mirror of
https://github.com/lightpanda-io/browser.git
synced 2026-03-28 15:40:04 +00:00
Compare commits
43 Commits
v0.2.0
...
fetch_lazy
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
afa0d5ba12 | ||
|
|
4d1e416299 | ||
|
|
3badcdbdbd | ||
|
|
fcd82b2c14 | ||
|
|
d0621510cc | ||
|
|
2a7a8bc2a6 | ||
|
|
af916dea1d | ||
|
|
31335fc4fb | ||
|
|
c84634093d | ||
|
|
37d8d2642d | ||
|
|
0423a178e9 | ||
|
|
7acf67d668 | ||
|
|
ef1fece40c | ||
|
|
ebb590250f | ||
|
|
03130a95d8 | ||
|
|
e133717f7f | ||
|
|
968c695da1 | ||
|
|
707116a030 | ||
|
|
01966f41ff | ||
|
|
141d17dd55 | ||
|
|
a3c2daf306 | ||
|
|
dc60fac90d | ||
|
|
a5e2e8ea15 | ||
|
|
8295c2abe5 | ||
|
|
5997be89f6 | ||
|
|
1c89cfe5d4 | ||
|
|
b5021bd9fa | ||
|
|
4fd365b520 | ||
|
|
479cd5ab1a | ||
|
|
8285cbcaa9 | ||
|
|
545d97b5c0 | ||
|
|
11016abdd3 | ||
|
|
066df87dd4 | ||
|
|
91899912d8 | ||
|
|
4ceca6b90b | ||
|
|
ec936417c6 | ||
|
|
4b75b33eb3 | ||
|
|
1d7e731034 | ||
|
|
ab60f64452 | ||
|
|
9757ea7b0f | ||
|
|
855583874f | ||
|
|
9efc27c2bb | ||
|
|
cab5117d85 |
45
.github/actions/install/action.yml
vendored
45
.github/actions/install/action.yml
vendored
@@ -2,6 +2,10 @@ name: "Browsercore install"
|
|||||||
description: "Install deps for the project browsercore"
|
description: "Install deps for the project browsercore"
|
||||||
|
|
||||||
inputs:
|
inputs:
|
||||||
|
zig:
|
||||||
|
description: 'Zig version to install'
|
||||||
|
required: false
|
||||||
|
default: '0.15.1'
|
||||||
arch:
|
arch:
|
||||||
description: 'CPU arch used to select the v8 lib'
|
description: 'CPU arch used to select the v8 lib'
|
||||||
required: false
|
required: false
|
||||||
@@ -13,7 +17,7 @@ inputs:
|
|||||||
zig-v8:
|
zig-v8:
|
||||||
description: 'zig v8 version to install'
|
description: 'zig v8 version to install'
|
||||||
required: false
|
required: false
|
||||||
default: 'v0.2.2'
|
default: 'v0.1.30'
|
||||||
v8:
|
v8:
|
||||||
description: 'v8 version to install'
|
description: 'v8 version to install'
|
||||||
required: false
|
required: false
|
||||||
@@ -34,11 +38,9 @@ runs:
|
|||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
sudo apt-get install -y wget xz-utils python3 ca-certificates git pkg-config libglib2.0-dev gperf libexpat1-dev cmake clang
|
sudo apt-get install -y wget xz-utils python3 ca-certificates git pkg-config libglib2.0-dev gperf libexpat1-dev cmake clang
|
||||||
|
|
||||||
# Zig version used from the `minimum_zig_version` field in build.zig.zon
|
|
||||||
- uses: mlugg/setup-zig@v2
|
- uses: mlugg/setup-zig@v2
|
||||||
|
with:
|
||||||
# Rust Toolchain for html5ever
|
version: ${{ inputs.zig }}
|
||||||
- uses: dtolnay/rust-toolchain@stable
|
|
||||||
|
|
||||||
- name: Cache v8
|
- name: Cache v8
|
||||||
id: cache-v8
|
id: cache-v8
|
||||||
@@ -59,5 +61,34 @@ runs:
|
|||||||
- name: install v8
|
- name: install v8
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
mkdir -p v8
|
mkdir -p v8/out/${{ inputs.os }}/debug/obj/zig/
|
||||||
ln -s ${{ inputs.cache-dir }}/v8/libc_v8.a v8/libc_v8.a
|
ln -s ${{ inputs.cache-dir }}/v8/libc_v8.a v8/out/${{ inputs.os }}/debug/obj/zig/libc_v8.a
|
||||||
|
|
||||||
|
mkdir -p v8/out/${{ inputs.os }}/release/obj/zig/
|
||||||
|
ln -s ${{ inputs.cache-dir }}/v8/libc_v8.a v8/out/${{ inputs.os }}/release/obj/zig/libc_v8.a
|
||||||
|
|
||||||
|
- name: Cache libiconv
|
||||||
|
id: cache-libiconv
|
||||||
|
uses: actions/cache@v4
|
||||||
|
env:
|
||||||
|
cache-name: cache-libiconv
|
||||||
|
with:
|
||||||
|
path: ${{ inputs.cache-dir }}/libiconv
|
||||||
|
key: vendor/libiconv/libiconv-1.17
|
||||||
|
|
||||||
|
- name: download libiconv
|
||||||
|
if: ${{ steps.cache-libiconv.outputs.cache-hit != 'true' }}
|
||||||
|
shell: bash
|
||||||
|
run: make download-libiconv
|
||||||
|
|
||||||
|
- name: build libiconv
|
||||||
|
shell: bash
|
||||||
|
run: make build-libiconv
|
||||||
|
|
||||||
|
- name: build mimalloc
|
||||||
|
shell: bash
|
||||||
|
run: make install-mimalloc
|
||||||
|
|
||||||
|
- name: build netsurf
|
||||||
|
shell: bash
|
||||||
|
run: make install-netsurf
|
||||||
|
|||||||
45
.github/workflows/build.yml
vendored
45
.github/workflows/build.yml
vendored
@@ -5,12 +5,8 @@ env:
|
|||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.NIGHTLY_BUILD_AWS_SECRET_ACCESS_KEY }}
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.NIGHTLY_BUILD_AWS_SECRET_ACCESS_KEY }}
|
||||||
AWS_BUCKET: ${{ vars.NIGHTLY_BUILD_AWS_BUCKET }}
|
AWS_BUCKET: ${{ vars.NIGHTLY_BUILD_AWS_BUCKET }}
|
||||||
AWS_REGION: ${{ vars.NIGHTLY_BUILD_AWS_REGION }}
|
AWS_REGION: ${{ vars.NIGHTLY_BUILD_AWS_REGION }}
|
||||||
RELEASE: ${{ github.ref_type == 'tag' && github.ref_name || 'nightly' }}
|
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
|
||||||
tags:
|
|
||||||
- '*'
|
|
||||||
schedule:
|
schedule:
|
||||||
- cron: "2 2 * * *"
|
- cron: "2 2 * * *"
|
||||||
|
|
||||||
@@ -30,7 +26,7 @@ jobs:
|
|||||||
timeout-minutes: 15
|
timeout-minutes: 15
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v6
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
# fetch submodules recusively, to get zig-js-runtime submodules also.
|
# fetch submodules recusively, to get zig-js-runtime submodules also.
|
||||||
@@ -40,13 +36,9 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
os: ${{env.OS}}
|
os: ${{env.OS}}
|
||||||
arch: ${{env.ARCH}}
|
arch: ${{env.ARCH}}
|
||||||
mode: 'release'
|
|
||||||
|
|
||||||
- name: v8 snapshot
|
|
||||||
run: zig build -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast snapshot_creator -- src/snapshot.bin
|
|
||||||
|
|
||||||
- name: zig build
|
- name: zig build
|
||||||
run: zig build -Dsnapshot_path=../../snapshot.bin -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast -Dcpu=x86_64 -Dgit_commit=$(git rev-parse --short ${{ github.sha }})
|
run: zig build --release=safe -Doptimize=ReleaseSafe -Dcpu=x86_64 -Dgit_commit=$(git rev-parse --short ${{ github.sha }})
|
||||||
|
|
||||||
- name: Rename binary
|
- name: Rename binary
|
||||||
run: mv zig-out/bin/lightpanda lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
run: mv zig-out/bin/lightpanda lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
||||||
@@ -61,7 +53,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
allowUpdates: true
|
allowUpdates: true
|
||||||
artifacts: lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
artifacts: lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
||||||
tag: ${{ env.RELEASE }}
|
tag: nightly
|
||||||
|
|
||||||
build-linux-aarch64:
|
build-linux-aarch64:
|
||||||
env:
|
env:
|
||||||
@@ -82,13 +74,9 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
os: ${{env.OS}}
|
os: ${{env.OS}}
|
||||||
arch: ${{env.ARCH}}
|
arch: ${{env.ARCH}}
|
||||||
mode: 'release'
|
|
||||||
|
|
||||||
- name: v8 snapshot
|
|
||||||
run: zig build -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast snapshot_creator -- src/snapshot.bin
|
|
||||||
|
|
||||||
- name: zig build
|
- name: zig build
|
||||||
run: zig build -Dsnapshot_path=../../snapshot.bin -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast -Dcpu=generic -Dgit_commit=$(git rev-parse --short ${{ github.sha }})
|
run: zig build --release=safe -Doptimize=ReleaseSafe -Dcpu=generic -Dgit_commit=$(git rev-parse --short ${{ github.sha }})
|
||||||
|
|
||||||
- name: Rename binary
|
- name: Rename binary
|
||||||
run: mv zig-out/bin/lightpanda lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
run: mv zig-out/bin/lightpanda lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
||||||
@@ -103,7 +91,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
allowUpdates: true
|
allowUpdates: true
|
||||||
artifacts: lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
artifacts: lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
||||||
tag: ${{ env.RELEASE }}
|
tag: nightly
|
||||||
|
|
||||||
build-macos-aarch64:
|
build-macos-aarch64:
|
||||||
env:
|
env:
|
||||||
@@ -126,13 +114,9 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
os: ${{env.OS}}
|
os: ${{env.OS}}
|
||||||
arch: ${{env.ARCH}}
|
arch: ${{env.ARCH}}
|
||||||
mode: 'release'
|
|
||||||
|
|
||||||
- name: v8 snapshot
|
|
||||||
run: zig build -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast snapshot_creator -- src/snapshot.bin
|
|
||||||
|
|
||||||
- name: zig build
|
- name: zig build
|
||||||
run: zig build -Dsnapshot_path=../../snapshot.bin -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast -Dgit_commit=$(git rev-parse --short ${{ github.sha }})
|
run: zig build --release=safe -Doptimize=ReleaseSafe -Dgit_commit=$(git rev-parse --short ${{ github.sha }})
|
||||||
|
|
||||||
- name: Rename binary
|
- name: Rename binary
|
||||||
run: mv zig-out/bin/lightpanda lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
run: mv zig-out/bin/lightpanda lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
||||||
@@ -147,14 +131,19 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
allowUpdates: true
|
allowUpdates: true
|
||||||
artifacts: lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
artifacts: lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
||||||
tag: ${{ env.RELEASE }}
|
tag: nightly
|
||||||
|
|
||||||
build-macos-x86_64:
|
build-macos-x86_64:
|
||||||
env:
|
env:
|
||||||
ARCH: x86_64
|
ARCH: x86_64
|
||||||
OS: macos
|
OS: macos
|
||||||
|
|
||||||
runs-on: macos-14-large
|
# macos-13 runs on x86 CPU. see
|
||||||
|
# https://github.com/actions/runner-images?tab=readme-ov-file
|
||||||
|
# If we want to build for macos-14 or superior, we need to switch to
|
||||||
|
# macos-14-large.
|
||||||
|
# No need for now, but maybe we will need it in the short term.
|
||||||
|
runs-on: macos-13
|
||||||
timeout-minutes: 15
|
timeout-minutes: 15
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
@@ -168,13 +157,9 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
os: ${{env.OS}}
|
os: ${{env.OS}}
|
||||||
arch: ${{env.ARCH}}
|
arch: ${{env.ARCH}}
|
||||||
mode: 'release'
|
|
||||||
|
|
||||||
- name: v8 snapshot
|
|
||||||
run: zig build -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast snapshot_creator -- src/snapshot.bin
|
|
||||||
|
|
||||||
- name: zig build
|
- name: zig build
|
||||||
run: zig build -Dsnapshot_path=../../snapshot.bin -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast -Dgit_commit=$(git rev-parse --short ${{ github.sha }})
|
run: zig build --release=safe -Doptimize=ReleaseSafe -Dgit_commit=$(git rev-parse --short ${{ github.sha }})
|
||||||
|
|
||||||
- name: Rename binary
|
- name: Rename binary
|
||||||
run: mv zig-out/bin/lightpanda lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
run: mv zig-out/bin/lightpanda lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
||||||
@@ -189,4 +174,4 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
allowUpdates: true
|
allowUpdates: true
|
||||||
artifacts: lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
artifacts: lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
||||||
tag: ${{ env.RELEASE }}
|
tag: nightly
|
||||||
|
|||||||
68
.github/workflows/e2e-integration-test.yml
vendored
68
.github/workflows/e2e-integration-test.yml
vendored
@@ -1,68 +0,0 @@
|
|||||||
name: e2e-integration-test
|
|
||||||
|
|
||||||
env:
|
|
||||||
LIGHTPANDA_DISABLE_TELEMETRY: true
|
|
||||||
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: "4 4 * * *"
|
|
||||||
# Allows you to run this workflow manually from the Actions tab
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
zig-build-release:
|
|
||||||
name: zig build release
|
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
timeout-minutes: 15
|
|
||||||
|
|
||||||
# Don't run the CI with draft PR.
|
|
||||||
if: github.event.pull_request.draft == false
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
# fetch submodules recusively, to get zig-js-runtime submodules also.
|
|
||||||
submodules: recursive
|
|
||||||
|
|
||||||
- uses: ./.github/actions/install
|
|
||||||
|
|
||||||
- name: zig build release
|
|
||||||
run: zig build -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast -Dcpu=x86_64 -Dgit_commit=$(git rev-parse --short ${{ github.sha }})
|
|
||||||
|
|
||||||
- name: upload artifact
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: lightpanda-build-release
|
|
||||||
path: |
|
|
||||||
zig-out/bin/lightpanda
|
|
||||||
retention-days: 1
|
|
||||||
|
|
||||||
demo-scripts:
|
|
||||||
name: demo-integration-scripts
|
|
||||||
needs: zig-build-release
|
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
timeout-minutes: 15
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
repository: 'lightpanda-io/demo'
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- run: npm install
|
|
||||||
|
|
||||||
- name: download artifact
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: lightpanda-build-release
|
|
||||||
|
|
||||||
- run: chmod a+x ./lightpanda
|
|
||||||
|
|
||||||
- name: run end to end integration tests
|
|
||||||
run: |
|
|
||||||
./lightpanda serve & echo $! > LPD.pid
|
|
||||||
go run integration/main.go
|
|
||||||
kill `cat LPD.pid`
|
|
||||||
8
.github/workflows/e2e-test.yml
vendored
8
.github/workflows/e2e-test.yml
vendored
@@ -49,18 +49,16 @@ jobs:
|
|||||||
if: github.event.pull_request.draft == false
|
if: github.event.pull_request.draft == false
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v6
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
# fetch submodules recusively, to get zig-js-runtime submodules also.
|
# fetch submodules recusively, to get zig-js-runtime submodules also.
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
|
|
||||||
- uses: ./.github/actions/install
|
- uses: ./.github/actions/install
|
||||||
with:
|
|
||||||
mode: 'release'
|
|
||||||
|
|
||||||
- name: zig build release
|
- name: zig build release
|
||||||
run: zig build -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast -Dcpu=x86_64 -Dgit_commit=$(git rev-parse --short ${{ github.sha }})
|
run: zig build -Doptimize=ReleaseFast -Dcpu=x86_64 -Dgit_commit=$(git rev-parse --short ${{ github.sha }})
|
||||||
|
|
||||||
- name: upload artifact
|
- name: upload artifact
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
@@ -124,7 +122,7 @@ jobs:
|
|||||||
needs: zig-build-release
|
needs: zig-build-release
|
||||||
|
|
||||||
env:
|
env:
|
||||||
MAX_MEMORY: 28000
|
MAX_MEMORY: 27000
|
||||||
MAX_AVG_DURATION: 23
|
MAX_AVG_DURATION: 23
|
||||||
LIGHTPANDA_DISABLE_TELEMETRY: true
|
LIGHTPANDA_DISABLE_TELEMETRY: true
|
||||||
|
|
||||||
|
|||||||
4
.github/workflows/wpt.yml
vendored
4
.github/workflows/wpt.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
|||||||
timeout-minutes: 90
|
timeout-minutes: 90
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v6
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
# fetch submodules recusively, to get zig-js-runtime submodules also.
|
# fetch submodules recusively, to get zig-js-runtime submodules also.
|
||||||
@@ -31,7 +31,7 @@ jobs:
|
|||||||
- uses: ./.github/actions/install
|
- uses: ./.github/actions/install
|
||||||
|
|
||||||
- name: json output
|
- name: json output
|
||||||
run: zig build wpt -- --json > wpt.json
|
run: zig build -Doptimize=ReleaseFast wpt -- --json > wpt.json
|
||||||
|
|
||||||
- name: write commit
|
- name: write commit
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
13
.github/workflows/zig-fmt.yml
vendored
13
.github/workflows/zig-fmt.yml
vendored
@@ -1,5 +1,8 @@
|
|||||||
name: zig-fmt
|
name: zig-fmt
|
||||||
|
|
||||||
|
env:
|
||||||
|
ZIG_VERSION: 0.15.1
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
|
|
||||||
@@ -29,13 +32,14 @@ jobs:
|
|||||||
timeout-minutes: 15
|
timeout-minutes: 15
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v6
|
- uses: mlugg/setup-zig@v2
|
||||||
|
with:
|
||||||
|
version: ${{ env.ZIG_VERSION }}
|
||||||
|
|
||||||
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
# Zig version used from the `minimum_zig_version` field in build.zig.zon
|
|
||||||
- uses: mlugg/setup-zig@v2
|
|
||||||
|
|
||||||
- name: Run zig fmt
|
- name: Run zig fmt
|
||||||
id: fmt
|
id: fmt
|
||||||
run: |
|
run: |
|
||||||
@@ -54,7 +58,6 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
echo "${delimiter}" >> "${GITHUB_OUTPUT}"
|
echo "${delimiter}" >> "${GITHUB_OUTPUT}"
|
||||||
|
|
||||||
- name: Fail the job
|
- name: Fail the job
|
||||||
if: steps.fmt.outputs.zig_fmt_errs != ''
|
if: steps.fmt.outputs.zig_fmt_errs != ''
|
||||||
run: exit 1
|
run: exit 1
|
||||||
|
|||||||
7
.github/workflows/zig-test.yml
vendored
7
.github/workflows/zig-test.yml
vendored
@@ -47,15 +47,16 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v6
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
# fetch submodules recusively, to get zig-js-runtime submodules also.
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
|
|
||||||
- uses: ./.github/actions/install
|
- uses: ./.github/actions/install
|
||||||
|
|
||||||
- name: zig build debug
|
- name: zig build debug
|
||||||
run: zig build -Dprebuilt_v8_path=v8/libc_v8.a
|
run: zig build
|
||||||
|
|
||||||
- name: upload artifact
|
- name: upload artifact
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
@@ -103,7 +104,7 @@ jobs:
|
|||||||
- uses: ./.github/actions/install
|
- uses: ./.github/actions/install
|
||||||
|
|
||||||
- name: zig build test
|
- name: zig build test
|
||||||
run: zig build -Dprebuilt_v8_path=v8/libc_v8.a test -- --json > bench.json
|
run: zig build test -- --json > bench.json
|
||||||
|
|
||||||
- name: write commit
|
- name: write commit
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -1,11 +1,7 @@
|
|||||||
zig-cache
|
zig-cache
|
||||||
/.zig-cache/
|
/.zig-cache/
|
||||||
/.lp-cache/
|
|
||||||
zig-out
|
zig-out
|
||||||
/vendor/netsurf/out
|
/vendor/netsurf/out
|
||||||
/vendor/libiconv/
|
/vendor/libiconv/
|
||||||
lightpanda.id
|
lightpanda.id
|
||||||
/v8/
|
/v8/
|
||||||
/build/
|
|
||||||
/src/html5ever/target/
|
|
||||||
src/snapshot.bin
|
|
||||||
|
|||||||
24
.gitmodules
vendored
24
.gitmodules
vendored
@@ -1,15 +1,33 @@
|
|||||||
|
[submodule "vendor/netsurf/libwapcaplet"]
|
||||||
|
path = vendor/netsurf/libwapcaplet
|
||||||
|
url = https://github.com/lightpanda-io/libwapcaplet.git/
|
||||||
|
[submodule "vendor/netsurf/libparserutils"]
|
||||||
|
path = vendor/netsurf/libparserutils
|
||||||
|
url = https://github.com/lightpanda-io/libparserutils.git/
|
||||||
|
[submodule "vendor/netsurf/libdom"]
|
||||||
|
path = vendor/netsurf/libdom
|
||||||
|
url = https://github.com/lightpanda-io/libdom.git/
|
||||||
|
[submodule "vendor/netsurf/share/netsurf-buildsystem"]
|
||||||
|
path = vendor/netsurf/share/netsurf-buildsystem
|
||||||
|
url = https://github.com/lightpanda-io/netsurf-buildsystem.git
|
||||||
|
[submodule "vendor/netsurf/libhubbub"]
|
||||||
|
path = vendor/netsurf/libhubbub
|
||||||
|
url = https://github.com/lightpanda-io/libhubbub.git/
|
||||||
[submodule "tests/wpt"]
|
[submodule "tests/wpt"]
|
||||||
path = tests/wpt
|
path = tests/wpt
|
||||||
url = https://github.com/lightpanda-io/wpt
|
url = https://github.com/lightpanda-io/wpt
|
||||||
|
[submodule "vendor/mimalloc"]
|
||||||
|
path = vendor/mimalloc
|
||||||
|
url = https://github.com/microsoft/mimalloc.git/
|
||||||
[submodule "vendor/nghttp2"]
|
[submodule "vendor/nghttp2"]
|
||||||
path = vendor/nghttp2
|
path = vendor/nghttp2
|
||||||
url = https://github.com/nghttp2/nghttp2.git
|
url = https://github.com/nghttp2/nghttp2.git
|
||||||
|
[submodule "vendor/mbedtls"]
|
||||||
|
path = vendor/mbedtls
|
||||||
|
url = https://github.com/Mbed-TLS/mbedtls.git
|
||||||
[submodule "vendor/zlib"]
|
[submodule "vendor/zlib"]
|
||||||
path = vendor/zlib
|
path = vendor/zlib
|
||||||
url = https://github.com/madler/zlib.git
|
url = https://github.com/madler/zlib.git
|
||||||
[submodule "vendor/curl"]
|
[submodule "vendor/curl"]
|
||||||
path = vendor/curl
|
path = vendor/curl
|
||||||
url = https://github.com/curl/curl.git
|
url = https://github.com/curl/curl.git
|
||||||
[submodule "vendor/brotli"]
|
|
||||||
path = vendor/brotli
|
|
||||||
url = https://github.com/google/brotli
|
|
||||||
|
|||||||
63
Dockerfile
63
Dockerfile
@@ -1,68 +1,60 @@
|
|||||||
FROM debian:stable-slim
|
FROM debian:stable
|
||||||
|
|
||||||
ARG MINISIG=0.12
|
ARG MINISIG=0.12
|
||||||
|
ARG ZIG=0.15.1
|
||||||
ARG ZIG_MINISIG=RWSGOq2NVecA2UPNdBUZykf1CCb147pkmdtYxgb3Ti+JO/wCYvhbAb/U
|
ARG ZIG_MINISIG=RWSGOq2NVecA2UPNdBUZykf1CCb147pkmdtYxgb3Ti+JO/wCYvhbAb/U
|
||||||
ARG V8=14.0.365.4
|
ARG V8=14.0.365.4
|
||||||
ARG ZIG_V8=v0.2.2
|
ARG ZIG_V8=v0.1.30
|
||||||
ARG TARGETPLATFORM
|
ARG TARGETPLATFORM
|
||||||
|
|
||||||
RUN apt-get update -yq && \
|
RUN apt-get update -yq && \
|
||||||
apt-get install -yq xz-utils ca-certificates \
|
apt-get install -yq xz-utils \
|
||||||
clang make curl git
|
python3 ca-certificates git \
|
||||||
|
pkg-config libglib2.0-dev \
|
||||||
# Get Rust
|
gperf libexpat1-dev \
|
||||||
RUN curl https://sh.rustup.rs -sSf | sh -s -- --profile minimal -y
|
cmake clang \
|
||||||
ENV PATH="/root/.cargo/bin:${PATH}"
|
curl git
|
||||||
|
|
||||||
# install minisig
|
# install minisig
|
||||||
RUN curl --fail -L -O https://github.com/jedisct1/minisign/releases/download/${MINISIG}/minisign-${MINISIG}-linux.tar.gz && \
|
RUN curl --fail -L -O https://github.com/jedisct1/minisign/releases/download/${MINISIG}/minisign-${MINISIG}-linux.tar.gz && \
|
||||||
tar xvzf minisign-${MINISIG}-linux.tar.gz -C /
|
tar xvzf minisign-${MINISIG}-linux.tar.gz
|
||||||
|
|
||||||
# clone lightpanda
|
|
||||||
RUN git clone https://github.com/lightpanda-io/browser.git
|
|
||||||
WORKDIR /browser
|
|
||||||
|
|
||||||
# install zig
|
# install zig
|
||||||
RUN ZIG=$(grep '\.minimum_zig_version = "' "build.zig.zon" | cut -d'"' -f2) && \
|
RUN case $TARGETPLATFORM in \
|
||||||
case $TARGETPLATFORM in \
|
|
||||||
"linux/arm64") ARCH="aarch64" ;; \
|
"linux/arm64") ARCH="aarch64" ;; \
|
||||||
*) ARCH="x86_64" ;; \
|
*) ARCH="x86_64" ;; \
|
||||||
esac && \
|
esac && \
|
||||||
curl --fail -L -O https://ziglang.org/download/${ZIG}/zig-${ARCH}-linux-${ZIG}.tar.xz && \
|
curl --fail -L -O https://ziglang.org/download/${ZIG}/zig-${ARCH}-linux-${ZIG}.tar.xz && \
|
||||||
curl --fail -L -O https://ziglang.org/download/${ZIG}/zig-${ARCH}-linux-${ZIG}.tar.xz.minisig && \
|
curl --fail -L -O https://ziglang.org/download/${ZIG}/zig-${ARCH}-linux-${ZIG}.tar.xz.minisig && \
|
||||||
/minisign-linux/${ARCH}/minisign -Vm zig-${ARCH}-linux-${ZIG}.tar.xz -P ${ZIG_MINISIG} && \
|
minisign-linux/${ARCH}/minisign -Vm zig-${ARCH}-linux-${ZIG}.tar.xz -P ${ZIG_MINISIG} && \
|
||||||
tar xvf zig-${ARCH}-linux-${ZIG}.tar.xz && \
|
tar xvf zig-${ARCH}-linux-${ZIG}.tar.xz && \
|
||||||
mv zig-${ARCH}-linux-${ZIG} /usr/local/lib && \
|
mv zig-${ARCH}-linux-${ZIG} /usr/local/lib && \
|
||||||
ln -s /usr/local/lib/zig-${ARCH}-linux-${ZIG}/zig /usr/local/bin/zig
|
ln -s /usr/local/lib/zig-${ARCH}-linux-${ZIG}/zig /usr/local/bin/zig
|
||||||
|
|
||||||
|
# clone lightpanda
|
||||||
|
RUN git clone https://github.com/lightpanda-io/browser.git
|
||||||
|
|
||||||
|
WORKDIR /browser
|
||||||
|
|
||||||
# install deps
|
# install deps
|
||||||
RUN git submodule init && \
|
RUN git submodule init && \
|
||||||
git submodule update --recursive
|
git submodule update --recursive
|
||||||
|
|
||||||
|
RUN make install-libiconv && \
|
||||||
|
make install-netsurf && \
|
||||||
|
make install-mimalloc
|
||||||
|
|
||||||
# download and install v8
|
# download and install v8
|
||||||
RUN case $TARGETPLATFORM in \
|
RUN case $TARGETPLATFORM in \
|
||||||
"linux/arm64") ARCH="aarch64" ;; \
|
"linux/arm64") ARCH="aarch64" ;; \
|
||||||
*) ARCH="x86_64" ;; \
|
*) ARCH="x86_64" ;; \
|
||||||
esac && \
|
esac && \
|
||||||
curl --fail -L -o libc_v8.a https://github.com/lightpanda-io/zig-v8-fork/releases/download/${ZIG_V8}/libc_v8_${V8}_linux_${ARCH}.a && \
|
curl --fail -L -o libc_v8.a https://github.com/lightpanda-io/zig-v8-fork/releases/download/${ZIG_V8}/libc_v8_${V8}_linux_${ARCH}.a && \
|
||||||
mkdir -p v8/ && \
|
mkdir -p v8/out/linux/release/obj/zig/ && \
|
||||||
mv libc_v8.a v8/libc_v8.a
|
mv libc_v8.a v8/out/linux/release/obj/zig/libc_v8.a
|
||||||
|
|
||||||
# build v8 snapshot
|
|
||||||
RUN zig build -Doptimize=ReleaseFast \
|
|
||||||
-Dprebuilt_v8_path=v8/libc_v8.a \
|
|
||||||
snapshot_creator -- src/snapshot.bin
|
|
||||||
|
|
||||||
# build release
|
# build release
|
||||||
RUN zig build -Doptimize=ReleaseFast \
|
RUN make build
|
||||||
-Dsnapshot_path=../../snapshot.bin \
|
|
||||||
-Dprebuilt_v8_path=v8/libc_v8.a \
|
|
||||||
-Dgit_commit=$(git rev-parse --short HEAD)
|
|
||||||
|
|
||||||
FROM debian:stable-slim
|
|
||||||
|
|
||||||
RUN apt-get update -yq && \
|
|
||||||
apt-get install -yq tini
|
|
||||||
|
|
||||||
FROM debian:stable-slim
|
FROM debian:stable-slim
|
||||||
|
|
||||||
@@ -70,12 +62,7 @@ FROM debian:stable-slim
|
|||||||
COPY --from=0 /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ca-certificates.crt
|
COPY --from=0 /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ca-certificates.crt
|
||||||
|
|
||||||
COPY --from=0 /browser/zig-out/bin/lightpanda /bin/lightpanda
|
COPY --from=0 /browser/zig-out/bin/lightpanda /bin/lightpanda
|
||||||
COPY --from=1 /usr/bin/tini /usr/bin/tini
|
|
||||||
|
|
||||||
EXPOSE 9222/tcp
|
EXPOSE 9222/tcp
|
||||||
|
|
||||||
# Lightpanda install only some signal handlers, and PID 1 doesn't have a default SIGTERM signal handler.
|
CMD ["/bin/lightpanda", "serve", "--host", "0.0.0.0", "--port", "9222"]
|
||||||
# Using "tini" as PID1 ensures that signals work as expected, so e.g. "docker stop" will not hang.
|
|
||||||
# (See https://github.com/krallin/tini#why-tini).
|
|
||||||
ENTRYPOINT ["/usr/bin/tini", "--"]
|
|
||||||
CMD ["/bin/lightpanda", "serve", "--host", "0.0.0.0", "--port", "9222", "--log_level", "info"]
|
|
||||||
|
|||||||
@@ -5,6 +5,14 @@ List](https://spdx.org/licenses/).
|
|||||||
|
|
||||||
The default license for this project is [AGPL-3.0-only](LICENSE).
|
The default license for this project is [AGPL-3.0-only](LICENSE).
|
||||||
|
|
||||||
|
## MIT
|
||||||
|
|
||||||
|
The following files are licensed under MIT:
|
||||||
|
|
||||||
|
```
|
||||||
|
src/polyfill/fetch.js
|
||||||
|
```
|
||||||
|
|
||||||
The following directories and their subdirectories are licensed under their
|
The following directories and their subdirectories are licensed under their
|
||||||
original upstream licenses:
|
original upstream licenses:
|
||||||
|
|
||||||
|
|||||||
197
Makefile
197
Makefile
@@ -34,7 +34,7 @@ endif
|
|||||||
|
|
||||||
## Display this help screen
|
## Display this help screen
|
||||||
help:
|
help:
|
||||||
@printf "\033[36m%-35s %s\033[0m\n" "Command" "Usage"
|
@printf "\e[36m%-35s %s\e[0m\n" "Command" "Usage"
|
||||||
@sed -n -e '/^## /{'\
|
@sed -n -e '/^## /{'\
|
||||||
-e 's/## //g;'\
|
-e 's/## //g;'\
|
||||||
-e 'h;'\
|
-e 'h;'\
|
||||||
@@ -47,76 +47,201 @@ help:
|
|||||||
|
|
||||||
# $(ZIG) commands
|
# $(ZIG) commands
|
||||||
# ------------
|
# ------------
|
||||||
.PHONY: build build-v8-snapshot build-dev run run-release shell test bench wpt data end2end
|
.PHONY: build build-dev run run-release shell test bench download-zig wpt data get-v8 build-v8 build-v8-dev
|
||||||
|
.PHONY: end2end
|
||||||
|
|
||||||
## Build v8 snapshot
|
zig_version = $(shell grep 'recommended_zig_version = "' "vendor/zig-js-runtime/build.zig" | cut -d'"' -f2)
|
||||||
build-v8-snapshot:
|
|
||||||
@printf "\033[36mBuilding v8 snapshot (release safe)...\033[0m\n"
|
|
||||||
@$(ZIG) build -Doptimize=ReleaseFast snapshot_creator -- src/snapshot.bin || (printf "\033[33mBuild ERROR\033[0m\n"; exit 1;)
|
|
||||||
@printf "\033[33mBuild OK\033[0m\n"
|
|
||||||
|
|
||||||
## Build in release-fast mode
|
## Download the zig recommended version
|
||||||
build: build-v8-snapshot
|
download-zig:
|
||||||
@printf "\033[36mBuilding (release safe)...\033[0m\n"
|
$(eval url = "https://ziglang.org/download/$(zig_version)/zig-$(OS)-$(ARCH)-$(zig_version).tar.xz")
|
||||||
@$(ZIG) build -Doptimize=ReleaseFast -Dsnapshot_path=../../snapshot.bin -Dgit_commit=$$(git rev-parse --short HEAD) || (printf "\033[33mBuild ERROR\033[0m\n"; exit 1;)
|
$(eval dest = "/tmp/zig-$(OS)-$(ARCH)-$(zig_version).tar.xz")
|
||||||
@printf "\033[33mBuild OK\033[0m\n"
|
@printf "\e[36mDownload zig version $(zig_version)...\e[0m\n"
|
||||||
|
@curl -o "$(dest)" -L "$(url)" || (printf "\e[33mBuild ERROR\e[0m\n"; exit 1;)
|
||||||
|
@printf "\e[33mDownloaded $(dest)\e[0m\n"
|
||||||
|
|
||||||
|
## Build in release-safe mode
|
||||||
|
build:
|
||||||
|
@printf "\e[36mBuilding (release safe)...\e[0m\n"
|
||||||
|
$(ZIG) build -Doptimize=ReleaseSafe -Dgit_commit=$$(git rev-parse --short HEAD) || (printf "\e[33mBuild ERROR\e[0m\n"; exit 1;)
|
||||||
|
@printf "\e[33mBuild OK\e[0m\n"
|
||||||
|
|
||||||
## Build in debug mode
|
## Build in debug mode
|
||||||
build-dev:
|
build-dev:
|
||||||
@printf "\033[36mBuilding (debug)...\033[0m\n"
|
@printf "\e[36mBuilding (debug)...\e[0m\n"
|
||||||
@$(ZIG) build -Dgit_commit=$$(git rev-parse --short HEAD) || (printf "\033[33mBuild ERROR\033[0m\n"; exit 1;)
|
@$(ZIG) build -Dgit_commit=$$(git rev-parse --short HEAD) || (printf "\e[33mBuild ERROR\e[0m\n"; exit 1;)
|
||||||
@printf "\033[33mBuild OK\033[0m\n"
|
@printf "\e[33mBuild OK\e[0m\n"
|
||||||
|
|
||||||
## Run the server in release mode
|
## Run the server in release mode
|
||||||
run: build
|
run: build
|
||||||
@printf "\033[36mRunning...\033[0m\n"
|
@printf "\e[36mRunning...\e[0m\n"
|
||||||
@./zig-out/bin/lightpanda || (printf "\033[33mRun ERROR\033[0m\n"; exit 1;)
|
@./zig-out/bin/lightpanda || (printf "\e[33mRun ERROR\e[0m\n"; exit 1;)
|
||||||
|
|
||||||
## Run the server in debug mode
|
## Run the server in debug mode
|
||||||
run-debug: build-dev
|
run-debug: build-dev
|
||||||
@printf "\033[36mRunning...\033[0m\n"
|
@printf "\e[36mRunning...\e[0m\n"
|
||||||
@./zig-out/bin/lightpanda || (printf "\033[33mRun ERROR\033[0m\n"; exit 1;)
|
@./zig-out/bin/lightpanda || (printf "\e[33mRun ERROR\e[0m\n"; exit 1;)
|
||||||
|
|
||||||
## Run a JS shell in debug mode
|
## Run a JS shell in debug mode
|
||||||
shell:
|
shell:
|
||||||
@printf "\033[36mBuilding shell...\033[0m\n"
|
@printf "\e[36mBuilding shell...\e[0m\n"
|
||||||
@$(ZIG) build shell || (printf "\033[33mBuild ERROR\033[0m\n"; exit 1;)
|
@$(ZIG) build shell || (printf "\e[33mBuild ERROR\e[0m\n"; exit 1;)
|
||||||
|
|
||||||
## Run WPT tests
|
## Run WPT tests
|
||||||
wpt:
|
wpt:
|
||||||
@printf "\033[36mBuilding wpt...\033[0m\n"
|
@printf "\e[36mBuilding wpt...\e[0m\n"
|
||||||
@$(ZIG) build wpt -- $(filter-out $@,$(MAKECMDGOALS)) || (printf "\033[33mBuild ERROR\033[0m\n"; exit 1;)
|
@$(ZIG) build wpt -- $(filter-out $@,$(MAKECMDGOALS)) || (printf "\e[33mBuild ERROR\e[0m\n"; exit 1;)
|
||||||
|
|
||||||
wpt-summary:
|
wpt-summary:
|
||||||
@printf "\033[36mBuilding wpt...\033[0m\n"
|
@printf "\e[36mBuilding wpt...\e[0m\n"
|
||||||
@$(ZIG) build wpt -- --summary $(filter-out $@,$(MAKECMDGOALS)) || (printf "\033[33mBuild ERROR\033[0m\n"; exit 1;)
|
@$(ZIG) build wpt -- --summary $(filter-out $@,$(MAKECMDGOALS)) || (printf "\e[33mBuild ERROR\e[0m\n"; exit 1;)
|
||||||
|
|
||||||
## Test - `grep` is used to filter out the huge compile command on build
|
## Test
|
||||||
ifeq ($(OS), macos)
|
|
||||||
test:
|
test:
|
||||||
@script -q /dev/null sh -c 'TEST_FILTER="${F}" $(ZIG) build test -freference-trace' 2>&1 \
|
@TEST_FILTER='${F}' $(ZIG) build test -freference-trace --summary all
|
||||||
| grep --line-buffered -v "^/.*zig test -freference-trace"
|
|
||||||
else
|
|
||||||
test:
|
|
||||||
@script -qec 'TEST_FILTER="${F}" $(ZIG) build test -freference-trace' /dev/null 2>&1 \
|
|
||||||
| grep --line-buffered -v "^/.*zig test -freference-trace"
|
|
||||||
endif
|
|
||||||
|
|
||||||
## Run demo/runner end to end tests
|
## Run demo/runner end to end tests
|
||||||
end2end:
|
end2end:
|
||||||
@test -d ../demo
|
@test -d ../demo
|
||||||
cd ../demo && go run runner/main.go
|
cd ../demo && go run runner/main.go
|
||||||
|
|
||||||
|
## v8
|
||||||
|
get-v8:
|
||||||
|
@printf "\e[36mGetting v8 source...\e[0m\n"
|
||||||
|
@$(ZIG) build get-v8
|
||||||
|
|
||||||
|
build-v8-dev:
|
||||||
|
@printf "\e[36mBuilding v8 (dev)...\e[0m\n"
|
||||||
|
@$(ZIG) build build-v8
|
||||||
|
|
||||||
|
build-v8:
|
||||||
|
@printf "\e[36mBuilding v8...\e[0m\n"
|
||||||
|
@$(ZIG) build -Doptimize=ReleaseSafe build-v8
|
||||||
|
|
||||||
# Install and build required dependencies commands
|
# Install and build required dependencies commands
|
||||||
# ------------
|
# ------------
|
||||||
.PHONY: install
|
.PHONY: install-submodule
|
||||||
|
.PHONY: install-libiconv
|
||||||
|
.PHONY: _install-netsurf install-netsurf clean-netsurf test-netsurf install-netsurf-dev
|
||||||
|
.PHONY: install-mimalloc install-mimalloc-dev clean-mimalloc
|
||||||
|
.PHONY: install-dev install
|
||||||
|
|
||||||
## Install and build dependencies for release
|
## Install and build dependencies for release
|
||||||
install: install-submodule
|
install: install-submodule install-libiconv install-netsurf install-mimalloc
|
||||||
|
|
||||||
|
## Install and build dependencies for dev
|
||||||
|
install-dev: install-submodule install-libiconv install-netsurf-dev install-mimalloc-dev
|
||||||
|
|
||||||
|
install-netsurf-dev: _install-netsurf
|
||||||
|
install-netsurf-dev: OPTCFLAGS := -O0 -g -DNDEBUG
|
||||||
|
|
||||||
|
install-netsurf: _install-netsurf
|
||||||
|
install-netsurf: OPTCFLAGS := -DNDEBUG
|
||||||
|
|
||||||
|
BC_NS := $(BC)vendor/netsurf/out/$(OS)-$(ARCH)
|
||||||
|
ICONV := $(BC)vendor/libiconv/out/$(OS)-$(ARCH)
|
||||||
|
# TODO: add Linux iconv path (I guess it depends on the distro)
|
||||||
|
# TODO: this way of linking libiconv is not ideal. We should have a more generic way
|
||||||
|
# and stick to a specif version. Maybe build from source. Anyway not now.
|
||||||
|
_install-netsurf: clean-netsurf
|
||||||
|
@printf "\e[36mInstalling NetSurf...\e[0m\n" && \
|
||||||
|
ls $(ICONV)/lib/libiconv.a 1> /dev/null || (printf "\e[33mERROR: you need to execute 'make install-libiconv'\e[0m\n"; exit 1;) && \
|
||||||
|
mkdir -p $(BC_NS) && \
|
||||||
|
cp -R vendor/netsurf/share $(BC_NS) && \
|
||||||
|
export PREFIX=$(BC_NS) && \
|
||||||
|
export OPTLDFLAGS="-L$(ICONV)/lib" && \
|
||||||
|
export OPTCFLAGS="$(OPTCFLAGS) -I$(ICONV)/include" && \
|
||||||
|
printf "\e[33mInstalling libwapcaplet...\e[0m\n" && \
|
||||||
|
cd vendor/netsurf/libwapcaplet && \
|
||||||
|
BUILDDIR=$(BC_NS)/build/libwapcaplet make install && \
|
||||||
|
cd ../libparserutils && \
|
||||||
|
printf "\e[33mInstalling libparserutils...\e[0m\n" && \
|
||||||
|
BUILDDIR=$(BC_NS)/build/libparserutils make install && \
|
||||||
|
cd ../libhubbub && \
|
||||||
|
printf "\e[33mInstalling libhubbub...\e[0m\n" && \
|
||||||
|
BUILDDIR=$(BC_NS)/build/libhubbub make install && \
|
||||||
|
rm src/treebuilder/autogenerated-element-type.c && \
|
||||||
|
cd ../libdom && \
|
||||||
|
printf "\e[33mInstalling libdom...\e[0m\n" && \
|
||||||
|
BUILDDIR=$(BC_NS)/build/libdom make install && \
|
||||||
|
printf "\e[33mRunning libdom example...\e[0m\n" && \
|
||||||
|
cd examples && \
|
||||||
|
$(ZIG) cc \
|
||||||
|
-I$(ICONV)/include \
|
||||||
|
-I$(BC_NS)/include \
|
||||||
|
-L$(ICONV)/lib \
|
||||||
|
-L$(BC_NS)/lib \
|
||||||
|
-liconv \
|
||||||
|
-ldom \
|
||||||
|
-lhubbub \
|
||||||
|
-lparserutils \
|
||||||
|
-lwapcaplet \
|
||||||
|
-o a.out \
|
||||||
|
dom-structure-dump.c \
|
||||||
|
$(ICONV)/lib/libiconv.a && \
|
||||||
|
./a.out > /dev/null && \
|
||||||
|
rm a.out && \
|
||||||
|
printf "\e[36mDone NetSurf $(OS)\e[0m\n"
|
||||||
|
|
||||||
|
clean-netsurf:
|
||||||
|
@printf "\e[36mCleaning NetSurf build...\e[0m\n" && \
|
||||||
|
rm -Rf $(BC_NS)
|
||||||
|
|
||||||
|
test-netsurf:
|
||||||
|
@printf "\e[36mTesting NetSurf...\e[0m\n" && \
|
||||||
|
export PREFIX=$(BC_NS) && \
|
||||||
|
export LDFLAGS="-L$(ICONV)/lib -L$(BC_NS)/lib" && \
|
||||||
|
export CFLAGS="-I$(ICONV)/include -I$(BC_NS)/include" && \
|
||||||
|
cd vendor/netsurf/libdom && \
|
||||||
|
BUILDDIR=$(BC_NS)/build/libdom make test
|
||||||
|
|
||||||
|
download-libiconv:
|
||||||
|
ifeq ("$(wildcard vendor/libiconv/libiconv-1.17)","")
|
||||||
|
@mkdir -p vendor/libiconv
|
||||||
|
@cd vendor/libiconv && \
|
||||||
|
curl -L https://github.com/lightpanda-io/libiconv/releases/download/1.17/libiconv-1.17.tar.gz | tar -xvzf -
|
||||||
|
endif
|
||||||
|
|
||||||
|
build-libiconv: clean-libiconv
|
||||||
|
@cd vendor/libiconv/libiconv-1.17 && \
|
||||||
|
./configure --prefix=$(ICONV) --enable-static && \
|
||||||
|
make && make install
|
||||||
|
|
||||||
|
install-libiconv: download-libiconv build-libiconv
|
||||||
|
|
||||||
|
clean-libiconv:
|
||||||
|
ifneq ("$(wildcard vendor/libiconv/libiconv-1.17/Makefile)","")
|
||||||
|
@cd vendor/libiconv/libiconv-1.17 && \
|
||||||
|
make clean
|
||||||
|
endif
|
||||||
|
|
||||||
data:
|
data:
|
||||||
cd src/data && go run public_suffix_list_gen.go > public_suffix_list.zig
|
cd src/data && go run public_suffix_list_gen.go > public_suffix_list.zig
|
||||||
|
|
||||||
|
.PHONY: _build_mimalloc
|
||||||
|
|
||||||
|
MIMALLOC := $(BC)vendor/mimalloc/out/$(OS)-$(ARCH)
|
||||||
|
_build_mimalloc: clean-mimalloc
|
||||||
|
@mkdir -p $(MIMALLOC)/build && \
|
||||||
|
cd $(MIMALLOC)/build && \
|
||||||
|
cmake -DMI_BUILD_SHARED=OFF -DMI_BUILD_OBJECT=OFF -DMI_BUILD_TESTS=OFF -DMI_OVERRIDE=OFF $(OPTS) ../../.. && \
|
||||||
|
make && \
|
||||||
|
mkdir -p $(MIMALLOC)/lib
|
||||||
|
|
||||||
|
install-mimalloc-dev: _build_mimalloc
|
||||||
|
install-mimalloc-dev: OPTS=-DCMAKE_BUILD_TYPE=Debug
|
||||||
|
install-mimalloc-dev:
|
||||||
|
@cd $(MIMALLOC) && \
|
||||||
|
mv build/libmimalloc-debug.a lib/libmimalloc.a
|
||||||
|
|
||||||
|
install-mimalloc: _build_mimalloc
|
||||||
|
install-mimalloc:
|
||||||
|
@cd $(MIMALLOC) && \
|
||||||
|
mv build/libmimalloc.a lib/libmimalloc.a
|
||||||
|
|
||||||
|
clean-mimalloc:
|
||||||
|
@rm -Rf $(MIMALLOC)/build
|
||||||
|
|
||||||
## Init and update git submodule
|
## Init and update git submodule
|
||||||
install-submodule:
|
install-submodule:
|
||||||
@git submodule init && \
|
@git submodule init && \
|
||||||
|
|||||||
98
README.md
98
README.md
@@ -18,7 +18,7 @@ Lightpanda is the open-source browser made for headless usage:
|
|||||||
|
|
||||||
- Javascript execution
|
- Javascript execution
|
||||||
- Support of Web APIs (partial, WIP)
|
- Support of Web APIs (partial, WIP)
|
||||||
- Compatible with Playwright[^1], Puppeteer, chromedp through [CDP](https://chromedevtools.github.io/devtools-protocol/)
|
- Compatible with Playwright[^1], Puppeteer, chromedp through CDP
|
||||||
|
|
||||||
Fast web automation for AI agents, LLM training, scraping and testing:
|
Fast web automation for AI agents, LLM training, scraping and testing:
|
||||||
|
|
||||||
@@ -140,14 +140,13 @@ You may still encounter errors or crashes. Please open an issue with specifics i
|
|||||||
|
|
||||||
Here are the key features we have implemented:
|
Here are the key features we have implemented:
|
||||||
|
|
||||||
- [x] HTTP loader ([Libcurl](https://curl.se/libcurl/))
|
- [x] HTTP loader (based on Libcurl)
|
||||||
- [x] HTML parser ([html5ever](https://github.com/servo/html5ever))
|
- [x] HTML parser and DOM tree (based on Netsurf libs)
|
||||||
- [x] DOM tree
|
- [x] Javascript support (v8)
|
||||||
- [x] Javascript support ([v8](https://v8.dev/))
|
|
||||||
- [x] DOM APIs
|
- [x] DOM APIs
|
||||||
- [x] Ajax
|
- [x] Ajax
|
||||||
- [x] XHR API
|
- [x] XHR API
|
||||||
- [x] Fetch API
|
- [x] Fetch API (polyfill)
|
||||||
- [x] DOM dump
|
- [x] DOM dump
|
||||||
- [x] CDP/websockets server
|
- [x] CDP/websockets server
|
||||||
- [x] Click
|
- [x] Click
|
||||||
@@ -165,34 +164,47 @@ You can also follow the progress of our Javascript support in our dedicated [zig
|
|||||||
|
|
||||||
### Prerequisites
|
### Prerequisites
|
||||||
|
|
||||||
Lightpanda is written with [Zig](https://ziglang.org/) `0.15.2`. You have to
|
Lightpanda is written with [Zig](https://ziglang.org/) `0.15.1`. You have to
|
||||||
install it with the right version in order to build the project.
|
install it with the right version in order to build the project.
|
||||||
|
|
||||||
Lightpanda also depends on
|
Lightpanda also depends on
|
||||||
[zig-js-runtime](https://github.com/lightpanda-io/zig-js-runtime/) (with v8),
|
[zig-js-runtime](https://github.com/lightpanda-io/zig-js-runtime/) (with v8),
|
||||||
[Libcurl](https://curl.se/libcurl/) and [html5ever](https://github.com/servo/html5ever).
|
[Libcurl](https://curl.se/libcurl/),
|
||||||
|
[Netsurf libs](https://www.netsurf-browser.org/) and
|
||||||
|
[Mimalloc](https://microsoft.github.io/mimalloc).
|
||||||
|
|
||||||
To be able to build the v8 engine for zig-js-runtime, you have to install some libs:
|
To be able to build the v8 engine for zig-js-runtime, you have to install some libs:
|
||||||
|
|
||||||
For **Debian/Ubuntu based Linux**:
|
For Debian/Ubuntu based Linux:
|
||||||
|
|
||||||
```
|
```
|
||||||
sudo apt install xz-utils ca-certificates \
|
sudo apt install xz-utils \
|
||||||
clang make curl git
|
python3 ca-certificates git \
|
||||||
|
pkg-config libglib2.0-dev \
|
||||||
|
gperf libexpat1-dev unzip rsync \
|
||||||
|
cmake clang
|
||||||
```
|
```
|
||||||
You also need to [install Rust](https://rust-lang.org/tools/install/).
|
|
||||||
|
|
||||||
For systems with [**Nix**](https://nixos.org/download/), you can use the devShell:
|
For systems with [Nix](https://nixos.org/download/), you can use the devShell:
|
||||||
```
|
```
|
||||||
nix develop
|
nix develop
|
||||||
```
|
```
|
||||||
|
|
||||||
For **MacOS**, you need cmake and [Rust](https://rust-lang.org/tools/install/).
|
For MacOS, you only need cmake:
|
||||||
|
|
||||||
```
|
```
|
||||||
brew install cmake
|
brew install cmake
|
||||||
```
|
```
|
||||||
|
|
||||||
### Install Git submodules
|
### Install and build dependencies
|
||||||
|
|
||||||
|
#### All in one build
|
||||||
|
|
||||||
|
You can run `make install` to install deps all in one (or `make install-dev` if you need the development versions).
|
||||||
|
|
||||||
|
Be aware that the build task is very long and cpu consuming, as you will build from sources all dependencies, including the v8 Javascript engine.
|
||||||
|
|
||||||
|
#### Step by step build dependency
|
||||||
|
|
||||||
The project uses git submodules for dependencies.
|
The project uses git submodules for dependencies.
|
||||||
|
|
||||||
@@ -202,31 +214,53 @@ To init or update the submodules in the `vendor/` directory:
|
|||||||
make install-submodule
|
make install-submodule
|
||||||
```
|
```
|
||||||
|
|
||||||
This is an alias for `git submodule init && git submodule update`.
|
**iconv**
|
||||||
|
|
||||||
### Build and run
|
libiconv is an internationalization library used by Netsurf.
|
||||||
|
|
||||||
You an build the entire browser with `make build` or `make build-dev` for debug
|
|
||||||
env.
|
|
||||||
|
|
||||||
But you can directly use the zig command: `zig build run`.
|
|
||||||
|
|
||||||
#### Embed v8 snapshot
|
|
||||||
|
|
||||||
Lighpanda uses v8 snapshot. By default, it is created on startup but you can
|
|
||||||
embed it by using the following commands:
|
|
||||||
|
|
||||||
Generate the snapshot.
|
|
||||||
```
|
```
|
||||||
zig build snapshot_creator -- src/snapshot.bin
|
make install-libiconv
|
||||||
```
|
```
|
||||||
|
|
||||||
Build using the snapshot binary.
|
**Netsurf libs**
|
||||||
|
|
||||||
|
Netsurf libs are used for HTML parsing and DOM tree generation.
|
||||||
|
|
||||||
```
|
```
|
||||||
zig build -Dsnapshot_path=../../snapshot.bin
|
make install-netsurf
|
||||||
```
|
```
|
||||||
|
|
||||||
See [#1279](https://github.com/lightpanda-io/browser/pull/1279) for more details.
|
For dev env, use `make install-netsurf-dev`.
|
||||||
|
|
||||||
|
**Mimalloc**
|
||||||
|
|
||||||
|
Mimalloc is used as a C memory allocator.
|
||||||
|
|
||||||
|
```
|
||||||
|
make install-mimalloc
|
||||||
|
```
|
||||||
|
|
||||||
|
For dev env, use `make install-mimalloc-dev`.
|
||||||
|
|
||||||
|
Note: when Mimalloc is built in dev mode, you can dump memory stats with the
|
||||||
|
env var `MIMALLOC_SHOW_STATS=1`. See
|
||||||
|
[https://microsoft.github.io/mimalloc/environment.html](https://microsoft.github.io/mimalloc/environment.html).
|
||||||
|
|
||||||
|
**v8**
|
||||||
|
|
||||||
|
First, get the tools necessary for building V8, as well as the V8 source code:
|
||||||
|
|
||||||
|
```
|
||||||
|
make get-v8
|
||||||
|
```
|
||||||
|
|
||||||
|
Next, build v8. This build task is very long and cpu consuming, as you will build v8 from sources.
|
||||||
|
|
||||||
|
```
|
||||||
|
make build-v8
|
||||||
|
```
|
||||||
|
|
||||||
|
For dev env, use `make build-v8-dev`.
|
||||||
|
|
||||||
## Test
|
## Test
|
||||||
|
|
||||||
|
|||||||
492
build.zig
492
build.zig
@@ -17,210 +17,152 @@
|
|||||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
|
const builtin = @import("builtin");
|
||||||
|
|
||||||
const Build = std.Build;
|
const Build = std.Build;
|
||||||
|
|
||||||
|
/// Do not rename this constant. It is scanned by some scripts to determine
|
||||||
|
/// which zig version to install.
|
||||||
|
const recommended_zig_version = "0.15.1";
|
||||||
|
|
||||||
pub fn build(b: *Build) !void {
|
pub fn build(b: *Build) !void {
|
||||||
|
switch (comptime builtin.zig_version.order(std.SemanticVersion.parse(recommended_zig_version) catch unreachable)) {
|
||||||
|
.eq => {},
|
||||||
|
.lt => {
|
||||||
|
@compileError("The minimum version of Zig required to compile is '" ++ recommended_zig_version ++ "', found '" ++ builtin.zig_version_string ++ "'.");
|
||||||
|
},
|
||||||
|
.gt => {
|
||||||
|
std.debug.print(
|
||||||
|
"WARNING: Recommended Zig version '{s}', but found '{s}', build may fail...\n\n",
|
||||||
|
.{ recommended_zig_version, builtin.zig_version_string },
|
||||||
|
);
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
var opts = b.addOptions();
|
||||||
|
opts.addOption(
|
||||||
|
[]const u8,
|
||||||
|
"git_commit",
|
||||||
|
b.option([]const u8, "git_commit", "Current git commit") orelse "dev",
|
||||||
|
);
|
||||||
|
|
||||||
const target = b.standardTargetOptions(.{});
|
const target = b.standardTargetOptions(.{});
|
||||||
const optimize = b.standardOptimizeOption(.{});
|
const optimize = b.standardOptimizeOption(.{});
|
||||||
|
|
||||||
const manifest = Manifest.init(b);
|
// We're still using llvm because the new x86 backend seems to crash
|
||||||
|
// with v8. This can be reproduced in zig-v8-fork.
|
||||||
|
|
||||||
const git_commit = b.option([]const u8, "git_commit", "Current git commit");
|
const lightpanda_module = b.addModule("lightpanda", .{
|
||||||
const prebuilt_v8_path = b.option([]const u8, "prebuilt_v8_path", "Path to prebuilt libc_v8.a");
|
.root_source_file = b.path("src/main.zig"),
|
||||||
const snapshot_path = b.option([]const u8, "snapshot_path", "Path to v8 snapshot");
|
|
||||||
|
|
||||||
var opts = b.addOptions();
|
|
||||||
opts.addOption([]const u8, "version", manifest.version);
|
|
||||||
opts.addOption([]const u8, "git_commit", git_commit orelse "dev");
|
|
||||||
opts.addOption(?[]const u8, "snapshot_path", snapshot_path);
|
|
||||||
|
|
||||||
const enable_tsan = b.option(bool, "tsan", "Enable Thread Sanitizer");
|
|
||||||
const enable_csan = b.option(std.zig.SanitizeC, "csan", "Enable C Sanitizers");
|
|
||||||
|
|
||||||
const lightpanda_module = blk: {
|
|
||||||
const mod = b.addModule("lightpanda", .{
|
|
||||||
.root_source_file = b.path("src/lightpanda.zig"),
|
|
||||||
.target = target,
|
.target = target,
|
||||||
.optimize = optimize,
|
.optimize = optimize,
|
||||||
.link_libc = true,
|
.link_libc = true,
|
||||||
.link_libcpp = true,
|
.link_libcpp = true,
|
||||||
.sanitize_c = enable_csan,
|
|
||||||
.sanitize_thread = enable_tsan,
|
|
||||||
});
|
});
|
||||||
|
try addDependencies(b, lightpanda_module, opts);
|
||||||
try addDependencies(b, mod, opts, prebuilt_v8_path);
|
|
||||||
|
|
||||||
break :blk mod;
|
|
||||||
};
|
|
||||||
|
|
||||||
{
|
{
|
||||||
// browser
|
// browser
|
||||||
|
// -------
|
||||||
|
|
||||||
|
// compile and install
|
||||||
const exe = b.addExecutable(.{
|
const exe = b.addExecutable(.{
|
||||||
.name = "lightpanda",
|
.name = "lightpanda",
|
||||||
.use_llvm = true,
|
.use_llvm = true,
|
||||||
.root_module = b.createModule(.{
|
.root_module = lightpanda_module,
|
||||||
.root_source_file = b.path("src/main.zig"),
|
|
||||||
.target = target,
|
|
||||||
.optimize = optimize,
|
|
||||||
.sanitize_c = enable_csan,
|
|
||||||
.sanitize_thread = enable_tsan,
|
|
||||||
.imports = &.{
|
|
||||||
.{ .name = "lightpanda", .module = lightpanda_module },
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
});
|
});
|
||||||
b.installArtifact(exe);
|
b.installArtifact(exe);
|
||||||
|
|
||||||
|
// run
|
||||||
const run_cmd = b.addRunArtifact(exe);
|
const run_cmd = b.addRunArtifact(exe);
|
||||||
if (b.args) |args| {
|
if (b.args) |args| {
|
||||||
run_cmd.addArgs(args);
|
run_cmd.addArgs(args);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// step
|
||||||
const run_step = b.step("run", "Run the app");
|
const run_step = b.step("run", "Run the app");
|
||||||
run_step.dependOn(&run_cmd.step);
|
run_step.dependOn(&run_cmd.step);
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
// snapshot creator
|
// tests
|
||||||
const exe = b.addExecutable(.{
|
// ----
|
||||||
.name = "lightpanda-snapshot-creator",
|
|
||||||
.use_llvm = true,
|
|
||||||
.root_module = b.createModule(.{
|
|
||||||
.root_source_file = b.path("src/main_snapshot_creator.zig"),
|
|
||||||
.target = target,
|
|
||||||
.optimize = optimize,
|
|
||||||
.imports = &.{
|
|
||||||
.{ .name = "lightpanda", .module = lightpanda_module },
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
b.installArtifact(exe);
|
|
||||||
|
|
||||||
const run_cmd = b.addRunArtifact(exe);
|
// compile
|
||||||
if (b.args) |args| {
|
|
||||||
run_cmd.addArgs(args);
|
|
||||||
}
|
|
||||||
const run_step = b.step("snapshot_creator", "Generate a v8 snapshot");
|
|
||||||
run_step.dependOn(&run_cmd.step);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
// test
|
|
||||||
const tests = b.addTest(.{
|
const tests = b.addTest(.{
|
||||||
.root_module = lightpanda_module,
|
.root_module = lightpanda_module,
|
||||||
.use_llvm = true,
|
.use_llvm = true,
|
||||||
.test_runner = .{ .path = b.path("src/test_runner.zig"), .mode = .simple },
|
.test_runner = .{ .path = b.path("src/test_runner.zig"), .mode = .simple },
|
||||||
});
|
});
|
||||||
|
|
||||||
const run_tests = b.addRunArtifact(tests);
|
const run_tests = b.addRunArtifact(tests);
|
||||||
const test_step = b.step("test", "Run unit tests");
|
|
||||||
test_step.dependOn(&run_tests.step);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
// ZIGDOM
|
|
||||||
// browser
|
|
||||||
const exe = b.addExecutable(.{
|
|
||||||
.name = "legacy_test",
|
|
||||||
.use_llvm = true,
|
|
||||||
.root_module = b.createModule(.{
|
|
||||||
.root_source_file = b.path("src/main_legacy_test.zig"),
|
|
||||||
.target = target,
|
|
||||||
.optimize = optimize,
|
|
||||||
.sanitize_c = enable_csan,
|
|
||||||
.sanitize_thread = enable_tsan,
|
|
||||||
.imports = &.{
|
|
||||||
.{ .name = "lightpanda", .module = lightpanda_module },
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
b.installArtifact(exe);
|
|
||||||
|
|
||||||
const run_cmd = b.addRunArtifact(exe);
|
|
||||||
if (b.args) |args| {
|
if (b.args) |args| {
|
||||||
run_cmd.addArgs(args);
|
run_tests.addArgs(args);
|
||||||
}
|
}
|
||||||
const run_step = b.step("legacy_test", "Run the app");
|
|
||||||
run_step.dependOn(&run_cmd.step);
|
// step
|
||||||
|
const tests_step = b.step("test", "Run unit tests");
|
||||||
|
tests_step.dependOn(&run_tests.step);
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
// wpt
|
// wpt
|
||||||
const exe = b.addExecutable(.{
|
// -----
|
||||||
.name = "lightpanda-wpt",
|
const wpt_module = b.createModule(.{
|
||||||
.use_llvm = true,
|
|
||||||
.root_module = b.createModule(.{
|
|
||||||
.root_source_file = b.path("src/main_wpt.zig"),
|
.root_source_file = b.path("src/main_wpt.zig"),
|
||||||
.target = target,
|
.target = target,
|
||||||
.optimize = optimize,
|
.optimize = optimize,
|
||||||
.sanitize_c = enable_csan,
|
|
||||||
.sanitize_thread = enable_tsan,
|
|
||||||
.imports = &.{
|
|
||||||
.{ .name = "lightpanda", .module = lightpanda_module },
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
});
|
});
|
||||||
b.installArtifact(exe);
|
try addDependencies(b, wpt_module, opts);
|
||||||
|
|
||||||
const run_cmd = b.addRunArtifact(exe);
|
// compile and install
|
||||||
|
const wpt = b.addExecutable(.{
|
||||||
|
.name = "lightpanda-wpt",
|
||||||
|
.use_llvm = true,
|
||||||
|
.root_module = wpt_module,
|
||||||
|
});
|
||||||
|
|
||||||
|
// run
|
||||||
|
const wpt_cmd = b.addRunArtifact(wpt);
|
||||||
if (b.args) |args| {
|
if (b.args) |args| {
|
||||||
run_cmd.addArgs(args);
|
wpt_cmd.addArgs(args);
|
||||||
}
|
}
|
||||||
const run_step = b.step("wpt", "Run WPT tests");
|
// step
|
||||||
run_step.dependOn(&run_cmd.step);
|
const wpt_step = b.step("wpt", "WPT tests");
|
||||||
|
wpt_step.dependOn(&wpt_cmd.step);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
// get v8
|
||||||
|
// -------
|
||||||
|
const v8 = b.dependency("v8", .{ .target = target, .optimize = optimize });
|
||||||
|
const get_v8 = b.addRunArtifact(v8.artifact("get-v8"));
|
||||||
|
const get_step = b.step("get-v8", "Get v8");
|
||||||
|
get_step.dependOn(&get_v8.step);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
// build v8
|
||||||
|
// -------
|
||||||
|
const v8 = b.dependency("v8", .{ .target = target, .optimize = optimize });
|
||||||
|
const build_v8 = b.addRunArtifact(v8.artifact("build-v8"));
|
||||||
|
const build_step = b.step("build-v8", "Build v8");
|
||||||
|
build_step.dependOn(&build_v8.step);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn addDependencies(b: *Build, mod: *Build.Module, opts: *Build.Step.Options, prebuilt_v8_path: ?[]const u8) !void {
|
fn addDependencies(b: *Build, mod: *Build.Module, opts: *Build.Step.Options) !void {
|
||||||
|
try moduleNetSurf(b, mod);
|
||||||
mod.addImport("build_config", opts.createModule());
|
mod.addImport("build_config", opts.createModule());
|
||||||
|
|
||||||
const target = mod.resolved_target.?;
|
const target = mod.resolved_target.?;
|
||||||
const dep_opts = .{
|
const dep_opts = .{
|
||||||
.target = target,
|
.target = target,
|
||||||
.optimize = mod.optimize.?,
|
.optimize = mod.optimize.?,
|
||||||
.prebuilt_v8_path = prebuilt_v8_path,
|
|
||||||
.cache_root = b.pathFromRoot(".lp-cache"),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
mod.addIncludePath(b.path("vendor/lightpanda"));
|
mod.addIncludePath(b.path("vendor/lightpanda"));
|
||||||
|
|
||||||
{
|
|
||||||
// html5ever
|
|
||||||
|
|
||||||
// Build step to install html5ever dependency.
|
|
||||||
const html5ever_argv = blk: {
|
|
||||||
const argv: []const []const u8 = &.{
|
|
||||||
"cargo",
|
|
||||||
"build",
|
|
||||||
// Seems cargo can figure out required paths out of Cargo.toml.
|
|
||||||
"--manifest-path",
|
|
||||||
"src/html5ever/Cargo.toml",
|
|
||||||
// TODO: We can prefer `--artifact-dir` once it become stable.
|
|
||||||
"--target-dir",
|
|
||||||
b.getInstallPath(.prefix, "html5ever"),
|
|
||||||
// This must be the last argument.
|
|
||||||
"--release",
|
|
||||||
};
|
|
||||||
|
|
||||||
break :blk switch (mod.optimize.?) {
|
|
||||||
// Prefer dev build on debug option.
|
|
||||||
.Debug => argv[0 .. argv.len - 1],
|
|
||||||
else => argv,
|
|
||||||
};
|
|
||||||
};
|
|
||||||
const html5ever_exec_cargo = b.addSystemCommand(html5ever_argv);
|
|
||||||
const html5ever_step = b.step("html5ever", "Install html5ever dependency (requires cargo)");
|
|
||||||
html5ever_step.dependOn(&html5ever_exec_cargo.step);
|
|
||||||
opts.step.dependOn(html5ever_step);
|
|
||||||
|
|
||||||
const html5ever_obj = switch (mod.optimize.?) {
|
|
||||||
.Debug => b.getInstallPath(.prefix, "html5ever/debug/liblitefetch_html5ever.a"),
|
|
||||||
// Release builds.
|
|
||||||
else => b.getInstallPath(.prefix, "html5ever/release/liblitefetch_html5ever.a"),
|
|
||||||
};
|
|
||||||
|
|
||||||
mod.addObjectFile(.{ .cwd_relative = html5ever_obj });
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
{
|
||||||
// v8
|
// v8
|
||||||
const v8_opts = b.addOptions();
|
const v8_opts = b.addOptions();
|
||||||
@@ -229,6 +171,36 @@ fn addDependencies(b: *Build, mod: *Build.Module, opts: *Build.Step.Options, pre
|
|||||||
const v8_mod = b.dependency("v8", dep_opts).module("v8");
|
const v8_mod = b.dependency("v8", dep_opts).module("v8");
|
||||||
v8_mod.addOptions("default_exports", v8_opts);
|
v8_mod.addOptions("default_exports", v8_opts);
|
||||||
mod.addImport("v8", v8_mod);
|
mod.addImport("v8", v8_mod);
|
||||||
|
|
||||||
|
const release_dir = if (mod.optimize.? == .Debug) "debug" else "release";
|
||||||
|
const os = switch (target.result.os.tag) {
|
||||||
|
.linux => "linux",
|
||||||
|
.macos => "macos",
|
||||||
|
else => return error.UnsupportedPlatform,
|
||||||
|
};
|
||||||
|
var lib_path = try std.fmt.allocPrint(
|
||||||
|
mod.owner.allocator,
|
||||||
|
"v8/out/{s}/{s}/obj/zig/libc_v8.a",
|
||||||
|
.{ os, release_dir },
|
||||||
|
);
|
||||||
|
std.fs.cwd().access(lib_path, .{}) catch {
|
||||||
|
// legacy path
|
||||||
|
lib_path = try std.fmt.allocPrint(
|
||||||
|
mod.owner.allocator,
|
||||||
|
"v8/out/{s}/obj/zig/libc_v8.a",
|
||||||
|
.{release_dir},
|
||||||
|
);
|
||||||
|
};
|
||||||
|
mod.addObjectFile(mod.owner.path(lib_path));
|
||||||
|
|
||||||
|
switch (target.result.os.tag) {
|
||||||
|
.macos => {
|
||||||
|
// v8 has a dependency, abseil-cpp, which, on Mac, uses CoreFoundation
|
||||||
|
mod.addSystemFrameworkPath(.{ .cwd_relative = "/System/Library/Frameworks" });
|
||||||
|
mod.linkFramework("CoreFoundation", .{});
|
||||||
|
},
|
||||||
|
else => {},
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
@@ -273,7 +245,6 @@ fn addDependencies(b: *Build, mod: *Build.Module, opts: *Build.Step.Options, pre
|
|||||||
mod.addCMacro("HAVE_ASSERT_H", "1");
|
mod.addCMacro("HAVE_ASSERT_H", "1");
|
||||||
mod.addCMacro("HAVE_BASENAME", "1");
|
mod.addCMacro("HAVE_BASENAME", "1");
|
||||||
mod.addCMacro("HAVE_BOOL_T", "1");
|
mod.addCMacro("HAVE_BOOL_T", "1");
|
||||||
mod.addCMacro("HAVE_BROTLI", "1");
|
|
||||||
mod.addCMacro("HAVE_BUILTIN_AVAILABLE", "1");
|
mod.addCMacro("HAVE_BUILTIN_AVAILABLE", "1");
|
||||||
mod.addCMacro("HAVE_CLOCK_GETTIME_MONOTONIC", "1");
|
mod.addCMacro("HAVE_CLOCK_GETTIME_MONOTONIC", "1");
|
||||||
mod.addCMacro("HAVE_DLFCN_H", "1");
|
mod.addCMacro("HAVE_DLFCN_H", "1");
|
||||||
@@ -402,27 +373,13 @@ fn addDependencies(b: *Build, mod: *Build.Module, opts: *Build.Step.Options, pre
|
|||||||
mod.addCMacro("STDC_HEADERS", "1");
|
mod.addCMacro("STDC_HEADERS", "1");
|
||||||
mod.addCMacro("TIME_WITH_SYS_TIME", "1");
|
mod.addCMacro("TIME_WITH_SYS_TIME", "1");
|
||||||
mod.addCMacro("USE_NGHTTP2", "1");
|
mod.addCMacro("USE_NGHTTP2", "1");
|
||||||
mod.addCMacro("USE_OPENSSL", "1");
|
mod.addCMacro("USE_MBEDTLS", "1");
|
||||||
mod.addCMacro("OPENSSL_IS_BORINGSSL", "1");
|
|
||||||
mod.addCMacro("USE_THREADS_POSIX", "1");
|
mod.addCMacro("USE_THREADS_POSIX", "1");
|
||||||
mod.addCMacro("USE_UNIX_SOCKETS", "1");
|
mod.addCMacro("USE_UNIX_SOCKETS", "1");
|
||||||
}
|
}
|
||||||
|
|
||||||
try buildZlib(b, mod);
|
try buildZlib(b, mod);
|
||||||
try buildBrotli(b, mod);
|
try buildMbedtls(b, mod);
|
||||||
const boringssl_dep = b.dependency("boringssl-zig", .{
|
|
||||||
.target = target,
|
|
||||||
.optimize = mod.optimize.?,
|
|
||||||
.force_pic = true,
|
|
||||||
});
|
|
||||||
|
|
||||||
const ssl = boringssl_dep.artifact("ssl");
|
|
||||||
ssl.bundle_ubsan_rt = false;
|
|
||||||
const crypto = boringssl_dep.artifact("crypto");
|
|
||||||
crypto.bundle_ubsan_rt = false;
|
|
||||||
|
|
||||||
mod.linkLibrary(ssl);
|
|
||||||
mod.linkLibrary(crypto);
|
|
||||||
try buildNghttp2(b, mod);
|
try buildNghttp2(b, mod);
|
||||||
try buildCurl(b, mod);
|
try buildCurl(b, mod);
|
||||||
|
|
||||||
@@ -438,6 +395,63 @@ fn addDependencies(b: *Build, mod: *Build.Module, opts: *Build.Step.Options, pre
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn moduleNetSurf(b: *Build, mod: *Build.Module) !void {
|
||||||
|
const target = mod.resolved_target.?;
|
||||||
|
const os = target.result.os.tag;
|
||||||
|
const arch = target.result.cpu.arch;
|
||||||
|
|
||||||
|
// iconv
|
||||||
|
const libiconv_lib_path = try std.fmt.allocPrint(
|
||||||
|
b.allocator,
|
||||||
|
"vendor/libiconv/out/{s}-{s}/lib/libiconv.a",
|
||||||
|
.{ @tagName(os), @tagName(arch) },
|
||||||
|
);
|
||||||
|
const libiconv_include_path = try std.fmt.allocPrint(
|
||||||
|
b.allocator,
|
||||||
|
"vendor/libiconv/out/{s}-{s}/lib/libiconv.a",
|
||||||
|
.{ @tagName(os), @tagName(arch) },
|
||||||
|
);
|
||||||
|
mod.addObjectFile(b.path(libiconv_lib_path));
|
||||||
|
mod.addIncludePath(b.path(libiconv_include_path));
|
||||||
|
|
||||||
|
{
|
||||||
|
// mimalloc
|
||||||
|
const mimalloc = "vendor/mimalloc";
|
||||||
|
const lib_path = try std.fmt.allocPrint(
|
||||||
|
b.allocator,
|
||||||
|
mimalloc ++ "/out/{s}-{s}/lib/libmimalloc.a",
|
||||||
|
.{ @tagName(os), @tagName(arch) },
|
||||||
|
);
|
||||||
|
mod.addObjectFile(b.path(lib_path));
|
||||||
|
mod.addIncludePath(b.path(mimalloc ++ "/include"));
|
||||||
|
}
|
||||||
|
|
||||||
|
// netsurf libs
|
||||||
|
const ns = "vendor/netsurf";
|
||||||
|
const ns_include_path = try std.fmt.allocPrint(
|
||||||
|
b.allocator,
|
||||||
|
ns ++ "/out/{s}-{s}/include",
|
||||||
|
.{ @tagName(os), @tagName(arch) },
|
||||||
|
);
|
||||||
|
mod.addIncludePath(b.path(ns_include_path));
|
||||||
|
|
||||||
|
const libs: [4][]const u8 = .{
|
||||||
|
"libdom",
|
||||||
|
"libhubbub",
|
||||||
|
"libparserutils",
|
||||||
|
"libwapcaplet",
|
||||||
|
};
|
||||||
|
inline for (libs) |lib| {
|
||||||
|
const ns_lib_path = try std.fmt.allocPrint(
|
||||||
|
b.allocator,
|
||||||
|
ns ++ "/out/{s}-{s}/lib/" ++ lib ++ ".a",
|
||||||
|
.{ @tagName(os), @tagName(arch) },
|
||||||
|
);
|
||||||
|
mod.addObjectFile(b.path(ns_lib_path));
|
||||||
|
mod.addIncludePath(b.path(ns ++ "/" ++ lib ++ "/src"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn buildZlib(b: *Build, m: *Build.Module) !void {
|
fn buildZlib(b: *Build, m: *Build.Module) !void {
|
||||||
const zlib = b.addLibrary(.{
|
const zlib = b.addLibrary(.{
|
||||||
.name = "zlib",
|
.name = "zlib",
|
||||||
@@ -470,27 +484,123 @@ fn buildZlib(b: *Build, m: *Build.Module) !void {
|
|||||||
} });
|
} });
|
||||||
}
|
}
|
||||||
|
|
||||||
fn buildBrotli(b: *Build, m: *Build.Module) !void {
|
fn buildMbedtls(b: *Build, m: *Build.Module) !void {
|
||||||
const brotli = b.addLibrary(.{
|
const mbedtls = b.addLibrary(.{
|
||||||
.name = "brotli",
|
.name = "mbedtls",
|
||||||
.root_module = m,
|
.root_module = m,
|
||||||
});
|
});
|
||||||
|
|
||||||
const root = "vendor/brotli/c/";
|
const root = "vendor/mbedtls/";
|
||||||
brotli.addIncludePath(b.path(root ++ "include"));
|
mbedtls.addIncludePath(b.path(root ++ "include"));
|
||||||
brotli.addCSourceFiles(.{ .flags = &.{}, .files = &.{
|
mbedtls.addIncludePath(b.path(root ++ "library"));
|
||||||
root ++ "common/constants.c",
|
|
||||||
root ++ "common/context.c",
|
mbedtls.addCSourceFiles(.{ .flags = &.{}, .files = &.{
|
||||||
root ++ "common/dictionary.c",
|
root ++ "library/aes.c",
|
||||||
root ++ "common/platform.c",
|
root ++ "library/aesni.c",
|
||||||
root ++ "common/shared_dictionary.c",
|
root ++ "library/aesce.c",
|
||||||
root ++ "common/transform.c",
|
root ++ "library/aria.c",
|
||||||
root ++ "dec/bit_reader.c",
|
root ++ "library/asn1parse.c",
|
||||||
root ++ "dec/decode.c",
|
root ++ "library/asn1write.c",
|
||||||
root ++ "dec/huffman.c",
|
root ++ "library/base64.c",
|
||||||
root ++ "dec/prefix.c",
|
root ++ "library/bignum.c",
|
||||||
root ++ "dec/state.c",
|
root ++ "library/bignum_core.c",
|
||||||
root ++ "dec/static_init.c",
|
root ++ "library/bignum_mod.c",
|
||||||
|
root ++ "library/bignum_mod_raw.c",
|
||||||
|
root ++ "library/camellia.c",
|
||||||
|
root ++ "library/ccm.c",
|
||||||
|
root ++ "library/chacha20.c",
|
||||||
|
root ++ "library/chachapoly.c",
|
||||||
|
root ++ "library/cipher.c",
|
||||||
|
root ++ "library/cipher_wrap.c",
|
||||||
|
root ++ "library/constant_time.c",
|
||||||
|
root ++ "library/cmac.c",
|
||||||
|
root ++ "library/ctr_drbg.c",
|
||||||
|
root ++ "library/des.c",
|
||||||
|
root ++ "library/dhm.c",
|
||||||
|
root ++ "library/ecdh.c",
|
||||||
|
root ++ "library/ecdsa.c",
|
||||||
|
root ++ "library/ecjpake.c",
|
||||||
|
root ++ "library/ecp.c",
|
||||||
|
root ++ "library/ecp_curves.c",
|
||||||
|
root ++ "library/entropy.c",
|
||||||
|
root ++ "library/entropy_poll.c",
|
||||||
|
root ++ "library/error.c",
|
||||||
|
root ++ "library/gcm.c",
|
||||||
|
root ++ "library/hkdf.c",
|
||||||
|
root ++ "library/hmac_drbg.c",
|
||||||
|
root ++ "library/lmots.c",
|
||||||
|
root ++ "library/lms.c",
|
||||||
|
root ++ "library/md.c",
|
||||||
|
root ++ "library/md5.c",
|
||||||
|
root ++ "library/memory_buffer_alloc.c",
|
||||||
|
root ++ "library/nist_kw.c",
|
||||||
|
root ++ "library/oid.c",
|
||||||
|
root ++ "library/padlock.c",
|
||||||
|
root ++ "library/pem.c",
|
||||||
|
root ++ "library/pk.c",
|
||||||
|
root ++ "library/pk_ecc.c",
|
||||||
|
root ++ "library/pk_wrap.c",
|
||||||
|
root ++ "library/pkcs12.c",
|
||||||
|
root ++ "library/pkcs5.c",
|
||||||
|
root ++ "library/pkparse.c",
|
||||||
|
root ++ "library/pkwrite.c",
|
||||||
|
root ++ "library/platform.c",
|
||||||
|
root ++ "library/platform_util.c",
|
||||||
|
root ++ "library/poly1305.c",
|
||||||
|
root ++ "library/psa_crypto.c",
|
||||||
|
root ++ "library/psa_crypto_aead.c",
|
||||||
|
root ++ "library/psa_crypto_cipher.c",
|
||||||
|
root ++ "library/psa_crypto_client.c",
|
||||||
|
root ++ "library/psa_crypto_ffdh.c",
|
||||||
|
root ++ "library/psa_crypto_driver_wrappers_no_static.c",
|
||||||
|
root ++ "library/psa_crypto_ecp.c",
|
||||||
|
root ++ "library/psa_crypto_hash.c",
|
||||||
|
root ++ "library/psa_crypto_mac.c",
|
||||||
|
root ++ "library/psa_crypto_pake.c",
|
||||||
|
root ++ "library/psa_crypto_rsa.c",
|
||||||
|
root ++ "library/psa_crypto_se.c",
|
||||||
|
root ++ "library/psa_crypto_slot_management.c",
|
||||||
|
root ++ "library/psa_crypto_storage.c",
|
||||||
|
root ++ "library/psa_its_file.c",
|
||||||
|
root ++ "library/psa_util.c",
|
||||||
|
root ++ "library/ripemd160.c",
|
||||||
|
root ++ "library/rsa.c",
|
||||||
|
root ++ "library/rsa_alt_helpers.c",
|
||||||
|
root ++ "library/sha1.c",
|
||||||
|
root ++ "library/sha3.c",
|
||||||
|
root ++ "library/sha256.c",
|
||||||
|
root ++ "library/sha512.c",
|
||||||
|
root ++ "library/threading.c",
|
||||||
|
root ++ "library/timing.c",
|
||||||
|
root ++ "library/version.c",
|
||||||
|
root ++ "library/version_features.c",
|
||||||
|
root ++ "library/pkcs7.c",
|
||||||
|
root ++ "library/x509.c",
|
||||||
|
root ++ "library/x509_create.c",
|
||||||
|
root ++ "library/x509_crl.c",
|
||||||
|
root ++ "library/x509_crt.c",
|
||||||
|
root ++ "library/x509_csr.c",
|
||||||
|
root ++ "library/x509write.c",
|
||||||
|
root ++ "library/x509write_crt.c",
|
||||||
|
root ++ "library/x509write_csr.c",
|
||||||
|
root ++ "library/debug.c",
|
||||||
|
root ++ "library/mps_reader.c",
|
||||||
|
root ++ "library/mps_trace.c",
|
||||||
|
root ++ "library/net_sockets.c",
|
||||||
|
root ++ "library/ssl_cache.c",
|
||||||
|
root ++ "library/ssl_ciphersuites.c",
|
||||||
|
root ++ "library/ssl_client.c",
|
||||||
|
root ++ "library/ssl_cookie.c",
|
||||||
|
root ++ "library/ssl_debug_helpers_generated.c",
|
||||||
|
root ++ "library/ssl_msg.c",
|
||||||
|
root ++ "library/ssl_ticket.c",
|
||||||
|
root ++ "library/ssl_tls.c",
|
||||||
|
root ++ "library/ssl_tls12_client.c",
|
||||||
|
root ++ "library/ssl_tls12_server.c",
|
||||||
|
root ++ "library/ssl_tls13_keys.c",
|
||||||
|
root ++ "library/ssl_tls13_server.c",
|
||||||
|
root ++ "library/ssl_tls13_client.c",
|
||||||
|
root ++ "library/ssl_tls13_generic.c",
|
||||||
} });
|
} });
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -547,8 +657,6 @@ fn buildCurl(b: *Build, m: *Build.Module) !void {
|
|||||||
|
|
||||||
curl.addIncludePath(b.path(root ++ "lib"));
|
curl.addIncludePath(b.path(root ++ "lib"));
|
||||||
curl.addIncludePath(b.path(root ++ "include"));
|
curl.addIncludePath(b.path(root ++ "include"));
|
||||||
curl.addIncludePath(b.path("vendor/zlib"));
|
|
||||||
|
|
||||||
curl.addCSourceFiles(.{
|
curl.addCSourceFiles(.{
|
||||||
.flags = &.{},
|
.flags = &.{},
|
||||||
.files = &.{
|
.files = &.{
|
||||||
@@ -707,37 +815,11 @@ fn buildCurl(b: *Build, m: *Build.Module) !void {
|
|||||||
root ++ "lib/vauth/spnego_sspi.c",
|
root ++ "lib/vauth/spnego_sspi.c",
|
||||||
root ++ "lib/vauth/vauth.c",
|
root ++ "lib/vauth/vauth.c",
|
||||||
root ++ "lib/vtls/cipher_suite.c",
|
root ++ "lib/vtls/cipher_suite.c",
|
||||||
root ++ "lib/vtls/openssl.c",
|
root ++ "lib/vtls/mbedtls.c",
|
||||||
root ++ "lib/vtls/hostcheck.c",
|
root ++ "lib/vtls/mbedtls_threadlock.c",
|
||||||
root ++ "lib/vtls/keylog.c",
|
|
||||||
root ++ "lib/vtls/vtls.c",
|
root ++ "lib/vtls/vtls.c",
|
||||||
root ++ "lib/vtls/vtls_scache.c",
|
root ++ "lib/vtls/vtls_scache.c",
|
||||||
root ++ "lib/vtls/x509asn1.c",
|
root ++ "lib/vtls/x509asn1.c",
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
const Manifest = struct {
|
|
||||||
version: []const u8,
|
|
||||||
minimum_zig_version: []const u8,
|
|
||||||
|
|
||||||
fn init(b: *std.Build) Manifest {
|
|
||||||
const input = @embedFile("build.zig.zon");
|
|
||||||
|
|
||||||
var diagnostics: std.zon.parse.Diagnostics = .{};
|
|
||||||
defer diagnostics.deinit(b.allocator);
|
|
||||||
|
|
||||||
return std.zon.parse.fromSlice(Manifest, b.allocator, input, &diagnostics, .{
|
|
||||||
.free_on_error = true,
|
|
||||||
.ignore_unknown_fields = true,
|
|
||||||
}) catch |err| {
|
|
||||||
switch (err) {
|
|
||||||
error.OutOfMemory => @panic("OOM"),
|
|
||||||
error.ParseZon => {
|
|
||||||
std.debug.print("Parse diagnostics:\n{f}\n", .{diagnostics});
|
|
||||||
std.process.exit(1);
|
|
||||||
},
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|||||||
@@ -2,17 +2,12 @@
|
|||||||
.name = .browser,
|
.name = .browser,
|
||||||
.paths = .{""},
|
.paths = .{""},
|
||||||
.version = "0.0.0",
|
.version = "0.0.0",
|
||||||
.fingerprint = 0xda130f3af836cea0, // Changing this has security and trust implications.
|
.fingerprint = 0xda130f3af836cea0,
|
||||||
.minimum_zig_version = "0.15.2",
|
|
||||||
.dependencies = .{
|
.dependencies = .{
|
||||||
.v8 = .{
|
.v8 = .{
|
||||||
.url = "https://github.com/lightpanda-io/zig-v8-fork/archive/d6b5f89cfc7feece29359e8c848bb916e8ecfab6.tar.gz",
|
.url = "https://github.com/lightpanda-io/zig-v8-fork/archive/7177ee1ae267a44751a0e7e012e257177699a375.tar.gz",
|
||||||
.hash = "v8-0.0.0-xddH6_0gBABrJc5cL6-P2wGvvweTTCgWdpmClr9r-C-s",
|
.hash = "v8-0.0.0-xddH63TCAwC1D1hEiOtbEnLBbtz9ZPHrdiGWLcBcYQB7",
|
||||||
},
|
|
||||||
//.v8 = .{ .path = "../zig-v8-fork" },
|
|
||||||
.@"boringssl-zig" = .{
|
|
||||||
.url = "git+https://github.com/Syndica/boringssl-zig.git#c53df00d06b02b755ad88bbf4d1202ed9687b096",
|
|
||||||
.hash = "boringssl-0.1.0-VtJeWehMAAA4RNnwRnzEvKcS9rjsR1QVRw1uJrwXxmVK",
|
|
||||||
},
|
},
|
||||||
|
// .v8 = .{ .path = "../zig-v8-fork" }
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|||||||
51
flake.lock
generated
51
flake.lock
generated
@@ -1,26 +1,5 @@
|
|||||||
{
|
{
|
||||||
"nodes": {
|
"nodes": {
|
||||||
"fenix": {
|
|
||||||
"inputs": {
|
|
||||||
"nixpkgs": [
|
|
||||||
"nixpkgs"
|
|
||||||
],
|
|
||||||
"rust-analyzer-src": "rust-analyzer-src"
|
|
||||||
},
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1763016383,
|
|
||||||
"narHash": "sha256-eYmo7FNvm3q08iROzwIi8i9dWuUbJJl3uLR3OLnSmdI=",
|
|
||||||
"owner": "nix-community",
|
|
||||||
"repo": "fenix",
|
|
||||||
"rev": "0fad5c0e5c531358e7174cd666af4608f08bc3ba",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"owner": "nix-community",
|
|
||||||
"repo": "fenix",
|
|
||||||
"type": "github"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"flake-compat": {
|
"flake-compat": {
|
||||||
"flake": false,
|
"flake": false,
|
||||||
"locked": {
|
"locked": {
|
||||||
@@ -96,11 +75,11 @@
|
|||||||
},
|
},
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1763043403,
|
"lastModified": 1756822655,
|
||||||
"narHash": "sha256-DgCTbHdIpzbXSlQlOZEWj8oPt2lrRMlSk03oIstvkVQ=",
|
"narHash": "sha256-xQAk8xLy7srAkR5NMZFsQFioL02iTHuuEIs3ohGpgdk=",
|
||||||
"owner": "nixos",
|
"owner": "nixos",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "75e04ecd084f93d4105ce68c07dac7656291fe2e",
|
"rev": "4bdac60bfe32c41103ae500ddf894c258291dd61",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
@@ -112,30 +91,12 @@
|
|||||||
},
|
},
|
||||||
"root": {
|
"root": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"fenix": "fenix",
|
|
||||||
"flake-utils": "flake-utils",
|
"flake-utils": "flake-utils",
|
||||||
"nixpkgs": "nixpkgs",
|
"nixpkgs": "nixpkgs",
|
||||||
"zigPkgs": "zigPkgs",
|
"zigPkgs": "zigPkgs",
|
||||||
"zlsPkg": "zlsPkg"
|
"zlsPkg": "zlsPkg"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"rust-analyzer-src": {
|
|
||||||
"flake": false,
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1762860488,
|
|
||||||
"narHash": "sha256-rMfWMCOo/pPefM2We0iMBLi2kLBAnYoB9thi4qS7uk4=",
|
|
||||||
"owner": "rust-lang",
|
|
||||||
"repo": "rust-analyzer",
|
|
||||||
"rev": "2efc80078029894eec0699f62ec8d5c1a56af763",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"owner": "rust-lang",
|
|
||||||
"ref": "nightly",
|
|
||||||
"repo": "rust-analyzer",
|
|
||||||
"type": "github"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"systems": {
|
"systems": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1681028828,
|
"lastModified": 1681028828,
|
||||||
@@ -175,11 +136,11 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1762907712,
|
"lastModified": 1756555914,
|
||||||
"narHash": "sha256-VNW/+VYIg6N4b9Iq+F0YZmm22n74IdFS7hsPLblWuOY=",
|
"narHash": "sha256-7yoSPIVEuL+3Wzf6e7NHuW3zmruHizRrYhGerjRHTLI=",
|
||||||
"owner": "mitchellh",
|
"owner": "mitchellh",
|
||||||
"repo": "zig-overlay",
|
"repo": "zig-overlay",
|
||||||
"rev": "d16453ee78765e49527c56d23386cead799b6b53",
|
"rev": "d0df3a2fd0f11134409d6d5ea0e510e5e477f7d6",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
|||||||
12
flake.nix
12
flake.nix
@@ -11,11 +11,6 @@
|
|||||||
zlsPkg.inputs.zig-overlay.follows = "zigPkgs";
|
zlsPkg.inputs.zig-overlay.follows = "zigPkgs";
|
||||||
zlsPkg.inputs.nixpkgs.follows = "nixpkgs";
|
zlsPkg.inputs.nixpkgs.follows = "nixpkgs";
|
||||||
|
|
||||||
fenix = {
|
|
||||||
url = "github:nix-community/fenix";
|
|
||||||
inputs.nixpkgs.follows = "nixpkgs";
|
|
||||||
};
|
|
||||||
|
|
||||||
flake-utils.url = "github:numtide/flake-utils";
|
flake-utils.url = "github:numtide/flake-utils";
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -24,7 +19,6 @@
|
|||||||
nixpkgs,
|
nixpkgs,
|
||||||
zigPkgs,
|
zigPkgs,
|
||||||
zlsPkg,
|
zlsPkg,
|
||||||
fenix,
|
|
||||||
flake-utils,
|
flake-utils,
|
||||||
...
|
...
|
||||||
}:
|
}:
|
||||||
@@ -42,8 +36,6 @@
|
|||||||
inherit system overlays;
|
inherit system overlays;
|
||||||
};
|
};
|
||||||
|
|
||||||
rustToolchain = fenix.packages.${system}.stable.toolchain;
|
|
||||||
|
|
||||||
# We need crtbeginS.o for building.
|
# We need crtbeginS.o for building.
|
||||||
crtFiles = pkgs.runCommand "crt-files" { } ''
|
crtFiles = pkgs.runCommand "crt-files" { } ''
|
||||||
mkdir -p $out/lib
|
mkdir -p $out/lib
|
||||||
@@ -57,9 +49,8 @@
|
|||||||
targetPkgs =
|
targetPkgs =
|
||||||
pkgs: with pkgs; [
|
pkgs: with pkgs; [
|
||||||
# Build Tools
|
# Build Tools
|
||||||
zigpkgs."0.15.2"
|
zigpkgs."0.15.1"
|
||||||
zls
|
zls
|
||||||
rustToolchain
|
|
||||||
python3
|
python3
|
||||||
pkg-config
|
pkg-config
|
||||||
cmake
|
cmake
|
||||||
@@ -75,6 +66,7 @@
|
|||||||
glib.dev
|
glib.dev
|
||||||
glibc.dev
|
glibc.dev
|
||||||
zlib
|
zlib
|
||||||
|
zlib.dev
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
in
|
in
|
||||||
|
|||||||
139
src/App.zig
139
src/App.zig
@@ -1,139 +0,0 @@
|
|||||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
|
||||||
//
|
|
||||||
// Francis Bouvier <francis@lightpanda.io>
|
|
||||||
// Pierre Tachoire <pierre@lightpanda.io>
|
|
||||||
//
|
|
||||||
// This program is free software: you can redistribute it and/or modify
|
|
||||||
// it under the terms of the GNU Affero General Public License as
|
|
||||||
// published by the Free Software Foundation, either version 3 of the
|
|
||||||
// License, or (at your option) any later version.
|
|
||||||
//
|
|
||||||
// This program is distributed in the hope that it will be useful,
|
|
||||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
// GNU Affero General Public License for more details.
|
|
||||||
//
|
|
||||||
// You should have received a copy of the GNU Affero General Public License
|
|
||||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
const std = @import("std");
|
|
||||||
|
|
||||||
const Allocator = std.mem.Allocator;
|
|
||||||
|
|
||||||
const log = @import("log.zig");
|
|
||||||
const Http = @import("http/Http.zig");
|
|
||||||
const Snapshot = @import("browser/js/Snapshot.zig");
|
|
||||||
const Platform = @import("browser/js/Platform.zig");
|
|
||||||
|
|
||||||
const Notification = @import("Notification.zig");
|
|
||||||
const Telemetry = @import("telemetry/telemetry.zig").Telemetry;
|
|
||||||
|
|
||||||
// Container for global state / objects that various parts of the system
|
|
||||||
// might need.
|
|
||||||
const App = @This();
|
|
||||||
|
|
||||||
http: Http,
|
|
||||||
config: Config,
|
|
||||||
platform: Platform,
|
|
||||||
snapshot: Snapshot,
|
|
||||||
telemetry: Telemetry,
|
|
||||||
allocator: Allocator,
|
|
||||||
app_dir_path: ?[]const u8,
|
|
||||||
notification: *Notification,
|
|
||||||
shutdown: bool = false,
|
|
||||||
|
|
||||||
pub const RunMode = enum {
|
|
||||||
help,
|
|
||||||
fetch,
|
|
||||||
serve,
|
|
||||||
version,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const Config = struct {
|
|
||||||
run_mode: RunMode,
|
|
||||||
tls_verify_host: bool = true,
|
|
||||||
http_proxy: ?[:0]const u8 = null,
|
|
||||||
proxy_bearer_token: ?[:0]const u8 = null,
|
|
||||||
http_timeout_ms: ?u31 = null,
|
|
||||||
http_connect_timeout_ms: ?u31 = null,
|
|
||||||
http_max_host_open: ?u8 = null,
|
|
||||||
http_max_concurrent: ?u8 = null,
|
|
||||||
user_agent: [:0]const u8,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub fn init(allocator: Allocator, config: Config) !*App {
|
|
||||||
const app = try allocator.create(App);
|
|
||||||
errdefer allocator.destroy(app);
|
|
||||||
|
|
||||||
app.config = config;
|
|
||||||
app.allocator = allocator;
|
|
||||||
|
|
||||||
app.notification = try Notification.init(allocator, null);
|
|
||||||
errdefer app.notification.deinit();
|
|
||||||
|
|
||||||
app.http = try Http.init(allocator, .{
|
|
||||||
.max_host_open = config.http_max_host_open orelse 4,
|
|
||||||
.max_concurrent = config.http_max_concurrent orelse 10,
|
|
||||||
.timeout_ms = config.http_timeout_ms orelse 5000,
|
|
||||||
.connect_timeout_ms = config.http_connect_timeout_ms orelse 0,
|
|
||||||
.http_proxy = config.http_proxy,
|
|
||||||
.tls_verify_host = config.tls_verify_host,
|
|
||||||
.proxy_bearer_token = config.proxy_bearer_token,
|
|
||||||
.user_agent = config.user_agent,
|
|
||||||
});
|
|
||||||
errdefer app.http.deinit();
|
|
||||||
|
|
||||||
app.platform = try Platform.init();
|
|
||||||
errdefer app.platform.deinit();
|
|
||||||
|
|
||||||
app.snapshot = try Snapshot.load(allocator);
|
|
||||||
errdefer app.snapshot.deinit(allocator);
|
|
||||||
|
|
||||||
app.app_dir_path = getAndMakeAppDir(allocator);
|
|
||||||
|
|
||||||
app.telemetry = try Telemetry.init(app, config.run_mode);
|
|
||||||
errdefer app.telemetry.deinit();
|
|
||||||
|
|
||||||
try app.telemetry.register(app.notification);
|
|
||||||
|
|
||||||
return app;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn deinit(self: *App) void {
|
|
||||||
if (@atomicRmw(bool, &self.shutdown, .Xchg, true, .monotonic)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const allocator = self.allocator;
|
|
||||||
if (self.app_dir_path) |app_dir_path| {
|
|
||||||
allocator.free(app_dir_path);
|
|
||||||
self.app_dir_path = null;
|
|
||||||
}
|
|
||||||
self.telemetry.deinit();
|
|
||||||
self.notification.deinit();
|
|
||||||
self.http.deinit();
|
|
||||||
self.snapshot.deinit(allocator);
|
|
||||||
self.platform.deinit();
|
|
||||||
|
|
||||||
allocator.destroy(self);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn getAndMakeAppDir(allocator: Allocator) ?[]const u8 {
|
|
||||||
if (@import("builtin").is_test) {
|
|
||||||
return allocator.dupe(u8, "/tmp") catch unreachable;
|
|
||||||
}
|
|
||||||
const app_dir_path = std.fs.getAppDataDir(allocator, "lightpanda") catch |err| {
|
|
||||||
log.warn(.app, "get data dir", .{ .err = err });
|
|
||||||
return null;
|
|
||||||
};
|
|
||||||
|
|
||||||
std.fs.cwd().makePath(app_dir_path) catch |err| switch (err) {
|
|
||||||
error.PathAlreadyExists => return app_dir_path,
|
|
||||||
else => {
|
|
||||||
allocator.free(app_dir_path);
|
|
||||||
log.warn(.app, "create data dir", .{ .err = err, .path = app_dir_path });
|
|
||||||
return null;
|
|
||||||
},
|
|
||||||
};
|
|
||||||
return app_dir_path;
|
|
||||||
}
|
|
||||||
@@ -1,412 +0,0 @@
|
|||||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
|
||||||
//
|
|
||||||
// Francis Bouvier <francis@lightpanda.io>
|
|
||||||
// Pierre Tachoire <pierre@lightpanda.io>
|
|
||||||
//
|
|
||||||
// This program is free software: you can redistribute it and/or modify
|
|
||||||
// it under the terms of the GNU Affero General Public License as
|
|
||||||
// published by the Free Software Foundation, either version 3 of the
|
|
||||||
// License, or (at your option) any later version.
|
|
||||||
//
|
|
||||||
// This program is distributed in the hope that it will be useful,
|
|
||||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
// GNU Affero General Public License for more details.
|
|
||||||
//
|
|
||||||
// You should have received a copy of the GNU Affero General Public License
|
|
||||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
const std = @import("std");
|
|
||||||
|
|
||||||
const log = @import("log.zig");
|
|
||||||
const Page = @import("browser/Page.zig");
|
|
||||||
const Transfer = @import("http/Client.zig").Transfer;
|
|
||||||
|
|
||||||
const Allocator = std.mem.Allocator;
|
|
||||||
|
|
||||||
const List = std.DoublyLinkedList;
|
|
||||||
|
|
||||||
// Allows code to register for and emit events.
|
|
||||||
// Keeps two lists
|
|
||||||
// 1 - for a given event type, a linked list of all the listeners
|
|
||||||
// 2 - for a given listener, a list of all it's registration
|
|
||||||
// The 2nd one is so that a listener can unregister all of it's listeners
|
|
||||||
// (there's currently no need for a listener to unregister only 1 or more
|
|
||||||
// specific listener).
|
|
||||||
//
|
|
||||||
// Scoping is important. Imagine we created a global singleton registry, and our
|
|
||||||
// CDP code registers for the "network_bytes_sent" event, because it needs to
|
|
||||||
// send messages to the client when this happens. Our HTTP client could then
|
|
||||||
// emit a "network_bytes_sent" message. It would be easy, and it would work.
|
|
||||||
// That is, it would work until the Telemetry code makes an HTTP request, and
|
|
||||||
// because everything's just one big global, that gets picked up by the
|
|
||||||
// registered CDP listener, and the telemetry network activity gets sent to the
|
|
||||||
// CDP client.
|
|
||||||
//
|
|
||||||
// To avoid this, one way or another, we need scoping. We could still have
|
|
||||||
// a global registry but every "register" and every "emit" has some type of
|
|
||||||
// "scope". This would have a run-time cost and still require some coordination
|
|
||||||
// between components to share a common scope.
|
|
||||||
//
|
|
||||||
// Instead, the approach that we take is to have a notification instance per
|
|
||||||
// scope. This makes some things harder, but we only plan on having 2
|
|
||||||
// notification instances at a given time: one in a Browser and one in the App.
|
|
||||||
// What about something like Telemetry, which lives outside of a Browser but
|
|
||||||
// still cares about Browser-events (like .page_navigate)? When the Browser
|
|
||||||
// notification is created, a `notification_created` event is raised in the
|
|
||||||
// App's notification, which Telemetry is registered for. This allows Telemetry
|
|
||||||
// to register for events in the Browser notification. See the Telemetry's
|
|
||||||
// register function.
|
|
||||||
const Notification = @This();
|
|
||||||
// Every event type (which are hard-coded), has a list of Listeners.
|
|
||||||
// When the event happens, we dispatch to those listener.
|
|
||||||
event_listeners: EventListeners,
|
|
||||||
|
|
||||||
// list of listeners for a specified receiver
|
|
||||||
// @intFromPtr(receiver) -> [listener1, listener2, ...]
|
|
||||||
// Used when `unregisterAll` is called.
|
|
||||||
listeners: std.AutoHashMapUnmanaged(usize, std.ArrayListUnmanaged(*Listener)),
|
|
||||||
|
|
||||||
allocator: Allocator,
|
|
||||||
mem_pool: std.heap.MemoryPool(Listener),
|
|
||||||
|
|
||||||
const EventListeners = struct {
|
|
||||||
page_remove: List = .{},
|
|
||||||
page_created: List = .{},
|
|
||||||
page_navigate: List = .{},
|
|
||||||
page_navigated: List = .{},
|
|
||||||
page_network_idle: List = .{},
|
|
||||||
page_network_almost_idle: List = .{},
|
|
||||||
http_request_fail: List = .{},
|
|
||||||
http_request_start: List = .{},
|
|
||||||
http_request_intercept: List = .{},
|
|
||||||
http_request_done: List = .{},
|
|
||||||
http_request_auth_required: List = .{},
|
|
||||||
http_response_data: List = .{},
|
|
||||||
http_response_header_done: List = .{},
|
|
||||||
notification_created: List = .{},
|
|
||||||
};
|
|
||||||
|
|
||||||
const Events = union(enum) {
|
|
||||||
page_remove: PageRemove,
|
|
||||||
page_created: *Page,
|
|
||||||
page_navigate: *const PageNavigate,
|
|
||||||
page_navigated: *const PageNavigated,
|
|
||||||
page_network_idle: *const PageNetworkIdle,
|
|
||||||
page_network_almost_idle: *const PageNetworkAlmostIdle,
|
|
||||||
http_request_fail: *const RequestFail,
|
|
||||||
http_request_start: *const RequestStart,
|
|
||||||
http_request_intercept: *const RequestIntercept,
|
|
||||||
http_request_auth_required: *const RequestAuthRequired,
|
|
||||||
http_request_done: *const RequestDone,
|
|
||||||
http_response_data: *const ResponseData,
|
|
||||||
http_response_header_done: *const ResponseHeaderDone,
|
|
||||||
notification_created: *Notification,
|
|
||||||
};
|
|
||||||
const EventType = std.meta.FieldEnum(Events);
|
|
||||||
|
|
||||||
pub const PageRemove = struct {};
|
|
||||||
|
|
||||||
pub const PageNavigate = struct {
|
|
||||||
req_id: usize,
|
|
||||||
timestamp: u64,
|
|
||||||
url: [:0]const u8,
|
|
||||||
opts: Page.NavigateOpts,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const PageNavigated = struct {
|
|
||||||
req_id: usize,
|
|
||||||
timestamp: u64,
|
|
||||||
url: [:0]const u8,
|
|
||||||
opts: Page.NavigatedOpts,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const PageNetworkIdle = struct {
|
|
||||||
timestamp: u64,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const PageNetworkAlmostIdle = struct {
|
|
||||||
timestamp: u64,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const RequestStart = struct {
|
|
||||||
transfer: *Transfer,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const RequestIntercept = struct {
|
|
||||||
transfer: *Transfer,
|
|
||||||
wait_for_interception: *bool,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const RequestAuthRequired = struct {
|
|
||||||
transfer: *Transfer,
|
|
||||||
wait_for_interception: *bool,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const ResponseData = struct {
|
|
||||||
data: []const u8,
|
|
||||||
transfer: *Transfer,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const ResponseHeaderDone = struct {
|
|
||||||
transfer: *Transfer,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const RequestDone = struct {
|
|
||||||
transfer: *Transfer,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const RequestFail = struct {
|
|
||||||
transfer: *Transfer,
|
|
||||||
err: anyerror,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub fn init(allocator: Allocator, parent: ?*Notification) !*Notification {
|
|
||||||
|
|
||||||
// This is put on the heap because we want to raise a .notification_created
|
|
||||||
// event, so that, something like Telemetry, can receive the
|
|
||||||
// .page_navigate event on all notification instances. That can only work
|
|
||||||
// if we dispatch .notification_created with a *Notification.
|
|
||||||
const notification = try allocator.create(Notification);
|
|
||||||
errdefer allocator.destroy(notification);
|
|
||||||
|
|
||||||
notification.* = .{
|
|
||||||
.listeners = .{},
|
|
||||||
.event_listeners = .{},
|
|
||||||
.allocator = allocator,
|
|
||||||
.mem_pool = std.heap.MemoryPool(Listener).init(allocator),
|
|
||||||
};
|
|
||||||
|
|
||||||
if (parent) |pn| {
|
|
||||||
pn.dispatch(.notification_created, notification);
|
|
||||||
}
|
|
||||||
|
|
||||||
return notification;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn deinit(self: *Notification) void {
|
|
||||||
const allocator = self.allocator;
|
|
||||||
|
|
||||||
var it = self.listeners.valueIterator();
|
|
||||||
while (it.next()) |listener| {
|
|
||||||
listener.deinit(allocator);
|
|
||||||
}
|
|
||||||
self.listeners.deinit(allocator);
|
|
||||||
self.mem_pool.deinit();
|
|
||||||
allocator.destroy(self);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn register(self: *Notification, comptime event: EventType, receiver: anytype, func: EventFunc(event)) !void {
|
|
||||||
var list = &@field(self.event_listeners, @tagName(event));
|
|
||||||
|
|
||||||
var listener = try self.mem_pool.create();
|
|
||||||
errdefer self.mem_pool.destroy(listener);
|
|
||||||
|
|
||||||
listener.* = .{
|
|
||||||
.node = .{},
|
|
||||||
.list = list,
|
|
||||||
.receiver = receiver,
|
|
||||||
.event = event,
|
|
||||||
.func = @ptrCast(func),
|
|
||||||
.struct_name = @typeName(@typeInfo(@TypeOf(receiver)).pointer.child),
|
|
||||||
};
|
|
||||||
|
|
||||||
const allocator = self.allocator;
|
|
||||||
const gop = try self.listeners.getOrPut(allocator, @intFromPtr(receiver));
|
|
||||||
if (gop.found_existing == false) {
|
|
||||||
gop.value_ptr.* = .{};
|
|
||||||
}
|
|
||||||
try gop.value_ptr.append(allocator, listener);
|
|
||||||
|
|
||||||
// we don't add this until we've successfully added the entry to
|
|
||||||
// self.listeners
|
|
||||||
list.append(&listener.node);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn unregister(self: *Notification, comptime event: EventType, receiver: anytype) void {
|
|
||||||
var listeners = self.listeners.getPtr(@intFromPtr(receiver)) orelse return;
|
|
||||||
|
|
||||||
var i: usize = 0;
|
|
||||||
while (i < listeners.items.len) {
|
|
||||||
const listener = listeners.items[i];
|
|
||||||
if (listener.event != event) {
|
|
||||||
i += 1;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
listener.list.remove(&listener.node);
|
|
||||||
self.mem_pool.destroy(listener);
|
|
||||||
_ = listeners.swapRemove(i);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (listeners.items.len == 0) {
|
|
||||||
listeners.deinit(self.allocator);
|
|
||||||
const removed = self.listeners.remove(@intFromPtr(receiver));
|
|
||||||
std.debug.assert(removed == true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn unregisterAll(self: *Notification, receiver: *anyopaque) void {
|
|
||||||
var kv = self.listeners.fetchRemove(@intFromPtr(receiver)) orelse return;
|
|
||||||
for (kv.value.items) |listener| {
|
|
||||||
listener.list.remove(&listener.node);
|
|
||||||
self.mem_pool.destroy(listener);
|
|
||||||
}
|
|
||||||
kv.value.deinit(self.allocator);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn dispatch(self: *Notification, comptime event: EventType, data: ArgType(event)) void {
|
|
||||||
const list = &@field(self.event_listeners, @tagName(event));
|
|
||||||
|
|
||||||
var node = list.first;
|
|
||||||
while (node) |n| {
|
|
||||||
const listener: *Listener = @fieldParentPtr("node", n);
|
|
||||||
const func: EventFunc(event) = @ptrCast(@alignCast(listener.func));
|
|
||||||
func(listener.receiver, data) catch |err| {
|
|
||||||
log.err(.app, "dispatch error", .{
|
|
||||||
.err = err,
|
|
||||||
.event = event,
|
|
||||||
.source = "notification",
|
|
||||||
.listener = listener.struct_name,
|
|
||||||
});
|
|
||||||
};
|
|
||||||
node = n.next;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Given an event type enum, returns the type of arg the event emits
|
|
||||||
fn ArgType(comptime event: Notification.EventType) type {
|
|
||||||
inline for (std.meta.fields(Notification.Events)) |f| {
|
|
||||||
if (std.mem.eql(u8, f.name, @tagName(event))) {
|
|
||||||
return f.type;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
unreachable;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Given an event type enum, returns the listening function type
|
|
||||||
fn EventFunc(comptime event: Notification.EventType) type {
|
|
||||||
return *const fn (*anyopaque, ArgType(event)) anyerror!void;
|
|
||||||
}
|
|
||||||
|
|
||||||
// A listener. This is 1 receiver, with its function, and the linked list
|
|
||||||
// node that goes in the appropriate EventListeners list.
|
|
||||||
const Listener = struct {
|
|
||||||
// the receiver of the event, i.e. the self parameter to `func`
|
|
||||||
receiver: *anyopaque,
|
|
||||||
|
|
||||||
// the function to call
|
|
||||||
func: *const anyopaque,
|
|
||||||
|
|
||||||
// For logging slightly better error
|
|
||||||
struct_name: []const u8,
|
|
||||||
|
|
||||||
event: Notification.EventType,
|
|
||||||
|
|
||||||
// intrusive linked list node
|
|
||||||
node: List.Node,
|
|
||||||
|
|
||||||
// The event list this listener belongs to.
|
|
||||||
// We need this in order to be able to remove the node from the list
|
|
||||||
list: *List,
|
|
||||||
};
|
|
||||||
|
|
||||||
const testing = std.testing;
|
|
||||||
test "Notification" {
|
|
||||||
var notifier = try Notification.init(testing.allocator, null);
|
|
||||||
defer notifier.deinit();
|
|
||||||
|
|
||||||
// noop
|
|
||||||
notifier.dispatch(.page_navigate, &.{
|
|
||||||
.req_id = 1,
|
|
||||||
.timestamp = 4,
|
|
||||||
.url = undefined,
|
|
||||||
.opts = .{},
|
|
||||||
});
|
|
||||||
|
|
||||||
var tc = TestClient{};
|
|
||||||
|
|
||||||
try notifier.register(.page_navigate, &tc, TestClient.pageNavigate);
|
|
||||||
notifier.dispatch(.page_navigate, &.{
|
|
||||||
.req_id = 1,
|
|
||||||
.timestamp = 4,
|
|
||||||
.url = undefined,
|
|
||||||
.opts = .{},
|
|
||||||
});
|
|
||||||
try testing.expectEqual(4, tc.page_navigate);
|
|
||||||
|
|
||||||
notifier.unregisterAll(&tc);
|
|
||||||
notifier.dispatch(.page_navigate, &.{
|
|
||||||
.req_id = 1,
|
|
||||||
.timestamp = 10,
|
|
||||||
.url = undefined,
|
|
||||||
.opts = .{},
|
|
||||||
});
|
|
||||||
try testing.expectEqual(4, tc.page_navigate);
|
|
||||||
|
|
||||||
try notifier.register(.page_navigate, &tc, TestClient.pageNavigate);
|
|
||||||
try notifier.register(.page_navigated, &tc, TestClient.pageNavigated);
|
|
||||||
notifier.dispatch(.page_navigate, &.{
|
|
||||||
.req_id = 1,
|
|
||||||
.timestamp = 10,
|
|
||||||
.url = undefined,
|
|
||||||
.opts = .{},
|
|
||||||
});
|
|
||||||
notifier.dispatch(.page_navigated, &.{ .req_id = 1, .timestamp = 6, .url = undefined, .opts = .{} });
|
|
||||||
try testing.expectEqual(14, tc.page_navigate);
|
|
||||||
try testing.expectEqual(6, tc.page_navigated);
|
|
||||||
|
|
||||||
notifier.unregisterAll(&tc);
|
|
||||||
notifier.dispatch(.page_navigate, &.{
|
|
||||||
.req_id = 1,
|
|
||||||
.timestamp = 100,
|
|
||||||
.url = undefined,
|
|
||||||
.opts = .{},
|
|
||||||
});
|
|
||||||
notifier.dispatch(.page_navigated, &.{ .req_id = 1, .timestamp = 100, .url = undefined, .opts = .{} });
|
|
||||||
try testing.expectEqual(14, tc.page_navigate);
|
|
||||||
try testing.expectEqual(6, tc.page_navigated);
|
|
||||||
|
|
||||||
{
|
|
||||||
// unregister
|
|
||||||
try notifier.register(.page_navigate, &tc, TestClient.pageNavigate);
|
|
||||||
try notifier.register(.page_navigated, &tc, TestClient.pageNavigated);
|
|
||||||
notifier.dispatch(.page_navigate, &.{ .req_id = 1, .timestamp = 100, .url = undefined, .opts = .{} });
|
|
||||||
notifier.dispatch(.page_navigated, &.{ .req_id = 1, .timestamp = 1000, .url = undefined, .opts = .{} });
|
|
||||||
try testing.expectEqual(114, tc.page_navigate);
|
|
||||||
try testing.expectEqual(1006, tc.page_navigated);
|
|
||||||
|
|
||||||
notifier.unregister(.page_navigate, &tc);
|
|
||||||
notifier.dispatch(.page_navigate, &.{ .req_id = 1, .timestamp = 100, .url = undefined, .opts = .{} });
|
|
||||||
notifier.dispatch(.page_navigated, &.{ .req_id = 1, .timestamp = 1000, .url = undefined, .opts = .{} });
|
|
||||||
try testing.expectEqual(114, tc.page_navigate);
|
|
||||||
try testing.expectEqual(2006, tc.page_navigated);
|
|
||||||
|
|
||||||
notifier.unregister(.page_navigated, &tc);
|
|
||||||
notifier.dispatch(.page_navigate, &.{ .req_id = 1, .timestamp = 100, .url = undefined, .opts = .{} });
|
|
||||||
notifier.dispatch(.page_navigated, &.{ .req_id = 1, .timestamp = 1000, .url = undefined, .opts = .{} });
|
|
||||||
try testing.expectEqual(114, tc.page_navigate);
|
|
||||||
try testing.expectEqual(2006, tc.page_navigated);
|
|
||||||
|
|
||||||
// already unregistered, try anyways
|
|
||||||
notifier.unregister(.page_navigated, &tc);
|
|
||||||
notifier.dispatch(.page_navigate, &.{ .req_id = 1, .timestamp = 100, .url = undefined, .opts = .{} });
|
|
||||||
notifier.dispatch(.page_navigated, &.{ .req_id = 1, .timestamp = 1000, .url = undefined, .opts = .{} });
|
|
||||||
try testing.expectEqual(114, tc.page_navigate);
|
|
||||||
try testing.expectEqual(2006, tc.page_navigated);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const TestClient = struct {
|
|
||||||
page_navigate: u64 = 0,
|
|
||||||
page_navigated: u64 = 0,
|
|
||||||
|
|
||||||
fn pageNavigate(ptr: *anyopaque, data: *const Notification.PageNavigate) !void {
|
|
||||||
const self: *TestClient = @ptrCast(@alignCast(ptr));
|
|
||||||
self.page_navigate += data.timestamp;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn pageNavigated(ptr: *anyopaque, data: *const Notification.PageNavigated) !void {
|
|
||||||
const self: *TestClient = @ptrCast(@alignCast(ptr));
|
|
||||||
self.page_navigated += data.timestamp;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
@@ -1,107 +0,0 @@
|
|||||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
|
||||||
//
|
|
||||||
// Francis Bouvier <francis@lightpanda.io>
|
|
||||||
// Pierre Tachoire <pierre@lightpanda.io>
|
|
||||||
//
|
|
||||||
// This program is free software: you can redistribute it and/or modify
|
|
||||||
// it under the terms of the GNU Affero General Public License as
|
|
||||||
// published by the Free Software Foundation, either version 3 of the
|
|
||||||
// License, or (at your option) any later version.
|
|
||||||
//
|
|
||||||
// This program is distributed in the hope that it will be useful,
|
|
||||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
// GNU Affero General Public License for more details.
|
|
||||||
//
|
|
||||||
// You should have received a copy of the GNU Affero General Public License
|
|
||||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
//! This structure processes operating system signals (SIGINT, SIGTERM)
|
|
||||||
//! and runs callbacks to clean up the system gracefully.
|
|
||||||
//!
|
|
||||||
//! The structure does not clear the memory allocated in the arena,
|
|
||||||
//! clear the entire arena when exiting the program.
|
|
||||||
const std = @import("std");
|
|
||||||
const assert = std.debug.assert;
|
|
||||||
const Allocator = std.mem.Allocator;
|
|
||||||
const lp = @import("lightpanda");
|
|
||||||
|
|
||||||
const log = lp.log;
|
|
||||||
|
|
||||||
const SigHandler = @This();
|
|
||||||
|
|
||||||
arena: Allocator,
|
|
||||||
|
|
||||||
sigset: std.posix.sigset_t = undefined,
|
|
||||||
handle_thread: ?std.Thread = null,
|
|
||||||
|
|
||||||
attempt: u32 = 0,
|
|
||||||
listeners: std.ArrayList(Listener) = .empty,
|
|
||||||
|
|
||||||
pub const Listener = struct {
|
|
||||||
args: []const u8,
|
|
||||||
start: *const fn (context: *const anyopaque) void,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub fn install(self: *SigHandler) !void {
|
|
||||||
// Block SIGINT and SIGTERM for the current thread and all created from it
|
|
||||||
self.sigset = std.posix.sigemptyset();
|
|
||||||
std.posix.sigaddset(&self.sigset, std.posix.SIG.INT);
|
|
||||||
std.posix.sigaddset(&self.sigset, std.posix.SIG.TERM);
|
|
||||||
std.posix.sigaddset(&self.sigset, std.posix.SIG.QUIT);
|
|
||||||
std.posix.sigprocmask(std.posix.SIG.BLOCK, &self.sigset, null);
|
|
||||||
|
|
||||||
self.handle_thread = try std.Thread.spawn(.{ .allocator = self.arena }, SigHandler.sighandle, .{self});
|
|
||||||
self.handle_thread.?.detach();
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn on(self: *SigHandler, func: anytype, args: std.meta.ArgsTuple(@TypeOf(func))) !void {
|
|
||||||
assert(@typeInfo(@TypeOf(func)).@"fn".return_type.? == void);
|
|
||||||
|
|
||||||
const Args = @TypeOf(args);
|
|
||||||
const TypeErased = struct {
|
|
||||||
fn start(context: *const anyopaque) void {
|
|
||||||
const args_casted: *const Args = @ptrCast(@alignCast(context));
|
|
||||||
@call(.auto, func, args_casted.*);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const buffer = try self.arena.alignedAlloc(u8, .of(Args), @sizeOf(Args));
|
|
||||||
errdefer self.arena.free(buffer);
|
|
||||||
|
|
||||||
const bytes: []const u8 = @ptrCast((&args)[0..1]);
|
|
||||||
@memcpy(buffer, bytes);
|
|
||||||
|
|
||||||
try self.listeners.append(self.arena, .{
|
|
||||||
.args = buffer,
|
|
||||||
.start = TypeErased.start,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
fn sighandle(self: *SigHandler) noreturn {
|
|
||||||
while (true) {
|
|
||||||
var sig: c_int = 0;
|
|
||||||
|
|
||||||
const rc = std.c.sigwait(&self.sigset, &sig);
|
|
||||||
if (rc != 0) {
|
|
||||||
log.err(.app, "Unable to process signal {}", .{rc});
|
|
||||||
std.process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
switch (sig) {
|
|
||||||
std.posix.SIG.INT, std.posix.SIG.TERM => {
|
|
||||||
if (self.attempt > 1) {
|
|
||||||
std.process.exit(1);
|
|
||||||
}
|
|
||||||
self.attempt += 1;
|
|
||||||
|
|
||||||
log.info(.app, "Received termination signal...", .{});
|
|
||||||
for (self.listeners.items) |*item| {
|
|
||||||
item.start(item.args.ptr);
|
|
||||||
}
|
|
||||||
continue;
|
|
||||||
},
|
|
||||||
else => continue,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,21 +1,3 @@
|
|||||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
|
||||||
//
|
|
||||||
// Francis Bouvier <francis@lightpanda.io>
|
|
||||||
// Pierre Tachoire <pierre@lightpanda.io>
|
|
||||||
//
|
|
||||||
// This program is free software: you can redistribute it and/or modify
|
|
||||||
// it under the terms of the GNU Affero General Public License as
|
|
||||||
// published by the Free Software Foundation, either version 3 of the
|
|
||||||
// License, or (at your option) any later version.
|
|
||||||
//
|
|
||||||
// This program is distributed in the hope that it will be useful,
|
|
||||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
// GNU Affero General Public License for more details.
|
|
||||||
//
|
|
||||||
// You should have received a copy of the GNU Affero General Public License
|
|
||||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
|
|
||||||
const TestHTTPServer = @This();
|
const TestHTTPServer = @This();
|
||||||
@@ -79,7 +61,6 @@ fn handleConnection(self: *TestHTTPServer, conn: std.net.Server.Connection) !voi
|
|||||||
return err;
|
return err;
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
self.handler(&req) catch |err| {
|
self.handler(&req) catch |err| {
|
||||||
std.debug.print("test http error '{s}': {}\n", .{ req.head.target, err });
|
std.debug.print("test http error '{s}': {}\n", .{ req.head.target, err });
|
||||||
try req.respond("server error", .{ .status = .internal_server_error });
|
try req.respond("server error", .{ .status = .internal_server_error });
|
||||||
@@ -132,11 +113,6 @@ fn getContentType(file_path: []const u8) []const u8 {
|
|||||||
return "text/xml";
|
return "text/xml";
|
||||||
}
|
}
|
||||||
|
|
||||||
if (std.mem.endsWith(u8, file_path, ".mjs")) {
|
|
||||||
// mjs are ECMAScript modules
|
|
||||||
return "application/json";
|
|
||||||
}
|
|
||||||
|
|
||||||
std.debug.print("TestHTTPServer asked to serve an unknown file type: {s}\n", .{file_path});
|
std.debug.print("TestHTTPServer asked to serve an unknown file type: {s}\n", .{file_path});
|
||||||
return "text/html";
|
return "text/html";
|
||||||
}
|
}
|
||||||
|
|||||||
113
src/app.zig
Normal file
113
src/app.zig
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
const std = @import("std");
|
||||||
|
|
||||||
|
const Allocator = std.mem.Allocator;
|
||||||
|
|
||||||
|
const log = @import("log.zig");
|
||||||
|
const Http = @import("http/Http.zig");
|
||||||
|
const Platform = @import("runtime/js.zig").Platform;
|
||||||
|
|
||||||
|
const Telemetry = @import("telemetry/telemetry.zig").Telemetry;
|
||||||
|
const Notification = @import("notification.zig").Notification;
|
||||||
|
|
||||||
|
// Container for global state / objects that various parts of the system
|
||||||
|
// might need.
|
||||||
|
pub const App = struct {
|
||||||
|
http: Http,
|
||||||
|
config: Config,
|
||||||
|
platform: Platform,
|
||||||
|
allocator: Allocator,
|
||||||
|
telemetry: Telemetry,
|
||||||
|
app_dir_path: ?[]const u8,
|
||||||
|
notification: *Notification,
|
||||||
|
|
||||||
|
pub const RunMode = enum {
|
||||||
|
help,
|
||||||
|
fetch,
|
||||||
|
serve,
|
||||||
|
version,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const Config = struct {
|
||||||
|
run_mode: RunMode,
|
||||||
|
tls_verify_host: bool = true,
|
||||||
|
http_proxy: ?[:0]const u8 = null,
|
||||||
|
proxy_bearer_token: ?[:0]const u8 = null,
|
||||||
|
http_timeout_ms: ?u31 = null,
|
||||||
|
http_connect_timeout_ms: ?u31 = null,
|
||||||
|
http_max_host_open: ?u8 = null,
|
||||||
|
http_max_concurrent: ?u8 = null,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn init(allocator: Allocator, config: Config) !*App {
|
||||||
|
const app = try allocator.create(App);
|
||||||
|
errdefer allocator.destroy(app);
|
||||||
|
|
||||||
|
const notification = try Notification.init(allocator, null);
|
||||||
|
errdefer notification.deinit();
|
||||||
|
|
||||||
|
var http = try Http.init(allocator, .{
|
||||||
|
.max_host_open = config.http_max_host_open orelse 4,
|
||||||
|
.max_concurrent = config.http_max_concurrent orelse 10,
|
||||||
|
.timeout_ms = config.http_timeout_ms orelse 5000,
|
||||||
|
.connect_timeout_ms = config.http_connect_timeout_ms orelse 0,
|
||||||
|
.http_proxy = config.http_proxy,
|
||||||
|
.tls_verify_host = config.tls_verify_host,
|
||||||
|
.proxy_bearer_token = config.proxy_bearer_token,
|
||||||
|
});
|
||||||
|
errdefer http.deinit();
|
||||||
|
|
||||||
|
const platform = try Platform.init();
|
||||||
|
errdefer platform.deinit();
|
||||||
|
|
||||||
|
const app_dir_path = getAndMakeAppDir(allocator);
|
||||||
|
|
||||||
|
app.* = .{
|
||||||
|
.http = http,
|
||||||
|
.allocator = allocator,
|
||||||
|
.telemetry = undefined,
|
||||||
|
.platform = platform,
|
||||||
|
.app_dir_path = app_dir_path,
|
||||||
|
.notification = notification,
|
||||||
|
.config = config,
|
||||||
|
};
|
||||||
|
|
||||||
|
app.telemetry = try Telemetry.init(app, config.run_mode);
|
||||||
|
errdefer app.telemetry.deinit();
|
||||||
|
|
||||||
|
try app.telemetry.register(app.notification);
|
||||||
|
|
||||||
|
return app;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn deinit(self: *App) void {
|
||||||
|
const allocator = self.allocator;
|
||||||
|
if (self.app_dir_path) |app_dir_path| {
|
||||||
|
allocator.free(app_dir_path);
|
||||||
|
}
|
||||||
|
self.telemetry.deinit();
|
||||||
|
self.notification.deinit();
|
||||||
|
self.http.deinit();
|
||||||
|
self.platform.deinit();
|
||||||
|
allocator.destroy(self);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
fn getAndMakeAppDir(allocator: Allocator) ?[]const u8 {
|
||||||
|
if (@import("builtin").is_test) {
|
||||||
|
return allocator.dupe(u8, "/tmp") catch unreachable;
|
||||||
|
}
|
||||||
|
const app_dir_path = std.fs.getAppDataDir(allocator, "lightpanda") catch |err| {
|
||||||
|
log.warn(.app, "get data dir", .{ .err = err });
|
||||||
|
return null;
|
||||||
|
};
|
||||||
|
|
||||||
|
std.fs.cwd().makePath(app_dir_path) catch |err| switch (err) {
|
||||||
|
error.PathAlreadyExists => return app_dir_path,
|
||||||
|
else => {
|
||||||
|
allocator.free(app_dir_path);
|
||||||
|
log.warn(.app, "create data dir", .{ .err = err, .path = app_dir_path });
|
||||||
|
return null;
|
||||||
|
},
|
||||||
|
};
|
||||||
|
return app_dir_path;
|
||||||
|
}
|
||||||
@@ -1,114 +0,0 @@
|
|||||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
|
||||||
//
|
|
||||||
// Francis Bouvier <francis@lightpanda.io>
|
|
||||||
// Pierre Tachoire <pierre@lightpanda.io>
|
|
||||||
//
|
|
||||||
// This program is free software: you can redistribute it and/or modify
|
|
||||||
// it under the terms of the GNU Affero General Public License as
|
|
||||||
// published by the Free Software Foundation, either version 3 of the
|
|
||||||
// License, or (at your option) any later version.
|
|
||||||
//
|
|
||||||
// This program is distributed in the hope that it will be useful,
|
|
||||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
// GNU Affero General Public License for more details.
|
|
||||||
//
|
|
||||||
// You should have received a copy of the GNU Affero General Public License
|
|
||||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
const std = @import("std");
|
|
||||||
|
|
||||||
const Allocator = std.mem.Allocator;
|
|
||||||
const ArenaAllocator = std.heap.ArenaAllocator;
|
|
||||||
|
|
||||||
const js = @import("js/js.zig");
|
|
||||||
const log = @import("../log.zig");
|
|
||||||
const App = @import("../App.zig");
|
|
||||||
const HttpClient = @import("../http/Client.zig");
|
|
||||||
const Notification = @import("../Notification.zig");
|
|
||||||
|
|
||||||
const IS_DEBUG = @import("builtin").mode == .Debug;
|
|
||||||
|
|
||||||
const Session = @import("Session.zig");
|
|
||||||
|
|
||||||
// Browser is an instance of the browser.
|
|
||||||
// You can create multiple browser instances.
|
|
||||||
// A browser contains only one session.
|
|
||||||
const Browser = @This();
|
|
||||||
|
|
||||||
env: js.Env,
|
|
||||||
app: *App,
|
|
||||||
session: ?Session,
|
|
||||||
allocator: Allocator,
|
|
||||||
http_client: *HttpClient,
|
|
||||||
call_arena: ArenaAllocator,
|
|
||||||
page_arena: ArenaAllocator,
|
|
||||||
session_arena: ArenaAllocator,
|
|
||||||
transfer_arena: ArenaAllocator,
|
|
||||||
notification: *Notification,
|
|
||||||
|
|
||||||
pub fn init(app: *App) !Browser {
|
|
||||||
const allocator = app.allocator;
|
|
||||||
|
|
||||||
var env = try js.Env.init(allocator, &app.platform, &app.snapshot);
|
|
||||||
errdefer env.deinit();
|
|
||||||
|
|
||||||
const notification = try Notification.init(allocator, app.notification);
|
|
||||||
app.http.client.notification = notification;
|
|
||||||
app.http.client.next_request_id = 0; // Should we track ids in CDP only?
|
|
||||||
errdefer notification.deinit();
|
|
||||||
|
|
||||||
return .{
|
|
||||||
.app = app,
|
|
||||||
.env = env,
|
|
||||||
.session = null,
|
|
||||||
.allocator = allocator,
|
|
||||||
.notification = notification,
|
|
||||||
.http_client = app.http.client,
|
|
||||||
.call_arena = ArenaAllocator.init(allocator),
|
|
||||||
.page_arena = ArenaAllocator.init(allocator),
|
|
||||||
.session_arena = ArenaAllocator.init(allocator),
|
|
||||||
.transfer_arena = ArenaAllocator.init(allocator),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn deinit(self: *Browser) void {
|
|
||||||
self.closeSession();
|
|
||||||
self.env.deinit();
|
|
||||||
self.call_arena.deinit();
|
|
||||||
self.page_arena.deinit();
|
|
||||||
self.session_arena.deinit();
|
|
||||||
self.transfer_arena.deinit();
|
|
||||||
self.http_client.notification = null;
|
|
||||||
self.notification.deinit();
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn newSession(self: *Browser) !*Session {
|
|
||||||
self.closeSession();
|
|
||||||
self.session = @as(Session, undefined);
|
|
||||||
const session = &self.session.?;
|
|
||||||
try Session.init(session, self);
|
|
||||||
return session;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn closeSession(self: *Browser) void {
|
|
||||||
if (self.session) |*session| {
|
|
||||||
session.deinit();
|
|
||||||
self.session = null;
|
|
||||||
_ = self.session_arena.reset(.{ .retain_with_limit = 1 * 1024 * 1024 });
|
|
||||||
self.env.lowMemoryNotification();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn runMicrotasks(self: *const Browser) void {
|
|
||||||
self.env.runMicrotasks();
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn runMessageLoop(self: *const Browser) void {
|
|
||||||
while (self.env.pumpMessageLoop()) {
|
|
||||||
if (comptime IS_DEBUG) {
|
|
||||||
log.debug(.browser, "pumpMessageLoop", .{});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
self.env.runIdleTasks();
|
|
||||||
}
|
|
||||||
52
src/browser/DataURI.zig
Normal file
52
src/browser/DataURI.zig
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
const std = @import("std");
|
||||||
|
const Allocator = std.mem.Allocator;
|
||||||
|
|
||||||
|
// Parses data:[<media-type>][;base64],<data>
|
||||||
|
pub fn parse(allocator: Allocator, src: []const u8) !?[]const u8 {
|
||||||
|
if (!std.mem.startsWith(u8, src, "data:")) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const uri = src[5..];
|
||||||
|
const data_starts = std.mem.indexOfScalar(u8, uri, ',') orelse return null;
|
||||||
|
|
||||||
|
var data = uri[data_starts + 1 ..];
|
||||||
|
|
||||||
|
// Extract the encoding.
|
||||||
|
const metadata = uri[0..data_starts];
|
||||||
|
if (std.mem.endsWith(u8, metadata, ";base64")) {
|
||||||
|
const decoder = std.base64.standard.Decoder;
|
||||||
|
const decoded_size = try decoder.calcSizeForSlice(data);
|
||||||
|
|
||||||
|
const buffer = try allocator.alloc(u8, decoded_size);
|
||||||
|
errdefer allocator.free(buffer);
|
||||||
|
|
||||||
|
try decoder.decode(buffer, data);
|
||||||
|
data = buffer;
|
||||||
|
}
|
||||||
|
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
|
||||||
|
const testing = @import("../testing.zig");
|
||||||
|
test "DataURI: parse valid" {
|
||||||
|
try test_valid("data:text/javascript; charset=utf-8;base64,Zm9v", "foo");
|
||||||
|
try test_valid("data:text/javascript; charset=utf-8;,foo", "foo");
|
||||||
|
try test_valid("data:,foo", "foo");
|
||||||
|
}
|
||||||
|
|
||||||
|
test "DataURI: parse invalid" {
|
||||||
|
try test_cannot_parse("atad:,foo");
|
||||||
|
try test_cannot_parse("data:foo");
|
||||||
|
try test_cannot_parse("data:");
|
||||||
|
}
|
||||||
|
|
||||||
|
fn test_valid(uri: []const u8, expected: []const u8) !void {
|
||||||
|
defer testing.reset();
|
||||||
|
const data_uri = try parse(testing.arena_allocator, uri) orelse return error.TestFailed;
|
||||||
|
try testing.expectEqual(expected, data_uri);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn test_cannot_parse(uri: []const u8) !void {
|
||||||
|
try testing.expectEqual(null, parse(undefined, uri));
|
||||||
|
}
|
||||||
@@ -1,516 +0,0 @@
|
|||||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
|
||||||
//
|
|
||||||
// Francis Bouvier <francis@lightpanda.io>
|
|
||||||
// Pierre Tachoire <pierre@lightpanda.io>
|
|
||||||
//
|
|
||||||
// This program is free software: you can redistribute it and/or modify
|
|
||||||
// it under the terms of the GNU Affero General Public License as
|
|
||||||
// published by the Free Software Foundation, either version 3 of the
|
|
||||||
// License, or (at your option) any later version.
|
|
||||||
//
|
|
||||||
// This program is distributed in the hope that it will be useful,
|
|
||||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
// GNU Affero General Public License for more details.
|
|
||||||
//
|
|
||||||
// You should have received a copy of the GNU Affero General Public License
|
|
||||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
const std = @import("std");
|
|
||||||
const builtin = @import("builtin");
|
|
||||||
|
|
||||||
const log = @import("../log.zig");
|
|
||||||
const String = @import("../string.zig").String;
|
|
||||||
|
|
||||||
const js = @import("js/js.zig");
|
|
||||||
const Page = @import("Page.zig");
|
|
||||||
|
|
||||||
const Node = @import("webapi/Node.zig");
|
|
||||||
const Event = @import("webapi/Event.zig");
|
|
||||||
const EventTarget = @import("webapi/EventTarget.zig");
|
|
||||||
|
|
||||||
const Allocator = std.mem.Allocator;
|
|
||||||
|
|
||||||
const IS_DEBUG = builtin.mode == .Debug;
|
|
||||||
|
|
||||||
pub const EventManager = @This();
|
|
||||||
|
|
||||||
page: *Page,
|
|
||||||
arena: Allocator,
|
|
||||||
listener_pool: std.heap.MemoryPool(Listener),
|
|
||||||
list_pool: std.heap.MemoryPool(std.DoublyLinkedList),
|
|
||||||
lookup: std.AutoHashMapUnmanaged(usize, *std.DoublyLinkedList),
|
|
||||||
dispatch_depth: usize,
|
|
||||||
deferred_removals: std.ArrayList(struct { list: *std.DoublyLinkedList, listener: *Listener }),
|
|
||||||
|
|
||||||
pub fn init(page: *Page) EventManager {
|
|
||||||
return .{
|
|
||||||
.page = page,
|
|
||||||
.lookup = .{},
|
|
||||||
.arena = page.arena,
|
|
||||||
.list_pool = std.heap.MemoryPool(std.DoublyLinkedList).init(page.arena),
|
|
||||||
.listener_pool = std.heap.MemoryPool(Listener).init(page.arena),
|
|
||||||
.dispatch_depth = 0,
|
|
||||||
.deferred_removals = .{},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const RegisterOptions = struct {
|
|
||||||
once: bool = false,
|
|
||||||
capture: bool = false,
|
|
||||||
passive: bool = false,
|
|
||||||
signal: ?*@import("webapi/AbortSignal.zig") = null,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const Callback = union(enum) {
|
|
||||||
function: js.Function,
|
|
||||||
object: js.Object,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub fn register(self: *EventManager, target: *EventTarget, typ: []const u8, callback: Callback, opts: RegisterOptions) !void {
|
|
||||||
if (comptime IS_DEBUG) {
|
|
||||||
log.debug(.event, "eventManager.register", .{ .type = typ, .capture = opts.capture, .once = opts.once, .target = target });
|
|
||||||
}
|
|
||||||
|
|
||||||
// If a signal is provided and already aborted, don't register the listener
|
|
||||||
if (opts.signal) |signal| {
|
|
||||||
if (signal.getAborted()) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const gop = try self.lookup.getOrPut(self.arena, @intFromPtr(target));
|
|
||||||
if (gop.found_existing) {
|
|
||||||
// check for duplicate callbacks already registered
|
|
||||||
var node = gop.value_ptr.*.first;
|
|
||||||
while (node) |n| {
|
|
||||||
const listener: *Listener = @alignCast(@fieldParentPtr("node", n));
|
|
||||||
if (listener.typ.eqlSlice(typ)) {
|
|
||||||
const is_duplicate = switch (callback) {
|
|
||||||
.object => |obj| listener.function.eqlObject(obj),
|
|
||||||
.function => |func| listener.function.eqlFunction(func),
|
|
||||||
};
|
|
||||||
if (is_duplicate and listener.capture == opts.capture) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
node = n.next;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
gop.value_ptr.* = try self.list_pool.create();
|
|
||||||
gop.value_ptr.*.* = .{};
|
|
||||||
}
|
|
||||||
|
|
||||||
const func = switch (callback) {
|
|
||||||
.function => |f| Function{ .value = f },
|
|
||||||
.object => |o| Function{ .object = o },
|
|
||||||
};
|
|
||||||
|
|
||||||
const listener = try self.listener_pool.create();
|
|
||||||
listener.* = .{
|
|
||||||
.node = .{},
|
|
||||||
.once = opts.once,
|
|
||||||
.capture = opts.capture,
|
|
||||||
.passive = opts.passive,
|
|
||||||
.function = func,
|
|
||||||
.signal = opts.signal,
|
|
||||||
.typ = try String.init(self.arena, typ, .{}),
|
|
||||||
};
|
|
||||||
// append the listener to the list of listeners for this target
|
|
||||||
gop.value_ptr.*.append(&listener.node);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn remove(self: *EventManager, target: *EventTarget, typ: []const u8, callback: Callback, use_capture: bool) void {
|
|
||||||
const list = self.lookup.get(@intFromPtr(target)) orelse return;
|
|
||||||
if (findListener(list, typ, callback, use_capture)) |listener| {
|
|
||||||
self.removeListener(list, listener);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn dispatch(self: *EventManager, target: *EventTarget, event: *Event) !void {
|
|
||||||
if (comptime IS_DEBUG) {
|
|
||||||
log.debug(.event, "eventManager.dispatch", .{ .type = event._type_string.str(), .bubbles = event._bubbles });
|
|
||||||
}
|
|
||||||
|
|
||||||
event._target = target;
|
|
||||||
event._dispatch_target = target; // Store original target for composedPath()
|
|
||||||
var was_handled = false;
|
|
||||||
|
|
||||||
defer if (was_handled) {
|
|
||||||
self.page.js.runMicrotasks();
|
|
||||||
};
|
|
||||||
|
|
||||||
switch (target._type) {
|
|
||||||
.node => |node| try self.dispatchNode(node, event, &was_handled),
|
|
||||||
.xhr,
|
|
||||||
.window,
|
|
||||||
.abort_signal,
|
|
||||||
.media_query_list,
|
|
||||||
.message_port,
|
|
||||||
.text_track_cue,
|
|
||||||
.navigation,
|
|
||||||
.screen,
|
|
||||||
.screen_orientation,
|
|
||||||
.generic,
|
|
||||||
=> {
|
|
||||||
const list = self.lookup.get(@intFromPtr(target)) orelse return;
|
|
||||||
try self.dispatchAll(list, target, event, &was_handled);
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// There are a lot of events that can be attached via addEventListener or as
|
|
||||||
// a property, like the XHR events, or window.onload. You might think that the
|
|
||||||
// property is just a shortcut for calling addEventListener, but they are distinct.
|
|
||||||
// An event set via property cannot be removed by removeEventListener. If you
|
|
||||||
// set both the property and add a listener, they both execute.
|
|
||||||
const DispatchWithFunctionOptions = struct {
|
|
||||||
context: []const u8,
|
|
||||||
inject_target: bool = true,
|
|
||||||
};
|
|
||||||
pub fn dispatchWithFunction(self: *EventManager, target: *EventTarget, event: *Event, function_: ?js.Function, comptime opts: DispatchWithFunctionOptions) !void {
|
|
||||||
if (comptime IS_DEBUG) {
|
|
||||||
log.debug(.event, "dispatchWithFunction", .{ .type = event._type_string.str(), .context = opts.context, .has_function = function_ != null });
|
|
||||||
}
|
|
||||||
|
|
||||||
if (comptime opts.inject_target) {
|
|
||||||
event._target = target;
|
|
||||||
event._dispatch_target = target; // Store original target for composedPath()
|
|
||||||
}
|
|
||||||
|
|
||||||
var was_dispatched = false;
|
|
||||||
defer if (was_dispatched) {
|
|
||||||
self.page.js.runMicrotasks();
|
|
||||||
};
|
|
||||||
|
|
||||||
if (function_) |func| {
|
|
||||||
event._current_target = target;
|
|
||||||
if (func.callWithThis(void, target, .{event})) {
|
|
||||||
was_dispatched = true;
|
|
||||||
} else |err| {
|
|
||||||
// a non-JS error
|
|
||||||
log.warn(.event, opts.context, .{ .err = err });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const list = self.lookup.get(@intFromPtr(target)) orelse return;
|
|
||||||
try self.dispatchAll(list, target, event, &was_dispatched);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn dispatchNode(self: *EventManager, target: *Node, event: *Event, was_handled: *bool) !void {
|
|
||||||
const ShadowRoot = @import("webapi/ShadowRoot.zig");
|
|
||||||
|
|
||||||
// Defer runs even on early return - ensures event phase is reset
|
|
||||||
// and default actions execute (unless prevented)
|
|
||||||
defer {
|
|
||||||
event._event_phase = .none;
|
|
||||||
|
|
||||||
// Execute default action if not prevented
|
|
||||||
if (event._prevent_default) {
|
|
||||||
// can't return in a defer (╯°□°)╯︵ ┻━┻
|
|
||||||
} else if (event._type_string.eqlSlice("click")) {
|
|
||||||
self.page.handleClick(target) catch |err| {
|
|
||||||
log.warn(.event, "page.click", .{ .err = err });
|
|
||||||
};
|
|
||||||
} else if (event._type_string.eqlSlice("keydown")) {
|
|
||||||
self.page.handleKeydown(target, event) catch |err| {
|
|
||||||
log.warn(.event, "page.keydown", .{ .err = err });
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var path_len: usize = 0;
|
|
||||||
var path_buffer: [128]*EventTarget = undefined;
|
|
||||||
|
|
||||||
var node: ?*Node = target;
|
|
||||||
while (node) |n| {
|
|
||||||
if (path_len >= path_buffer.len) break;
|
|
||||||
path_buffer[path_len] = n.asEventTarget();
|
|
||||||
path_len += 1;
|
|
||||||
|
|
||||||
// Check if this node is a shadow root
|
|
||||||
if (n.is(ShadowRoot)) |shadow| {
|
|
||||||
event._needs_retargeting = true;
|
|
||||||
|
|
||||||
// If event is not composed, stop at shadow boundary
|
|
||||||
if (!event._composed) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Otherwise, jump to the shadow host and continue
|
|
||||||
node = shadow._host.asNode();
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
node = n._parent;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Even though the window isn't part of the DOM, events always propagate
|
|
||||||
// through it in the capture phase (unless we stopped at a shadow boundary)
|
|
||||||
if (path_len < path_buffer.len) {
|
|
||||||
path_buffer[path_len] = self.page.window.asEventTarget();
|
|
||||||
path_len += 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
const path = path_buffer[0..path_len];
|
|
||||||
|
|
||||||
// Phase 1: Capturing phase (root → target, excluding target)
|
|
||||||
// This happens for all events, regardless of bubbling
|
|
||||||
event._event_phase = .capturing_phase;
|
|
||||||
var i: usize = path_len;
|
|
||||||
while (i > 1) {
|
|
||||||
i -= 1;
|
|
||||||
const current_target = path[i];
|
|
||||||
if (self.lookup.get(@intFromPtr(current_target))) |list| {
|
|
||||||
try self.dispatchPhase(list, current_target, event, was_handled, true);
|
|
||||||
if (event._stop_propagation) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Phase 2: At target
|
|
||||||
event._event_phase = .at_target;
|
|
||||||
const target_et = target.asEventTarget();
|
|
||||||
if (self.lookup.get(@intFromPtr(target_et))) |list| {
|
|
||||||
try self.dispatchPhase(list, target_et, event, was_handled, null);
|
|
||||||
if (event._stop_propagation) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Phase 3: Bubbling phase (target → root, excluding target)
|
|
||||||
// This only happens if the event bubbles
|
|
||||||
if (event._bubbles) {
|
|
||||||
event._event_phase = .bubbling_phase;
|
|
||||||
for (path[1..]) |current_target| {
|
|
||||||
if (self.lookup.get(@intFromPtr(current_target))) |list| {
|
|
||||||
try self.dispatchPhase(list, current_target, event, was_handled, false);
|
|
||||||
if (event._stop_propagation) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn dispatchPhase(self: *EventManager, list: *std.DoublyLinkedList, current_target: *EventTarget, event: *Event, was_handled: *bool, comptime capture_only: ?bool) !void {
|
|
||||||
const page = self.page;
|
|
||||||
const typ = event._type_string;
|
|
||||||
|
|
||||||
// Track dispatch depth for deferred removal
|
|
||||||
self.dispatch_depth += 1;
|
|
||||||
defer {
|
|
||||||
const dispatch_depth = self.dispatch_depth;
|
|
||||||
// Only destroy deferred listeners when we exit the outermost dispatch
|
|
||||||
if (dispatch_depth == 1) {
|
|
||||||
for (self.deferred_removals.items) |removal| {
|
|
||||||
removal.list.remove(&removal.listener.node);
|
|
||||||
self.listener_pool.destroy(removal.listener);
|
|
||||||
}
|
|
||||||
self.deferred_removals.clearRetainingCapacity();
|
|
||||||
} else {
|
|
||||||
self.dispatch_depth = dispatch_depth - 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Use the last listener in the list as sentinel - listeners added during dispatch will be after it
|
|
||||||
const last_node = list.last orelse return;
|
|
||||||
const last_listener: *Listener = @alignCast(@fieldParentPtr("node", last_node));
|
|
||||||
|
|
||||||
// Iterate through the list, stopping after we've encountered the last_listener
|
|
||||||
var node = list.first;
|
|
||||||
var is_done = false;
|
|
||||||
while (node) |n| {
|
|
||||||
if (is_done) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
const listener: *Listener = @alignCast(@fieldParentPtr("node", n));
|
|
||||||
is_done = (listener == last_listener);
|
|
||||||
node = n.next;
|
|
||||||
|
|
||||||
// Skip non-matching listeners
|
|
||||||
if (!listener.typ.eql(typ)) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (comptime capture_only) |capture| {
|
|
||||||
if (listener.capture != capture) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Skip removed listeners
|
|
||||||
if (listener.removed) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the listener has an aborted signal, remove it and skip
|
|
||||||
if (listener.signal) |signal| {
|
|
||||||
if (signal.getAborted()) {
|
|
||||||
self.removeListener(list, listener);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove "once" listeners BEFORE calling them so nested dispatches don't see them
|
|
||||||
if (listener.once) {
|
|
||||||
self.removeListener(list, listener);
|
|
||||||
}
|
|
||||||
|
|
||||||
was_handled.* = true;
|
|
||||||
event._current_target = current_target;
|
|
||||||
|
|
||||||
// Compute adjusted target for shadow DOM retargeting (only if needed)
|
|
||||||
const original_target = event._target;
|
|
||||||
if (event._needs_retargeting) {
|
|
||||||
event._target = getAdjustedTarget(original_target, current_target);
|
|
||||||
}
|
|
||||||
|
|
||||||
switch (listener.function) {
|
|
||||||
.value => |value| try value.callWithThis(void, current_target, .{event}),
|
|
||||||
.string => |string| {
|
|
||||||
const str = try page.call_arena.dupeZ(u8, string.str());
|
|
||||||
try self.page.js.eval(str, null);
|
|
||||||
},
|
|
||||||
.object => |obj| {
|
|
||||||
if (try obj.getFunction("handleEvent")) |handleEvent| {
|
|
||||||
try handleEvent.callWithThis(void, obj, .{event});
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// Restore original target (only if we changed it)
|
|
||||||
if (event._needs_retargeting) {
|
|
||||||
event._target = original_target;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (event._stop_immediate_propagation) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Non-Node dispatching (XHR, Window without propagation)
|
|
||||||
fn dispatchAll(self: *EventManager, list: *std.DoublyLinkedList, current_target: *EventTarget, event: *Event, was_handled: *bool) !void {
|
|
||||||
return self.dispatchPhase(list, current_target, event, was_handled, null);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn removeListener(self: *EventManager, list: *std.DoublyLinkedList, listener: *Listener) void {
|
|
||||||
// If we're in a dispatch, defer removal to avoid invalidating iteration
|
|
||||||
if (self.dispatch_depth > 0) {
|
|
||||||
listener.removed = true;
|
|
||||||
self.deferred_removals.append(self.arena, .{ .list = list, .listener = listener }) catch unreachable;
|
|
||||||
} else {
|
|
||||||
// Outside dispatch, remove immediately
|
|
||||||
list.remove(&listener.node);
|
|
||||||
self.listener_pool.destroy(listener);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn findListener(list: *const std.DoublyLinkedList, typ: []const u8, callback: Callback, capture: bool) ?*Listener {
|
|
||||||
var node = list.first;
|
|
||||||
while (node) |n| {
|
|
||||||
node = n.next;
|
|
||||||
const listener: *Listener = @alignCast(@fieldParentPtr("node", n));
|
|
||||||
const matches = switch (callback) {
|
|
||||||
.object => |obj| listener.function.eqlObject(obj),
|
|
||||||
.function => |func| listener.function.eqlFunction(func),
|
|
||||||
};
|
|
||||||
if (!matches) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (listener.capture != capture) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (!listener.typ.eqlSlice(typ)) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
return listener;
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const Listener = struct {
|
|
||||||
typ: String,
|
|
||||||
once: bool,
|
|
||||||
capture: bool,
|
|
||||||
passive: bool,
|
|
||||||
function: Function,
|
|
||||||
signal: ?*@import("webapi/AbortSignal.zig") = null,
|
|
||||||
node: std.DoublyLinkedList.Node,
|
|
||||||
removed: bool = false,
|
|
||||||
};
|
|
||||||
|
|
||||||
const Function = union(enum) {
|
|
||||||
value: js.Function,
|
|
||||||
string: String,
|
|
||||||
object: js.Object,
|
|
||||||
|
|
||||||
fn eqlFunction(self: Function, func: js.Function) bool {
|
|
||||||
return switch (self) {
|
|
||||||
.value => |v| return v.id == func.id,
|
|
||||||
else => false,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn eqlObject(self: Function, obj: js.Object) bool {
|
|
||||||
return switch (self) {
|
|
||||||
.object => |o| return o.getId() == obj.getId(),
|
|
||||||
else => false,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Computes the adjusted target for shadow DOM event retargeting
|
|
||||||
// Returns the lowest shadow-including ancestor of original_target that is
|
|
||||||
// also an ancestor-or-self of current_target
|
|
||||||
fn getAdjustedTarget(original_target: ?*EventTarget, current_target: *EventTarget) ?*EventTarget {
|
|
||||||
const ShadowRoot = @import("webapi/ShadowRoot.zig");
|
|
||||||
|
|
||||||
const orig_node = switch ((original_target orelse return null)._type) {
|
|
||||||
.node => |n| n,
|
|
||||||
else => return original_target,
|
|
||||||
};
|
|
||||||
const curr_node = switch (current_target._type) {
|
|
||||||
.node => |n| n,
|
|
||||||
else => return original_target,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Walk up from original target, checking if we can reach current target
|
|
||||||
var node: ?*Node = orig_node;
|
|
||||||
while (node) |n| {
|
|
||||||
// Check if current_target is an ancestor of n (or n itself)
|
|
||||||
if (isAncestorOrSelf(curr_node, n)) {
|
|
||||||
return n.asEventTarget();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Cross shadow boundary if needed
|
|
||||||
if (n.is(ShadowRoot)) |shadow| {
|
|
||||||
node = shadow._host.asNode();
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
node = n._parent;
|
|
||||||
}
|
|
||||||
|
|
||||||
return original_target;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if ancestor is an ancestor of (or the same as) node
|
|
||||||
// WITHOUT crossing shadow boundaries (just regular DOM tree)
|
|
||||||
fn isAncestorOrSelf(ancestor: *Node, node: *Node) bool {
|
|
||||||
if (ancestor == node) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
var current: ?*Node = node._parent;
|
|
||||||
while (current) |n| {
|
|
||||||
if (n == ancestor) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
current = n._parent;
|
|
||||||
}
|
|
||||||
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
@@ -1,488 +0,0 @@
|
|||||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
|
||||||
//
|
|
||||||
// Francis Bouvier <francis@lightpanda.io>
|
|
||||||
// Pierre Tachoire <pierre@lightpanda.io>
|
|
||||||
//
|
|
||||||
// This program is free software: you can redistribute it and/or modify
|
|
||||||
// it under the terms of the GNU Affero General Public License as
|
|
||||||
// published by the Free Software Foundation, either version 3 of the
|
|
||||||
// License, or (at your option) any later version.
|
|
||||||
//
|
|
||||||
// This program is distributed in the hope that it will be useful,
|
|
||||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
// GNU Affero General Public License for more details.
|
|
||||||
//
|
|
||||||
// You should have received a copy of the GNU Affero General Public License
|
|
||||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
const std = @import("std");
|
|
||||||
const assert = std.debug.assert;
|
|
||||||
const builtin = @import("builtin");
|
|
||||||
const reflect = @import("reflect.zig");
|
|
||||||
const IS_DEBUG = builtin.mode == .Debug;
|
|
||||||
|
|
||||||
const log = @import("../log.zig");
|
|
||||||
const String = @import("../string.zig").String;
|
|
||||||
|
|
||||||
const SlabAllocator = @import("../slab.zig").SlabAllocator;
|
|
||||||
|
|
||||||
const Page = @import("Page.zig");
|
|
||||||
const Node = @import("webapi/Node.zig");
|
|
||||||
const Event = @import("webapi/Event.zig");
|
|
||||||
const UIEvent = @import("webapi/event/UIEvent.zig");
|
|
||||||
const Element = @import("webapi/Element.zig");
|
|
||||||
const Document = @import("webapi/Document.zig");
|
|
||||||
const EventTarget = @import("webapi/EventTarget.zig");
|
|
||||||
const XMLHttpRequestEventTarget = @import("webapi/net/XMLHttpRequestEventTarget.zig");
|
|
||||||
const Blob = @import("webapi/Blob.zig");
|
|
||||||
const AbstractRange = @import("webapi/AbstractRange.zig");
|
|
||||||
|
|
||||||
const Factory = @This();
|
|
||||||
_page: *Page,
|
|
||||||
_slab: SlabAllocator,
|
|
||||||
|
|
||||||
fn PrototypeChain(comptime types: []const type) type {
|
|
||||||
return struct {
|
|
||||||
const Self = @This();
|
|
||||||
memory: []u8,
|
|
||||||
|
|
||||||
fn totalSize() usize {
|
|
||||||
var size: usize = 0;
|
|
||||||
for (types) |T| {
|
|
||||||
size = std.mem.alignForward(usize, size, @alignOf(T));
|
|
||||||
size += @sizeOf(T);
|
|
||||||
}
|
|
||||||
return size;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn maxAlign() std.mem.Alignment {
|
|
||||||
var alignment: std.mem.Alignment = .@"1";
|
|
||||||
|
|
||||||
for (types) |T| {
|
|
||||||
alignment = std.mem.Alignment.max(alignment, std.mem.Alignment.of(T));
|
|
||||||
}
|
|
||||||
|
|
||||||
return alignment;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn getType(comptime index: usize) type {
|
|
||||||
return types[index];
|
|
||||||
}
|
|
||||||
|
|
||||||
fn allocate(allocator: std.mem.Allocator) !Self {
|
|
||||||
const size = comptime Self.totalSize();
|
|
||||||
const alignment = comptime Self.maxAlign();
|
|
||||||
|
|
||||||
const memory = try allocator.alignedAlloc(u8, alignment, size);
|
|
||||||
return .{ .memory = memory };
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get(self: *const Self, comptime index: usize) *getType(index) {
|
|
||||||
var offset: usize = 0;
|
|
||||||
inline for (types, 0..) |T, i| {
|
|
||||||
offset = std.mem.alignForward(usize, offset, @alignOf(T));
|
|
||||||
|
|
||||||
if (i == index) {
|
|
||||||
return @as(*T, @ptrCast(@alignCast(self.memory.ptr + offset)));
|
|
||||||
}
|
|
||||||
offset += @sizeOf(T);
|
|
||||||
}
|
|
||||||
unreachable;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn set(self: *const Self, comptime index: usize, value: getType(index)) void {
|
|
||||||
const ptr = self.get(index);
|
|
||||||
ptr.* = value;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn setRoot(self: *const Self, comptime T: type) void {
|
|
||||||
const ptr = self.get(0);
|
|
||||||
ptr.* = .{ ._type = unionInit(T, self.get(1)) };
|
|
||||||
}
|
|
||||||
|
|
||||||
fn setMiddle(self: *const Self, comptime index: usize, comptime T: type) void {
|
|
||||||
assert(index >= 1);
|
|
||||||
assert(index < types.len);
|
|
||||||
|
|
||||||
const ptr = self.get(index);
|
|
||||||
ptr.* = .{ ._proto = self.get(index - 1), ._type = unionInit(T, self.get(index + 1)) };
|
|
||||||
}
|
|
||||||
|
|
||||||
fn setMiddleWithValue(self: *const Self, comptime index: usize, comptime T: type, value: anytype) void {
|
|
||||||
assert(index >= 1);
|
|
||||||
|
|
||||||
const ptr = self.get(index);
|
|
||||||
ptr.* = .{ ._proto = self.get(index - 1), ._type = unionInit(T, value) };
|
|
||||||
}
|
|
||||||
|
|
||||||
fn setLeaf(self: *const Self, comptime index: usize, value: anytype) void {
|
|
||||||
assert(index >= 1);
|
|
||||||
|
|
||||||
const ptr = self.get(index);
|
|
||||||
ptr.* = value;
|
|
||||||
ptr._proto = self.get(index - 1);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn AutoPrototypeChain(comptime types: []const type) type {
|
|
||||||
return struct {
|
|
||||||
fn create(allocator: std.mem.Allocator, leaf_value: anytype) !*@TypeOf(leaf_value) {
|
|
||||||
const chain = try PrototypeChain(types).allocate(allocator);
|
|
||||||
|
|
||||||
const RootType = types[0];
|
|
||||||
chain.setRoot(RootType.Type);
|
|
||||||
|
|
||||||
inline for (1..types.len - 1) |i| {
|
|
||||||
const MiddleType = types[i];
|
|
||||||
chain.setMiddle(i, MiddleType.Type);
|
|
||||||
}
|
|
||||||
|
|
||||||
chain.setLeaf(types.len - 1, leaf_value);
|
|
||||||
return chain.get(types.len - 1);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn init(page: *Page) Factory {
|
|
||||||
return .{
|
|
||||||
._page = page,
|
|
||||||
._slab = SlabAllocator.init(page.arena, 128),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// this is a root object
|
|
||||||
pub fn eventTarget(self: *Factory, child: anytype) !*@TypeOf(child) {
|
|
||||||
const allocator = self._slab.allocator();
|
|
||||||
const chain = try PrototypeChain(
|
|
||||||
&.{ EventTarget, @TypeOf(child) },
|
|
||||||
).allocate(allocator);
|
|
||||||
|
|
||||||
const event_ptr = chain.get(0);
|
|
||||||
event_ptr.* = .{
|
|
||||||
._type = unionInit(EventTarget.Type, chain.get(1)),
|
|
||||||
};
|
|
||||||
chain.setLeaf(1, child);
|
|
||||||
|
|
||||||
return chain.get(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn eventInit(typ: []const u8, value: anytype, page: *Page) !Event {
|
|
||||||
// Round to 2ms for privacy (browsers do this)
|
|
||||||
const raw_timestamp = @import("../datetime.zig").milliTimestamp(.monotonic);
|
|
||||||
const time_stamp = (raw_timestamp / 2) * 2;
|
|
||||||
|
|
||||||
return .{
|
|
||||||
._type = unionInit(Event.Type, value),
|
|
||||||
._type_string = try String.init(page.arena, typ, .{}),
|
|
||||||
._time_stamp = time_stamp,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// this is a root object
|
|
||||||
pub fn event(self: *Factory, typ: []const u8, child: anytype) !*@TypeOf(child) {
|
|
||||||
const allocator = self._slab.allocator();
|
|
||||||
|
|
||||||
const chain = try PrototypeChain(
|
|
||||||
&.{ Event, @TypeOf(child) },
|
|
||||||
).allocate(allocator);
|
|
||||||
|
|
||||||
// Special case: Event has a _type_string field, so we need manual setup
|
|
||||||
const event_ptr = chain.get(0);
|
|
||||||
event_ptr.* = try eventInit(typ, chain.get(1), self._page);
|
|
||||||
chain.setLeaf(1, child);
|
|
||||||
|
|
||||||
return chain.get(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn uiEvent(self: *Factory, typ: []const u8, child: anytype) !*@TypeOf(child) {
|
|
||||||
const allocator = self._slab.allocator();
|
|
||||||
|
|
||||||
const chain = try PrototypeChain(
|
|
||||||
&.{ Event, UIEvent, @TypeOf(child) },
|
|
||||||
).allocate(allocator);
|
|
||||||
|
|
||||||
// Special case: Event has a _type_string field, so we need manual setup
|
|
||||||
const event_ptr = chain.get(0);
|
|
||||||
event_ptr.* = try eventInit(typ, chain.get(1), self._page);
|
|
||||||
chain.setMiddle(1, UIEvent.Type);
|
|
||||||
chain.setLeaf(2, child);
|
|
||||||
|
|
||||||
return chain.get(2);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn blob(self: *Factory, child: anytype) !*@TypeOf(child) {
|
|
||||||
const allocator = self._slab.allocator();
|
|
||||||
|
|
||||||
// Special case: Blob has slice and mime fields, so we need manual setup
|
|
||||||
const chain = try PrototypeChain(
|
|
||||||
&.{ Blob, @TypeOf(child) },
|
|
||||||
).allocate(allocator);
|
|
||||||
|
|
||||||
const blob_ptr = chain.get(0);
|
|
||||||
blob_ptr.* = .{
|
|
||||||
._type = unionInit(Blob.Type, chain.get(1)),
|
|
||||||
._slice = "",
|
|
||||||
._mime = "",
|
|
||||||
};
|
|
||||||
chain.setLeaf(1, child);
|
|
||||||
|
|
||||||
return chain.get(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn abstractRange(self: *Factory, child: anytype, page: *Page) !*@TypeOf(child) {
|
|
||||||
const allocator = self._slab.allocator();
|
|
||||||
const chain = try PrototypeChain(&.{ AbstractRange, @TypeOf(child) }).allocate(allocator);
|
|
||||||
|
|
||||||
const doc = page.document.asNode();
|
|
||||||
chain.set(0, AbstractRange{
|
|
||||||
._type = unionInit(AbstractRange.Type, chain.get(1)),
|
|
||||||
._end_offset = 0,
|
|
||||||
._start_offset = 0,
|
|
||||||
._end_container = doc,
|
|
||||||
._start_container = doc,
|
|
||||||
});
|
|
||||||
chain.setLeaf(1, child);
|
|
||||||
return chain.get(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn node(self: *Factory, child: anytype) !*@TypeOf(child) {
|
|
||||||
const allocator = self._slab.allocator();
|
|
||||||
return try AutoPrototypeChain(
|
|
||||||
&.{ EventTarget, Node, @TypeOf(child) },
|
|
||||||
).create(allocator, child);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn document(self: *Factory, child: anytype) !*@TypeOf(child) {
|
|
||||||
const allocator = self._slab.allocator();
|
|
||||||
return try AutoPrototypeChain(
|
|
||||||
&.{ EventTarget, Node, Document, @TypeOf(child) },
|
|
||||||
).create(allocator, child);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn documentFragment(self: *Factory, child: anytype) !*@TypeOf(child) {
|
|
||||||
const allocator = self._slab.allocator();
|
|
||||||
return try AutoPrototypeChain(
|
|
||||||
&.{ EventTarget, Node, Node.DocumentFragment, @TypeOf(child) },
|
|
||||||
).create(allocator, child);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn element(self: *Factory, child: anytype) !*@TypeOf(child) {
|
|
||||||
const allocator = self._slab.allocator();
|
|
||||||
return try AutoPrototypeChain(
|
|
||||||
&.{ EventTarget, Node, Element, @TypeOf(child) },
|
|
||||||
).create(allocator, child);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn htmlElement(self: *Factory, child: anytype) !*@TypeOf(child) {
|
|
||||||
const allocator = self._slab.allocator();
|
|
||||||
return try AutoPrototypeChain(
|
|
||||||
&.{ EventTarget, Node, Element, Element.Html, @TypeOf(child) },
|
|
||||||
).create(allocator, child);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn htmlMediaElement(self: *Factory, child: anytype) !*@TypeOf(child) {
|
|
||||||
const allocator = self._slab.allocator();
|
|
||||||
return try AutoPrototypeChain(
|
|
||||||
&.{ EventTarget, Node, Element, Element.Html, Element.Html.Media, @TypeOf(child) },
|
|
||||||
).create(allocator, child);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn svgElement(self: *Factory, tag_name: []const u8, child: anytype) !*@TypeOf(child) {
|
|
||||||
const allocator = self._slab.allocator();
|
|
||||||
const ChildT = @TypeOf(child);
|
|
||||||
|
|
||||||
if (ChildT == Element.Svg) {
|
|
||||||
return self.element(child);
|
|
||||||
}
|
|
||||||
|
|
||||||
const chain = try PrototypeChain(
|
|
||||||
&.{ EventTarget, Node, Element, Element.Svg, ChildT },
|
|
||||||
).allocate(allocator);
|
|
||||||
|
|
||||||
chain.setRoot(EventTarget.Type);
|
|
||||||
chain.setMiddle(1, Node.Type);
|
|
||||||
chain.setMiddle(2, Element.Type);
|
|
||||||
|
|
||||||
// will never allocate, can't fail
|
|
||||||
const tag_name_str = String.init(self._page.arena, tag_name, .{}) catch unreachable;
|
|
||||||
|
|
||||||
// Manually set Element.Svg with the tag_name
|
|
||||||
chain.set(3, .{
|
|
||||||
._proto = chain.get(2),
|
|
||||||
._tag_name = tag_name_str,
|
|
||||||
._type = unionInit(Element.Svg.Type, chain.get(4)),
|
|
||||||
});
|
|
||||||
|
|
||||||
chain.setLeaf(4, child);
|
|
||||||
return chain.get(4);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn xhrEventTarget(self: *Factory, child: anytype) !*@TypeOf(child) {
|
|
||||||
const allocator = self._slab.allocator();
|
|
||||||
|
|
||||||
return try AutoPrototypeChain(
|
|
||||||
&.{ EventTarget, XMLHttpRequestEventTarget, @TypeOf(child) },
|
|
||||||
).create(allocator, child);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn textTrackCue(self: *Factory, child: anytype) !*@TypeOf(child) {
|
|
||||||
const allocator = self._slab.allocator();
|
|
||||||
const TextTrackCue = @import("webapi/media/TextTrackCue.zig");
|
|
||||||
|
|
||||||
return try AutoPrototypeChain(
|
|
||||||
&.{ EventTarget, TextTrackCue, @TypeOf(child) },
|
|
||||||
).create(allocator, child);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn hasChainRoot(comptime T: type) bool {
|
|
||||||
// Check if this is a root
|
|
||||||
if (@hasDecl(T, "_prototype_root")) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If no _proto field, we're at the top but not a recognized root
|
|
||||||
if (!@hasField(T, "_proto")) return false;
|
|
||||||
|
|
||||||
// Get the _proto field's type and recurse
|
|
||||||
const fields = @typeInfo(T).@"struct".fields;
|
|
||||||
inline for (fields) |field| {
|
|
||||||
if (std.mem.eql(u8, field.name, "_proto")) {
|
|
||||||
const ProtoType = reflect.Struct(field.type);
|
|
||||||
return hasChainRoot(ProtoType);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn isChainType(comptime T: type) bool {
|
|
||||||
if (@hasField(T, "_proto")) return false;
|
|
||||||
return comptime hasChainRoot(T);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn destroy(self: *Factory, value: anytype) void {
|
|
||||||
const S = reflect.Struct(@TypeOf(value));
|
|
||||||
|
|
||||||
if (comptime IS_DEBUG) {
|
|
||||||
// We should always destroy from the leaf down.
|
|
||||||
if (@hasDecl(S, "_prototype_root")) {
|
|
||||||
// A Event{._type == .generic} (or any other similar types)
|
|
||||||
// _should_ be destoyed directly. The _type = .generic is a pseudo
|
|
||||||
// child
|
|
||||||
if (S != Event or value._type != .generic) {
|
|
||||||
log.fatal(.bug, "factory.destroy.event", .{ .type = @typeName(S) });
|
|
||||||
unreachable;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (comptime isChainType(S)) {
|
|
||||||
self.destroyChain(value, true, 0, std.mem.Alignment.@"1");
|
|
||||||
} else {
|
|
||||||
self.destroyStandalone(value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn destroyStandalone(self: *Factory, value: anytype) void {
|
|
||||||
const S = reflect.Struct(@TypeOf(value));
|
|
||||||
assert(!@hasDecl(S, "_prototype_root"));
|
|
||||||
|
|
||||||
const allocator = self._slab.allocator();
|
|
||||||
|
|
||||||
if (@hasDecl(S, "deinit")) {
|
|
||||||
// And it has a deinit, we'll call it
|
|
||||||
switch (@typeInfo(@TypeOf(S.deinit)).@"fn".params.len) {
|
|
||||||
1 => value.deinit(),
|
|
||||||
2 => value.deinit(self._page),
|
|
||||||
else => @compileLog(@typeName(S) ++ " has an invalid deinit function"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
allocator.destroy(value);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn destroyChain(
|
|
||||||
self: *Factory,
|
|
||||||
value: anytype,
|
|
||||||
comptime first: bool,
|
|
||||||
old_size: usize,
|
|
||||||
old_align: std.mem.Alignment,
|
|
||||||
) void {
|
|
||||||
const S = reflect.Struct(@TypeOf(value));
|
|
||||||
const allocator = self._slab.allocator();
|
|
||||||
|
|
||||||
// aligns the old size to the alignment of this element
|
|
||||||
const current_size = std.mem.alignForward(usize, old_size, @alignOf(S));
|
|
||||||
const alignment = std.mem.Alignment.fromByteUnits(@alignOf(S));
|
|
||||||
|
|
||||||
const new_align = std.mem.Alignment.max(old_align, alignment);
|
|
||||||
const new_size = current_size + @sizeOf(S);
|
|
||||||
|
|
||||||
// This is initially called from a deinit. We don't want to call that
|
|
||||||
// same deinit. So when this is the first time destroyChain is called
|
|
||||||
// we don't call deinit (because we're in that deinit)
|
|
||||||
if (!comptime first) {
|
|
||||||
// But if it isn't the first time
|
|
||||||
if (@hasDecl(S, "deinit")) {
|
|
||||||
// And it has a deinit, we'll call it
|
|
||||||
switch (@typeInfo(@TypeOf(S.deinit)).@"fn".params.len) {
|
|
||||||
1 => value.deinit(),
|
|
||||||
2 => value.deinit(self._page),
|
|
||||||
else => @compileLog(@typeName(S) ++ " has an invalid deinit function"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (@hasField(S, "_proto")) {
|
|
||||||
self.destroyChain(value._proto, false, new_size, new_align);
|
|
||||||
} else if (@hasDecl(S, "JsApi")) {
|
|
||||||
// Doesn't have a _proto, but has a JsApi.
|
|
||||||
if (self._page.js.removeTaggedMapping(@intFromPtr(value))) |tagged| {
|
|
||||||
allocator.destroy(tagged);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// no proto so this is the head of the chain.
|
|
||||||
// we use this as the ptr to the start of the chain.
|
|
||||||
// and we have summed up the length.
|
|
||||||
assert(@hasDecl(S, "_prototype_root"));
|
|
||||||
|
|
||||||
const memory_ptr: [*]const u8 = @ptrCast(value);
|
|
||||||
const len = std.mem.alignForward(usize, new_size, new_align.toByteUnits());
|
|
||||||
allocator.free(memory_ptr[0..len]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn createT(self: *Factory, comptime T: type) !*T {
|
|
||||||
const allocator = self._slab.allocator();
|
|
||||||
return try allocator.create(T);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn create(self: *Factory, value: anytype) !*@TypeOf(value) {
|
|
||||||
const ptr = try self.createT(@TypeOf(value));
|
|
||||||
ptr.* = value;
|
|
||||||
return ptr;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn unionInit(comptime T: type, value: anytype) T {
|
|
||||||
const V = @TypeOf(value);
|
|
||||||
const field_name = comptime unionFieldName(T, V);
|
|
||||||
return @unionInit(T, field_name, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
// There can be friction between comptime and runtime. Comptime has to
|
|
||||||
// account for all possible types, even if some runtime flow makes certain
|
|
||||||
// cases impossible. At runtime, we always call `unionFieldName` with the
|
|
||||||
// correct struct or pointer type. But at comptime time, `unionFieldName`
|
|
||||||
// is called with both variants (S and *S). So we use reflect.Struct().
|
|
||||||
// This only works because we never have a union with a field S and another
|
|
||||||
// field *S.
|
|
||||||
fn unionFieldName(comptime T: type, comptime V: type) []const u8 {
|
|
||||||
inline for (@typeInfo(T).@"union".fields) |field| {
|
|
||||||
if (reflect.Struct(field.type) == reflect.Struct(V)) {
|
|
||||||
return field.name;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@compileError(@typeName(V) ++ " is not a valid type for " ++ @typeName(T) ++ ".type");
|
|
||||||
}
|
|
||||||
@@ -1,530 +0,0 @@
|
|||||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
|
||||||
//
|
|
||||||
// Francis Bouvier <francis@lightpanda.io>
|
|
||||||
// Pierre Tachoire <pierre@lightpanda.io>
|
|
||||||
//
|
|
||||||
// This program is free software: you can redistribute it and/or modify
|
|
||||||
// it under the terms of the GNU Affero General Public License as
|
|
||||||
// published by the Free Software Foundation, either version 3 of the
|
|
||||||
// License, or (at your option) any later version.
|
|
||||||
//
|
|
||||||
// This program is distributed in the hope that it will be useful,
|
|
||||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
// GNU Affero General Public License for more details.
|
|
||||||
//
|
|
||||||
// You should have received a copy of the GNU Affero General Public License
|
|
||||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
const std = @import("std");
|
|
||||||
|
|
||||||
const Mime = @This();
|
|
||||||
content_type: ContentType,
|
|
||||||
params: []const u8 = "",
|
|
||||||
// IANA defines max. charset value length as 40.
|
|
||||||
// We keep 41 for null-termination since HTML parser expects in this format.
|
|
||||||
charset: [41]u8 = default_charset,
|
|
||||||
charset_len: usize = 5,
|
|
||||||
|
|
||||||
/// String "UTF-8" continued by null characters.
|
|
||||||
pub const default_charset = .{ 'U', 'T', 'F', '-', '8' } ++ .{0} ** 36;
|
|
||||||
|
|
||||||
/// Mime with unknown Content-Type, empty params and empty charset.
|
|
||||||
pub const unknown = Mime{ .content_type = .{ .unknown = {} } };
|
|
||||||
|
|
||||||
pub const ContentTypeEnum = enum {
|
|
||||||
text_xml,
|
|
||||||
text_html,
|
|
||||||
text_javascript,
|
|
||||||
text_plain,
|
|
||||||
text_css,
|
|
||||||
application_json,
|
|
||||||
unknown,
|
|
||||||
other,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const ContentType = union(ContentTypeEnum) {
|
|
||||||
text_xml: void,
|
|
||||||
text_html: void,
|
|
||||||
text_javascript: void,
|
|
||||||
text_plain: void,
|
|
||||||
text_css: void,
|
|
||||||
application_json: void,
|
|
||||||
unknown: void,
|
|
||||||
other: struct { type: []const u8, sub_type: []const u8 },
|
|
||||||
};
|
|
||||||
|
|
||||||
pub fn contentTypeString(mime: *const Mime) []const u8 {
|
|
||||||
return switch (mime.content_type) {
|
|
||||||
.text_xml => "text/xml",
|
|
||||||
.text_html => "text/html",
|
|
||||||
.text_javascript => "application/javascript",
|
|
||||||
.text_plain => "text/plain",
|
|
||||||
.text_css => "text/css",
|
|
||||||
.application_json => "application/json",
|
|
||||||
else => "",
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the null-terminated charset value.
|
|
||||||
pub fn charsetStringZ(mime: *const Mime) [:0]const u8 {
|
|
||||||
return mime.charset[0..mime.charset_len :0];
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn charsetString(mime: *const Mime) []const u8 {
|
|
||||||
return mime.charset[0..mime.charset_len];
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Removes quotes of value if quotes are given.
|
|
||||||
///
|
|
||||||
/// Currently we don't validate the charset.
|
|
||||||
/// See section 2.3 Naming Requirements:
|
|
||||||
/// https://datatracker.ietf.org/doc/rfc2978/
|
|
||||||
fn parseCharset(value: []const u8) error{ CharsetTooBig, Invalid }![]const u8 {
|
|
||||||
// Cannot be larger than 40.
|
|
||||||
// https://datatracker.ietf.org/doc/rfc2978/
|
|
||||||
if (value.len > 40) return error.CharsetTooBig;
|
|
||||||
|
|
||||||
// If the first char is a quote, look for a pair.
|
|
||||||
if (value[0] == '"') {
|
|
||||||
if (value.len < 3 or value[value.len - 1] != '"') {
|
|
||||||
return error.Invalid;
|
|
||||||
}
|
|
||||||
|
|
||||||
return value[1 .. value.len - 1];
|
|
||||||
}
|
|
||||||
|
|
||||||
// No quotes.
|
|
||||||
return value;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn parse(input: []u8) !Mime {
|
|
||||||
if (input.len > 255) {
|
|
||||||
return error.TooBig;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Zig's trim API is broken. The return type is always `[]const u8`,
|
|
||||||
// even if the input type is `[]u8`. @constCast is safe here.
|
|
||||||
var normalized = @constCast(std.mem.trim(u8, input, &std.ascii.whitespace));
|
|
||||||
_ = std.ascii.lowerString(normalized, normalized);
|
|
||||||
|
|
||||||
const content_type, const type_len = try parseContentType(normalized);
|
|
||||||
if (type_len >= normalized.len) {
|
|
||||||
return .{ .content_type = content_type };
|
|
||||||
}
|
|
||||||
|
|
||||||
const params = trimLeft(normalized[type_len..]);
|
|
||||||
|
|
||||||
var charset: [41]u8 = undefined;
|
|
||||||
var charset_len: usize = undefined;
|
|
||||||
|
|
||||||
var it = std.mem.splitScalar(u8, params, ';');
|
|
||||||
while (it.next()) |attr| {
|
|
||||||
const i = std.mem.indexOfScalarPos(u8, attr, 0, '=') orelse return error.Invalid;
|
|
||||||
const name = trimLeft(attr[0..i]);
|
|
||||||
|
|
||||||
const value = trimRight(attr[i + 1 ..]);
|
|
||||||
if (value.len == 0) {
|
|
||||||
return error.Invalid;
|
|
||||||
}
|
|
||||||
|
|
||||||
const attribute_name = std.meta.stringToEnum(enum {
|
|
||||||
charset,
|
|
||||||
}, name) orelse continue;
|
|
||||||
|
|
||||||
switch (attribute_name) {
|
|
||||||
.charset => {
|
|
||||||
if (value.len == 0) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
const attribute_value = try parseCharset(value);
|
|
||||||
@memcpy(charset[0..attribute_value.len], attribute_value);
|
|
||||||
// Null-terminate right after attribute value.
|
|
||||||
charset[attribute_value.len] = 0;
|
|
||||||
charset_len = attribute_value.len;
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return .{
|
|
||||||
.params = params,
|
|
||||||
.charset = charset,
|
|
||||||
.charset_len = charset_len,
|
|
||||||
.content_type = content_type,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn sniff(body: []const u8) ?Mime {
|
|
||||||
// 0x0C is form feed
|
|
||||||
const content = std.mem.trimLeft(u8, body, &.{ ' ', '\t', '\n', '\r', 0x0C });
|
|
||||||
if (content.len == 0) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (content[0] != '<') {
|
|
||||||
if (std.mem.startsWith(u8, content, &.{ 0xEF, 0xBB, 0xBF })) {
|
|
||||||
// UTF-8 BOM
|
|
||||||
return .{ .content_type = .{ .text_plain = {} } };
|
|
||||||
}
|
|
||||||
if (std.mem.startsWith(u8, content, &.{ 0xFE, 0xFF })) {
|
|
||||||
// UTF-16 big-endian BOM
|
|
||||||
return .{ .content_type = .{ .text_plain = {} } };
|
|
||||||
}
|
|
||||||
if (std.mem.startsWith(u8, content, &.{ 0xFF, 0xFE })) {
|
|
||||||
// UTF-16 little-endian BOM
|
|
||||||
return .{ .content_type = .{ .text_plain = {} } };
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
// The longest prefix we have is "<!DOCTYPE HTML ", 15 bytes. If we're
|
|
||||||
// here, we already know content[0] == '<', so we can skip that. So 14
|
|
||||||
// bytes.
|
|
||||||
|
|
||||||
// +1 because we don't need the leading '<'
|
|
||||||
var buf: [14]u8 = undefined;
|
|
||||||
|
|
||||||
const stripped = content[1..];
|
|
||||||
const prefix_len = @min(stripped.len, buf.len);
|
|
||||||
const prefix = std.ascii.lowerString(&buf, stripped[0..prefix_len]);
|
|
||||||
|
|
||||||
// we already know it starts with a <
|
|
||||||
const known_prefixes = [_]struct { []const u8, ContentType }{
|
|
||||||
.{ "!doctype html", .{ .text_html = {} } },
|
|
||||||
.{ "html", .{ .text_html = {} } },
|
|
||||||
.{ "script", .{ .text_html = {} } },
|
|
||||||
.{ "iframe", .{ .text_html = {} } },
|
|
||||||
.{ "h1", .{ .text_html = {} } },
|
|
||||||
.{ "div", .{ .text_html = {} } },
|
|
||||||
.{ "font", .{ .text_html = {} } },
|
|
||||||
.{ "table", .{ .text_html = {} } },
|
|
||||||
.{ "a", .{ .text_html = {} } },
|
|
||||||
.{ "style", .{ .text_html = {} } },
|
|
||||||
.{ "title", .{ .text_html = {} } },
|
|
||||||
.{ "b", .{ .text_html = {} } },
|
|
||||||
.{ "body", .{ .text_html = {} } },
|
|
||||||
.{ "br", .{ .text_html = {} } },
|
|
||||||
.{ "p", .{ .text_html = {} } },
|
|
||||||
.{ "!--", .{ .text_html = {} } },
|
|
||||||
.{ "xml", .{ .text_xml = {} } },
|
|
||||||
};
|
|
||||||
inline for (known_prefixes) |kp| {
|
|
||||||
const known_prefix = kp.@"0";
|
|
||||||
if (std.mem.startsWith(u8, prefix, known_prefix) and prefix.len > known_prefix.len) {
|
|
||||||
const next = prefix[known_prefix.len];
|
|
||||||
// a "tag-terminating-byte"
|
|
||||||
if (next == ' ' or next == '>') {
|
|
||||||
return .{ .content_type = kp.@"1" };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn isHTML(self: *const Mime) bool {
|
|
||||||
return self.content_type == .text_html;
|
|
||||||
}
|
|
||||||
|
|
||||||
// we expect value to be lowercase
|
|
||||||
fn parseContentType(value: []const u8) !struct { ContentType, usize } {
|
|
||||||
const end = std.mem.indexOfScalarPos(u8, value, 0, ';') orelse value.len;
|
|
||||||
const type_name = trimRight(value[0..end]);
|
|
||||||
const attribute_start = end + 1;
|
|
||||||
|
|
||||||
if (std.meta.stringToEnum(enum {
|
|
||||||
@"text/xml",
|
|
||||||
@"text/html",
|
|
||||||
@"text/css",
|
|
||||||
@"text/plain",
|
|
||||||
|
|
||||||
@"text/javascript",
|
|
||||||
@"application/javascript",
|
|
||||||
@"application/x-javascript",
|
|
||||||
|
|
||||||
@"application/json",
|
|
||||||
}, type_name)) |known_type| {
|
|
||||||
const ct: ContentType = switch (known_type) {
|
|
||||||
.@"text/xml" => .{ .text_xml = {} },
|
|
||||||
.@"text/html" => .{ .text_html = {} },
|
|
||||||
.@"text/javascript", .@"application/javascript", .@"application/x-javascript" => .{ .text_javascript = {} },
|
|
||||||
.@"text/plain" => .{ .text_plain = {} },
|
|
||||||
.@"text/css" => .{ .text_css = {} },
|
|
||||||
.@"application/json" => .{ .application_json = {} },
|
|
||||||
};
|
|
||||||
return .{ ct, attribute_start };
|
|
||||||
}
|
|
||||||
|
|
||||||
const separator = std.mem.indexOfScalarPos(u8, type_name, 0, '/') orelse return error.Invalid;
|
|
||||||
|
|
||||||
const main_type = value[0..separator];
|
|
||||||
const sub_type = trimRight(value[separator + 1 .. end]);
|
|
||||||
|
|
||||||
if (main_type.len == 0 or validType(main_type) == false) {
|
|
||||||
return error.Invalid;
|
|
||||||
}
|
|
||||||
if (sub_type.len == 0 or validType(sub_type) == false) {
|
|
||||||
return error.Invalid;
|
|
||||||
}
|
|
||||||
|
|
||||||
return .{ .{ .other = .{
|
|
||||||
.type = main_type,
|
|
||||||
.sub_type = sub_type,
|
|
||||||
} }, attribute_start };
|
|
||||||
}
|
|
||||||
|
|
||||||
const VALID_CODEPOINTS = blk: {
|
|
||||||
var v: [256]bool = undefined;
|
|
||||||
for (0..256) |i| {
|
|
||||||
v[i] = std.ascii.isAlphanumeric(i);
|
|
||||||
}
|
|
||||||
for ("!#$%&\\*+-.^'_`|~") |b| {
|
|
||||||
v[b] = true;
|
|
||||||
}
|
|
||||||
break :blk v;
|
|
||||||
};
|
|
||||||
|
|
||||||
fn validType(value: []const u8) bool {
|
|
||||||
for (value) |b| {
|
|
||||||
if (VALID_CODEPOINTS[b] == false) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn trimLeft(s: []const u8) []const u8 {
|
|
||||||
return std.mem.trimLeft(u8, s, &std.ascii.whitespace);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn trimRight(s: []const u8) []const u8 {
|
|
||||||
return std.mem.trimRight(u8, s, &std.ascii.whitespace);
|
|
||||||
}
|
|
||||||
|
|
||||||
const testing = @import("../testing.zig");
|
|
||||||
test "Mime: invalid" {
|
|
||||||
defer testing.reset();
|
|
||||||
|
|
||||||
const invalids = [_][]const u8{
|
|
||||||
"",
|
|
||||||
"text",
|
|
||||||
"text /html",
|
|
||||||
"text/ html",
|
|
||||||
"text / html",
|
|
||||||
"text/html other",
|
|
||||||
"text/html; x",
|
|
||||||
"text/html; x=",
|
|
||||||
"text/html; x= ",
|
|
||||||
"text/html; = ",
|
|
||||||
"text/html;=",
|
|
||||||
"text/html; charset=\"\"",
|
|
||||||
"text/html; charset=\"",
|
|
||||||
"text/html; charset=\"\\",
|
|
||||||
};
|
|
||||||
|
|
||||||
for (invalids) |invalid| {
|
|
||||||
const mutable_input = try testing.arena_allocator.dupe(u8, invalid);
|
|
||||||
try testing.expectError(error.Invalid, Mime.parse(mutable_input));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
test "Mime: parse common" {
|
|
||||||
defer testing.reset();
|
|
||||||
|
|
||||||
try expect(.{ .content_type = .{ .text_xml = {} } }, "text/xml");
|
|
||||||
try expect(.{ .content_type = .{ .text_html = {} } }, "text/html");
|
|
||||||
try expect(.{ .content_type = .{ .text_plain = {} } }, "text/plain");
|
|
||||||
|
|
||||||
try expect(.{ .content_type = .{ .text_xml = {} } }, "text/xml;");
|
|
||||||
try expect(.{ .content_type = .{ .text_html = {} } }, "text/html;");
|
|
||||||
try expect(.{ .content_type = .{ .text_plain = {} } }, "text/plain;");
|
|
||||||
|
|
||||||
try expect(.{ .content_type = .{ .text_xml = {} } }, " \ttext/xml");
|
|
||||||
try expect(.{ .content_type = .{ .text_html = {} } }, "text/html ");
|
|
||||||
try expect(.{ .content_type = .{ .text_plain = {} } }, "text/plain \t\t");
|
|
||||||
|
|
||||||
try expect(.{ .content_type = .{ .text_xml = {} } }, "TEXT/xml");
|
|
||||||
try expect(.{ .content_type = .{ .text_html = {} } }, "text/Html");
|
|
||||||
try expect(.{ .content_type = .{ .text_plain = {} } }, "TEXT/PLAIN");
|
|
||||||
|
|
||||||
try expect(.{ .content_type = .{ .text_xml = {} } }, " TeXT/xml");
|
|
||||||
try expect(.{ .content_type = .{ .text_html = {} } }, "teXt/HtML ;");
|
|
||||||
try expect(.{ .content_type = .{ .text_plain = {} } }, "tExT/PlAiN;");
|
|
||||||
|
|
||||||
try expect(.{ .content_type = .{ .text_javascript = {} } }, "text/javascript");
|
|
||||||
try expect(.{ .content_type = .{ .text_javascript = {} } }, "Application/JavaScript");
|
|
||||||
try expect(.{ .content_type = .{ .text_javascript = {} } }, "application/x-javascript");
|
|
||||||
|
|
||||||
try expect(.{ .content_type = .{ .application_json = {} } }, "application/json");
|
|
||||||
try expect(.{ .content_type = .{ .text_css = {} } }, "text/css");
|
|
||||||
}
|
|
||||||
|
|
||||||
test "Mime: parse uncommon" {
|
|
||||||
defer testing.reset();
|
|
||||||
|
|
||||||
const text_csv = Expectation{
|
|
||||||
.content_type = .{ .other = .{ .type = "text", .sub_type = "csv" } },
|
|
||||||
};
|
|
||||||
try expect(text_csv, "text/csv");
|
|
||||||
try expect(text_csv, "text/csv;");
|
|
||||||
try expect(text_csv, " text/csv\t ");
|
|
||||||
try expect(text_csv, " text/csv\t ;");
|
|
||||||
|
|
||||||
try expect(
|
|
||||||
.{ .content_type = .{ .other = .{ .type = "text", .sub_type = "csv" } } },
|
|
||||||
"Text/CSV",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
test "Mime: parse charset" {
|
|
||||||
defer testing.reset();
|
|
||||||
|
|
||||||
try expect(.{
|
|
||||||
.content_type = .{ .text_xml = {} },
|
|
||||||
.charset = "utf-8",
|
|
||||||
.params = "charset=utf-8",
|
|
||||||
}, "text/xml; charset=utf-8");
|
|
||||||
|
|
||||||
try expect(.{
|
|
||||||
.content_type = .{ .text_xml = {} },
|
|
||||||
.charset = "utf-8",
|
|
||||||
.params = "charset=\"utf-8\"",
|
|
||||||
}, "text/xml;charset=\"UTF-8\"");
|
|
||||||
|
|
||||||
try expect(.{
|
|
||||||
.content_type = .{ .text_html = {} },
|
|
||||||
.charset = "iso-8859-1",
|
|
||||||
.params = "charset=\"iso-8859-1\"",
|
|
||||||
}, "text/html; charset=\"iso-8859-1\"");
|
|
||||||
|
|
||||||
try expect(.{
|
|
||||||
.content_type = .{ .text_html = {} },
|
|
||||||
.charset = "iso-8859-1",
|
|
||||||
.params = "charset=\"iso-8859-1\"",
|
|
||||||
}, "text/html; charset=\"ISO-8859-1\"");
|
|
||||||
|
|
||||||
try expect(.{
|
|
||||||
.content_type = .{ .text_xml = {} },
|
|
||||||
.charset = "custom-non-standard-charset-value",
|
|
||||||
.params = "charset=\"custom-non-standard-charset-value\"",
|
|
||||||
}, "text/xml;charset=\"custom-non-standard-charset-value\"");
|
|
||||||
}
|
|
||||||
|
|
||||||
test "Mime: isHTML" {
|
|
||||||
defer testing.reset();
|
|
||||||
|
|
||||||
const assert = struct {
|
|
||||||
fn assert(expected: bool, input: []const u8) !void {
|
|
||||||
const mutable_input = try testing.arena_allocator.dupe(u8, input);
|
|
||||||
var mime = try Mime.parse(mutable_input);
|
|
||||||
try testing.expectEqual(expected, mime.isHTML());
|
|
||||||
}
|
|
||||||
}.assert;
|
|
||||||
try assert(true, "text/html");
|
|
||||||
try assert(true, "text/html;");
|
|
||||||
try assert(true, "text/html; charset=utf-8");
|
|
||||||
try assert(false, "text/htm"); // htm not html
|
|
||||||
try assert(false, "text/plain");
|
|
||||||
try assert(false, "over/9000");
|
|
||||||
}
|
|
||||||
|
|
||||||
test "Mime: sniff" {
|
|
||||||
try testing.expectEqual(null, Mime.sniff(""));
|
|
||||||
try testing.expectEqual(null, Mime.sniff("<htm"));
|
|
||||||
try testing.expectEqual(null, Mime.sniff("<html!"));
|
|
||||||
try testing.expectEqual(null, Mime.sniff("<a_"));
|
|
||||||
try testing.expectEqual(null, Mime.sniff("<!doctype html"));
|
|
||||||
try testing.expectEqual(null, Mime.sniff("<!doctype html>"));
|
|
||||||
try testing.expectEqual(null, Mime.sniff("\n <!doctype html>"));
|
|
||||||
try testing.expectEqual(null, Mime.sniff("\n \t <font/>"));
|
|
||||||
|
|
||||||
const expectHTML = struct {
|
|
||||||
fn expect(input: []const u8) !void {
|
|
||||||
try testing.expectEqual(.text_html, std.meta.activeTag(Mime.sniff(input).?.content_type));
|
|
||||||
}
|
|
||||||
}.expect;
|
|
||||||
|
|
||||||
try expectHTML("<!doctype html ");
|
|
||||||
try expectHTML("\n \t <!DOCTYPE HTML ");
|
|
||||||
|
|
||||||
try expectHTML("<html ");
|
|
||||||
try expectHTML("\n \t <HtmL> even more stufff");
|
|
||||||
|
|
||||||
try expectHTML("<script>");
|
|
||||||
try expectHTML("\n \t <SCRIpt >alert(document.cookies)</script>");
|
|
||||||
|
|
||||||
try expectHTML("<iframe>");
|
|
||||||
try expectHTML(" \t <ifRAME >");
|
|
||||||
|
|
||||||
try expectHTML("<h1>");
|
|
||||||
try expectHTML(" <H1>");
|
|
||||||
|
|
||||||
try expectHTML("<div>");
|
|
||||||
try expectHTML("\n\r\r <DiV>");
|
|
||||||
|
|
||||||
try expectHTML("<font>");
|
|
||||||
try expectHTML(" <fonT>");
|
|
||||||
|
|
||||||
try expectHTML("<table>");
|
|
||||||
try expectHTML("\t\t<TAblE>");
|
|
||||||
|
|
||||||
try expectHTML("<a>");
|
|
||||||
try expectHTML("\n\n<A>");
|
|
||||||
|
|
||||||
try expectHTML("<style>");
|
|
||||||
try expectHTML(" \n\t <STyLE>");
|
|
||||||
|
|
||||||
try expectHTML("<title>");
|
|
||||||
try expectHTML(" \n\t <TITLE>");
|
|
||||||
|
|
||||||
try expectHTML("<b>");
|
|
||||||
try expectHTML(" \n\t <B>");
|
|
||||||
|
|
||||||
try expectHTML("<body>");
|
|
||||||
try expectHTML(" \n\t <BODY>");
|
|
||||||
|
|
||||||
try expectHTML("<br>");
|
|
||||||
try expectHTML(" \n\t <BR>");
|
|
||||||
|
|
||||||
try expectHTML("<p>");
|
|
||||||
try expectHTML(" \n\t <P>");
|
|
||||||
|
|
||||||
try expectHTML("<!-->");
|
|
||||||
try expectHTML(" \n\t <!-->");
|
|
||||||
}
|
|
||||||
|
|
||||||
const Expectation = struct {
|
|
||||||
content_type: Mime.ContentType,
|
|
||||||
params: []const u8 = "",
|
|
||||||
charset: ?[]const u8 = null,
|
|
||||||
};
|
|
||||||
|
|
||||||
fn expect(expected: Expectation, input: []const u8) !void {
|
|
||||||
const mutable_input = try testing.arena_allocator.dupe(u8, input);
|
|
||||||
|
|
||||||
const actual = try Mime.parse(mutable_input);
|
|
||||||
try testing.expectEqual(
|
|
||||||
std.meta.activeTag(expected.content_type),
|
|
||||||
std.meta.activeTag(actual.content_type),
|
|
||||||
);
|
|
||||||
|
|
||||||
switch (expected.content_type) {
|
|
||||||
.other => |e| {
|
|
||||||
const a = actual.content_type.other;
|
|
||||||
try testing.expectEqual(e.type, a.type);
|
|
||||||
try testing.expectEqual(e.sub_type, a.sub_type);
|
|
||||||
},
|
|
||||||
else => {}, // already asserted above
|
|
||||||
}
|
|
||||||
|
|
||||||
try testing.expectEqual(expected.params, actual.params);
|
|
||||||
|
|
||||||
if (expected.charset) |ec| {
|
|
||||||
// We remove the null characters for testing purposes here.
|
|
||||||
try testing.expectEqual(ec, actual.charsetString());
|
|
||||||
} else {
|
|
||||||
const m: Mime = .unknown;
|
|
||||||
try testing.expectEqual(m.charsetStringZ(), actual.charsetStringZ());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
2970
src/browser/Page.zig
2970
src/browser/Page.zig
File diff suppressed because it is too large
Load Diff
@@ -17,98 +17,146 @@
|
|||||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
const builtin = @import("builtin");
|
|
||||||
|
|
||||||
const log = @import("../log.zig");
|
const log = @import("../log.zig");
|
||||||
const milliTimestamp = @import("../datetime.zig").milliTimestamp;
|
const Allocator = std.mem.Allocator;
|
||||||
|
|
||||||
const IS_DEBUG = builtin.mode == .Debug;
|
|
||||||
|
|
||||||
const Queue = std.PriorityQueue(Task, void, struct {
|
|
||||||
fn compare(_: void, a: Task, b: Task) std.math.Order {
|
|
||||||
const time_order = std.math.order(a.run_at, b.run_at);
|
|
||||||
if (time_order != .eq) return time_order;
|
|
||||||
// Break ties with sequence number to maintain FIFO order
|
|
||||||
return std.math.order(a.sequence, b.sequence);
|
|
||||||
}
|
|
||||||
}.compare);
|
|
||||||
|
|
||||||
const Scheduler = @This();
|
const Scheduler = @This();
|
||||||
|
|
||||||
_sequence: u64,
|
|
||||||
low_priority: Queue,
|
|
||||||
high_priority: Queue,
|
high_priority: Queue,
|
||||||
|
|
||||||
pub fn init(allocator: std.mem.Allocator) Scheduler {
|
// For repeating tasks. We only want to run these if there are other things to
|
||||||
|
// do. We don't, for example, want a window.setInterval or the page.runMicrotasks
|
||||||
|
// to block the page.wait.
|
||||||
|
low_priority: Queue,
|
||||||
|
|
||||||
|
// we expect allocator to be the page arena, hence we never call high_priority.deinit
|
||||||
|
pub fn init(allocator: Allocator) Scheduler {
|
||||||
return .{
|
return .{
|
||||||
._sequence = 0,
|
|
||||||
.low_priority = Queue.init(allocator, {}),
|
|
||||||
.high_priority = Queue.init(allocator, {}),
|
.high_priority = Queue.init(allocator, {}),
|
||||||
|
.low_priority = Queue.init(allocator, {}),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn reset(self: *Scheduler) void {
|
||||||
|
self.high_priority.clearRetainingCapacity();
|
||||||
|
self.low_priority.clearRetainingCapacity();
|
||||||
|
}
|
||||||
|
|
||||||
const AddOpts = struct {
|
const AddOpts = struct {
|
||||||
name: []const u8 = "",
|
name: []const u8 = "",
|
||||||
low_priority: bool = false,
|
low_priority: bool = false,
|
||||||
};
|
};
|
||||||
pub fn add(self: *Scheduler, ctx: *anyopaque, cb: Callback, run_in_ms: u32, opts: AddOpts) !void {
|
pub fn add(self: *Scheduler, ctx: *anyopaque, func: Task.Func, ms: u32, opts: AddOpts) !void {
|
||||||
if (comptime IS_DEBUG) {
|
var low_priority = opts.low_priority;
|
||||||
log.debug(.scheduler, "scheduler.add", .{ .name = opts.name, .run_in_ms = run_in_ms, .low_priority = opts.low_priority });
|
if (ms > 5_000) {
|
||||||
|
// we don't want tasks in the far future to block page.wait from
|
||||||
|
// completing. However, if page.wait is called multiple times (maybe
|
||||||
|
// a CDP driver is wait for something to happen), then we do want
|
||||||
|
// to [eventually] run these when their time is up.
|
||||||
|
low_priority = true;
|
||||||
}
|
}
|
||||||
var queue = if (opts.low_priority) &self.low_priority else &self.high_priority;
|
|
||||||
const seq = self._sequence + 1;
|
var q = if (low_priority) &self.low_priority else &self.high_priority;
|
||||||
self._sequence = seq;
|
return q.add(.{
|
||||||
return queue.add(.{
|
.ms = std.time.milliTimestamp() + ms,
|
||||||
.ctx = ctx,
|
.ctx = ctx,
|
||||||
.callback = cb,
|
.func = func,
|
||||||
.sequence = seq,
|
|
||||||
.name = opts.name,
|
.name = opts.name,
|
||||||
.run_at = milliTimestamp(.monotonic) + run_in_ms,
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run(self: *Scheduler) !?u64 {
|
pub fn run(self: *Scheduler) !?i32 {
|
||||||
_ = try self.runQueue(&self.low_priority);
|
_ = try self.runQueue(&self.low_priority);
|
||||||
return self.runQueue(&self.high_priority);
|
return self.runQueue(&self.high_priority);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn runQueue(self: *Scheduler, queue: *Queue) !?u64 {
|
fn runQueue(self: *Scheduler, queue: *Queue) !?i32 {
|
||||||
|
// this is O(1)
|
||||||
if (queue.count() == 0) {
|
if (queue.count() == 0) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
const now = milliTimestamp(.monotonic);
|
const now = std.time.milliTimestamp();
|
||||||
|
|
||||||
while (queue.peek()) |*task_| {
|
var next = queue.peek();
|
||||||
if (task_.run_at > now) {
|
while (next) |task| {
|
||||||
return @intCast(task_.run_at - now);
|
const time_to_next = task.ms - now;
|
||||||
}
|
if (time_to_next > 0) {
|
||||||
var task = queue.remove();
|
// @intCast is petty safe since we limit tasks to just 5 seconds
|
||||||
if (comptime IS_DEBUG) {
|
// in the future
|
||||||
log.debug(.scheduler, "scheduler.runTask", .{ .name = task.name });
|
return @intCast(time_to_next);
|
||||||
}
|
}
|
||||||
|
|
||||||
const repeat_in_ms = task.callback(task.ctx) catch |err| {
|
if (task.func(task.ctx)) |repeat_delay| {
|
||||||
log.warn(.scheduler, "task.callback", .{ .name = task.name, .err = err });
|
// if we do (now + 0) then our WHILE loop will run endlessly.
|
||||||
continue;
|
// no task should ever return 0
|
||||||
};
|
std.debug.assert(repeat_delay != 0);
|
||||||
|
|
||||||
if (repeat_in_ms) |ms| {
|
var copy = task;
|
||||||
// Task cannot be repeated immediately, and they should know that
|
copy.ms = now + repeat_delay;
|
||||||
std.debug.assert(ms != 0);
|
try self.low_priority.add(copy);
|
||||||
task.run_at = now + ms;
|
|
||||||
try self.low_priority.add(task);
|
|
||||||
}
|
}
|
||||||
|
_ = queue.remove();
|
||||||
|
next = queue.peek();
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
const Task = struct {
|
const Task = struct {
|
||||||
run_at: u64,
|
ms: i64,
|
||||||
sequence: u64,
|
func: Func,
|
||||||
ctx: *anyopaque,
|
ctx: *anyopaque,
|
||||||
name: []const u8,
|
name: []const u8,
|
||||||
callback: Callback,
|
|
||||||
|
const Func = *const fn (ctx: *anyopaque) ?u32;
|
||||||
};
|
};
|
||||||
|
|
||||||
const Callback = *const fn (ctx: *anyopaque) anyerror!?u32;
|
const Queue = std.PriorityQueue(Task, void, struct {
|
||||||
|
fn compare(_: void, a: Task, b: Task) std.math.Order {
|
||||||
|
return std.math.order(a.ms, b.ms);
|
||||||
|
}
|
||||||
|
}.compare);
|
||||||
|
|
||||||
|
const testing = @import("../testing.zig");
|
||||||
|
test "Scheduler" {
|
||||||
|
defer testing.reset();
|
||||||
|
|
||||||
|
var task = TestTask{ .allocator = testing.arena_allocator };
|
||||||
|
|
||||||
|
var s = Scheduler.init(testing.arena_allocator);
|
||||||
|
try testing.expectEqual(null, s.run());
|
||||||
|
try testing.expectEqual(0, task.calls.items.len);
|
||||||
|
|
||||||
|
try s.add(&task, TestTask.run1, 3, .{});
|
||||||
|
|
||||||
|
try testing.expectDelta(3, try s.run(), 1);
|
||||||
|
try testing.expectEqual(0, task.calls.items.len);
|
||||||
|
|
||||||
|
std.Thread.sleep(std.time.ns_per_ms * 5);
|
||||||
|
try testing.expectEqual(null, s.run());
|
||||||
|
try testing.expectEqualSlices(u32, &.{1}, task.calls.items);
|
||||||
|
|
||||||
|
try s.add(&task, TestTask.run2, 3, .{});
|
||||||
|
try s.add(&task, TestTask.run1, 2, .{});
|
||||||
|
|
||||||
|
std.Thread.sleep(std.time.ns_per_ms * 5);
|
||||||
|
try testing.expectDelta(null, try s.run(), 1);
|
||||||
|
try testing.expectEqualSlices(u32, &.{ 1, 1, 2 }, task.calls.items);
|
||||||
|
}
|
||||||
|
|
||||||
|
const TestTask = struct {
|
||||||
|
allocator: Allocator,
|
||||||
|
calls: std.ArrayListUnmanaged(u32) = .{},
|
||||||
|
|
||||||
|
fn run1(ctx: *anyopaque) ?u32 {
|
||||||
|
var self: *TestTask = @ptrCast(@alignCast(ctx));
|
||||||
|
self.calls.append(self.allocator, 1) catch unreachable;
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run2(ctx: *anyopaque) ?u32 {
|
||||||
|
var self: *TestTask = @ptrCast(@alignCast(ctx));
|
||||||
|
self.calls.append(self.allocator, 2) catch unreachable;
|
||||||
|
return 2;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,176 +0,0 @@
|
|||||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
|
||||||
//
|
|
||||||
// Francis Bouvier <francis@lightpanda.io>
|
|
||||||
// Pierre Tachoire <pierre@lightpanda.io>
|
|
||||||
//
|
|
||||||
// This program is free software: you can redistribute it and/or modify
|
|
||||||
// it under the terms of the GNU Affero General Public License as
|
|
||||||
// published by the Free Software Foundation, either version 3 of the
|
|
||||||
// License, or (at your option) any later version.
|
|
||||||
//
|
|
||||||
// This program is distributed in the hope that it will be useful,
|
|
||||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
// GNU Affero General Public License for more details.
|
|
||||||
//
|
|
||||||
// You should have received a copy of the GNU Affero General Public License
|
|
||||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
const std = @import("std");
|
|
||||||
|
|
||||||
const log = @import("../log.zig");
|
|
||||||
|
|
||||||
const js = @import("js/js.zig");
|
|
||||||
const storage = @import("webapi/storage/storage.zig");
|
|
||||||
const Navigation = @import("webapi/navigation/Navigation.zig");
|
|
||||||
const History = @import("webapi/History.zig");
|
|
||||||
|
|
||||||
const Page = @import("Page.zig");
|
|
||||||
const Browser = @import("Browser.zig");
|
|
||||||
|
|
||||||
const Allocator = std.mem.Allocator;
|
|
||||||
const IS_DEBUG = @import("builtin").mode == .Debug;
|
|
||||||
|
|
||||||
// Session is like a browser's tab.
|
|
||||||
// It owns the js env and the loader for all the pages of the session.
|
|
||||||
// You can create successively multiple pages for a session, but you must
|
|
||||||
// deinit a page before running another one.
|
|
||||||
const Session = @This();
|
|
||||||
|
|
||||||
browser: *Browser,
|
|
||||||
|
|
||||||
// Used to create our Inspector and in the BrowserContext.
|
|
||||||
arena: Allocator,
|
|
||||||
|
|
||||||
// The page's arena is unsuitable for data that has to existing while
|
|
||||||
// navigating from one page to another. For example, if we're clicking
|
|
||||||
// on an HREF, the URL exists in the original page (where the click
|
|
||||||
// originated) but also has to exist in the new page.
|
|
||||||
// While we could use the Session's arena, this could accumulate a lot of
|
|
||||||
// memory if we do many navigation events. The `transfer_arena` is meant to
|
|
||||||
// bridge the gap: existing long enough to store any data needed to end one
|
|
||||||
// page and start another.
|
|
||||||
transfer_arena: Allocator,
|
|
||||||
|
|
||||||
executor: js.ExecutionWorld,
|
|
||||||
cookie_jar: storage.Cookie.Jar,
|
|
||||||
storage_shed: storage.Shed,
|
|
||||||
|
|
||||||
history: History,
|
|
||||||
navigation: Navigation,
|
|
||||||
|
|
||||||
page: ?*Page = null,
|
|
||||||
|
|
||||||
pub fn init(self: *Session, browser: *Browser) !void {
|
|
||||||
var executor = try browser.env.newExecutionWorld();
|
|
||||||
errdefer executor.deinit();
|
|
||||||
|
|
||||||
const allocator = browser.app.allocator;
|
|
||||||
const session_allocator = browser.session_arena.allocator();
|
|
||||||
|
|
||||||
self.* = .{
|
|
||||||
.browser = browser,
|
|
||||||
.executor = executor,
|
|
||||||
.storage_shed = .{},
|
|
||||||
.arena = session_allocator,
|
|
||||||
.cookie_jar = storage.Cookie.Jar.init(allocator),
|
|
||||||
.navigation = .{},
|
|
||||||
.history = .{},
|
|
||||||
.transfer_arena = browser.transfer_arena.allocator(),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn deinit(self: *Session) void {
|
|
||||||
if (self.page != null) {
|
|
||||||
self.removePage();
|
|
||||||
}
|
|
||||||
self.cookie_jar.deinit();
|
|
||||||
self.storage_shed.deinit(self.browser.app.allocator);
|
|
||||||
self.executor.deinit();
|
|
||||||
}
|
|
||||||
|
|
||||||
// NOTE: the caller is not the owner of the returned value,
|
|
||||||
// the pointer on Page is just returned as a convenience
|
|
||||||
pub fn createPage(self: *Session) !*Page {
|
|
||||||
std.debug.assert(self.page == null);
|
|
||||||
|
|
||||||
const page_arena = &self.browser.page_arena;
|
|
||||||
_ = page_arena.reset(.{ .retain_with_limit = 1 * 1024 * 1024 });
|
|
||||||
|
|
||||||
self.page = try Page.init(page_arena.allocator(), self.browser.call_arena.allocator(), self);
|
|
||||||
const page = self.page.?;
|
|
||||||
|
|
||||||
// Creates a new NavigationEventTarget for this page.
|
|
||||||
try self.navigation.onNewPage(page);
|
|
||||||
|
|
||||||
if (comptime IS_DEBUG) {
|
|
||||||
log.debug(.browser, "create page", .{});
|
|
||||||
}
|
|
||||||
// start JS env
|
|
||||||
// Inform CDP the main page has been created such that additional context for other Worlds can be created as well
|
|
||||||
self.browser.notification.dispatch(.page_created, page);
|
|
||||||
|
|
||||||
return page;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn removePage(self: *Session) void {
|
|
||||||
// Inform CDP the page is going to be removed, allowing other worlds to remove themselves before the main one
|
|
||||||
self.browser.notification.dispatch(.page_remove, .{});
|
|
||||||
|
|
||||||
std.debug.assert(self.page != null);
|
|
||||||
|
|
||||||
self.page.?.deinit();
|
|
||||||
self.page = null;
|
|
||||||
|
|
||||||
self.navigation.onRemovePage();
|
|
||||||
|
|
||||||
if (comptime IS_DEBUG) {
|
|
||||||
log.debug(.browser, "remove page", .{});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn currentPage(self: *Session) ?*Page {
|
|
||||||
return self.page orelse return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const WaitResult = enum {
|
|
||||||
done,
|
|
||||||
no_page,
|
|
||||||
cdp_socket,
|
|
||||||
navigate,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub fn wait(self: *Session, wait_ms: u32) WaitResult {
|
|
||||||
while (true) {
|
|
||||||
const page = self.page orelse return .no_page;
|
|
||||||
switch (page.wait(wait_ms)) {
|
|
||||||
.navigate => self.processScheduledNavigation() catch return .done,
|
|
||||||
else => |result| return result,
|
|
||||||
}
|
|
||||||
// if we've successfull navigated, we'll give the new page another
|
|
||||||
// page.wait(wait_ms)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn processScheduledNavigation(self: *Session) !void {
|
|
||||||
const qn = self.page.?._queued_navigation.?;
|
|
||||||
defer _ = self.browser.transfer_arena.reset(.{ .retain_with_limit = 8 * 1024 });
|
|
||||||
|
|
||||||
// This was already aborted on the page, but it would be pretty
|
|
||||||
// bad if old requests went to the new page, so let's make double sure
|
|
||||||
self.browser.http_client.abort();
|
|
||||||
self.removePage();
|
|
||||||
|
|
||||||
const page = self.createPage() catch |err| {
|
|
||||||
log.err(.browser, "queued navigation page error", .{
|
|
||||||
.err = err,
|
|
||||||
.url = qn.url,
|
|
||||||
});
|
|
||||||
return err;
|
|
||||||
};
|
|
||||||
|
|
||||||
page.navigate(qn.url, qn.opts) catch |err| {
|
|
||||||
log.err(.browser, "queued navigation error", .{ .err = err, .url = qn.url });
|
|
||||||
return err;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
78
src/browser/State.zig
Normal file
78
src/browser/State.zig
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
// Sometimes we need to extend libdom. For example, its HTMLDocument doesn't
|
||||||
|
// have a readyState. We have a couple different options, such as making the
|
||||||
|
// correction in libdom directly. Another option stems from the fact that every
|
||||||
|
// libdom node has an opaque embedder_data field. This is the struct that we
|
||||||
|
// lazily load into that field.
|
||||||
|
//
|
||||||
|
// It didn't originally start off as a collection of every single extension, but
|
||||||
|
// this quickly proved necessary, since different fields are needed on the same
|
||||||
|
// data at different levels of the prototype chain. This isn't memory efficient.
|
||||||
|
|
||||||
|
const Env = @import("env.zig").Env;
|
||||||
|
const parser = @import("netsurf.zig");
|
||||||
|
const DataSet = @import("html/DataSet.zig");
|
||||||
|
const ShadowRoot = @import("dom/shadow_root.zig").ShadowRoot;
|
||||||
|
const StyleSheet = @import("cssom/StyleSheet.zig");
|
||||||
|
const CSSStyleSheet = @import("cssom/CSSStyleSheet.zig");
|
||||||
|
const CSSStyleDeclaration = @import("cssom/CSSStyleDeclaration.zig");
|
||||||
|
|
||||||
|
// for HTMLScript (but probably needs to be added to more)
|
||||||
|
onload: ?Env.Function = null,
|
||||||
|
onerror: ?Env.Function = null,
|
||||||
|
|
||||||
|
// for HTMLElement
|
||||||
|
style: CSSStyleDeclaration = .empty,
|
||||||
|
dataset: ?DataSet = null,
|
||||||
|
template_content: ?*parser.DocumentFragment = null,
|
||||||
|
|
||||||
|
// For dom/element
|
||||||
|
shadow_root: ?*ShadowRoot = null,
|
||||||
|
|
||||||
|
// for html/document
|
||||||
|
ready_state: ReadyState = .loading,
|
||||||
|
|
||||||
|
// for html/HTMLStyleElement
|
||||||
|
style_sheet: ?*StyleSheet = null,
|
||||||
|
|
||||||
|
// for dom/document
|
||||||
|
active_element: ?*parser.Element = null,
|
||||||
|
adopted_style_sheets: ?Env.JsObject = null,
|
||||||
|
|
||||||
|
// for HTMLSelectElement
|
||||||
|
// By default, if no option is explicitly selected, the first option should
|
||||||
|
// be selected. However, libdom doesn't do this, and it sets the
|
||||||
|
// selectedIndex to -1, which is a valid value for "nothing selected".
|
||||||
|
// Therefore, when libdom says the selectedIndex == -1, we don't know if
|
||||||
|
// it means that nothing is selected, or if the first option is selected by
|
||||||
|
// default.
|
||||||
|
// There are cases where this won't work, but when selectedIndex is
|
||||||
|
// explicitly set, we set this boolean flag. Then, when we're getting then
|
||||||
|
// selectedIndex, if this flag is == false, which is to say that if
|
||||||
|
// selectedIndex hasn't been explicitly set AND if we have at least 1 option
|
||||||
|
// AND if it isn't a multi select, we can make the 1st item selected by
|
||||||
|
// default (by returning selectedIndex == 0).
|
||||||
|
explicit_index_set: bool = false,
|
||||||
|
|
||||||
|
const ReadyState = enum {
|
||||||
|
loading,
|
||||||
|
interactive,
|
||||||
|
complete,
|
||||||
|
};
|
||||||
@@ -1,758 +0,0 @@
|
|||||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
|
||||||
//
|
|
||||||
// Francis Bouvier <francis@lightpanda.io>
|
|
||||||
// Pierre Tachoire <pierre@lightpanda.io>
|
|
||||||
//
|
|
||||||
// This program is free software: you can redistribute it and/or modify
|
|
||||||
// it under the terms of the GNU Affero General Public License as
|
|
||||||
// published by the Free Software Foundation, either version 3 of the
|
|
||||||
// License, or (at your option) any later version.
|
|
||||||
//
|
|
||||||
// This program is distributed in the hope that it will be useful,
|
|
||||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
// GNU Affero General Public License for more details.
|
|
||||||
//
|
|
||||||
// You should have received a copy of the GNU Affero General Public License
|
|
||||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
const std = @import("std");
|
|
||||||
const Allocator = std.mem.Allocator;
|
|
||||||
|
|
||||||
const ResolveOpts = struct {
|
|
||||||
always_dupe: bool = false,
|
|
||||||
};
|
|
||||||
// path is anytype, so that it can be used with both []const u8 and [:0]const u8
|
|
||||||
pub fn resolve(allocator: Allocator, base: [:0]const u8, path: anytype, comptime opts: ResolveOpts) ![:0]const u8 {
|
|
||||||
const PT = @TypeOf(path);
|
|
||||||
if (base.len == 0 or isCompleteHTTPUrl(path)) {
|
|
||||||
if (comptime opts.always_dupe or !isNullTerminated(PT)) {
|
|
||||||
return allocator.dupeZ(u8, path);
|
|
||||||
}
|
|
||||||
return path;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (path.len == 0) {
|
|
||||||
if (comptime opts.always_dupe) {
|
|
||||||
return allocator.dupeZ(u8, base);
|
|
||||||
}
|
|
||||||
return base;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (path[0] == '?') {
|
|
||||||
const base_path_end = std.mem.indexOfAny(u8, base, "?#") orelse base.len;
|
|
||||||
return std.mem.joinZ(allocator, "", &.{ base[0..base_path_end], path });
|
|
||||||
}
|
|
||||||
if (path[0] == '#') {
|
|
||||||
const base_fragment_start = std.mem.indexOfScalar(u8, base, '#') orelse base.len;
|
|
||||||
return std.mem.joinZ(allocator, "", &.{ base[0..base_fragment_start], path });
|
|
||||||
}
|
|
||||||
|
|
||||||
if (std.mem.startsWith(u8, path, "//")) {
|
|
||||||
// network-path reference
|
|
||||||
const index = std.mem.indexOfScalar(u8, base, ':') orelse {
|
|
||||||
if (comptime isNullTerminated(PT)) {
|
|
||||||
return path;
|
|
||||||
}
|
|
||||||
return allocator.dupeZ(u8, path);
|
|
||||||
};
|
|
||||||
const protocol = base[0 .. index + 1];
|
|
||||||
return std.mem.joinZ(allocator, "", &.{ protocol, path });
|
|
||||||
}
|
|
||||||
|
|
||||||
const scheme_end = std.mem.indexOf(u8, base, "://");
|
|
||||||
const authority_start = if (scheme_end) |end| end + 3 else 0;
|
|
||||||
const path_start = std.mem.indexOfScalarPos(u8, base, authority_start, '/') orelse base.len;
|
|
||||||
|
|
||||||
if (path[0] == '/') {
|
|
||||||
return std.mem.joinZ(allocator, "", &.{ base[0..path_start], path });
|
|
||||||
}
|
|
||||||
|
|
||||||
var normalized_base: []const u8 = base[0..path_start];
|
|
||||||
if (path_start < base.len) {
|
|
||||||
if (std.mem.lastIndexOfScalar(u8, base[path_start + 1 ..], '/')) |pos| {
|
|
||||||
normalized_base = base[0 .. path_start + 1 + pos];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// trailing space so that we always have space to append the null terminator
|
|
||||||
var out = try std.mem.join(allocator, "", &.{ normalized_base, "/", path, " " });
|
|
||||||
const end = out.len - 1;
|
|
||||||
|
|
||||||
const path_marker = path_start + 1;
|
|
||||||
|
|
||||||
// Strip out ./ and ../. This is done in-place, because doing so can
|
|
||||||
// only ever make `out` smaller. After this, `out` cannot be freed by
|
|
||||||
// an allocator, which is ok, because we expect allocator to be an arena.
|
|
||||||
var in_i: usize = 0;
|
|
||||||
var out_i: usize = 0;
|
|
||||||
while (in_i < end) {
|
|
||||||
if (std.mem.startsWith(u8, out[in_i..], "./")) {
|
|
||||||
in_i += 2;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (std.mem.startsWith(u8, out[in_i..], "../")) {
|
|
||||||
std.debug.assert(out[out_i - 1] == '/');
|
|
||||||
|
|
||||||
if (out_i > path_marker) {
|
|
||||||
// go back before the /
|
|
||||||
out_i -= 2;
|
|
||||||
while (out_i > 1 and out[out_i - 1] != '/') {
|
|
||||||
out_i -= 1;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// if out_i == path_marker, than we've reached the start of
|
|
||||||
// the path. We can't ../ any more. E.g.:
|
|
||||||
// http://www.example.com/../hello.
|
|
||||||
// You might think that's an error, but, at least with
|
|
||||||
// new URL('../hello', 'http://www.example.com/')
|
|
||||||
// it just ignores the extra ../
|
|
||||||
}
|
|
||||||
in_i += 3;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
out[out_i] = out[in_i];
|
|
||||||
in_i += 1;
|
|
||||||
out_i += 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// we always have an extra space
|
|
||||||
out[out_i] = 0;
|
|
||||||
return out[0..out_i :0];
|
|
||||||
}
|
|
||||||
|
|
||||||
fn isNullTerminated(comptime value: type) bool {
|
|
||||||
return @typeInfo(value).pointer.sentinel_ptr != null;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn isCompleteHTTPUrl(url: []const u8) bool {
|
|
||||||
if (url.len < 3) { // Minimum is "x://"
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// very common case
|
|
||||||
if (url[0] == '/') {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if there's a scheme (protocol) ending with ://
|
|
||||||
const colon_pos = std.mem.indexOfScalar(u8, url, ':') orelse return false;
|
|
||||||
|
|
||||||
// Check if it's followed by //
|
|
||||||
if (colon_pos + 2 >= url.len or url[colon_pos + 1] != '/' or url[colon_pos + 2] != '/') {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate that everything before the colon is a valid scheme
|
|
||||||
// A scheme must start with a letter and contain only letters, digits, +, -, .
|
|
||||||
if (colon_pos == 0) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
const scheme = url[0..colon_pos];
|
|
||||||
if (!std.ascii.isAlphabetic(scheme[0])) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
for (scheme[1..]) |c| {
|
|
||||||
if (!std.ascii.isAlphanumeric(c) and c != '+' and c != '-' and c != '.') {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn getUsername(raw: [:0]const u8) []const u8 {
|
|
||||||
const user_info = getUserInfo(raw) orelse return "";
|
|
||||||
const pos = std.mem.indexOfScalarPos(u8, user_info, 0, ':') orelse return user_info;
|
|
||||||
return user_info[0..pos];
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn getPassword(raw: [:0]const u8) []const u8 {
|
|
||||||
const user_info = getUserInfo(raw) orelse return "";
|
|
||||||
const pos = std.mem.indexOfScalarPos(u8, user_info, 0, ':') orelse return "";
|
|
||||||
return user_info[pos + 1 ..];
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn getPathname(raw: [:0]const u8) []const u8 {
|
|
||||||
const protocol_end = std.mem.indexOf(u8, raw, "://") orelse 0;
|
|
||||||
const path_start = std.mem.indexOfScalarPos(u8, raw, if (protocol_end > 0) protocol_end + 3 else 0, '/') orelse raw.len;
|
|
||||||
|
|
||||||
const query_or_hash_start = std.mem.indexOfAnyPos(u8, raw, path_start, "?#") orelse raw.len;
|
|
||||||
|
|
||||||
if (path_start >= query_or_hash_start) {
|
|
||||||
if (std.mem.indexOf(u8, raw, "://") != null) return "/";
|
|
||||||
return "";
|
|
||||||
}
|
|
||||||
|
|
||||||
return raw[path_start..query_or_hash_start];
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn getProtocol(raw: [:0]const u8) []const u8 {
|
|
||||||
const pos = std.mem.indexOfScalarPos(u8, raw, 0, ':') orelse return "";
|
|
||||||
return raw[0 .. pos + 1];
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn isHTTPS(raw: [:0]const u8) bool {
|
|
||||||
return std.mem.startsWith(u8, raw, "https:");
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn getHostname(raw: [:0]const u8) []const u8 {
|
|
||||||
const host = getHost(raw);
|
|
||||||
const pos = std.mem.lastIndexOfScalar(u8, host, ':') orelse return host;
|
|
||||||
return host[0..pos];
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn getPort(raw: [:0]const u8) []const u8 {
|
|
||||||
const host = getHost(raw);
|
|
||||||
const pos = std.mem.lastIndexOfScalar(u8, host, ':') orelse return "";
|
|
||||||
|
|
||||||
if (pos + 1 >= host.len) {
|
|
||||||
return "";
|
|
||||||
}
|
|
||||||
|
|
||||||
for (host[pos + 1 ..]) |c| {
|
|
||||||
if (c < '0' or c > '9') {
|
|
||||||
return "";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return host[pos + 1 ..];
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn getSearch(raw: [:0]const u8) []const u8 {
|
|
||||||
const pos = std.mem.indexOfScalarPos(u8, raw, 0, '?') orelse return "";
|
|
||||||
const query_part = raw[pos..];
|
|
||||||
|
|
||||||
if (std.mem.indexOfScalarPos(u8, query_part, 0, '#')) |fragment_start| {
|
|
||||||
return query_part[0..fragment_start];
|
|
||||||
}
|
|
||||||
|
|
||||||
return query_part;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn getHash(raw: [:0]const u8) []const u8 {
|
|
||||||
const start = std.mem.indexOfScalarPos(u8, raw, 0, '#') orelse return "";
|
|
||||||
return raw[start..];
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn getOrigin(allocator: Allocator, raw: [:0]const u8) !?[]const u8 {
|
|
||||||
const scheme_end = std.mem.indexOf(u8, raw, "://") orelse return null;
|
|
||||||
|
|
||||||
// Only HTTP and HTTPS schemes have origins
|
|
||||||
const protocol = raw[0 .. scheme_end + 1];
|
|
||||||
if (!std.mem.eql(u8, protocol, "http:") and !std.mem.eql(u8, protocol, "https:")) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
var authority_start = scheme_end + 3;
|
|
||||||
const has_user_info = if (std.mem.indexOf(u8, raw[authority_start..], "@")) |pos| blk: {
|
|
||||||
authority_start += pos + 1;
|
|
||||||
break :blk true;
|
|
||||||
} else false;
|
|
||||||
|
|
||||||
// Find end of authority (start of path/query/fragment or end of string)
|
|
||||||
const authority_end_relative = std.mem.indexOfAny(u8, raw[authority_start..], "/?#");
|
|
||||||
const authority_end = if (authority_end_relative) |end|
|
|
||||||
authority_start + end
|
|
||||||
else
|
|
||||||
raw.len;
|
|
||||||
|
|
||||||
// Check for port in the host:port section
|
|
||||||
const host_part = raw[authority_start..authority_end];
|
|
||||||
if (std.mem.lastIndexOfScalar(u8, host_part, ':')) |colon_pos_in_host| {
|
|
||||||
const port = host_part[colon_pos_in_host + 1 ..];
|
|
||||||
|
|
||||||
// Validate it's actually a port (all digits)
|
|
||||||
for (port) |c| {
|
|
||||||
if (c < '0' or c > '9') {
|
|
||||||
// Not a port (probably IPv6)
|
|
||||||
if (has_user_info) {
|
|
||||||
// Need to allocate to exclude user info
|
|
||||||
return try std.fmt.allocPrint(allocator, "{s}//{s}", .{ raw[0 .. scheme_end + 1], host_part });
|
|
||||||
}
|
|
||||||
// Can return a slice
|
|
||||||
return raw[0..authority_end];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if it's a default port that should be excluded from origin
|
|
||||||
const is_default =
|
|
||||||
(std.mem.eql(u8, protocol, "http:") and std.mem.eql(u8, port, "80")) or
|
|
||||||
(std.mem.eql(u8, protocol, "https:") and std.mem.eql(u8, port, "443"));
|
|
||||||
|
|
||||||
if (is_default or has_user_info) {
|
|
||||||
// Need to allocate to build origin without default port and/or user info
|
|
||||||
const hostname = host_part[0..colon_pos_in_host];
|
|
||||||
if (is_default) {
|
|
||||||
return try std.fmt.allocPrint(allocator, "{s}//{s}", .{ protocol, hostname });
|
|
||||||
} else {
|
|
||||||
return try std.fmt.allocPrint(allocator, "{s}//{s}", .{ protocol, host_part });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else if (has_user_info) {
|
|
||||||
// No port, but has user info - need to allocate
|
|
||||||
return try std.fmt.allocPrint(allocator, "{s}//{s}", .{ raw[0 .. scheme_end + 1], host_part });
|
|
||||||
}
|
|
||||||
|
|
||||||
// Common case: no user info, no default port - return slice (zero allocation!)
|
|
||||||
return raw[0..authority_end];
|
|
||||||
}
|
|
||||||
|
|
||||||
fn getUserInfo(raw: [:0]const u8) ?[]const u8 {
|
|
||||||
const scheme_end = std.mem.indexOf(u8, raw, "://") orelse return null;
|
|
||||||
const authority_start = scheme_end + 3;
|
|
||||||
|
|
||||||
const pos = std.mem.indexOfScalar(u8, raw[authority_start..], '@') orelse return null;
|
|
||||||
const path_start = std.mem.indexOfScalarPos(u8, raw, authority_start, '/') orelse raw.len;
|
|
||||||
|
|
||||||
const full_pos = authority_start + pos;
|
|
||||||
if (full_pos < path_start) {
|
|
||||||
return raw[authority_start..full_pos];
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn getHost(raw: [:0]const u8) []const u8 {
|
|
||||||
const scheme_end = std.mem.indexOf(u8, raw, "://") orelse return "";
|
|
||||||
|
|
||||||
var authority_start = scheme_end + 3;
|
|
||||||
if (std.mem.indexOf(u8, raw[authority_start..], "@")) |pos| {
|
|
||||||
authority_start += pos + 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
const authority = raw[authority_start..];
|
|
||||||
const path_start = std.mem.indexOfAny(u8, authority, "/?#") orelse return authority;
|
|
||||||
return authority[0..path_start];
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns true if these two URLs point to the same document.
|
|
||||||
pub fn eqlDocument(first: [:0]const u8, second: [:0]const u8) bool {
|
|
||||||
// First '#' signifies the start of the fragment.
|
|
||||||
const first_hash_index = std.mem.indexOfScalar(u8, first, '#') orelse first.len;
|
|
||||||
const second_hash_index = std.mem.indexOfScalar(u8, second, '#') orelse second.len;
|
|
||||||
return std.mem.eql(u8, first[0..first_hash_index], second[0..second_hash_index]);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper function to build a URL from components
|
|
||||||
pub fn buildUrl(
|
|
||||||
allocator: Allocator,
|
|
||||||
protocol: []const u8,
|
|
||||||
host: []const u8,
|
|
||||||
pathname: []const u8,
|
|
||||||
search: []const u8,
|
|
||||||
hash: []const u8,
|
|
||||||
) ![:0]const u8 {
|
|
||||||
return std.fmt.allocPrintSentinel(allocator, "{s}//{s}{s}{s}{s}", .{
|
|
||||||
protocol,
|
|
||||||
host,
|
|
||||||
pathname,
|
|
||||||
search,
|
|
||||||
hash,
|
|
||||||
}, 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn setProtocol(current: [:0]const u8, value: []const u8, allocator: Allocator) ![:0]const u8 {
|
|
||||||
const host = getHost(current);
|
|
||||||
const pathname = getPathname(current);
|
|
||||||
const search = getSearch(current);
|
|
||||||
const hash = getHash(current);
|
|
||||||
|
|
||||||
// Add : suffix if not present
|
|
||||||
const protocol = if (value.len > 0 and value[value.len - 1] != ':')
|
|
||||||
try std.fmt.allocPrint(allocator, "{s}:", .{value})
|
|
||||||
else
|
|
||||||
value;
|
|
||||||
|
|
||||||
return buildUrl(allocator, protocol, host, pathname, search, hash);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn setHost(current: [:0]const u8, value: []const u8, allocator: Allocator) ![:0]const u8 {
|
|
||||||
const protocol = getProtocol(current);
|
|
||||||
const pathname = getPathname(current);
|
|
||||||
const search = getSearch(current);
|
|
||||||
const hash = getHash(current);
|
|
||||||
|
|
||||||
// Check if the host includes a port
|
|
||||||
const colon_pos = std.mem.lastIndexOfScalar(u8, value, ':');
|
|
||||||
const clean_host = if (colon_pos) |pos| blk: {
|
|
||||||
const port_str = value[pos + 1 ..];
|
|
||||||
// Remove default ports
|
|
||||||
if (std.mem.eql(u8, protocol, "https:") and std.mem.eql(u8, port_str, "443")) {
|
|
||||||
break :blk value[0..pos];
|
|
||||||
}
|
|
||||||
if (std.mem.eql(u8, protocol, "http:") and std.mem.eql(u8, port_str, "80")) {
|
|
||||||
break :blk value[0..pos];
|
|
||||||
}
|
|
||||||
break :blk value;
|
|
||||||
} else value;
|
|
||||||
|
|
||||||
return buildUrl(allocator, protocol, clean_host, pathname, search, hash);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn setHostname(current: [:0]const u8, value: []const u8, allocator: Allocator) ![:0]const u8 {
|
|
||||||
const current_port = getPort(current);
|
|
||||||
const new_host = if (current_port.len > 0)
|
|
||||||
try std.fmt.allocPrint(allocator, "{s}:{s}", .{ value, current_port })
|
|
||||||
else
|
|
||||||
value;
|
|
||||||
|
|
||||||
return setHost(current, new_host, allocator);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn setPort(current: [:0]const u8, value: ?[]const u8, allocator: Allocator) ![:0]const u8 {
|
|
||||||
const hostname = getHostname(current);
|
|
||||||
const protocol = getProtocol(current);
|
|
||||||
|
|
||||||
// Handle null or default ports
|
|
||||||
const new_host = if (value) |port_str| blk: {
|
|
||||||
if (port_str.len == 0) {
|
|
||||||
break :blk hostname;
|
|
||||||
}
|
|
||||||
// Check if this is a default port for the protocol
|
|
||||||
if (std.mem.eql(u8, protocol, "https:") and std.mem.eql(u8, port_str, "443")) {
|
|
||||||
break :blk hostname;
|
|
||||||
}
|
|
||||||
if (std.mem.eql(u8, protocol, "http:") and std.mem.eql(u8, port_str, "80")) {
|
|
||||||
break :blk hostname;
|
|
||||||
}
|
|
||||||
break :blk try std.fmt.allocPrint(allocator, "{s}:{s}", .{ hostname, port_str });
|
|
||||||
} else hostname;
|
|
||||||
|
|
||||||
return setHost(current, new_host, allocator);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn setPathname(current: [:0]const u8, value: []const u8, allocator: Allocator) ![:0]const u8 {
|
|
||||||
const protocol = getProtocol(current);
|
|
||||||
const host = getHost(current);
|
|
||||||
const search = getSearch(current);
|
|
||||||
const hash = getHash(current);
|
|
||||||
|
|
||||||
// Add / prefix if not present and value is not empty
|
|
||||||
const pathname = if (value.len > 0 and value[0] != '/')
|
|
||||||
try std.fmt.allocPrint(allocator, "/{s}", .{value})
|
|
||||||
else
|
|
||||||
value;
|
|
||||||
|
|
||||||
return buildUrl(allocator, protocol, host, pathname, search, hash);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn setSearch(current: [:0]const u8, value: []const u8, allocator: Allocator) ![:0]const u8 {
|
|
||||||
const protocol = getProtocol(current);
|
|
||||||
const host = getHost(current);
|
|
||||||
const pathname = getPathname(current);
|
|
||||||
const hash = getHash(current);
|
|
||||||
|
|
||||||
// Add ? prefix if not present and value is not empty
|
|
||||||
const search = if (value.len > 0 and value[0] != '?')
|
|
||||||
try std.fmt.allocPrint(allocator, "?{s}", .{value})
|
|
||||||
else
|
|
||||||
value;
|
|
||||||
|
|
||||||
return buildUrl(allocator, protocol, host, pathname, search, hash);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn setHash(current: [:0]const u8, value: []const u8, allocator: Allocator) ![:0]const u8 {
|
|
||||||
const protocol = getProtocol(current);
|
|
||||||
const host = getHost(current);
|
|
||||||
const pathname = getPathname(current);
|
|
||||||
const search = getSearch(current);
|
|
||||||
|
|
||||||
// Add # prefix if not present and value is not empty
|
|
||||||
const hash = if (value.len > 0 and value[0] != '#')
|
|
||||||
try std.fmt.allocPrint(allocator, "#{s}", .{value})
|
|
||||||
else
|
|
||||||
value;
|
|
||||||
|
|
||||||
return buildUrl(allocator, protocol, host, pathname, search, hash);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn concatQueryString(arena: Allocator, url: []const u8, query_string: []const u8) ![:0]const u8 {
|
|
||||||
if (query_string.len == 0) {
|
|
||||||
return arena.dupeZ(u8, url);
|
|
||||||
}
|
|
||||||
|
|
||||||
var buf: std.ArrayList(u8) = .empty;
|
|
||||||
|
|
||||||
// the most space well need is the url + ('?' or '&') + the query_string + null terminator
|
|
||||||
try buf.ensureTotalCapacity(arena, url.len + 2 + query_string.len);
|
|
||||||
buf.appendSliceAssumeCapacity(url);
|
|
||||||
|
|
||||||
if (std.mem.indexOfScalar(u8, url, '?')) |index| {
|
|
||||||
const last_index = url.len - 1;
|
|
||||||
if (index != last_index and url[last_index] != '&') {
|
|
||||||
buf.appendAssumeCapacity('&');
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
buf.appendAssumeCapacity('?');
|
|
||||||
}
|
|
||||||
buf.appendSliceAssumeCapacity(query_string);
|
|
||||||
buf.appendAssumeCapacity(0);
|
|
||||||
return buf.items[0 .. buf.items.len - 1 :0];
|
|
||||||
}
|
|
||||||
|
|
||||||
const testing = @import("../testing.zig");
|
|
||||||
test "URL: isCompleteHTTPUrl" {
|
|
||||||
try testing.expectEqual(true, isCompleteHTTPUrl("http://example.com/about"));
|
|
||||||
try testing.expectEqual(true, isCompleteHTTPUrl("HttP://example.com/about"));
|
|
||||||
try testing.expectEqual(true, isCompleteHTTPUrl("httpS://example.com/about"));
|
|
||||||
try testing.expectEqual(true, isCompleteHTTPUrl("HTTPs://example.com/about"));
|
|
||||||
try testing.expectEqual(true, isCompleteHTTPUrl("ftp://example.com/about"));
|
|
||||||
|
|
||||||
try testing.expectEqual(false, isCompleteHTTPUrl("/example.com"));
|
|
||||||
try testing.expectEqual(false, isCompleteHTTPUrl("../../about"));
|
|
||||||
try testing.expectEqual(false, isCompleteHTTPUrl("about"));
|
|
||||||
}
|
|
||||||
|
|
||||||
test "URL: resolve regression (#1093)" {
|
|
||||||
defer testing.reset();
|
|
||||||
|
|
||||||
const Case = struct {
|
|
||||||
base: [:0]const u8,
|
|
||||||
path: [:0]const u8,
|
|
||||||
expected: [:0]const u8,
|
|
||||||
};
|
|
||||||
|
|
||||||
const cases = [_]Case{
|
|
||||||
.{
|
|
||||||
.base = "https://alas.aws.amazon.com/alas2.html",
|
|
||||||
.path = "../static/bootstrap.min.css",
|
|
||||||
.expected = "https://alas.aws.amazon.com/static/bootstrap.min.css",
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
for (cases) |case| {
|
|
||||||
const result = try resolve(testing.arena_allocator, case.base, case.path, .{});
|
|
||||||
try testing.expectString(case.expected, result);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
test "URL: resolve" {
|
|
||||||
defer testing.reset();
|
|
||||||
|
|
||||||
const Case = struct {
|
|
||||||
base: [:0]const u8,
|
|
||||||
path: [:0]const u8,
|
|
||||||
expected: [:0]const u8,
|
|
||||||
};
|
|
||||||
|
|
||||||
const cases = [_]Case{
|
|
||||||
.{
|
|
||||||
.base = "https://example/xyz/abc/123",
|
|
||||||
.path = "something.js",
|
|
||||||
.expected = "https://example/xyz/abc/something.js",
|
|
||||||
},
|
|
||||||
.{
|
|
||||||
.base = "https://example/xyz/abc/123",
|
|
||||||
.path = "/something.js",
|
|
||||||
.expected = "https://example/something.js",
|
|
||||||
},
|
|
||||||
.{
|
|
||||||
.base = "https://example/",
|
|
||||||
.path = "something.js",
|
|
||||||
.expected = "https://example/something.js",
|
|
||||||
},
|
|
||||||
.{
|
|
||||||
.base = "https://example/",
|
|
||||||
.path = "/something.js",
|
|
||||||
.expected = "https://example/something.js",
|
|
||||||
},
|
|
||||||
.{
|
|
||||||
.base = "https://example",
|
|
||||||
.path = "something.js",
|
|
||||||
.expected = "https://example/something.js",
|
|
||||||
},
|
|
||||||
.{
|
|
||||||
.base = "https://example",
|
|
||||||
.path = "abc/something.js",
|
|
||||||
.expected = "https://example/abc/something.js",
|
|
||||||
},
|
|
||||||
.{
|
|
||||||
.base = "https://example/nested",
|
|
||||||
.path = "abc/something.js",
|
|
||||||
.expected = "https://example/abc/something.js",
|
|
||||||
},
|
|
||||||
.{
|
|
||||||
.base = "https://example/nested/",
|
|
||||||
.path = "abc/something.js",
|
|
||||||
.expected = "https://example/nested/abc/something.js",
|
|
||||||
},
|
|
||||||
.{
|
|
||||||
.base = "https://example/nested/",
|
|
||||||
.path = "/abc/something.js",
|
|
||||||
.expected = "https://example/abc/something.js",
|
|
||||||
},
|
|
||||||
.{
|
|
||||||
.base = "https://example/nested/",
|
|
||||||
.path = "http://www.github.com/example/",
|
|
||||||
.expected = "http://www.github.com/example/",
|
|
||||||
},
|
|
||||||
.{
|
|
||||||
.base = "https://example/nested/",
|
|
||||||
.path = "",
|
|
||||||
.expected = "https://example/nested/",
|
|
||||||
},
|
|
||||||
.{
|
|
||||||
.base = "https://example/abc/aaa",
|
|
||||||
.path = "./hello/./world",
|
|
||||||
.expected = "https://example/abc/hello/world",
|
|
||||||
},
|
|
||||||
.{
|
|
||||||
.base = "https://example/abc/aaa/",
|
|
||||||
.path = "../hello",
|
|
||||||
.expected = "https://example/abc/hello",
|
|
||||||
},
|
|
||||||
.{
|
|
||||||
.base = "https://example/abc/aaa",
|
|
||||||
.path = "../hello",
|
|
||||||
.expected = "https://example/hello",
|
|
||||||
},
|
|
||||||
.{
|
|
||||||
.base = "https://example/abc/aaa/",
|
|
||||||
.path = "./.././.././hello",
|
|
||||||
.expected = "https://example/hello",
|
|
||||||
},
|
|
||||||
.{
|
|
||||||
.base = "some/page",
|
|
||||||
.path = "hello",
|
|
||||||
.expected = "some/hello",
|
|
||||||
},
|
|
||||||
.{
|
|
||||||
.base = "some/page/",
|
|
||||||
.path = "hello",
|
|
||||||
.expected = "some/page/hello",
|
|
||||||
},
|
|
||||||
.{
|
|
||||||
.base = "some/page/other",
|
|
||||||
.path = ".././hello",
|
|
||||||
.expected = "some/hello",
|
|
||||||
},
|
|
||||||
.{
|
|
||||||
.base = "https://www.example.com/hello/world",
|
|
||||||
.path = "//example/about",
|
|
||||||
.expected = "https://example/about",
|
|
||||||
},
|
|
||||||
.{
|
|
||||||
.base = "http:",
|
|
||||||
.path = "//example.com/over/9000",
|
|
||||||
.expected = "http://example.com/over/9000",
|
|
||||||
},
|
|
||||||
.{
|
|
||||||
.base = "https://example.com/",
|
|
||||||
.path = "../hello",
|
|
||||||
.expected = "https://example.com/hello",
|
|
||||||
},
|
|
||||||
.{
|
|
||||||
.base = "https://www.example.com/hello/world/",
|
|
||||||
.path = "../../../../example/about",
|
|
||||||
.expected = "https://www.example.com/example/about",
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
for (cases) |case| {
|
|
||||||
const result = try resolve(testing.arena_allocator, case.base, case.path, .{});
|
|
||||||
try testing.expectString(case.expected, result);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
test "URL: eqlDocument" {
|
|
||||||
defer testing.reset();
|
|
||||||
{
|
|
||||||
const url = "https://lightpanda.io/about";
|
|
||||||
try testing.expectEqual(true, eqlDocument(url, url));
|
|
||||||
}
|
|
||||||
{
|
|
||||||
const url1 = "https://lightpanda.io/about";
|
|
||||||
const url2 = "http://lightpanda.io/about";
|
|
||||||
try testing.expectEqual(false, eqlDocument(url1, url2));
|
|
||||||
}
|
|
||||||
{
|
|
||||||
const url1 = "https://lightpanda.io/about";
|
|
||||||
const url2 = "https://example.com/about";
|
|
||||||
try testing.expectEqual(false, eqlDocument(url1, url2));
|
|
||||||
}
|
|
||||||
{
|
|
||||||
const url1 = "https://lightpanda.io:8080/about";
|
|
||||||
const url2 = "https://lightpanda.io:9090/about";
|
|
||||||
try testing.expectEqual(false, eqlDocument(url1, url2));
|
|
||||||
}
|
|
||||||
{
|
|
||||||
const url1 = "https://lightpanda.io/about";
|
|
||||||
const url2 = "https://lightpanda.io/contact";
|
|
||||||
try testing.expectEqual(false, eqlDocument(url1, url2));
|
|
||||||
}
|
|
||||||
{
|
|
||||||
const url1 = "https://lightpanda.io/about?foo=bar";
|
|
||||||
const url2 = "https://lightpanda.io/about?baz=qux";
|
|
||||||
try testing.expectEqual(false, eqlDocument(url1, url2));
|
|
||||||
}
|
|
||||||
{
|
|
||||||
const url1 = "https://lightpanda.io/about#section1";
|
|
||||||
const url2 = "https://lightpanda.io/about#section2";
|
|
||||||
try testing.expectEqual(true, eqlDocument(url1, url2));
|
|
||||||
}
|
|
||||||
{
|
|
||||||
const url1 = "https://lightpanda.io/about";
|
|
||||||
const url2 = "https://lightpanda.io/about/";
|
|
||||||
try testing.expectEqual(false, eqlDocument(url1, url2));
|
|
||||||
}
|
|
||||||
{
|
|
||||||
const url1 = "https://lightpanda.io/about?foo=bar";
|
|
||||||
const url2 = "https://lightpanda.io/about";
|
|
||||||
try testing.expectEqual(false, eqlDocument(url1, url2));
|
|
||||||
}
|
|
||||||
{
|
|
||||||
const url1 = "https://lightpanda.io/about";
|
|
||||||
const url2 = "https://lightpanda.io/about?foo=bar";
|
|
||||||
try testing.expectEqual(false, eqlDocument(url1, url2));
|
|
||||||
}
|
|
||||||
{
|
|
||||||
const url1 = "https://lightpanda.io/about?foo=bar";
|
|
||||||
const url2 = "https://lightpanda.io/about?foo=bar";
|
|
||||||
try testing.expectEqual(true, eqlDocument(url1, url2));
|
|
||||||
}
|
|
||||||
{
|
|
||||||
const url1 = "https://lightpanda.io/about?";
|
|
||||||
const url2 = "https://lightpanda.io/about";
|
|
||||||
try testing.expectEqual(false, eqlDocument(url1, url2));
|
|
||||||
}
|
|
||||||
{
|
|
||||||
const url1 = "https://duckduckgo.com/";
|
|
||||||
const url2 = "https://duckduckgo.com/?q=lightpanda";
|
|
||||||
try testing.expectEqual(false, eqlDocument(url1, url2));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
test "URL: concatQueryString" {
|
|
||||||
defer testing.reset();
|
|
||||||
const arena = testing.arena_allocator;
|
|
||||||
|
|
||||||
{
|
|
||||||
const url = try concatQueryString(arena, "https://www.lightpanda.io/", "");
|
|
||||||
try testing.expectEqual("https://www.lightpanda.io/", url);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
const url = try concatQueryString(arena, "https://www.lightpanda.io/index?", "");
|
|
||||||
try testing.expectEqual("https://www.lightpanda.io/index?", url);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
const url = try concatQueryString(arena, "https://www.lightpanda.io/index?", "a=b");
|
|
||||||
try testing.expectEqual("https://www.lightpanda.io/index?a=b", url);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
const url = try concatQueryString(arena, "https://www.lightpanda.io/index?1=2", "a=b");
|
|
||||||
try testing.expectEqual("https://www.lightpanda.io/index?1=2&a=b", url);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
const url = try concatQueryString(arena, "https://www.lightpanda.io/index?1=2&", "a=b");
|
|
||||||
try testing.expectEqual("https://www.lightpanda.io/index?1=2&a=b", url);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
116
src/browser/browser.zig
Normal file
116
src/browser/browser.zig
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
|
||||||
|
const Allocator = std.mem.Allocator;
|
||||||
|
const ArenaAllocator = std.heap.ArenaAllocator;
|
||||||
|
|
||||||
|
const State = @import("State.zig");
|
||||||
|
const Env = @import("env.zig").Env;
|
||||||
|
const App = @import("../app.zig").App;
|
||||||
|
const Session = @import("session.zig").Session;
|
||||||
|
const Notification = @import("../notification.zig").Notification;
|
||||||
|
|
||||||
|
const log = @import("../log.zig");
|
||||||
|
const HttpClient = @import("../http/Client.zig");
|
||||||
|
|
||||||
|
// Browser is an instance of the browser.
|
||||||
|
// You can create multiple browser instances.
|
||||||
|
// A browser contains only one session.
|
||||||
|
pub const Browser = struct {
|
||||||
|
env: *Env,
|
||||||
|
app: *App,
|
||||||
|
session: ?Session,
|
||||||
|
allocator: Allocator,
|
||||||
|
http_client: *HttpClient,
|
||||||
|
page_arena: ArenaAllocator,
|
||||||
|
session_arena: ArenaAllocator,
|
||||||
|
transfer_arena: ArenaAllocator,
|
||||||
|
notification: *Notification,
|
||||||
|
state_pool: std.heap.MemoryPool(State),
|
||||||
|
|
||||||
|
pub fn init(app: *App) !Browser {
|
||||||
|
const allocator = app.allocator;
|
||||||
|
|
||||||
|
const env = try Env.init(allocator, &app.platform, .{});
|
||||||
|
errdefer env.deinit();
|
||||||
|
|
||||||
|
const notification = try Notification.init(allocator, app.notification);
|
||||||
|
app.http.client.notification = notification;
|
||||||
|
app.http.client.next_request_id = 0; // Should we track ids in CDP only?
|
||||||
|
errdefer notification.deinit();
|
||||||
|
|
||||||
|
return .{
|
||||||
|
.app = app,
|
||||||
|
.env = env,
|
||||||
|
.session = null,
|
||||||
|
.allocator = allocator,
|
||||||
|
.notification = notification,
|
||||||
|
.http_client = app.http.client,
|
||||||
|
.page_arena = ArenaAllocator.init(allocator),
|
||||||
|
.session_arena = ArenaAllocator.init(allocator),
|
||||||
|
.transfer_arena = ArenaAllocator.init(allocator),
|
||||||
|
.state_pool = std.heap.MemoryPool(State).init(allocator),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn deinit(self: *Browser) void {
|
||||||
|
self.closeSession();
|
||||||
|
self.env.deinit();
|
||||||
|
self.page_arena.deinit();
|
||||||
|
self.session_arena.deinit();
|
||||||
|
self.transfer_arena.deinit();
|
||||||
|
self.http_client.notification = null;
|
||||||
|
self.notification.deinit();
|
||||||
|
self.state_pool.deinit();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn newSession(self: *Browser) !*Session {
|
||||||
|
self.closeSession();
|
||||||
|
self.session = @as(Session, undefined);
|
||||||
|
const session = &self.session.?;
|
||||||
|
try Session.init(session, self);
|
||||||
|
return session;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn closeSession(self: *Browser) void {
|
||||||
|
if (self.session) |*session| {
|
||||||
|
session.deinit();
|
||||||
|
self.session = null;
|
||||||
|
_ = self.session_arena.reset(.{ .retain_with_limit = 1 * 1024 * 1024 });
|
||||||
|
self.env.lowMemoryNotification();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn runMicrotasks(self: *const Browser) void {
|
||||||
|
self.env.runMicrotasks();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn runMessageLoop(self: *const Browser) void {
|
||||||
|
while (self.env.pumpMessageLoop()) {
|
||||||
|
log.debug(.browser, "pumpMessageLoop", .{});
|
||||||
|
}
|
||||||
|
self.env.runIdleTasks();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const testing = @import("../testing.zig");
|
||||||
|
test "Browser" {
|
||||||
|
try testing.htmlRunner("browser.html");
|
||||||
|
}
|
||||||
168
src/browser/console/console.zig
Normal file
168
src/browser/console/console.zig
Normal file
@@ -0,0 +1,168 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
const builtin = @import("builtin");
|
||||||
|
const log = @import("../../log.zig");
|
||||||
|
|
||||||
|
const Allocator = std.mem.Allocator;
|
||||||
|
const Page = @import("../page.zig").Page;
|
||||||
|
const JsObject = @import("../env.zig").Env.JsObject;
|
||||||
|
|
||||||
|
pub const Console = struct {
|
||||||
|
// TODO: configurable writer
|
||||||
|
timers: std.StringHashMapUnmanaged(u32) = .{},
|
||||||
|
counts: std.StringHashMapUnmanaged(u32) = .{},
|
||||||
|
|
||||||
|
pub fn _lp(values: []JsObject, page: *Page) !void {
|
||||||
|
if (values.len == 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
log.fatal(.console, "lightpanda", .{ .args = try serializeValues(values, page) });
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _log(values: []JsObject, page: *Page) !void {
|
||||||
|
if (values.len == 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
log.info(.console, "info", .{ .args = try serializeValues(values, page) });
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _info(values: []JsObject, page: *Page) !void {
|
||||||
|
return _log(values, page);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _debug(values: []JsObject, page: *Page) !void {
|
||||||
|
if (values.len == 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
log.debug(.console, "debug", .{ .args = try serializeValues(values, page) });
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _warn(values: []JsObject, page: *Page) !void {
|
||||||
|
if (values.len == 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
log.warn(.console, "warn", .{ .args = try serializeValues(values, page) });
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _error(values: []JsObject, page: *Page) !void {
|
||||||
|
if (values.len == 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
log.warn(.console, "error", .{
|
||||||
|
.args = try serializeValues(values, page),
|
||||||
|
.stack = page.stackTrace() catch "???",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _clear() void {}
|
||||||
|
|
||||||
|
pub fn _count(self: *Console, label_: ?[]const u8, page: *Page) !void {
|
||||||
|
const label = label_ orelse "default";
|
||||||
|
const gop = try self.counts.getOrPut(page.arena, label);
|
||||||
|
|
||||||
|
var current: u32 = 0;
|
||||||
|
if (gop.found_existing) {
|
||||||
|
current = gop.value_ptr.*;
|
||||||
|
} else {
|
||||||
|
gop.key_ptr.* = try page.arena.dupe(u8, label);
|
||||||
|
}
|
||||||
|
|
||||||
|
const count = current + 1;
|
||||||
|
gop.value_ptr.* = count;
|
||||||
|
|
||||||
|
log.info(.console, "count", .{ .label = label, .count = count });
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _countReset(self: *Console, label_: ?[]const u8) !void {
|
||||||
|
const label = label_ orelse "default";
|
||||||
|
const kv = self.counts.fetchRemove(label) orelse {
|
||||||
|
log.info(.console, "invalid counter", .{ .label = label });
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
log.info(.console, "count reset", .{ .label = label, .count = kv.value });
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _time(self: *Console, label_: ?[]const u8, page: *Page) !void {
|
||||||
|
const label = label_ orelse "default";
|
||||||
|
const gop = try self.timers.getOrPut(page.arena, label);
|
||||||
|
|
||||||
|
if (gop.found_existing) {
|
||||||
|
log.info(.console, "duplicate timer", .{ .label = label });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
gop.key_ptr.* = try page.arena.dupe(u8, label);
|
||||||
|
gop.value_ptr.* = timestamp();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _timeLog(self: *Console, label_: ?[]const u8) void {
|
||||||
|
const elapsed = timestamp();
|
||||||
|
const label = label_ orelse "default";
|
||||||
|
const start = self.timers.get(label) orelse {
|
||||||
|
log.info(.console, "invalid timer", .{ .label = label });
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
log.info(.console, "timer", .{ .label = label, .elapsed = elapsed - start });
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _timeStop(self: *Console, label_: ?[]const u8) void {
|
||||||
|
const elapsed = timestamp();
|
||||||
|
const label = label_ orelse "default";
|
||||||
|
const kv = self.timers.fetchRemove(label) orelse {
|
||||||
|
log.info(.console, "invalid timer", .{ .label = label });
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
|
||||||
|
log.warn(.console, "timer stop", .{ .label = label, .elapsed = elapsed - kv.value });
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _assert(assertion: JsObject, values: []JsObject, page: *Page) !void {
|
||||||
|
if (assertion.isTruthy()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
var serialized_values: []const u8 = "";
|
||||||
|
if (values.len > 0) {
|
||||||
|
serialized_values = try serializeValues(values, page);
|
||||||
|
}
|
||||||
|
log.info(.console, "assertion failed", .{ .values = serialized_values });
|
||||||
|
}
|
||||||
|
|
||||||
|
fn serializeValues(values: []JsObject, page: *Page) ![]const u8 {
|
||||||
|
if (values.len == 0) {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
|
||||||
|
const arena = page.call_arena;
|
||||||
|
const separator = log.separator();
|
||||||
|
var arr: std.ArrayListUnmanaged(u8) = .{};
|
||||||
|
|
||||||
|
for (values, 1..) |value, i| {
|
||||||
|
try arr.appendSlice(arena, separator);
|
||||||
|
try arr.writer(arena).print("{d}: ", .{i});
|
||||||
|
const serialized = if (builtin.mode == .Debug) value.toDetailString() else value.toString();
|
||||||
|
try arr.appendSlice(arena, try serialized);
|
||||||
|
}
|
||||||
|
return arr.items;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
fn timestamp() u32 {
|
||||||
|
return @import("../../datetime.zig").timestamp();
|
||||||
|
}
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
//
|
//
|
||||||
// Francis Bouvier <francis@lightpanda.io>
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
// Pierre Tachoire <pierre@lightpanda.io>
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
@@ -17,30 +17,29 @@
|
|||||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
const js = @import("../js/js.zig");
|
const Env = @import("../env.zig").Env;
|
||||||
|
const uuidv4 = @import("../../id.zig").uuidv4;
|
||||||
|
|
||||||
const Crypto = @This();
|
// https://w3c.github.io/webcrypto/#crypto-interface
|
||||||
_pad: bool = false,
|
pub const Crypto = struct {
|
||||||
|
_not_empty: bool = true,
|
||||||
|
|
||||||
pub const init: Crypto = .{};
|
pub fn _getRandomValues(_: *const Crypto, js_obj: Env.JsObject) !Env.JsObject {
|
||||||
|
var into = try js_obj.toZig(Crypto, "getRandomValues", RandomValues);
|
||||||
// We take a js.Value, because we want to return the same instance, not a new
|
|
||||||
// TypedArray
|
|
||||||
pub fn getRandomValues(_: *const Crypto, js_obj: js.Object) !js.Object {
|
|
||||||
var into = try js_obj.toZig(RandomValues);
|
|
||||||
const buf = into.asBuffer();
|
const buf = into.asBuffer();
|
||||||
if (buf.len > 65_536) {
|
if (buf.len > 65_536) {
|
||||||
return error.QuotaExceededError;
|
return error.QuotaExceededError;
|
||||||
}
|
}
|
||||||
std.crypto.random.bytes(buf);
|
std.crypto.random.bytes(buf);
|
||||||
return js_obj;
|
return js_obj;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn randomUUID(_: *const Crypto) ![36]u8 {
|
pub fn _randomUUID(_: *const Crypto) [36]u8 {
|
||||||
var hex: [36]u8 = undefined;
|
var hex: [36]u8 = undefined;
|
||||||
@import("../../id.zig").uuidv4(&hex);
|
uuidv4(&hex);
|
||||||
return hex;
|
return hex;
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
|
||||||
const RandomValues = union(enum) {
|
const RandomValues = union(enum) {
|
||||||
int8: []i8,
|
int8: []i8,
|
||||||
@@ -66,21 +65,7 @@ const RandomValues = union(enum) {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const JsApi = struct {
|
|
||||||
pub const bridge = js.Bridge(Crypto);
|
|
||||||
|
|
||||||
pub const Meta = struct {
|
|
||||||
pub const name = "Crypto";
|
|
||||||
pub const prototype_chain = bridge.prototypeChain();
|
|
||||||
pub var class_id: bridge.ClassId = undefined;
|
|
||||||
pub const empty_with_no_proto = true;
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const getRandomValues = bridge.function(Crypto.getRandomValues, .{});
|
|
||||||
pub const randomUUID = bridge.function(Crypto.randomUUID, .{});
|
|
||||||
};
|
|
||||||
|
|
||||||
const testing = @import("../../testing.zig");
|
const testing = @import("../../testing.zig");
|
||||||
test "WebApi: Crypto" {
|
test "Browser: Crypto" {
|
||||||
try testing.htmlRunner("crypto.html", .{});
|
try testing.htmlRunner("crypto.html");
|
||||||
}
|
}
|
||||||
@@ -1,295 +0,0 @@
|
|||||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
|
||||||
//
|
|
||||||
// Francis Bouvier <francis@lightpanda.io>
|
|
||||||
// Pierre Tachoire <pierre@lightpanda.io>
|
|
||||||
//
|
|
||||||
// This program is free software: you can redistribute it and/or modify
|
|
||||||
// it under the terms of the GNU Affero General Public License as
|
|
||||||
// published by the Free Software Foundation, either version 3 of the
|
|
||||||
// License, or (at your option) any later version.
|
|
||||||
//
|
|
||||||
// This program is distributed in the hope that it will be useful,
|
|
||||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
// GNU Affero General Public License for more details.
|
|
||||||
//
|
|
||||||
// You should have received a copy of the GNU Affero General Public License
|
|
||||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
const std = @import("std");
|
|
||||||
const Tokenizer = @import("Tokenizer.zig");
|
|
||||||
|
|
||||||
pub const Declaration = struct {
|
|
||||||
name: []const u8,
|
|
||||||
value: []const u8,
|
|
||||||
important: bool,
|
|
||||||
};
|
|
||||||
|
|
||||||
const TokenSpan = struct {
|
|
||||||
token: Tokenizer.Token,
|
|
||||||
start: usize,
|
|
||||||
end: usize,
|
|
||||||
};
|
|
||||||
|
|
||||||
const TokenStream = struct {
|
|
||||||
tokenizer: Tokenizer,
|
|
||||||
peeked: ?TokenSpan = null,
|
|
||||||
|
|
||||||
fn init(input: []const u8) TokenStream {
|
|
||||||
return .{ .tokenizer = .{ .input = input } };
|
|
||||||
}
|
|
||||||
|
|
||||||
fn nextRaw(self: *TokenStream) ?TokenSpan {
|
|
||||||
const start = self.tokenizer.position;
|
|
||||||
const token = self.tokenizer.next() orelse return null;
|
|
||||||
const end = self.tokenizer.position;
|
|
||||||
return .{ .token = token, .start = start, .end = end };
|
|
||||||
}
|
|
||||||
|
|
||||||
fn next(self: *TokenStream) ?TokenSpan {
|
|
||||||
if (self.peeked) |token| {
|
|
||||||
self.peeked = null;
|
|
||||||
return token;
|
|
||||||
}
|
|
||||||
return self.nextRaw();
|
|
||||||
}
|
|
||||||
|
|
||||||
fn peek(self: *TokenStream) ?TokenSpan {
|
|
||||||
if (self.peeked == null) {
|
|
||||||
self.peeked = self.nextRaw();
|
|
||||||
}
|
|
||||||
return self.peeked;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
pub fn parseDeclarationsList(input: []const u8) DeclarationsIterator {
|
|
||||||
return DeclarationsIterator.init(input);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const DeclarationsIterator = struct {
|
|
||||||
input: []const u8,
|
|
||||||
stream: TokenStream,
|
|
||||||
|
|
||||||
pub fn init(input: []const u8) DeclarationsIterator {
|
|
||||||
return .{
|
|
||||||
.input = input,
|
|
||||||
.stream = TokenStream.init(input),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn next(self: *DeclarationsIterator) ?Declaration {
|
|
||||||
while (true) {
|
|
||||||
self.skipTriviaAndSemicolons();
|
|
||||||
const peeked = self.stream.peek() orelse return null;
|
|
||||||
|
|
||||||
switch (peeked.token) {
|
|
||||||
.at_keyword => {
|
|
||||||
_ = self.stream.next();
|
|
||||||
self.skipAtRule();
|
|
||||||
},
|
|
||||||
.ident => |name| {
|
|
||||||
_ = self.stream.next();
|
|
||||||
if (self.consumeDeclaration(name)) |declaration| {
|
|
||||||
return declaration;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
else => {
|
|
||||||
_ = self.stream.next();
|
|
||||||
self.skipInvalidDeclaration();
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn consumeDeclaration(self: *DeclarationsIterator, name: []const u8) ?Declaration {
|
|
||||||
self.skipTrivia();
|
|
||||||
|
|
||||||
const colon = self.stream.next() orelse return null;
|
|
||||||
if (!isColon(colon.token)) {
|
|
||||||
self.skipInvalidDeclaration();
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const value = self.consumeValue() orelse return null;
|
|
||||||
return .{
|
|
||||||
.name = name,
|
|
||||||
.value = value.value,
|
|
||||||
.important = value.important,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const ValueResult = struct {
|
|
||||||
value: []const u8,
|
|
||||||
important: bool,
|
|
||||||
};
|
|
||||||
|
|
||||||
fn consumeValue(self: *DeclarationsIterator) ?ValueResult {
|
|
||||||
self.skipTrivia();
|
|
||||||
|
|
||||||
var depth: usize = 0;
|
|
||||||
var start: ?usize = null;
|
|
||||||
var last_sig: ?TokenSpan = null;
|
|
||||||
var prev_sig: ?TokenSpan = null;
|
|
||||||
|
|
||||||
while (true) {
|
|
||||||
const peeked = self.stream.peek() orelse break;
|
|
||||||
if (isSemicolon(peeked.token) and depth == 0) {
|
|
||||||
_ = self.stream.next();
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
const span = self.stream.next() orelse break;
|
|
||||||
if (isWhitespaceOrComment(span.token)) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (start == null) start = span.start;
|
|
||||||
prev_sig = last_sig;
|
|
||||||
last_sig = span;
|
|
||||||
updateDepth(span.token, &depth);
|
|
||||||
}
|
|
||||||
|
|
||||||
const value_start = start orelse return null;
|
|
||||||
const last = last_sig orelse return null;
|
|
||||||
|
|
||||||
var important = false;
|
|
||||||
var end_pos = last.end;
|
|
||||||
|
|
||||||
if (isImportantPair(prev_sig, last)) {
|
|
||||||
important = true;
|
|
||||||
const bang = prev_sig orelse return null;
|
|
||||||
if (value_start >= bang.start) return null;
|
|
||||||
end_pos = bang.start;
|
|
||||||
}
|
|
||||||
|
|
||||||
var value_slice = self.input[value_start..end_pos];
|
|
||||||
value_slice = std.mem.trim(u8, value_slice, &std.ascii.whitespace);
|
|
||||||
if (value_slice.len == 0) return null;
|
|
||||||
|
|
||||||
return .{ .value = value_slice, .important = important };
|
|
||||||
}
|
|
||||||
|
|
||||||
fn skipTrivia(self: *DeclarationsIterator) void {
|
|
||||||
while (self.stream.peek()) |peeked| {
|
|
||||||
if (!isWhitespaceOrComment(peeked.token)) break;
|
|
||||||
_ = self.stream.next();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn skipTriviaAndSemicolons(self: *DeclarationsIterator) void {
|
|
||||||
while (self.stream.peek()) |peeked| {
|
|
||||||
if (isWhitespaceOrComment(peeked.token) or isSemicolon(peeked.token)) {
|
|
||||||
_ = self.stream.next();
|
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn skipAtRule(self: *DeclarationsIterator) void {
|
|
||||||
var depth: usize = 0;
|
|
||||||
var saw_block = false;
|
|
||||||
|
|
||||||
while (true) {
|
|
||||||
const peeked = self.stream.peek() orelse return;
|
|
||||||
if (!saw_block and isSemicolon(peeked.token) and depth == 0) {
|
|
||||||
_ = self.stream.next();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const span = self.stream.next() orelse return;
|
|
||||||
if (isWhitespaceOrComment(span.token)) continue;
|
|
||||||
|
|
||||||
if (isBlockStart(span.token)) {
|
|
||||||
depth += 1;
|
|
||||||
saw_block = true;
|
|
||||||
} else if (isBlockEnd(span.token)) {
|
|
||||||
if (depth > 0) depth -= 1;
|
|
||||||
if (saw_block and depth == 0) return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn skipInvalidDeclaration(self: *DeclarationsIterator) void {
|
|
||||||
var depth: usize = 0;
|
|
||||||
|
|
||||||
while (self.stream.peek()) |peeked| {
|
|
||||||
if (isSemicolon(peeked.token) and depth == 0) {
|
|
||||||
_ = self.stream.next();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const span = self.stream.next() orelse return;
|
|
||||||
if (isWhitespaceOrComment(span.token)) continue;
|
|
||||||
updateDepth(span.token, &depth);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
fn isWhitespaceOrComment(token: Tokenizer.Token) bool {
|
|
||||||
return switch (token) {
|
|
||||||
.white_space, .comment => true,
|
|
||||||
else => false,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn isSemicolon(token: Tokenizer.Token) bool {
|
|
||||||
return switch (token) {
|
|
||||||
.semicolon => true,
|
|
||||||
else => false,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn isColon(token: Tokenizer.Token) bool {
|
|
||||||
return switch (token) {
|
|
||||||
.colon => true,
|
|
||||||
else => false,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn isBlockStart(token: Tokenizer.Token) bool {
|
|
||||||
return switch (token) {
|
|
||||||
.curly_bracket_block, .square_bracket_block, .parenthesis_block, .function => true,
|
|
||||||
else => false,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn isBlockEnd(token: Tokenizer.Token) bool {
|
|
||||||
return switch (token) {
|
|
||||||
.close_curly_bracket, .close_parenthesis, .close_square_bracket => true,
|
|
||||||
else => false,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn updateDepth(token: Tokenizer.Token, depth: *usize) void {
|
|
||||||
if (isBlockStart(token)) {
|
|
||||||
depth.* += 1;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isBlockEnd(token)) {
|
|
||||||
if (depth.* > 0) depth.* -= 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn isImportantPair(prev_sig: ?TokenSpan, last_sig: TokenSpan) bool {
|
|
||||||
if (!isIdentImportant(last_sig.token)) return false;
|
|
||||||
const prev = prev_sig orelse return false;
|
|
||||||
return isBang(prev.token);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn isIdentImportant(token: Tokenizer.Token) bool {
|
|
||||||
return switch (token) {
|
|
||||||
.ident => |name| std.ascii.eqlIgnoreCase(name, "important"),
|
|
||||||
else => false,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn isBang(token: Tokenizer.Token) bool {
|
|
||||||
return switch (token) {
|
|
||||||
.delim => |c| c == '!',
|
|
||||||
else => false,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
218
src/browser/css/README.md
Normal file
218
src/browser/css/README.md
Normal file
@@ -0,0 +1,218 @@
|
|||||||
|
# css
|
||||||
|
|
||||||
|
Lightpanda css implements CSS selectors parsing and matching in Zig.
|
||||||
|
This package is a port of the Go lib [andybalholm/cascadia](https://github.com/andybalholm/cascadia).
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Query parser
|
||||||
|
|
||||||
|
```zig
|
||||||
|
const css = @import("css.zig");
|
||||||
|
|
||||||
|
const selector = try css.parse(alloc, "h1", .{});
|
||||||
|
defer selector.deinit(alloc);
|
||||||
|
```
|
||||||
|
|
||||||
|
### DOM tree match
|
||||||
|
|
||||||
|
The lib expects a `Node` interface implementation to match your DOM tree.
|
||||||
|
|
||||||
|
```zig
|
||||||
|
pub const Node = struct {
|
||||||
|
pub fn firstChild(_: Node) !?Node {
|
||||||
|
return error.TODO;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lastChild(_: Node) !?Node {
|
||||||
|
return error.TODO;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn nextSibling(_: Node) !?Node {
|
||||||
|
return error.TODO;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn prevSibling(_: Node) !?Node {
|
||||||
|
return error.TODO;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parent(_: Node) !?Node {
|
||||||
|
return error.TODO;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn isElement(_: Node) bool {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn isDocument(_: Node) bool {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn isComment(_: Node) bool {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn isText(_: Node) bool {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn isEmptyText(_: Node) !bool {
|
||||||
|
return error.TODO;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn tag(_: Node) ![]const u8 {
|
||||||
|
return error.TODO;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn attr(_: Node, _: []const u8) !?[]const u8 {
|
||||||
|
return error.TODO;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn eql(_: Node, _: Node) bool {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
You also need do define a `Matcher` implementing a `match` function to
|
||||||
|
accumulate the results.
|
||||||
|
|
||||||
|
```zig
|
||||||
|
const Matcher = struct {
|
||||||
|
const Nodes = std.ArrayList(Node);
|
||||||
|
|
||||||
|
nodes: Nodes,
|
||||||
|
|
||||||
|
fn init(alloc: std.mem.Allocator) Matcher {
|
||||||
|
return .{ .nodes = Nodes.init(alloc) };
|
||||||
|
}
|
||||||
|
|
||||||
|
fn deinit(m: *Matcher) void {
|
||||||
|
m.nodes.deinit();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn match(m: *Matcher, n: Node) !void {
|
||||||
|
try m.nodes.append(n);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
Then you can use the lib itself.
|
||||||
|
|
||||||
|
```zig
|
||||||
|
var matcher = Matcher.init(alloc);
|
||||||
|
defer matcher.deinit();
|
||||||
|
|
||||||
|
try css.matchAll(selector, node, &matcher);
|
||||||
|
_ = try css.matchFirst(selector, node, &matcher); // returns true if a node matched.
|
||||||
|
```
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
* [x] parse query selector
|
||||||
|
* [x] `matchAll`
|
||||||
|
* [x] `matchFirst`
|
||||||
|
* [ ] specificity
|
||||||
|
|
||||||
|
### Selectors implemented
|
||||||
|
|
||||||
|
#### Selectors
|
||||||
|
|
||||||
|
* [x] Class selectors
|
||||||
|
* [x] Id selectors
|
||||||
|
* [x] Type selectors
|
||||||
|
* [x] Universal selectors
|
||||||
|
* [ ] Nesting selectors
|
||||||
|
|
||||||
|
#### Combinators
|
||||||
|
|
||||||
|
* [x] Child combinator
|
||||||
|
* [ ] Column combinator
|
||||||
|
* [x] Descendant combinator
|
||||||
|
* [ ] Namespace combinator
|
||||||
|
* [x] Next-sibling combinator
|
||||||
|
* [x] Selector list combinator
|
||||||
|
* [x] Subsequent-sibling combinator
|
||||||
|
|
||||||
|
#### Attribute
|
||||||
|
|
||||||
|
* [x] `[attr]`
|
||||||
|
* [x] `[attr=value]`
|
||||||
|
* [x] `[attr|=value]`
|
||||||
|
* [x] `[attr^=value]`
|
||||||
|
* [x] `[attr$=value]`
|
||||||
|
* [ ] `[attr*=value]`
|
||||||
|
* [x] `[attr operator value i]`
|
||||||
|
* [ ] `[attr operator value s]`
|
||||||
|
|
||||||
|
#### Pseudo classes
|
||||||
|
|
||||||
|
* [ ] `:active`
|
||||||
|
* [ ] `:any-link`
|
||||||
|
* [ ] `:autofill`
|
||||||
|
* [ ] `:blank Experimental`
|
||||||
|
* [x] `:checked`
|
||||||
|
* [ ] `:current Experimental`
|
||||||
|
* [ ] `:default`
|
||||||
|
* [ ] `:defined`
|
||||||
|
* [ ] `:dir() Experimental`
|
||||||
|
* [x] `:disabled`
|
||||||
|
* [x] `:empty`
|
||||||
|
* [x] `:enabled`
|
||||||
|
* [ ] `:first`
|
||||||
|
* [x] `:first-child`
|
||||||
|
* [x] `:first-of-type`
|
||||||
|
* [ ] `:focus`
|
||||||
|
* [ ] `:focus-visible`
|
||||||
|
* [ ] `:focus-within`
|
||||||
|
* [ ] `:fullscreen`
|
||||||
|
* [ ] `:future Experimental`
|
||||||
|
* [x] `:has() Experimental`
|
||||||
|
* [ ] `:host`
|
||||||
|
* [ ] `:host()`
|
||||||
|
* [ ] `:host-context() Experimental`
|
||||||
|
* [ ] `:hover`
|
||||||
|
* [ ] `:indeterminate`
|
||||||
|
* [ ] `:in-range`
|
||||||
|
* [ ] `:invalid`
|
||||||
|
* [ ] `:is()`
|
||||||
|
* [x] `:lang()`
|
||||||
|
* [x] `:last-child`
|
||||||
|
* [x] `:last-of-type`
|
||||||
|
* [ ] `:left`
|
||||||
|
* [x] `:link`
|
||||||
|
* [ ] `:local-link Experimental`
|
||||||
|
* [ ] `:modal`
|
||||||
|
* [x] `:not()`
|
||||||
|
* [x] `:nth-child()`
|
||||||
|
* [x] `:nth-last-child()`
|
||||||
|
* [x] `:nth-last-of-type()`
|
||||||
|
* [x] `:nth-of-type()`
|
||||||
|
* [x] `:only-child`
|
||||||
|
* [x] `:only-of-type`
|
||||||
|
* [ ] `:optional`
|
||||||
|
* [ ] `:out-of-range`
|
||||||
|
* [ ] `:past Experimental`
|
||||||
|
* [ ] `:paused`
|
||||||
|
* [ ] `:picture-in-picture`
|
||||||
|
* [ ] `:placeholder-shown`
|
||||||
|
* [ ] `:playing`
|
||||||
|
* [ ] `:read-only`
|
||||||
|
* [ ] `:read-write`
|
||||||
|
* [ ] `:required`
|
||||||
|
* [ ] `:right`
|
||||||
|
* [x] `:root`
|
||||||
|
* [ ] `:scope`
|
||||||
|
* [ ] `:state() Experimental`
|
||||||
|
* [ ] `:target`
|
||||||
|
* [ ] `:target-within Experimental`
|
||||||
|
* [ ] `:user-invalid Experimental`
|
||||||
|
* [ ] `:valid`
|
||||||
|
* [ ] `:visited`
|
||||||
|
* [ ] `:where()`
|
||||||
|
* [ ] `:contains()`
|
||||||
|
* [ ] `:containsown()`
|
||||||
|
* [ ] `:matched()`
|
||||||
|
* [ ] `:matchesown()`
|
||||||
|
* [x] `:root`
|
||||||
|
|
||||||
@@ -1,824 +0,0 @@
|
|||||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
|
||||||
//
|
|
||||||
// Francis Bouvier <francis@lightpanda.io>
|
|
||||||
// Pierre Tachoire <pierre@lightpanda.io>
|
|
||||||
//
|
|
||||||
// This program is free software: you can redistribute it and/or modify
|
|
||||||
// it under the terms of the GNU Affero General Public License as
|
|
||||||
// published by the Free Software Foundation, either version 3 of the
|
|
||||||
// License, or (at your option) any later version.
|
|
||||||
//
|
|
||||||
// This program is distributed in the hope that it will be useful,
|
|
||||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
// GNU Affero General Public License for more details.
|
|
||||||
//
|
|
||||||
// You should have received a copy of the GNU Affero General Public License
|
|
||||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
//! This file implements the tokenization step defined in the CSS Syntax Module Level 3 specification.
|
|
||||||
//!
|
|
||||||
//! The algorithm accepts a valid UTF-8 string and returns a stream of tokens.
|
|
||||||
//! The tokenization step never fails, even for complete gibberish.
|
|
||||||
//! Validity must then be checked by the parser.
|
|
||||||
//!
|
|
||||||
//! NOTE: The tokenizer is not thread-safe and does not own any memory, and does not check the validity of utf8.
|
|
||||||
//!
|
|
||||||
//! See spec for more info: https://drafts.csswg.org/css-syntax/#tokenization
|
|
||||||
|
|
||||||
const std = @import("std");
|
|
||||||
const builtin = @import("builtin");
|
|
||||||
const assert = std.debug.assert;
|
|
||||||
|
|
||||||
const Tokenizer = @This();
|
|
||||||
|
|
||||||
pub const Token = union(enum) {
|
|
||||||
/// A `<ident-token>`
|
|
||||||
ident: []const u8,
|
|
||||||
|
|
||||||
/// A `<function-token>`
|
|
||||||
///
|
|
||||||
/// The value (name) does not include the `(` marker.
|
|
||||||
function: []const u8,
|
|
||||||
|
|
||||||
/// A `<at-keyword-token>`
|
|
||||||
///
|
|
||||||
/// The value does not include the `@` marker.
|
|
||||||
at_keyword: []const u8,
|
|
||||||
|
|
||||||
/// A `<hash-token>` with the type flag set to "id"
|
|
||||||
///
|
|
||||||
/// The value does not include the `#` marker.
|
|
||||||
id_hash: []const u8, // Hash that is a valid ID selector.
|
|
||||||
|
|
||||||
/// A `<hash-token>` with the type flag set to "unrestricted"
|
|
||||||
///
|
|
||||||
/// The value does not include the `#` marker.
|
|
||||||
unrestricted_hash: []const u8,
|
|
||||||
|
|
||||||
/// A `<string-token>`
|
|
||||||
///
|
|
||||||
/// The value does not include the quotes.
|
|
||||||
string: []const u8,
|
|
||||||
|
|
||||||
/// A `<bad-string-token>`
|
|
||||||
///
|
|
||||||
/// This token always indicates a parse error.
|
|
||||||
bad_string: []const u8,
|
|
||||||
|
|
||||||
/// A `<url-token>`
|
|
||||||
///
|
|
||||||
/// The value does not include the `url(` `)` markers. Note that `url( <string-token> )` is represented by a
|
|
||||||
/// `Function` token.
|
|
||||||
url: []const u8,
|
|
||||||
|
|
||||||
/// A `<bad-url-token>`
|
|
||||||
///
|
|
||||||
/// This token always indicates a parse error.
|
|
||||||
bad_url: []const u8,
|
|
||||||
|
|
||||||
/// A `<delim-token>`
|
|
||||||
delim: u8,
|
|
||||||
|
|
||||||
/// A `<number-token>`
|
|
||||||
number: struct {
|
|
||||||
/// Whether the number had a `+` or `-` sign.
|
|
||||||
///
|
|
||||||
/// This is used is some cases like the <An+B> micro syntax. (See the `parse_nth` function.)
|
|
||||||
has_sign: bool,
|
|
||||||
|
|
||||||
/// If the origin source did not include a fractional part, the value as an integer.
|
|
||||||
int_value: ?i32,
|
|
||||||
|
|
||||||
/// The value as a float
|
|
||||||
value: f32,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// A `<percentage-token>`
|
|
||||||
percentage: struct {
|
|
||||||
/// Whether the number had a `+` or `-` sign.
|
|
||||||
has_sign: bool,
|
|
||||||
|
|
||||||
/// If the origin source did not include a fractional part, the value as an integer.
|
|
||||||
/// It is **not** divided by 100.
|
|
||||||
int_value: ?i32,
|
|
||||||
|
|
||||||
/// The value as a float, divided by 100 so that the nominal range is 0.0 to 1.0.
|
|
||||||
unit_value: f32,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// A `<dimension-token>`
|
|
||||||
dimension: struct {
|
|
||||||
/// Whether the number had a `+` or `-` sign.
|
|
||||||
///
|
|
||||||
/// This is used is some cases like the <An+B> micro syntax. (See the `parse_nth` function.)
|
|
||||||
has_sign: bool,
|
|
||||||
|
|
||||||
/// If the origin source did not include a fractional part, the value as an integer.
|
|
||||||
int_value: ?i32,
|
|
||||||
|
|
||||||
/// The value as a float
|
|
||||||
value: f32,
|
|
||||||
|
|
||||||
/// The unit, e.g. "px" in `12px`
|
|
||||||
unit: []const u8,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// A `<unicode-range-token>`
|
|
||||||
unicode_range: struct { bgn: u32, end: i32 },
|
|
||||||
|
|
||||||
/// A `<whitespace-token>`
|
|
||||||
white_space: []const u8,
|
|
||||||
|
|
||||||
/// A `<!--` `<CDO-token>`
|
|
||||||
cdo,
|
|
||||||
|
|
||||||
/// A `-->` `<CDC-token>`
|
|
||||||
cdc,
|
|
||||||
|
|
||||||
/// A `:` `<colon-token>`
|
|
||||||
colon, // :
|
|
||||||
|
|
||||||
/// A `;` `<semicolon-token>`
|
|
||||||
semicolon, // ;
|
|
||||||
|
|
||||||
/// A `,` `<comma-token>`
|
|
||||||
comma, // ,
|
|
||||||
|
|
||||||
/// A `<[-token>`
|
|
||||||
square_bracket_block,
|
|
||||||
|
|
||||||
/// A `<]-token>`
|
|
||||||
///
|
|
||||||
/// When obtained from one of the `Parser::next*` methods,
|
|
||||||
/// this token is always unmatched and indicates a parse error.
|
|
||||||
close_square_bracket,
|
|
||||||
|
|
||||||
/// A `<(-token>`
|
|
||||||
parenthesis_block,
|
|
||||||
|
|
||||||
/// A `<)-token>`
|
|
||||||
///
|
|
||||||
/// When obtained from one of the `Parser::next*` methods,
|
|
||||||
/// this token is always unmatched and indicates a parse error.
|
|
||||||
close_parenthesis,
|
|
||||||
|
|
||||||
/// A `<{-token>`
|
|
||||||
curly_bracket_block,
|
|
||||||
|
|
||||||
/// A `<}-token>`
|
|
||||||
///
|
|
||||||
/// When obtained from one of the `Parser::next*` methods,
|
|
||||||
/// this token is always unmatched and indicates a parse error.
|
|
||||||
close_curly_bracket,
|
|
||||||
|
|
||||||
/// A comment.
|
|
||||||
///
|
|
||||||
/// The CSS Syntax spec does not generate tokens for comments,
|
|
||||||
/// But we do for simplicity of the interface.
|
|
||||||
///
|
|
||||||
/// The value does not include the `/*` `*/` markers.
|
|
||||||
comment: []const u8,
|
|
||||||
};
|
|
||||||
|
|
||||||
input: []const u8,
|
|
||||||
|
|
||||||
/// Counted in bytes, not code points. From 0.
|
|
||||||
position: usize = 0,
|
|
||||||
|
|
||||||
// If true, the input has at least `n` bytes left *after* the current one.
|
|
||||||
// That is, `Lexer.byteAt(n)` will not panic.
|
|
||||||
fn hasAtLeast(self: *const Tokenizer, n: usize) bool {
|
|
||||||
return self.position + n < self.input.len;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn isEof(self: *const Tokenizer) bool {
|
|
||||||
return !self.hasAtLeast(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn byteAt(self: *const Tokenizer, offset: usize) u8 {
|
|
||||||
return self.input[self.position + offset];
|
|
||||||
}
|
|
||||||
|
|
||||||
// Assumes non-EOF
|
|
||||||
fn nextByteUnchecked(self: *const Tokenizer) u8 {
|
|
||||||
return self.byteAt(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn nextByte(self: *const Tokenizer) ?u8 {
|
|
||||||
return if (self.isEof())
|
|
||||||
null
|
|
||||||
else
|
|
||||||
self.input[self.position];
|
|
||||||
}
|
|
||||||
|
|
||||||
fn startsWith(self: *const Tokenizer, needle: []const u8) bool {
|
|
||||||
return std.mem.startsWith(u8, self.input[self.position..], needle);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn slice(self: *const Tokenizer, start: usize, end: usize) []const u8 {
|
|
||||||
return self.input[start..end];
|
|
||||||
}
|
|
||||||
|
|
||||||
fn sliceFrom(self: *const Tokenizer, start_pos: usize) []const u8 {
|
|
||||||
return self.slice(start_pos, self.position);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Advance over N bytes in the input. This function can advance
|
|
||||||
// over ASCII bytes (excluding newlines), or UTF-8 sequence
|
|
||||||
// leaders (excluding leaders for 4-byte sequences).
|
|
||||||
fn advance(self: *Tokenizer, n: usize) void {
|
|
||||||
if (builtin.mode == .Debug) {
|
|
||||||
// Each byte must either be an ASCII byte or a sequence leader,
|
|
||||||
// but not a 4-byte leader; also newlines are rejected.
|
|
||||||
for (0..n) |i| {
|
|
||||||
const b = self.byteAt(i);
|
|
||||||
assert(b != '\r' and b != '\n' and b != '\x0C');
|
|
||||||
assert(b <= 0x7F or (b & 0xF0 != 0xF0 and b & 0xC0 != 0x80));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
self.position += n;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn hasNewlineAt(self: *const Tokenizer, offset: usize) bool {
|
|
||||||
if (!self.hasAtLeast(offset)) return false;
|
|
||||||
|
|
||||||
return switch (self.byteAt(offset)) {
|
|
||||||
'\n', '\r', '\x0C' => true,
|
|
||||||
else => false,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn hasNonAsciiAt(self: *const Tokenizer, offset: usize) bool {
|
|
||||||
if (!self.hasAtLeast(offset)) return false;
|
|
||||||
|
|
||||||
const byte = self.byteAt(offset);
|
|
||||||
const len_utf8 = std.unicode.utf8ByteSequenceLength(byte) catch return false;
|
|
||||||
|
|
||||||
if (!self.hasAtLeast(offset + len_utf8 - 1)) return false;
|
|
||||||
|
|
||||||
const start = self.position + offset;
|
|
||||||
const bytes = self.slice(start, start + len_utf8);
|
|
||||||
|
|
||||||
const codepoint = std.unicode.utf8Decode(bytes) catch return false;
|
|
||||||
|
|
||||||
// https://drafts.csswg.org/css-syntax/#non-ascii-ident-code-point
|
|
||||||
return switch (codepoint) {
|
|
||||||
'\u{00B7}', '\u{200C}', '\u{200D}', '\u{203F}', '\u{2040}' => true,
|
|
||||||
'\u{00C0}'...'\u{00D6}' => true,
|
|
||||||
'\u{00D8}'...'\u{00F6}' => true,
|
|
||||||
'\u{00F8}'...'\u{037D}' => true,
|
|
||||||
'\u{037F}'...'\u{1FFF}' => true,
|
|
||||||
'\u{2070}'...'\u{218F}' => true,
|
|
||||||
'\u{2C00}'...'\u{2FEF}' => true,
|
|
||||||
'\u{3001}'...'\u{D7FF}' => true,
|
|
||||||
'\u{F900}'...'\u{FDCF}' => true,
|
|
||||||
'\u{FDF0}'...'\u{FFFD}' => true,
|
|
||||||
else => codepoint >= '\u{10000}',
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn isIdentStart(self: *Tokenizer) bool {
|
|
||||||
if (self.isEof()) return false;
|
|
||||||
|
|
||||||
var b = self.nextByteUnchecked();
|
|
||||||
if (b == '-') {
|
|
||||||
b = if (self.hasAtLeast(1)) self.byteAt(1) else return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
return switch (b) {
|
|
||||||
'a'...'z', 'A'...'Z', '_', 0x0 => true,
|
|
||||||
'\\' => !self.hasNewlineAt(1),
|
|
||||||
else => b > 0x7F, // not is ascii
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn consumeChar(self: *Tokenizer) void {
|
|
||||||
const byte = self.nextByteUnchecked();
|
|
||||||
const len_utf8 = std.unicode.utf8ByteSequenceLength(byte) catch 1;
|
|
||||||
self.position += len_utf8;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Given that a newline has been seen, advance over the newline
|
|
||||||
// and update the state.
|
|
||||||
fn consumeNewline(self: *Tokenizer) void {
|
|
||||||
const byte = self.nextByteUnchecked();
|
|
||||||
assert(byte == '\r' or byte == '\n' or byte == '\x0C');
|
|
||||||
|
|
||||||
self.position += 1;
|
|
||||||
if (byte == '\r' and self.nextByte() == '\n') {
|
|
||||||
self.position += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn consumeWhiteSpace(self: *Tokenizer, newline: bool) Token {
|
|
||||||
const start_position = self.position;
|
|
||||||
if (newline) {
|
|
||||||
self.consumeNewline();
|
|
||||||
} else {
|
|
||||||
self.advance(1);
|
|
||||||
}
|
|
||||||
while (!self.isEof()) {
|
|
||||||
const b = self.nextByteUnchecked();
|
|
||||||
switch (b) {
|
|
||||||
' ', '\t' => {
|
|
||||||
self.advance(1);
|
|
||||||
},
|
|
||||||
'\n', '\x0C', '\r' => {
|
|
||||||
self.consumeNewline();
|
|
||||||
},
|
|
||||||
else => break,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return .{ .white_space = self.sliceFrom(start_position) };
|
|
||||||
}
|
|
||||||
|
|
||||||
fn consumeComment(self: *Tokenizer) []const u8 {
|
|
||||||
self.advance(2); // consume "/*"
|
|
||||||
const start_position = self.position;
|
|
||||||
while (!self.isEof()) {
|
|
||||||
switch (self.nextByteUnchecked()) {
|
|
||||||
'*' => {
|
|
||||||
const end_position = self.position;
|
|
||||||
self.advance(1);
|
|
||||||
if (self.nextByte() == '/') {
|
|
||||||
self.advance(1);
|
|
||||||
return self.slice(start_position, end_position);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'\n', '\x0C', '\r' => {
|
|
||||||
self.consumeNewline();
|
|
||||||
},
|
|
||||||
0x0 => self.advance(1),
|
|
||||||
else => self.consumeChar(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return self.sliceFrom(start_position);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn byteToHexDigit(b: u8) ?u32 {
|
|
||||||
return switch (b) {
|
|
||||||
'0'...'9' => b - '0',
|
|
||||||
'a'...'f' => b - 'a' + 10,
|
|
||||||
'A'...'F' => b - 'A' + 10,
|
|
||||||
else => null,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn byteToDecimalDigit(b: u8) ?u32 {
|
|
||||||
return if (std.ascii.isDigit(b)) b - '0' else null;
|
|
||||||
}
|
|
||||||
|
|
||||||
// (value, number of digits up to 6)
|
|
||||||
fn consumeHexDigits(self: *Tokenizer) void {
|
|
||||||
var value: u32 = 0;
|
|
||||||
var digits: u32 = 0;
|
|
||||||
|
|
||||||
while (digits < 6 and !self.isEof()) {
|
|
||||||
if (byteToHexDigit(self.nextByteUnchecked())) |digit| {
|
|
||||||
value = value * 16 + digit;
|
|
||||||
digits += 1;
|
|
||||||
self.advance(1);
|
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
_ = &value;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Assumes that the U+005C REVERSE SOLIDUS (\) has already been consumed
|
|
||||||
// and that the next input character has already been verified
|
|
||||||
// to not be a newline.
|
|
||||||
fn consumeEscape(self: *Tokenizer) void {
|
|
||||||
if (self.isEof())
|
|
||||||
return; // Escaped EOF
|
|
||||||
|
|
||||||
switch (self.nextByteUnchecked()) {
|
|
||||||
'0'...'9', 'A'...'F', 'a'...'f' => {
|
|
||||||
consumeHexDigits(self);
|
|
||||||
|
|
||||||
if (!self.isEof()) {
|
|
||||||
switch (self.nextByteUnchecked()) {
|
|
||||||
' ', '\t' => {
|
|
||||||
self.advance(1);
|
|
||||||
},
|
|
||||||
'\n', '\x0C', '\r' => {
|
|
||||||
self.consumeNewline();
|
|
||||||
},
|
|
||||||
else => {},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
else => self.consumeChar(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// https://drafts.csswg.org/css-syntax/#consume-string-token
|
|
||||||
fn consumeString(self: *Tokenizer, single_quote: bool) Token {
|
|
||||||
self.advance(1); // Skip the initial quote
|
|
||||||
|
|
||||||
// start_pos is at code point boundary, after " or '
|
|
||||||
const start_pos = self.position;
|
|
||||||
|
|
||||||
while (!self.isEof()) {
|
|
||||||
switch (self.nextByteUnchecked()) {
|
|
||||||
'"' => {
|
|
||||||
if (!single_quote) {
|
|
||||||
const value = self.sliceFrom(start_pos);
|
|
||||||
self.advance(1);
|
|
||||||
return .{ .string = value };
|
|
||||||
}
|
|
||||||
self.advance(1);
|
|
||||||
},
|
|
||||||
'\'' => {
|
|
||||||
if (single_quote) {
|
|
||||||
const value = self.sliceFrom(start_pos);
|
|
||||||
self.advance(1);
|
|
||||||
return .{ .string = value };
|
|
||||||
}
|
|
||||||
self.advance(1);
|
|
||||||
},
|
|
||||||
'\n', '\r', '\x0C' => {
|
|
||||||
return .{ .bad_string = self.sliceFrom(start_pos) };
|
|
||||||
},
|
|
||||||
'\\' => {
|
|
||||||
self.advance(1);
|
|
||||||
if (self.isEof())
|
|
||||||
continue; // escaped EOF, do nothing.
|
|
||||||
|
|
||||||
switch (self.nextByteUnchecked()) {
|
|
||||||
// Escaped newline
|
|
||||||
'\n', '\x0C', '\r' => self.consumeNewline(),
|
|
||||||
|
|
||||||
// Spec calls for replacing escape sequences with characters,
|
|
||||||
// but this would require allocating a new string.
|
|
||||||
// Therefore, we leave it as is and let the parser handle the escaping.
|
|
||||||
else => self.consumeEscape(),
|
|
||||||
}
|
|
||||||
},
|
|
||||||
else => self.consumeChar(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return .{ .string = self.sliceFrom(start_pos) };
|
|
||||||
}
|
|
||||||
|
|
||||||
fn consumeName(self: *Tokenizer) []const u8 {
|
|
||||||
// start_pos is the end of the previous token, therefore at a code point boundary
|
|
||||||
const start_pos = self.position;
|
|
||||||
|
|
||||||
while (!self.isEof()) {
|
|
||||||
switch (self.nextByteUnchecked()) {
|
|
||||||
'a'...'z', 'A'...'Z', '0'...'9', '_', '-' => self.advance(1),
|
|
||||||
'\\' => {
|
|
||||||
if (self.hasNewlineAt(1)) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
self.advance(1);
|
|
||||||
self.consumeEscape();
|
|
||||||
},
|
|
||||||
0x0 => self.advance(1),
|
|
||||||
'\x80'...'\xBF', '\xC0'...'\xEF', '\xF0'...'\xFF' => {
|
|
||||||
// This byte *is* part of a multi-byte code point,
|
|
||||||
// we’ll end up copying the whole code point before this loop does something else.
|
|
||||||
self.advance(1);
|
|
||||||
},
|
|
||||||
else => {
|
|
||||||
if (self.hasNonAsciiAt(0)) {
|
|
||||||
self.consumeChar();
|
|
||||||
} else {
|
|
||||||
break; // ASCII
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return self.sliceFrom(start_pos);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn consumeMark(self: *Tokenizer) Token {
|
|
||||||
const byte = self.nextByteUnchecked();
|
|
||||||
self.advance(1);
|
|
||||||
return switch (byte) {
|
|
||||||
',' => .comma,
|
|
||||||
':' => .colon,
|
|
||||||
';' => .semicolon,
|
|
||||||
'(' => .parenthesis_block,
|
|
||||||
')' => .close_parenthesis,
|
|
||||||
'{' => .curly_bracket_block,
|
|
||||||
'}' => .close_curly_bracket,
|
|
||||||
'[' => .square_bracket_block,
|
|
||||||
']' => .close_square_bracket,
|
|
||||||
else => unreachable,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn consumeNumeric(self: *Tokenizer) Token {
|
|
||||||
// Parse [+-]?\d*(\.\d+)?([eE][+-]?\d+)?
|
|
||||||
// But this is always called so that there is at least one digit in \d*(\.\d+)?
|
|
||||||
|
|
||||||
// Do all the math in f64 so that large numbers overflow to +/-inf
|
|
||||||
// and i32::{MIN, MAX} are within range.
|
|
||||||
|
|
||||||
var sign: f64 = 1.0;
|
|
||||||
var has_sign = false;
|
|
||||||
switch (self.nextByteUnchecked()) {
|
|
||||||
'+' => {
|
|
||||||
has_sign = true;
|
|
||||||
},
|
|
||||||
'-' => {
|
|
||||||
has_sign = true;
|
|
||||||
sign = -1.0;
|
|
||||||
},
|
|
||||||
else => {},
|
|
||||||
}
|
|
||||||
if (has_sign) {
|
|
||||||
self.advance(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
var is_integer = true;
|
|
||||||
var integral_part: f64 = 0.0;
|
|
||||||
var fractional_part: f64 = 0.0;
|
|
||||||
|
|
||||||
while (!self.isEof()) {
|
|
||||||
if (byteToDecimalDigit(self.nextByteUnchecked())) |digit| {
|
|
||||||
integral_part = integral_part * 10.0 + @as(f64, @floatFromInt(digit));
|
|
||||||
self.advance(1);
|
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (self.hasAtLeast(1) and self.nextByteUnchecked() == '.' and std.ascii.isDigit(self.byteAt(1))) {
|
|
||||||
is_integer = false;
|
|
||||||
self.advance(1); // Consume '.'
|
|
||||||
|
|
||||||
var factor: f64 = 0.1;
|
|
||||||
while (!self.isEof()) {
|
|
||||||
if (byteToDecimalDigit(self.nextByteUnchecked())) |digit| {
|
|
||||||
fractional_part += @as(f64, @floatFromInt(digit)) * factor;
|
|
||||||
factor *= 0.1;
|
|
||||||
self.advance(1);
|
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var value = sign * (integral_part + fractional_part);
|
|
||||||
|
|
||||||
blk: {
|
|
||||||
const e = self.nextByte() orelse break :blk;
|
|
||||||
if (e != 'e' and e != 'E') break :blk;
|
|
||||||
|
|
||||||
var mul: f64 = 1.0;
|
|
||||||
|
|
||||||
if (self.hasAtLeast(2) and (self.byteAt(1) == '+' or self.byteAt(1) == '-') and std.ascii.isDigit(self.byteAt(2))) {
|
|
||||||
mul = switch (self.byteAt(1)) {
|
|
||||||
'-' => -1.0,
|
|
||||||
'+' => 1.0,
|
|
||||||
else => unreachable,
|
|
||||||
};
|
|
||||||
|
|
||||||
self.advance(2);
|
|
||||||
} else if (self.hasAtLeast(1) and std.ascii.isDigit(self.byteAt(2))) {
|
|
||||||
self.advance(1);
|
|
||||||
} else {
|
|
||||||
break :blk;
|
|
||||||
}
|
|
||||||
|
|
||||||
is_integer = false;
|
|
||||||
|
|
||||||
var exponent: f64 = 0.0;
|
|
||||||
while (!self.isEof()) {
|
|
||||||
if (byteToDecimalDigit(self.nextByteUnchecked())) |digit| {
|
|
||||||
exponent = exponent * 10.0 + @as(f64, @floatFromInt(digit));
|
|
||||||
self.advance(1);
|
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
value *= std.math.pow(f64, 10.0, mul * exponent);
|
|
||||||
}
|
|
||||||
|
|
||||||
const int_value: ?i32 = if (is_integer) blk: {
|
|
||||||
if (value >= std.math.maxInt(i32)) {
|
|
||||||
break :blk std.math.maxInt(i32);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (value <= std.math.minInt(i32)) {
|
|
||||||
break :blk std.math.minInt(i32);
|
|
||||||
}
|
|
||||||
|
|
||||||
break :blk @as(i32, @intFromFloat(value));
|
|
||||||
} else null;
|
|
||||||
|
|
||||||
if (!self.isEof() and self.nextByteUnchecked() == '%') {
|
|
||||||
self.advance(1);
|
|
||||||
|
|
||||||
return .{ .percentage = .{
|
|
||||||
.has_sign = has_sign,
|
|
||||||
.int_value = int_value,
|
|
||||||
.unit_value = @as(f32, @floatCast(value / 100.0)),
|
|
||||||
} };
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isIdentStart(self)) {
|
|
||||||
return .{ .dimension = .{
|
|
||||||
.has_sign = has_sign,
|
|
||||||
.int_value = int_value,
|
|
||||||
.value = @as(f32, @floatCast(value)),
|
|
||||||
.unit = consumeName(self),
|
|
||||||
} };
|
|
||||||
}
|
|
||||||
|
|
||||||
return .{ .number = .{
|
|
||||||
.has_sign = has_sign,
|
|
||||||
.int_value = int_value,
|
|
||||||
.value = @as(f32, @floatCast(value)),
|
|
||||||
} };
|
|
||||||
}
|
|
||||||
|
|
||||||
fn consumeUnquotedUrl(self: *Tokenizer) ?Token {
|
|
||||||
// TODO: true url parser
|
|
||||||
if (self.nextByte()) |it| {
|
|
||||||
return self.consumeString(it == '\'');
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn consumeIdentLike(self: *Tokenizer) Token {
|
|
||||||
const value = self.consumeName();
|
|
||||||
|
|
||||||
if (!self.isEof() and self.nextByteUnchecked() == '(') {
|
|
||||||
self.advance(1);
|
|
||||||
|
|
||||||
if (std.ascii.eqlIgnoreCase(value, "url")) {
|
|
||||||
if (self.consumeUnquotedUrl()) |result| {
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return .{ .function = value };
|
|
||||||
}
|
|
||||||
|
|
||||||
return .{ .ident = value };
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn next(self: *Tokenizer) ?Token {
|
|
||||||
if (self.isEof()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const b = self.nextByteUnchecked();
|
|
||||||
return switch (b) {
|
|
||||||
// Consume comments
|
|
||||||
'/' => {
|
|
||||||
if (self.startsWith("/*")) {
|
|
||||||
return .{ .comment = self.consumeComment() };
|
|
||||||
} else {
|
|
||||||
self.advance(1);
|
|
||||||
return .{ .delim = '/' };
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
// Consume marks
|
|
||||||
'(', ')', '{', '}', '[', ']', ',', ':', ';' => {
|
|
||||||
return self.consumeMark();
|
|
||||||
},
|
|
||||||
|
|
||||||
// Consume as much whitespace as possible. Return a <whitespace-token>.
|
|
||||||
' ', '\t' => self.consumeWhiteSpace(false),
|
|
||||||
'\n', '\x0C', '\r' => self.consumeWhiteSpace(true),
|
|
||||||
|
|
||||||
// Consume a string token and return it.
|
|
||||||
'"' => self.consumeString(false),
|
|
||||||
'\'' => self.consumeString(true),
|
|
||||||
|
|
||||||
'0'...'9' => self.consumeNumeric(),
|
|
||||||
'a'...'z', 'A'...'Z', '_', 0x0 => self.consumeIdentLike(),
|
|
||||||
|
|
||||||
'+' => {
|
|
||||||
if ((self.hasAtLeast(1) and std.ascii.isDigit(self.byteAt(1))) or
|
|
||||||
(self.hasAtLeast(2) and self.byteAt(1) == '.' and std.ascii.isDigit(self.byteAt(2))))
|
|
||||||
{
|
|
||||||
return self.consumeNumeric();
|
|
||||||
}
|
|
||||||
self.advance(1);
|
|
||||||
return .{ .delim = '+' };
|
|
||||||
},
|
|
||||||
'-' => {
|
|
||||||
if ((self.hasAtLeast(1) and std.ascii.isDigit(self.byteAt(1))) or
|
|
||||||
(self.hasAtLeast(2) and self.byteAt(1) == '.' and std.ascii.isDigit(self.byteAt(2))))
|
|
||||||
{
|
|
||||||
return self.consumeNumeric();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (self.startsWith("-->")) {
|
|
||||||
self.advance(3);
|
|
||||||
return .cdc;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isIdentStart(self)) {
|
|
||||||
return self.consumeIdentLike();
|
|
||||||
}
|
|
||||||
|
|
||||||
self.advance(1);
|
|
||||||
return .{ .delim = '-' };
|
|
||||||
},
|
|
||||||
'.' => {
|
|
||||||
if (self.hasAtLeast(1) and std.ascii.isDigit(self.byteAt(1))) {
|
|
||||||
return self.consumeNumeric();
|
|
||||||
}
|
|
||||||
self.advance(1);
|
|
||||||
return .{ .delim = '.' };
|
|
||||||
},
|
|
||||||
|
|
||||||
// Consume hash token
|
|
||||||
'#' => {
|
|
||||||
self.advance(1);
|
|
||||||
if (self.isIdentStart()) {
|
|
||||||
return .{ .id_hash = self.consumeName() };
|
|
||||||
}
|
|
||||||
if (self.nextByte()) |it| {
|
|
||||||
switch (it) {
|
|
||||||
// Any other valid case here already resulted in IDHash.
|
|
||||||
'0'...'9', '-' => return .{ .unrestricted_hash = self.consumeName() },
|
|
||||||
else => {},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return .{ .delim = '#' };
|
|
||||||
},
|
|
||||||
|
|
||||||
// Consume at-rules
|
|
||||||
'@' => {
|
|
||||||
self.advance(1);
|
|
||||||
return if (isIdentStart(self))
|
|
||||||
.{ .at_keyword = consumeName(self) }
|
|
||||||
else
|
|
||||||
.{ .delim = '@' };
|
|
||||||
},
|
|
||||||
|
|
||||||
'<' => {
|
|
||||||
if (self.startsWith("<!--")) {
|
|
||||||
self.advance(4);
|
|
||||||
return .cdo;
|
|
||||||
} else {
|
|
||||||
self.advance(1);
|
|
||||||
return .{ .delim = '<' };
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
'\\' => {
|
|
||||||
if (!self.hasNewlineAt(1)) {
|
|
||||||
return self.consumeIdentLike();
|
|
||||||
}
|
|
||||||
|
|
||||||
self.advance(1);
|
|
||||||
return .{ .delim = '\\' };
|
|
||||||
},
|
|
||||||
|
|
||||||
else => {
|
|
||||||
if (b > 0x7F) { // not is ascii
|
|
||||||
return self.consumeIdentLike();
|
|
||||||
}
|
|
||||||
|
|
||||||
self.advance(1);
|
|
||||||
return .{ .delim = b };
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const testing = std.testing;
|
|
||||||
|
|
||||||
fn expectTokensEqual(input: []const u8, tokens: []const Token) !void {
|
|
||||||
var lexer = Tokenizer{ .input = input };
|
|
||||||
|
|
||||||
var i: usize = 0;
|
|
||||||
while (lexer.next()) |token| : (i += 1) {
|
|
||||||
assert(i < tokens.len);
|
|
||||||
try testing.expectEqualDeep(tokens[i], token);
|
|
||||||
}
|
|
||||||
|
|
||||||
try testing.expectEqual(i, tokens.len);
|
|
||||||
try testing.expectEqualDeep(null, lexer.next());
|
|
||||||
}
|
|
||||||
|
|
||||||
test "smoke" {
|
|
||||||
try expectTokensEqual(
|
|
||||||
\\.lightpanda {color:red;}
|
|
||||||
, &.{
|
|
||||||
.{ .delim = '.' },
|
|
||||||
.{ .ident = "lightpanda" },
|
|
||||||
.{ .white_space = " " },
|
|
||||||
.curly_bracket_block,
|
|
||||||
.{ .ident = "color" },
|
|
||||||
.colon,
|
|
||||||
.{ .ident = "red" },
|
|
||||||
.semicolon,
|
|
||||||
.close_curly_bracket,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
195
src/browser/css/css.zig
Normal file
195
src/browser/css/css.zig
Normal file
@@ -0,0 +1,195 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
// CSS Selector parser and query
|
||||||
|
// This package is a rewrite in Zig of Cascadia CSS Selector parser.
|
||||||
|
// see https://github.com/andybalholm/cascadia
|
||||||
|
const std = @import("std");
|
||||||
|
const Selector = @import("selector.zig").Selector;
|
||||||
|
const parser = @import("parser.zig");
|
||||||
|
|
||||||
|
pub const Interfaces = .{
|
||||||
|
Css,
|
||||||
|
};
|
||||||
|
|
||||||
|
// https://developer.mozilla.org/en-US/docs/Web/API/CSS
|
||||||
|
pub const Css = struct {
|
||||||
|
_not_empty: bool = true,
|
||||||
|
|
||||||
|
pub fn _supports(_: *Css, _: []const u8, _: ?[]const u8) bool {
|
||||||
|
// TODO: Actually respond with which CSS features we support.
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// parse parse a selector string and returns the parsed result or an error.
|
||||||
|
pub fn parse(alloc: std.mem.Allocator, s: []const u8, opts: parser.ParseOptions) parser.ParseError!Selector {
|
||||||
|
var p = parser.Parser{ .s = s, .i = 0, .opts = opts };
|
||||||
|
return p.parse(alloc);
|
||||||
|
}
|
||||||
|
|
||||||
|
// matchFirst call m.match with the first node that matches the selector s, from the
|
||||||
|
// descendants of n and returns true. If none matches, it returns false.
|
||||||
|
pub fn matchFirst(s: *const Selector, node: anytype, m: anytype) !bool {
|
||||||
|
var c = try node.firstChild();
|
||||||
|
while (true) {
|
||||||
|
if (c == null) break;
|
||||||
|
|
||||||
|
if (try s.match(c.?)) {
|
||||||
|
try m.match(c.?);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (try matchFirst(s, c.?, m)) return true;
|
||||||
|
c = try c.?.nextSibling();
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// matchAll call m.match with the all the nodes that matches the selector s, from the
|
||||||
|
// descendants of n.
|
||||||
|
pub fn matchAll(s: *const Selector, node: anytype, m: anytype) !void {
|
||||||
|
var c = try node.firstChild();
|
||||||
|
while (true) {
|
||||||
|
if (c == null) break;
|
||||||
|
|
||||||
|
if (try s.match(c.?)) try m.match(c.?);
|
||||||
|
try matchAll(s, c.?, m);
|
||||||
|
c = try c.?.nextSibling();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
test "parse" {
|
||||||
|
const alloc = std.testing.allocator;
|
||||||
|
|
||||||
|
const testcases = [_][]const u8{
|
||||||
|
"address",
|
||||||
|
"*",
|
||||||
|
"#foo",
|
||||||
|
"li#t1",
|
||||||
|
"*#t4",
|
||||||
|
".t1",
|
||||||
|
"p.t1",
|
||||||
|
"div.teST",
|
||||||
|
".t1.fail",
|
||||||
|
"p.t1.t2",
|
||||||
|
"p.--t1",
|
||||||
|
"p.--t1.--t2",
|
||||||
|
"p[title]",
|
||||||
|
"div[class=\"red\" i]",
|
||||||
|
"address[title=\"foo\"]",
|
||||||
|
"address[title=\"FoOIgnoRECaSe\" i]",
|
||||||
|
"address[title!=\"foo\"]",
|
||||||
|
"address[title!=\"foo\" i]",
|
||||||
|
"p[title!=\"FooBarUFoo\" i]",
|
||||||
|
"[ \t title ~= foo ]",
|
||||||
|
"p[title~=\"FOO\" i]",
|
||||||
|
"p[title~=toofoo i]",
|
||||||
|
"[title~=\"hello world\"]",
|
||||||
|
"[title~=\"hello\" i]",
|
||||||
|
"[title~=\"hello\" I]",
|
||||||
|
"[lang|=\"en\"]",
|
||||||
|
"[lang|=\"EN\" i]",
|
||||||
|
"[lang|=\"EN\" i]",
|
||||||
|
"[title^=\"foo\"]",
|
||||||
|
"[title^=\"foo\" i]",
|
||||||
|
"[title$=\"bar\"]",
|
||||||
|
"[title$=\"BAR\" i]",
|
||||||
|
"[title*=\"bar\"]",
|
||||||
|
"[title*=\"BaRu\" i]",
|
||||||
|
"[title*=\"BaRu\" I]",
|
||||||
|
"p[class$=\" \"]",
|
||||||
|
"p[class$=\"\"]",
|
||||||
|
"p[class^=\" \"]",
|
||||||
|
"p[class^=\"\"]",
|
||||||
|
"p[class*=\" \"]",
|
||||||
|
"p[class*=\"\"]",
|
||||||
|
"input[name=Sex][value=F]",
|
||||||
|
"table[border=\"0\"][cellpadding=\"0\"][cellspacing=\"0\"]",
|
||||||
|
".t1:not(.t2)",
|
||||||
|
"div:not(.t1)",
|
||||||
|
"div:not([class=\"t2\"])",
|
||||||
|
"li:nth-child(odd)",
|
||||||
|
"li:nth-child(even)",
|
||||||
|
"li:nth-child(-n+2)",
|
||||||
|
"li:nth-child(3n+1)",
|
||||||
|
"li:nth-last-child(odd)",
|
||||||
|
"li:nth-last-child(even)",
|
||||||
|
"li:nth-last-child(-n+2)",
|
||||||
|
"li:nth-last-child(3n+1)",
|
||||||
|
"span:first-child",
|
||||||
|
"span:last-child",
|
||||||
|
"p:nth-of-type(2)",
|
||||||
|
"p:nth-last-of-type(2)",
|
||||||
|
"p:last-of-type",
|
||||||
|
"p:first-of-type",
|
||||||
|
"p:only-child",
|
||||||
|
"p:only-of-type",
|
||||||
|
":empty",
|
||||||
|
"div p",
|
||||||
|
"div table p",
|
||||||
|
"div > p",
|
||||||
|
"p ~ p",
|
||||||
|
"p + p",
|
||||||
|
"li, p",
|
||||||
|
"p +/*This is a comment*/ p",
|
||||||
|
"p:contains(\"that wraps\")",
|
||||||
|
"p:containsOwn(\"that wraps\")",
|
||||||
|
":containsOwn(\"inner\")",
|
||||||
|
"p:containsOwn(\"block\")",
|
||||||
|
"div:has(#p1)",
|
||||||
|
"div:has(:containsOwn(\"2\"))",
|
||||||
|
"body :has(:containsOwn(\"2\"))",
|
||||||
|
"body :haschild(:containsOwn(\"2\"))",
|
||||||
|
"p:matches([\\d])",
|
||||||
|
"p:matches([a-z])",
|
||||||
|
"p:matches([a-zA-Z])",
|
||||||
|
"p:matches([^\\d])",
|
||||||
|
"p:matches(^(0|a))",
|
||||||
|
"p:matches(^\\d+$)",
|
||||||
|
"p:not(:matches(^\\d+$))",
|
||||||
|
"div :matchesOwn(^\\d+$)",
|
||||||
|
"[href#=(fina)]:not([href#=(\\/\\/[^\\/]+untrusted)])",
|
||||||
|
"[href#=(^https:\\/\\/[^\\/]*\\/?news)]",
|
||||||
|
":input",
|
||||||
|
":root",
|
||||||
|
"*:root",
|
||||||
|
"html:nth-child(1)",
|
||||||
|
"*:root:first-child",
|
||||||
|
"*:root:nth-child(1)",
|
||||||
|
"a:not(:root)",
|
||||||
|
"body > *:nth-child(3n+2)",
|
||||||
|
"input:disabled",
|
||||||
|
":disabled",
|
||||||
|
":enabled",
|
||||||
|
"div.class1, div.class2",
|
||||||
|
};
|
||||||
|
|
||||||
|
for (testcases) |tc| {
|
||||||
|
const s = parse(alloc, tc, .{}) catch |e| {
|
||||||
|
std.debug.print("query {s}", .{tc});
|
||||||
|
return e;
|
||||||
|
};
|
||||||
|
defer s.deinit(alloc);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: CSS" {
|
||||||
|
try testing.htmlRunner("css.html");
|
||||||
|
}
|
||||||
427
src/browser/css/libdom.zig
Normal file
427
src/browser/css/libdom.zig
Normal file
@@ -0,0 +1,427 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
const css = @import("css.zig");
|
||||||
|
const Allocator = std.mem.Allocator;
|
||||||
|
|
||||||
|
// Node implementation with Netsurf Libdom C lib.
|
||||||
|
pub const Node = struct {
|
||||||
|
node: *parser.Node,
|
||||||
|
|
||||||
|
pub fn firstChild(n: Node) !?Node {
|
||||||
|
const c = try parser.nodeFirstChild(n.node);
|
||||||
|
if (c) |cc| return .{ .node = cc };
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lastChild(n: Node) !?Node {
|
||||||
|
const c = try parser.nodeLastChild(n.node);
|
||||||
|
if (c) |cc| return .{ .node = cc };
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn nextSibling(n: Node) !?Node {
|
||||||
|
const c = try parser.nodeNextSibling(n.node);
|
||||||
|
if (c) |cc| return .{ .node = cc };
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn prevSibling(n: Node) !?Node {
|
||||||
|
const c = try parser.nodePreviousSibling(n.node);
|
||||||
|
if (c) |cc| return .{ .node = cc };
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parent(n: Node) !?Node {
|
||||||
|
const c = try parser.nodeParentNode(n.node);
|
||||||
|
if (c) |cc| return .{ .node = cc };
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn isElement(n: Node) bool {
|
||||||
|
const t = parser.nodeType(n.node) catch return false;
|
||||||
|
return t == .element;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn isDocument(n: Node) bool {
|
||||||
|
const t = parser.nodeType(n.node) catch return false;
|
||||||
|
return t == .document;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn isComment(n: Node) bool {
|
||||||
|
const t = parser.nodeType(n.node) catch return false;
|
||||||
|
return t == .comment;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn isText(n: Node) bool {
|
||||||
|
const t = parser.nodeType(n.node) catch return false;
|
||||||
|
return t == .text;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn text(n: Node) !?[]const u8 {
|
||||||
|
const data = try parser.nodeTextContent(n.node);
|
||||||
|
if (data == null) return null;
|
||||||
|
if (data.?.len == 0) return null;
|
||||||
|
|
||||||
|
return std.mem.trim(u8, data.?, &std.ascii.whitespace);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn isEmptyText(n: Node) !bool {
|
||||||
|
const data = try parser.nodeTextContent(n.node);
|
||||||
|
if (data == null) return true;
|
||||||
|
if (data.?.len == 0) return true;
|
||||||
|
|
||||||
|
return std.mem.trim(u8, data.?, &std.ascii.whitespace).len == 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn tag(n: Node) ![]const u8 {
|
||||||
|
return try parser.nodeName(n.node);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn attr(n: Node, key: []const u8) !?[]const u8 {
|
||||||
|
if (!n.isElement()) return null;
|
||||||
|
return try parser.elementGetAttribute(parser.nodeToElement(n.node), key);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn eql(a: Node, b: Node) bool {
|
||||||
|
return a.node == b.node;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const MatcherTest = struct {
|
||||||
|
const Nodes = std.ArrayListUnmanaged(Node);
|
||||||
|
|
||||||
|
nodes: Nodes,
|
||||||
|
allocator: Allocator,
|
||||||
|
|
||||||
|
fn init(allocator: Allocator) MatcherTest {
|
||||||
|
return .{
|
||||||
|
.nodes = .empty,
|
||||||
|
.allocator = allocator,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
fn deinit(m: *MatcherTest) void {
|
||||||
|
m.nodes.deinit(m.allocator);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn reset(m: *MatcherTest) void {
|
||||||
|
m.nodes.clearRetainingCapacity();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn match(m: *MatcherTest, n: Node) !void {
|
||||||
|
try m.nodes.append(m.allocator, n);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
test "Browser.CSS.Libdom: matchFirst" {
|
||||||
|
const alloc = std.testing.allocator;
|
||||||
|
|
||||||
|
try parser.init();
|
||||||
|
defer parser.deinit();
|
||||||
|
|
||||||
|
var matcher = MatcherTest.init(alloc);
|
||||||
|
defer matcher.deinit();
|
||||||
|
|
||||||
|
const testcases = [_]struct {
|
||||||
|
q: []const u8,
|
||||||
|
html: []const u8,
|
||||||
|
exp: usize,
|
||||||
|
}{
|
||||||
|
.{ .q = "address", .html = "<body><address>This address...</address></body>", .exp = 1 },
|
||||||
|
.{ .q = "*", .html = "<!-- comment --><html><head></head><body>text</body></html>", .exp = 1 },
|
||||||
|
.{ .q = "*", .html = "<html><head></head><body></body></html>", .exp = 1 },
|
||||||
|
.{ .q = "#foo", .html = "<p id=\"foo\"><p id=\"bar\">", .exp = 1 },
|
||||||
|
.{ .q = "li#t1", .html = "<ul><li id=\"t1\"><p id=\"t1\">", .exp = 1 },
|
||||||
|
.{ .q = ".t3", .html = "<ul><li class=\"t1\"><li class=\"t2 t3\">", .exp = 1 },
|
||||||
|
.{ .q = "*#t4", .html = "<ol><li id=\"t4\"><li id=\"t44\">", .exp = 1 },
|
||||||
|
.{ .q = ".t1", .html = "<ul><li class=\"t1\"><li class=\"t2\">", .exp = 1 },
|
||||||
|
.{ .q = "p.t1", .html = "<p class=\"t1 t2\">", .exp = 1 },
|
||||||
|
.{ .q = "div.teST", .html = "<div class=\"test\">", .exp = 0 },
|
||||||
|
.{ .q = ".t1.fail", .html = "<p class=\"t1 t2\">", .exp = 0 },
|
||||||
|
.{ .q = "p.t1.t2", .html = "<p class=\"t1 t2\">", .exp = 1 },
|
||||||
|
.{ .q = "p.--t1", .html = "<p class=\"--t1 --t2\">", .exp = 1 },
|
||||||
|
.{ .q = "p.--t1.--t2", .html = "<p class=\"--t1 --t2\">", .exp = 1 },
|
||||||
|
.{ .q = "p[title]", .html = "<p><p title=\"title\">", .exp = 1 },
|
||||||
|
.{ .q = "div[class=\"red\" i]", .html = "<div><div class=\"Red\">", .exp = 1 },
|
||||||
|
.{ .q = "address[title=\"foo\"]", .html = "<address><address title=\"foo\"><address title=\"bar\">", .exp = 1 },
|
||||||
|
.{ .q = "address[title=\"FoOIgnoRECaSe\" i]", .html = "<address><address title=\"fooIgnoreCase\"><address title=\"bar\">", .exp = 1 },
|
||||||
|
.{ .q = "address[title!=\"foo\"]", .html = "<address><address title=\"foo\"><address title=\"bar\">", .exp = 1 },
|
||||||
|
.{ .q = "address[title!=\"foo\" i]", .html = "<address><address title=\"FOO\"><address title=\"bar\">", .exp = 1 },
|
||||||
|
.{ .q = "p[title!=\"FooBarUFoo\" i]", .html = "<p title=\"fooBARuFOO\"><p title=\"varfoo\">", .exp = 1 },
|
||||||
|
.{ .q = "[ title ~= foo ]", .html = "<p title=\"tot foo bar\">", .exp = 1 },
|
||||||
|
.{ .q = "p[title~=\"FOO\" i]", .html = "<p title=\"tot foo bar\">", .exp = 1 },
|
||||||
|
.{ .q = "p[title~=toofoo i]", .html = "<p title=\"tot foo bar\">", .exp = 0 },
|
||||||
|
.{ .q = "[title~=\"hello world\"]", .html = "<p title=\"hello world\">", .exp = 0 },
|
||||||
|
.{ .q = "[title~=\"hello\" i]", .html = "<p title=\"HELLO world\">", .exp = 1 },
|
||||||
|
.{ .q = "[title~=\"hello\" I]", .html = "<p title=\"HELLO world\">", .exp = 1 },
|
||||||
|
.{ .q = "[lang|=\"en\"]", .html = "<p lang=\"en\"><p lang=\"en-gb\"><p lang=\"enough\"><p lang=\"fr-en\">", .exp = 1 },
|
||||||
|
.{ .q = "[lang|=\"EN\" i]", .html = "<p lang=\"en\"><p lang=\"En-gb\"><p lang=\"enough\"><p lang=\"fr-en\">", .exp = 1 },
|
||||||
|
.{ .q = "[lang|=\"EN\" i]", .html = "<p lang=\"en\"><p lang=\"En-gb\"><p lang=\"enough\"><p lang=\"fr-en\">", .exp = 1 },
|
||||||
|
.{ .q = "[title^=\"foo\"]", .html = "<p title=\"foobar\"><p title=\"barfoo\">", .exp = 1 },
|
||||||
|
.{ .q = "[title^=\"foo\" i]", .html = "<p title=\"FooBAR\"><p title=\"barfoo\">", .exp = 1 },
|
||||||
|
.{ .q = "[title$=\"bar\"]", .html = "<p title=\"foobar\"><p title=\"barfoo\">", .exp = 1 },
|
||||||
|
.{ .q = "[title$=\"BAR\" i]", .html = "<p title=\"foobar\"><p title=\"barfoo\">", .exp = 1 },
|
||||||
|
.{ .q = "[title*=\"bar\"]", .html = "<p title=\"foobarufoo\">", .exp = 1 },
|
||||||
|
.{ .q = "[title*=\"BaRu\" i]", .html = "<p title=\"foobarufoo\">", .exp = 1 },
|
||||||
|
.{ .q = "[title*=\"BaRu\" I]", .html = "<p title=\"foobarufoo\">", .exp = 1 },
|
||||||
|
.{ .q = "p[class$=\" \"]", .html = "<p class=\" \">This text should be green.</p><p>This text should be green.</p>", .exp = 0 },
|
||||||
|
.{ .q = "p[class$=\"\"]", .html = "<p class=\"\">This text should be green.</p><p>This text should be green.</p>", .exp = 0 },
|
||||||
|
.{ .q = "p[class^=\" \"]", .html = "<p class=\" \">This text should be green.</p><p>This text should be green.</p>", .exp = 0 },
|
||||||
|
.{ .q = "p[class^=\"\"]", .html = "<p class=\"\">This text should be green.</p><p>This text should be green.</p>", .exp = 0 },
|
||||||
|
.{ .q = "p[class*=\" \"]", .html = "<p class=\" \">This text should be green.</p><p>This text should be green.</p>", .exp = 0 },
|
||||||
|
.{ .q = "p[class*=\"\"]", .html = "<p class=\"\">This text should be green.</p><p>This text should be green.</p>", .exp = 0 },
|
||||||
|
.{ .q = "input[name=Sex][value=F]", .html = "<input type=\"radio\" name=\"Sex\" value=\"F\"/>", .exp = 1 },
|
||||||
|
.{ .q = "table[border=\"0\"][cellpadding=\"0\"][cellspacing=\"0\"]", .html = "<table border=\"0\" cellpadding=\"0\" cellspacing=\"0\" style=\"table-layout: fixed; width: 100%; border: 0 dashed; border-color: #FFFFFF\"><tr style=\"height:64px\">aaa</tr></table>", .exp = 1 },
|
||||||
|
.{ .q = ".t1:not(.t2)", .html = "<p class=\"t1 t2\">", .exp = 0 },
|
||||||
|
.{ .q = "div:not(.t1)", .html = "<div class=\"t3\">", .exp = 1 },
|
||||||
|
.{ .q = "div:not([class=\"t2\"])", .html = "<div><div class=\"t2\"><div class=\"t3\">", .exp = 1 },
|
||||||
|
.{ .q = "li:nth-child(odd)", .html = "<ol><li id=1><li id=2><li id=3></ol>", .exp = 1 },
|
||||||
|
.{ .q = "li:nth-child(even)", .html = "<ol><li id=1><li id=2><li id=3></ol>", .exp = 1 },
|
||||||
|
.{ .q = "li:nth-child(-n+2)", .html = "<ol><li id=1><li id=2><li id=3></ol>", .exp = 1 },
|
||||||
|
.{ .q = "li:nth-child(3n+1)", .html = "<ol><li id=1><li id=2><li id=3></ol>", .exp = 1 },
|
||||||
|
.{ .q = "li:nth-last-child(odd)", .html = "<ol><li id=1><li id=2><li id=3><li id=4></ol>", .exp = 1 },
|
||||||
|
.{ .q = "li:nth-last-child(even)", .html = "<ol><li id=1><li id=2><li id=3><li id=4></ol>", .exp = 1 },
|
||||||
|
.{ .q = "li:nth-last-child(-n+2)", .html = "<ol><li id=1><li id=2><li id=3><li id=4></ol>", .exp = 1 },
|
||||||
|
.{ .q = "li:nth-last-child(3n+1)", .html = "<ol><li id=1><li id=2><li id=3><li id=4></ol>", .exp = 1 },
|
||||||
|
.{ .q = "span:first-child", .html = "<p>some text <span id=\"1\">and a span</span><span id=\"2\"> and another</span></p>", .exp = 1 },
|
||||||
|
.{ .q = "span:last-child", .html = "<span>a span</span> and some text", .exp = 1 },
|
||||||
|
.{ .q = "p:nth-of-type(2)", .html = "<address></address><p id=1><p id=2>", .exp = 1 },
|
||||||
|
.{ .q = "p:nth-last-of-type(2)", .html = "<address></address><p id=1><p id=2></p><a>", .exp = 1 },
|
||||||
|
.{ .q = "p:last-of-type", .html = "<address></address><p id=1><p id=2></p><a>", .exp = 1 },
|
||||||
|
.{ .q = "p:first-of-type", .html = "<address></address><p id=1><p id=2></p><a>", .exp = 1 },
|
||||||
|
.{ .q = "p:only-child", .html = "<div><p id=\"1\"></p><a></a></div><div><p id=\"2\"></p></div>", .exp = 1 },
|
||||||
|
.{ .q = "p:only-of-type", .html = "<div><p id=\"1\"></p><a></a></div><div><p id=\"2\"></p><p id=\"3\"></p></div>", .exp = 1 },
|
||||||
|
.{ .q = ":empty", .html = "<p id=\"1\"><!-- --><p id=\"2\">Hello<p id=\"3\"><span>", .exp = 1 },
|
||||||
|
.{ .q = "div p", .html = "<div><p id=\"1\"><table><tr><td><p id=\"2\"></table></div><p id=\"3\">", .exp = 1 },
|
||||||
|
.{ .q = "div table p", .html = "<div><p id=\"1\"><table><tr><td><p id=\"2\"></table></div><p id=\"3\">", .exp = 1 },
|
||||||
|
.{ .q = "div > p", .html = "<div><p id=\"1\"><div><p id=\"2\"></div><table><tr><td><p id=\"3\"></table></div>", .exp = 1 },
|
||||||
|
.{ .q = "p ~ p", .html = "<p id=\"1\"><p id=\"2\"></p><address></address><p id=\"3\">", .exp = 1 },
|
||||||
|
.{ .q = "p + p", .html = "<p id=\"1\"></p> <!--comment--> <p id=\"2\"></p><address></address><p id=\"3\">", .exp = 1 },
|
||||||
|
.{ .q = "li, p", .html = "<ul><li></li><li></li></ul><p>", .exp = 1 },
|
||||||
|
.{ .q = "p +/*This is a comment*/ p", .html = "<p id=\"1\"><p id=\"2\"></p><address></address><p id=\"3\">", .exp = 1 },
|
||||||
|
// .{ .q = "p:contains(\"that wraps\")", .html = "<p>Text block that <span>wraps inner text</span> and continues</p>", .exp = 1 },
|
||||||
|
.{ .q = "p:containsOwn(\"that wraps\")", .html = "<p>Text block that <span>wraps inner text</span> and continues</p>", .exp = 0 },
|
||||||
|
.{ .q = ":containsOwn(\"inner\")", .html = "<p>Text block that <span>wraps inner text</span> and continues</p>", .exp = 1 },
|
||||||
|
.{ .q = ":containsOwn(\"Inner\")", .html = "<p>Text block that <span>wraps inner text</span> and continues</p>", .exp = 0 },
|
||||||
|
.{ .q = "p:containsOwn(\"block\")", .html = "<p>Text block that <span>wraps inner text</span> and continues</p>", .exp = 1 },
|
||||||
|
// .{ .q = "div:has(#p1)", .html = "<div id=\"d1\"><p id=\"p1\"><span>text content</span></p></div><div id=\"d2\"/>", .exp = 1 },
|
||||||
|
.{ .q = "div:has(:containsOwn(\"2\"))", .html = "<div id=\"d1\"><p id=\"p1\"><span>contents 1</span></p></div> <div id=\"d2\"><p>contents <em>2</em></p></div>", .exp = 1 },
|
||||||
|
.{ .q = "body :has(:containsOwn(\"2\"))", .html = "<body><div id=\"d1\"><p id=\"p1\"><span>contents 1</span></p></div> <div id=\"d2\"><p id=\"p2\">contents <em>2</em></p></div></body>", .exp = 1 },
|
||||||
|
.{ .q = "body :haschild(:containsOwn(\"2\"))", .html = "<body><div id=\"d1\"><p id=\"p1\"><span>contents 1</span></p></div> <div id=\"d2\"><p id=\"p2\">contents <em>2</em></p></div></body>", .exp = 1 },
|
||||||
|
// .{ .q = "p:matches([\\d])", .html = "<p id=\"p1\">0123456789</p><p id=\"p2\">abcdef</p><p id=\"p3\">0123ABCD</p>", .exp = 1 },
|
||||||
|
// .{ .q = "p:matches([a-z])", .html = "<p id=\"p1\">0123456789</p><p id=\"p2\">abcdef</p><p id=\"p3\">0123ABCD</p>", .exp = 1 },
|
||||||
|
// .{ .q = "p:matches([a-zA-Z])", .html = "<p id=\"p1\">0123456789</p><p id=\"p2\">abcdef</p><p id=\"p3\">0123ABCD</p>", .exp = 1 },
|
||||||
|
// .{ .q = "p:matches([^\\d])", .html = "<p id=\"p1\">0123456789</p><p id=\"p2\">abcdef</p><p id=\"p3\">0123ABCD</p>", .exp = 1 },
|
||||||
|
// .{ .q = "p:matches(^(0|a))", .html = "<p id=\"p1\">0123456789</p><p id=\"p2\">abcdef</p><p id=\"p3\">0123ABCD</p>", .exp = 1 },
|
||||||
|
// .{ .q = "p:matches(^\\d+$)", .html = "<p id=\"p1\">0123456789</p><p id=\"p2\">abcdef</p><p id=\"p3\">0123ABCD</p>", .exp = 1 },
|
||||||
|
// .{ .q = "p:not(:matches(^\\d+$))", .html = "<p id=\"p1\">0123456789</p><p id=\"p2\">abcdef</p><p id=\"p3\">0123ABCD</p>", .exp = 1 },
|
||||||
|
// .{ .q = "div :matchesOwn(^\\d+$)", .html = "<div><p id=\"p1\">01234<em>567</em>89</p><div>", .exp = 1 },
|
||||||
|
// .{ .q = "[href#=(fina)]:not([href#=(\\/\\/[^\\/]+untrusted)])", .html = "<ul> <li><a id=\"a1\" href=\"http://www.google.com/finance\"></a> <li><a id=\"a2\" href=\"http://finance.yahoo.com/\"></a> <li><a id=\"a2\" href=\"http://finance.untrusted.com/\"/> <li><a id=\"a3\" href=\"https://www.google.com/news\"/> <li><a id=\"a4\" href=\"http://news.yahoo.com\"/> </ul>", .exp = 1 },
|
||||||
|
// .{ .q = "[href#=(^https:\\/\\/[^\\/]*\\/?news)]", .html = "<ul> <li><a id=\"a1\" href=\"http://www.google.com/finance\"/> <li><a id=\"a2\" href=\"http://finance.yahoo.com/\"/> <li><a id=\"a3\" href=\"https://www.google.com/news\"></a> <li><a id=\"a4\" href=\"http://news.yahoo.com\"/> </ul>", .exp = 1 },
|
||||||
|
.{ .q = ":input", .html = "<form> <label>Username <input type=\"text\" name=\"username\" /></label> <label>Password <input type=\"password\" name=\"password\" /></label> <label>Country <select name=\"country\"> <option value=\"ca\">Canada</option> <option value=\"us\">United States</option> </select> </label> <label>Bio <textarea name=\"bio\"></textarea></label> <button>Sign up</button> </form>", .exp = 1 },
|
||||||
|
.{ .q = ":root", .html = "<html><head></head><body></body></html>", .exp = 1 },
|
||||||
|
.{ .q = "*:root", .html = "<html><head></head><body></body></html>", .exp = 1 },
|
||||||
|
.{ .q = "html:nth-child(1)", .html = "<html><head></head><body></body></html>", .exp = 1 },
|
||||||
|
.{ .q = "*:root:first-child", .html = "<html><head></head><body></body></html>", .exp = 1 },
|
||||||
|
.{ .q = "*:root:nth-child(1)", .html = "<html><head></head><body></body></html>", .exp = 1 },
|
||||||
|
.{ .q = "a:not(:root)", .html = "<html><head></head><body><a href=\"http://www.foo.com\"></a></body></html>", .exp = 1 },
|
||||||
|
.{ .q = "body > *:nth-child(3n+2)", .html = "<html><head></head><body><p></p><div></div><span></span><a></a><form></form></body></html>", .exp = 1 },
|
||||||
|
.{ .q = "input:disabled", .html = "<html><head></head><body><fieldset disabled><legend id=\"1\"><input id=\"i1\"/></legend><legend id=\"2\"><input id=\"i2\"/></legend></fieldset></body></html>", .exp = 1 },
|
||||||
|
.{ .q = ":disabled", .html = "<html><head></head><body><fieldset disabled></fieldset></body></html>", .exp = 1 },
|
||||||
|
.{ .q = ":enabled", .html = "<html><head></head><body><fieldset></fieldset></body></html>", .exp = 1 },
|
||||||
|
.{ .q = "div.class1, div.class2", .html = "<div class=class1></div><div class=class2></div><div class=class3></div>", .exp = 1 },
|
||||||
|
};
|
||||||
|
|
||||||
|
for (testcases) |tc| {
|
||||||
|
matcher.reset();
|
||||||
|
|
||||||
|
const doc = try parser.documentHTMLParseFromStr(tc.html);
|
||||||
|
defer parser.documentHTMLClose(doc) catch {};
|
||||||
|
|
||||||
|
const s = css.parse(alloc, tc.q, .{}) catch |e| {
|
||||||
|
std.debug.print("parse, query: {s}\n", .{tc.q});
|
||||||
|
return e;
|
||||||
|
};
|
||||||
|
|
||||||
|
defer s.deinit(alloc);
|
||||||
|
|
||||||
|
const node = Node{ .node = parser.documentHTMLToNode(doc) };
|
||||||
|
|
||||||
|
_ = css.matchFirst(&s, node, &matcher) catch |e| {
|
||||||
|
std.debug.print("match, query: {s}\n", .{tc.q});
|
||||||
|
return e;
|
||||||
|
};
|
||||||
|
std.testing.expectEqual(tc.exp, matcher.nodes.items.len) catch |e| {
|
||||||
|
std.debug.print("expectation, query: {s}\n", .{tc.q});
|
||||||
|
return e;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser.CSS.Libdom: matchAll" {
|
||||||
|
const alloc = std.testing.allocator;
|
||||||
|
|
||||||
|
try parser.init();
|
||||||
|
defer parser.deinit();
|
||||||
|
|
||||||
|
var matcher = MatcherTest.init(alloc);
|
||||||
|
defer matcher.deinit();
|
||||||
|
|
||||||
|
const testcases = [_]struct {
|
||||||
|
q: []const u8,
|
||||||
|
html: []const u8,
|
||||||
|
exp: usize,
|
||||||
|
}{
|
||||||
|
.{ .q = "address", .html = "<body><address>This address...</address></body>", .exp = 1 },
|
||||||
|
.{ .q = "*", .html = "<!-- comment --><html><head></head><body>text</body></html>", .exp = 3 },
|
||||||
|
.{ .q = "*", .html = "<html><head></head><body></body></html>", .exp = 3 },
|
||||||
|
.{ .q = "#foo", .html = "<p id=\"foo\"><p id=\"bar\">", .exp = 1 },
|
||||||
|
.{ .q = "li#t1", .html = "<ul><li id=\"t1\"><p id=\"t1\">", .exp = 1 },
|
||||||
|
.{ .q = ".t3", .html = "<ul><li class=\"t1\"><li class=\"t2 t3\">", .exp = 1 },
|
||||||
|
.{ .q = "*#t4", .html = "<ol><li id=\"t4\"><li id=\"t44\">", .exp = 1 },
|
||||||
|
.{ .q = ".t1", .html = "<ul><li class=\"t1\"><li class=\"t2\">", .exp = 1 },
|
||||||
|
.{ .q = "p.t1", .html = "<p class=\"t1 t2\">", .exp = 1 },
|
||||||
|
.{ .q = "div.teST", .html = "<div class=\"test\">", .exp = 0 },
|
||||||
|
.{ .q = ".t1.fail", .html = "<p class=\"t1 t2\">", .exp = 0 },
|
||||||
|
.{ .q = "p.t1.t2", .html = "<p class=\"t1 t2\">", .exp = 1 },
|
||||||
|
.{ .q = "p.--t1", .html = "<p class=\"--t1 --t2\">", .exp = 1 },
|
||||||
|
.{ .q = "p.--t1.--t2", .html = "<p class=\"--t1 --t2\">", .exp = 1 },
|
||||||
|
.{ .q = "p[title]", .html = "<p><p title=\"title\">", .exp = 1 },
|
||||||
|
.{ .q = "div[class=\"red\" i]", .html = "<div><div class=\"Red\">", .exp = 1 },
|
||||||
|
.{ .q = "address[title=\"foo\"]", .html = "<address><address title=\"foo\"><address title=\"bar\">", .exp = 1 },
|
||||||
|
.{ .q = "address[title=\"FoOIgnoRECaSe\" i]", .html = "<address><address title=\"fooIgnoreCase\"><address title=\"bar\">", .exp = 1 },
|
||||||
|
.{ .q = "address[title!=\"foo\"]", .html = "<address><address title=\"foo\"><address title=\"bar\">", .exp = 2 },
|
||||||
|
.{ .q = "address[title!=\"foo\" i]", .html = "<address><address title=\"FOO\"><address title=\"bar\">", .exp = 2 },
|
||||||
|
.{ .q = "p[title!=\"FooBarUFoo\" i]", .html = "<p title=\"fooBARuFOO\"><p title=\"varfoo\">", .exp = 1 },
|
||||||
|
.{ .q = "[ title ~= foo ]", .html = "<p title=\"tot foo bar\">", .exp = 1 },
|
||||||
|
.{ .q = "p[title~=\"FOO\" i]", .html = "<p title=\"tot foo bar\">", .exp = 1 },
|
||||||
|
.{ .q = "p[title~=toofoo i]", .html = "<p title=\"tot foo bar\">", .exp = 0 },
|
||||||
|
.{ .q = "[title~=\"hello world\"]", .html = "<p title=\"hello world\">", .exp = 0 },
|
||||||
|
.{ .q = "[title~=\"hello\" i]", .html = "<p title=\"HELLO world\">", .exp = 1 },
|
||||||
|
.{ .q = "[title~=\"hello\" I]", .html = "<p title=\"HELLO world\">", .exp = 1 },
|
||||||
|
.{ .q = "[lang|=\"en\"]", .html = "<p lang=\"en\"><p lang=\"en-gb\"><p lang=\"enough\"><p lang=\"fr-en\">", .exp = 2 },
|
||||||
|
.{ .q = "[lang|=\"EN\" i]", .html = "<p lang=\"en\"><p lang=\"En-gb\"><p lang=\"enough\"><p lang=\"fr-en\">", .exp = 2 },
|
||||||
|
.{ .q = "[lang|=\"EN\" i]", .html = "<p lang=\"en\"><p lang=\"En-gb\"><p lang=\"enough\"><p lang=\"fr-en\">", .exp = 2 },
|
||||||
|
.{ .q = "[title^=\"foo\"]", .html = "<p title=\"foobar\"><p title=\"barfoo\">", .exp = 1 },
|
||||||
|
.{ .q = "[title^=\"foo\" i]", .html = "<p title=\"FooBAR\"><p title=\"barfoo\">", .exp = 1 },
|
||||||
|
.{ .q = "[title$=\"bar\"]", .html = "<p title=\"foobar\"><p title=\"barfoo\">", .exp = 1 },
|
||||||
|
.{ .q = "[title$=\"BAR\" i]", .html = "<p title=\"foobar\"><p title=\"barfoo\">", .exp = 1 },
|
||||||
|
.{ .q = "[title*=\"bar\"]", .html = "<p title=\"foobarufoo\">", .exp = 1 },
|
||||||
|
.{ .q = "[title*=\"BaRu\" i]", .html = "<p title=\"foobarufoo\">", .exp = 1 },
|
||||||
|
.{ .q = "[title*=\"BaRu\" I]", .html = "<p title=\"foobarufoo\">", .exp = 1 },
|
||||||
|
.{ .q = "p[class$=\" \"]", .html = "<p class=\" \">This text should be green.</p><p>This text should be green.</p>", .exp = 0 },
|
||||||
|
.{ .q = "p[class$=\"\"]", .html = "<p class=\"\">This text should be green.</p><p>This text should be green.</p>", .exp = 0 },
|
||||||
|
.{ .q = "p[class^=\" \"]", .html = "<p class=\" \">This text should be green.</p><p>This text should be green.</p>", .exp = 0 },
|
||||||
|
.{ .q = "p[class^=\"\"]", .html = "<p class=\"\">This text should be green.</p><p>This text should be green.</p>", .exp = 0 },
|
||||||
|
.{ .q = "p[class*=\" \"]", .html = "<p class=\" \">This text should be green.</p><p>This text should be green.</p>", .exp = 0 },
|
||||||
|
.{ .q = "p[class*=\"\"]", .html = "<p class=\"\">This text should be green.</p><p>This text should be green.</p>", .exp = 0 },
|
||||||
|
.{ .q = "input[name=Sex][value=F]", .html = "<input type=\"radio\" name=\"Sex\" value=\"F\"/>", .exp = 1 },
|
||||||
|
.{ .q = "table[border=\"0\"][cellpadding=\"0\"][cellspacing=\"0\"]", .html = "<table border=\"0\" cellpadding=\"0\" cellspacing=\"0\" style=\"table-layout: fixed; width: 100%; border: 0 dashed; border-color: #FFFFFF\"><tr style=\"height:64px\">aaa</tr></table>", .exp = 1 },
|
||||||
|
.{ .q = ".t1:not(.t2)", .html = "<p class=\"t1 t2\">", .exp = 0 },
|
||||||
|
.{ .q = "div:not(.t1)", .html = "<div class=\"t3\">", .exp = 1 },
|
||||||
|
.{ .q = "div:not([class=\"t2\"])", .html = "<div><div class=\"t2\"><div class=\"t3\">", .exp = 2 },
|
||||||
|
.{ .q = "li:nth-child(odd)", .html = "<ol><li id=1><li id=2><li id=3></ol>", .exp = 2 },
|
||||||
|
.{ .q = "li:nth-child(even)", .html = "<ol><li id=1><li id=2><li id=3></ol>", .exp = 1 },
|
||||||
|
.{ .q = "li:nth-child(-n+2)", .html = "<ol><li id=1><li id=2><li id=3></ol>", .exp = 2 },
|
||||||
|
.{ .q = "li:nth-child(3n+1)", .html = "<ol><li id=1><li id=2><li id=3></ol>", .exp = 1 },
|
||||||
|
.{ .q = "li:nth-last-child(odd)", .html = "<ol><li id=1><li id=2><li id=3><li id=4></ol>", .exp = 2 },
|
||||||
|
.{ .q = "li:nth-last-child(even)", .html = "<ol><li id=1><li id=2><li id=3><li id=4></ol>", .exp = 2 },
|
||||||
|
.{ .q = "li:nth-last-child(-n+2)", .html = "<ol><li id=1><li id=2><li id=3><li id=4></ol>", .exp = 2 },
|
||||||
|
.{ .q = "li:nth-last-child(3n+1)", .html = "<ol><li id=1><li id=2><li id=3><li id=4></ol>", .exp = 2 },
|
||||||
|
.{ .q = "span:first-child", .html = "<p>some text <span id=\"1\">and a span</span><span id=\"2\"> and another</span></p>", .exp = 1 },
|
||||||
|
.{ .q = "span:last-child", .html = "<span>a span</span> and some text", .exp = 1 },
|
||||||
|
.{ .q = "p:nth-of-type(2)", .html = "<address></address><p id=1><p id=2>", .exp = 1 },
|
||||||
|
.{ .q = "p:nth-last-of-type(2)", .html = "<address></address><p id=1><p id=2></p><a>", .exp = 1 },
|
||||||
|
.{ .q = "p:last-of-type", .html = "<address></address><p id=1><p id=2></p><a>", .exp = 1 },
|
||||||
|
.{ .q = "p:first-of-type", .html = "<address></address><p id=1><p id=2></p><a>", .exp = 1 },
|
||||||
|
.{ .q = "p:only-child", .html = "<div><p id=\"1\"></p><a></a></div><div><p id=\"2\"></p></div>", .exp = 1 },
|
||||||
|
.{ .q = "p:only-of-type", .html = "<div><p id=\"1\"></p><a></a></div><div><p id=\"2\"></p><p id=\"3\"></p></div>", .exp = 1 },
|
||||||
|
.{ .q = ":empty", .html = "<p id=\"1\"><!-- --><p id=\"2\">Hello<p id=\"3\"><span>", .exp = 3 },
|
||||||
|
.{ .q = "div p", .html = "<div><p id=\"1\"><table><tr><td><p id=\"2\"></table></div><p id=\"3\">", .exp = 2 },
|
||||||
|
.{ .q = "div table p", .html = "<div><p id=\"1\"><table><tr><td><p id=\"2\"></table></div><p id=\"3\">", .exp = 1 },
|
||||||
|
.{ .q = "div > p", .html = "<div><p id=\"1\"><div><p id=\"2\"></div><table><tr><td><p id=\"3\"></table></div>", .exp = 2 },
|
||||||
|
.{ .q = "p ~ p", .html = "<p id=\"1\"><p id=\"2\"></p><address></address><p id=\"3\">", .exp = 2 },
|
||||||
|
.{ .q = "p + p", .html = "<p id=\"1\"></p> <!--comment--> <p id=\"2\"></p><address></address><p id=\"3\">", .exp = 1 },
|
||||||
|
.{ .q = "li, p", .html = "<ul><li></li><li></li></ul><p>", .exp = 3 },
|
||||||
|
.{ .q = "p +/*This is a comment*/ p", .html = "<p id=\"1\"><p id=\"2\"></p><address></address><p id=\"3\">", .exp = 1 },
|
||||||
|
// .{ .q = "p:contains(\"that wraps\")", .html = "<p>Text block that <span>wraps inner text</span> and continues</p>", .exp = 1 },
|
||||||
|
.{ .q = "p:containsOwn(\"that wraps\")", .html = "<p>Text block that <span>wraps inner text</span> and continues</p>", .exp = 0 },
|
||||||
|
.{ .q = ":containsOwn(\"inner\")", .html = "<p>Text block that <span>wraps inner text</span> and continues</p>", .exp = 1 },
|
||||||
|
.{ .q = ":containsOwn(\"Inner\")", .html = "<p>Text block that <span>wraps inner text</span> and continues</p>", .exp = 0 },
|
||||||
|
.{ .q = "p:containsOwn(\"block\")", .html = "<p>Text block that <span>wraps inner text</span> and continues</p>", .exp = 1 },
|
||||||
|
.{ .q = "div:has(#p1)", .html = "<div id=\"d1\"><p id=\"p1\"><span>text content</span></p></div><div id=\"d2\"/>", .exp = 1 },
|
||||||
|
.{ .q = "div:has(:containsOwn(\"2\"))", .html = "<div id=\"d1\"><p id=\"p1\"><span>contents 1</span></p></div> <div id=\"d2\"><p>contents <em>2</em></p></div>", .exp = 1 },
|
||||||
|
.{ .q = "body :has(:containsOwn(\"2\"))", .html = "<body><div id=\"d1\"><p id=\"p1\"><span>contents 1</span></p></div> <div id=\"d2\"><p id=\"p2\">contents <em>2</em></p></div></body>", .exp = 2 },
|
||||||
|
.{ .q = "body :haschild(:containsOwn(\"2\"))", .html = "<body><div id=\"d1\"><p id=\"p1\"><span>contents 1</span></p></div> <div id=\"d2\"><p id=\"p2\">contents <em>2</em></p></div></body>", .exp = 1 },
|
||||||
|
// .{ .q = "p:matches([\\d])", .html = "<p id=\"p1\">0123456789</p><p id=\"p2\">abcdef</p><p id=\"p3\">0123ABCD</p>", .exp = 2 },
|
||||||
|
// .{ .q = "p:matches([a-z])", .html = "<p id=\"p1\">0123456789</p><p id=\"p2\">abcdef</p><p id=\"p3\">0123ABCD</p>", .exp = 1 },
|
||||||
|
// .{ .q = "p:matches([a-zA-Z])", .html = "<p id=\"p1\">0123456789</p><p id=\"p2\">abcdef</p><p id=\"p3\">0123ABCD</p>", .exp = 2 },
|
||||||
|
// .{ .q = "p:matches([^\\d])", .html = "<p id=\"p1\">0123456789</p><p id=\"p2\">abcdef</p><p id=\"p3\">0123ABCD</p>", .exp = 2 },
|
||||||
|
// .{ .q = "p:matches(^(0|a))", .html = "<p id=\"p1\">0123456789</p><p id=\"p2\">abcdef</p><p id=\"p3\">0123ABCD</p>", .exp = 3 },
|
||||||
|
// .{ .q = "p:matches(^\\d+$)", .html = "<p id=\"p1\">0123456789</p><p id=\"p2\">abcdef</p><p id=\"p3\">0123ABCD</p>", .exp = 1 },
|
||||||
|
// .{ .q = "p:not(:matches(^\\d+$))", .html = "<p id=\"p1\">0123456789</p><p id=\"p2\">abcdef</p><p id=\"p3\">0123ABCD</p>", .exp = 2 },
|
||||||
|
// .{ .q = "div :matchesOwn(^\\d+$)", .html = "<div><p id=\"p1\">01234<em>567</em>89</p><div>", .exp = 2 },
|
||||||
|
// .{ .q = "[href#=(fina)]:not([href#=(\\/\\/[^\\/]+untrusted)])", .html = "<ul> <li><a id=\"a1\" href=\"http://www.google.com/finance\"></a> <li><a id=\"a2\" href=\"http://finance.yahoo.com/\"></a> <li><a id=\"a2\" href=\"http://finance.untrusted.com/\"/> <li><a id=\"a3\" href=\"https://www.google.com/news\"/> <li><a id=\"a4\" href=\"http://news.yahoo.com\"/> </ul>", .exp = 2 },
|
||||||
|
// .{ .q = "[href#=(^https:\\/\\/[^\\/]*\\/?news)]", .html = "<ul> <li><a id=\"a1\" href=\"http://www.google.com/finance\"/> <li><a id=\"a2\" href=\"http://finance.yahoo.com/\"/> <li><a id=\"a3\" href=\"https://www.google.com/news\"></a> <li><a id=\"a4\" href=\"http://news.yahoo.com\"/> </ul>", .exp = 1 },
|
||||||
|
.{ .q = ":input", .html = "<form> <label>Username <input type=\"text\" name=\"username\" /></label> <label>Password <input type=\"password\" name=\"password\" /></label> <label>Country <select name=\"country\"> <option value=\"ca\">Canada</option> <option value=\"us\">United States</option> </select> </label> <label>Bio <textarea name=\"bio\"></textarea></label> <button>Sign up</button> </form>", .exp = 5 },
|
||||||
|
.{ .q = ":root", .html = "<html><head></head><body></body></html>", .exp = 1 },
|
||||||
|
.{ .q = "*:root", .html = "<html><head></head><body></body></html>", .exp = 1 },
|
||||||
|
.{ .q = "html:nth-child(1)", .html = "<html><head></head><body></body></html>", .exp = 1 },
|
||||||
|
.{ .q = "*:root:first-child", .html = "<html><head></head><body></body></html>", .exp = 1 },
|
||||||
|
.{ .q = "*:root:nth-child(1)", .html = "<html><head></head><body></body></html>", .exp = 1 },
|
||||||
|
.{ .q = "a:not(:root)", .html = "<html><head></head><body><a href=\"http://www.foo.com\"></a></body></html>", .exp = 1 },
|
||||||
|
.{ .q = "body > *:nth-child(3n+2)", .html = "<html><head></head><body><p></p><div></div><span></span><a></a><form></form></body></html>", .exp = 2 },
|
||||||
|
.{ .q = "input:disabled", .html = "<html><head></head><body><fieldset disabled><legend id=\"1\"><input id=\"i1\"/></legend><legend id=\"2\"><input id=\"i2\"/></legend></fieldset></body></html>", .exp = 1 },
|
||||||
|
.{ .q = ":disabled", .html = "<html><head></head><body><fieldset disabled></fieldset></body></html>", .exp = 1 },
|
||||||
|
.{ .q = ":enabled", .html = "<html><head></head><body><fieldset></fieldset></body></html>", .exp = 1 },
|
||||||
|
.{ .q = "div.class1, div.class2", .html = "<div class=class1></div><div class=class2></div><div class=class3></div>", .exp = 2 },
|
||||||
|
};
|
||||||
|
|
||||||
|
for (testcases) |tc| {
|
||||||
|
matcher.reset();
|
||||||
|
|
||||||
|
const doc = try parser.documentHTMLParseFromStr(tc.html);
|
||||||
|
defer parser.documentHTMLClose(doc) catch {};
|
||||||
|
|
||||||
|
const s = css.parse(alloc, tc.q, .{}) catch |e| {
|
||||||
|
std.debug.print("parse, query: {s}\n", .{tc.q});
|
||||||
|
return e;
|
||||||
|
};
|
||||||
|
defer s.deinit(alloc);
|
||||||
|
|
||||||
|
const node = Node{ .node = parser.documentHTMLToNode(doc) };
|
||||||
|
|
||||||
|
_ = css.matchAll(&s, node, &matcher) catch |e| {
|
||||||
|
std.debug.print("match, query: {s}\n", .{tc.q});
|
||||||
|
return e;
|
||||||
|
};
|
||||||
|
std.testing.expectEqual(tc.exp, matcher.nodes.items.len) catch |e| {
|
||||||
|
std.debug.print("expectation, query: {s}\n", .{tc.q});
|
||||||
|
return e;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
994
src/browser/css/parser.zig
Normal file
994
src/browser/css/parser.zig
Normal file
@@ -0,0 +1,994 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
// CSS Selector parser
|
||||||
|
// This file is a rewrite in Zig of Cascadia CSS Selector parser.
|
||||||
|
// see https://github.com/andybalholm/cascadia
|
||||||
|
// see https://github.com/andybalholm/cascadia/blob/master/parser.go
|
||||||
|
const std = @import("std");
|
||||||
|
const ascii = std.ascii;
|
||||||
|
const Allocator = std.mem.Allocator;
|
||||||
|
|
||||||
|
const selector = @import("selector.zig");
|
||||||
|
const Selector = selector.Selector;
|
||||||
|
const PseudoClass = selector.PseudoClass;
|
||||||
|
const AttributeOP = selector.AttributeOP;
|
||||||
|
const Combinator = selector.Combinator;
|
||||||
|
|
||||||
|
const REPLACEMENT_CHARACTER = &.{ 239, 191, 189 };
|
||||||
|
|
||||||
|
pub const ParseError = error{
|
||||||
|
ExpectedSelector,
|
||||||
|
ExpectedIdentifier,
|
||||||
|
ExpectedName,
|
||||||
|
ExpectedIDSelector,
|
||||||
|
ExpectedClassSelector,
|
||||||
|
ExpectedAttributeSelector,
|
||||||
|
ExpectedString,
|
||||||
|
ExpectedRegexp,
|
||||||
|
ExpectedPseudoClassSelector,
|
||||||
|
ExpectedParenthesis,
|
||||||
|
ExpectedParenthesisClose,
|
||||||
|
ExpectedNthExpression,
|
||||||
|
ExpectedInteger,
|
||||||
|
InvalidEscape,
|
||||||
|
EscapeLineEndingOutsideString,
|
||||||
|
InvalidUnicode,
|
||||||
|
UnicodeIsNotHandled,
|
||||||
|
WriteError,
|
||||||
|
PseudoElementNotAtSelectorEnd,
|
||||||
|
PseudoElementNotUnique,
|
||||||
|
PseudoElementDisabled,
|
||||||
|
InvalidAttributeOperator,
|
||||||
|
InvalidAttributeSelector,
|
||||||
|
InvalidString,
|
||||||
|
InvalidRegexp,
|
||||||
|
InvalidPseudoClassSelector,
|
||||||
|
EmptyPseudoClassSelector,
|
||||||
|
InvalidPseudoClass,
|
||||||
|
InvalidPseudoElement,
|
||||||
|
UnmatchParenthesis,
|
||||||
|
NotHandled,
|
||||||
|
UnknownPseudoSelector,
|
||||||
|
InvalidNthExpression,
|
||||||
|
} || PseudoClass.Error || Combinator.Error || std.mem.Allocator.Error;
|
||||||
|
|
||||||
|
pub const ParseOptions = struct {
|
||||||
|
accept_pseudo_elts: bool = true,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const Parser = struct {
|
||||||
|
s: []const u8, // string to parse
|
||||||
|
i: usize = 0, // current position
|
||||||
|
|
||||||
|
opts: ParseOptions,
|
||||||
|
|
||||||
|
pub fn parse(p: *Parser, allocator: Allocator) ParseError!Selector {
|
||||||
|
return p.parseSelectorGroup(allocator);
|
||||||
|
}
|
||||||
|
|
||||||
|
// skipWhitespace consumes whitespace characters and comments.
|
||||||
|
// It returns true if there was actually anything to skip.
|
||||||
|
fn skipWhitespace(p: *Parser) bool {
|
||||||
|
var i = p.i;
|
||||||
|
while (i < p.s.len) {
|
||||||
|
const c = p.s[i];
|
||||||
|
// Whitespaces.
|
||||||
|
if (ascii.isWhitespace(c)) {
|
||||||
|
i += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Comments.
|
||||||
|
if (c == '/') {
|
||||||
|
if (std.mem.startsWith(u8, p.s[i..], "/*")) {
|
||||||
|
if (std.mem.indexOf(u8, p.s[i..], "*/")) |end| {
|
||||||
|
i += end + "*/".len;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (i > p.i) {
|
||||||
|
p.i = i;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseSimpleSelectorSequence parses a selector sequence that applies to
|
||||||
|
// a single element.
|
||||||
|
fn parseSimpleSelectorSequence(p: *Parser, allocator: Allocator) ParseError!Selector {
|
||||||
|
if (p.i >= p.s.len) {
|
||||||
|
return ParseError.ExpectedSelector;
|
||||||
|
}
|
||||||
|
|
||||||
|
var buf: std.ArrayListUnmanaged(Selector) = .empty;
|
||||||
|
defer buf.deinit(allocator);
|
||||||
|
|
||||||
|
switch (p.s[p.i]) {
|
||||||
|
'*' => {
|
||||||
|
// It's the universal selector. Just skip over it, since it
|
||||||
|
// doesn't affect the meaning.
|
||||||
|
p.i += 1;
|
||||||
|
|
||||||
|
// other version of universal selector
|
||||||
|
if (p.i + 2 < p.s.len and std.mem.eql(u8, "|*", p.s[p.i .. p.i + 2])) {
|
||||||
|
p.i += 2;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'#', '.', '[', ':' => {
|
||||||
|
// There's no type selector. Wait to process the other till the
|
||||||
|
// main loop.
|
||||||
|
},
|
||||||
|
else => try buf.append(allocator, try p.parseTypeSelector(allocator)),
|
||||||
|
}
|
||||||
|
|
||||||
|
var pseudo_elt: ?PseudoClass = null;
|
||||||
|
|
||||||
|
loop: while (p.i < p.s.len) {
|
||||||
|
var ns: Selector = switch (p.s[p.i]) {
|
||||||
|
'#' => try p.parseIDSelector(allocator),
|
||||||
|
'.' => try p.parseClassSelector(allocator),
|
||||||
|
'[' => try p.parseAttributeSelector(allocator),
|
||||||
|
':' => try p.parsePseudoclassSelector(allocator),
|
||||||
|
else => break :loop,
|
||||||
|
};
|
||||||
|
errdefer ns.deinit(allocator);
|
||||||
|
|
||||||
|
// From https://drafts.csswg.org/selectors-3/#pseudo-elements :
|
||||||
|
// "Only one pseudo-element may appear per selector, and if present
|
||||||
|
// it must appear after the sequence of simple selectors that
|
||||||
|
// represents the subjects of the selector.""
|
||||||
|
switch (ns) {
|
||||||
|
.pseudo_element => |e| {
|
||||||
|
// We found a pseudo-element.
|
||||||
|
// Only one pseudo-element is accepted per selector.
|
||||||
|
if (pseudo_elt != null) return ParseError.PseudoElementNotUnique;
|
||||||
|
if (!p.opts.accept_pseudo_elts) return ParseError.PseudoElementDisabled;
|
||||||
|
|
||||||
|
pseudo_elt = e;
|
||||||
|
ns.deinit(allocator);
|
||||||
|
},
|
||||||
|
else => {
|
||||||
|
if (pseudo_elt != null) return ParseError.PseudoElementNotAtSelectorEnd;
|
||||||
|
try buf.append(allocator, ns);
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// no need wrap the selectors in compoundSelector
|
||||||
|
if (buf.items.len == 1 and pseudo_elt == null) {
|
||||||
|
return buf.items[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
return .{
|
||||||
|
.compound = .{ .selectors = try buf.toOwnedSlice(allocator), .pseudo_elt = pseudo_elt },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseTypeSelector parses a type selector (one that matches by tag name).
|
||||||
|
fn parseTypeSelector(p: *Parser, allocator: Allocator) ParseError!Selector {
|
||||||
|
var buf: std.ArrayListUnmanaged(u8) = .empty;
|
||||||
|
defer buf.deinit(allocator);
|
||||||
|
try p.parseIdentifier(buf.writer(allocator));
|
||||||
|
|
||||||
|
return .{ .tag = try buf.toOwnedSlice(allocator) };
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseIdentifier parses an identifier.
|
||||||
|
fn parseIdentifier(p: *Parser, w: anytype) ParseError!void {
|
||||||
|
const prefix = '-';
|
||||||
|
var numPrefix: usize = 0;
|
||||||
|
|
||||||
|
while (p.s.len > p.i and p.s[p.i] == prefix) {
|
||||||
|
p.i += 1;
|
||||||
|
numPrefix += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (p.s.len <= p.i) {
|
||||||
|
return ParseError.ExpectedSelector;
|
||||||
|
}
|
||||||
|
|
||||||
|
const c = p.s[p.i];
|
||||||
|
if (!(nameStart(c) or c == '\\')) {
|
||||||
|
return ParseError.ExpectedSelector;
|
||||||
|
}
|
||||||
|
|
||||||
|
var ii: usize = 0;
|
||||||
|
while (ii < numPrefix) {
|
||||||
|
w.writeByte(prefix) catch return ParseError.WriteError;
|
||||||
|
ii += 1;
|
||||||
|
}
|
||||||
|
try parseName(p, w);
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseName parses a name (which is like an identifier, but doesn't have
|
||||||
|
// extra restrictions on the first character).
|
||||||
|
fn parseName(p: *Parser, w: anytype) ParseError!void {
|
||||||
|
const sel = p.s;
|
||||||
|
const sel_len = sel.len;
|
||||||
|
|
||||||
|
var i = p.i;
|
||||||
|
var ok = false;
|
||||||
|
|
||||||
|
while (i < sel_len) {
|
||||||
|
const c = sel[i];
|
||||||
|
|
||||||
|
if (nameChar(c)) {
|
||||||
|
const start = i;
|
||||||
|
while (i < sel_len and nameChar(sel[i])) i += 1;
|
||||||
|
w.writeAll(sel[start..i]) catch return ParseError.WriteError;
|
||||||
|
ok = true;
|
||||||
|
} else if (c == '\\') {
|
||||||
|
p.i = i;
|
||||||
|
try p.parseEscape(w);
|
||||||
|
i = p.i;
|
||||||
|
ok = true;
|
||||||
|
} else if (c == 0) {
|
||||||
|
w.writeAll(REPLACEMENT_CHARACTER) catch return ParseError.WriteError;
|
||||||
|
i += 1;
|
||||||
|
if (i == sel_len) {
|
||||||
|
ok = true;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// default:
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!ok) return ParseError.ExpectedName;
|
||||||
|
p.i = i;
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseEscape parses a backslash escape.
|
||||||
|
// The returned string is owned by the caller.
|
||||||
|
fn parseEscape(p: *Parser, w: anytype) ParseError!void {
|
||||||
|
const sel = p.s;
|
||||||
|
const sel_len = sel.len;
|
||||||
|
|
||||||
|
if (sel_len < p.i + 2 or sel[p.i] != '\\') {
|
||||||
|
p.i += 1;
|
||||||
|
w.writeAll(REPLACEMENT_CHARACTER) catch return ParseError.WriteError;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const start = p.i + 1;
|
||||||
|
const c = sel[start];
|
||||||
|
|
||||||
|
// unicode escape (hex)
|
||||||
|
if (ascii.isHex(c)) {
|
||||||
|
var i: usize = start;
|
||||||
|
while (i < start + 6 and i < sel_len and ascii.isHex(sel[i])) {
|
||||||
|
i += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
const v = std.fmt.parseUnsigned(u21, sel[start..i], 16) catch {
|
||||||
|
p.i = i;
|
||||||
|
w.writeAll(REPLACEMENT_CHARACTER) catch return ParseError.WriteError;
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
|
||||||
|
if (sel_len >= i) {
|
||||||
|
if (sel_len > i) {
|
||||||
|
switch (sel[i]) {
|
||||||
|
'\r' => {
|
||||||
|
i += 1;
|
||||||
|
if (sel_len > i and sel[i] == '\n') i += 1;
|
||||||
|
},
|
||||||
|
' ', '\t', '\n', std.ascii.control_code.ff => i += 1,
|
||||||
|
else => {},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
p.i = i;
|
||||||
|
if (v == 0) {
|
||||||
|
w.writeAll(REPLACEMENT_CHARACTER) catch return ParseError.WriteError;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
var buf: [4]u8 = undefined;
|
||||||
|
const ln = std.unicode.utf8Encode(v, &buf) catch {
|
||||||
|
w.writeAll(REPLACEMENT_CHARACTER) catch return ParseError.WriteError;
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
w.writeAll(buf[0..ln]) catch return ParseError.WriteError;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return the literal character after the backslash.
|
||||||
|
p.i += 2;
|
||||||
|
w.writeByte(sel[start]) catch return ParseError.WriteError;
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseIDSelector parses a selector that matches by id attribute.
|
||||||
|
fn parseIDSelector(p: *Parser, allocator: Allocator) ParseError!Selector {
|
||||||
|
if (p.i >= p.s.len) return ParseError.ExpectedIDSelector;
|
||||||
|
if (p.s[p.i] != '#') return ParseError.ExpectedIDSelector;
|
||||||
|
|
||||||
|
p.i += 1;
|
||||||
|
|
||||||
|
var buf: std.ArrayListUnmanaged(u8) = .empty;
|
||||||
|
defer buf.deinit(allocator);
|
||||||
|
|
||||||
|
try p.parseName(buf.writer(allocator));
|
||||||
|
return .{ .id = try buf.toOwnedSlice(allocator) };
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseClassSelector parses a selector that matches by class attribute.
|
||||||
|
fn parseClassSelector(p: *Parser, allocator: Allocator) ParseError!Selector {
|
||||||
|
if (p.i >= p.s.len) return ParseError.ExpectedClassSelector;
|
||||||
|
if (p.s[p.i] != '.') return ParseError.ExpectedClassSelector;
|
||||||
|
|
||||||
|
p.i += 1;
|
||||||
|
|
||||||
|
var buf: std.ArrayListUnmanaged(u8) = .empty;
|
||||||
|
defer buf.deinit(allocator);
|
||||||
|
|
||||||
|
try p.parseIdentifier(buf.writer(allocator));
|
||||||
|
return .{ .class = try buf.toOwnedSlice(allocator) };
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseAttributeSelector parses a selector that matches by attribute value.
|
||||||
|
fn parseAttributeSelector(p: *Parser, allocator: Allocator) ParseError!Selector {
|
||||||
|
if (p.i >= p.s.len) return ParseError.ExpectedAttributeSelector;
|
||||||
|
if (p.s[p.i] != '[') return ParseError.ExpectedAttributeSelector;
|
||||||
|
|
||||||
|
p.i += 1;
|
||||||
|
_ = p.skipWhitespace();
|
||||||
|
|
||||||
|
var buf: std.ArrayListUnmanaged(u8) = .empty;
|
||||||
|
defer buf.deinit(allocator);
|
||||||
|
|
||||||
|
try p.parseIdentifier(buf.writer(allocator));
|
||||||
|
const key = try buf.toOwnedSlice(allocator);
|
||||||
|
errdefer allocator.free(key);
|
||||||
|
|
||||||
|
lowerstr(key);
|
||||||
|
|
||||||
|
_ = p.skipWhitespace();
|
||||||
|
if (p.i >= p.s.len) return ParseError.ExpectedAttributeSelector;
|
||||||
|
if (p.s[p.i] == ']') {
|
||||||
|
p.i += 1;
|
||||||
|
return .{ .attribute = .{ .key = key } };
|
||||||
|
}
|
||||||
|
|
||||||
|
if (p.i + 2 >= p.s.len) return ParseError.ExpectedAttributeSelector;
|
||||||
|
|
||||||
|
const op = try parseAttributeOP(p.s[p.i .. p.i + 2]);
|
||||||
|
p.i += op.len();
|
||||||
|
|
||||||
|
_ = p.skipWhitespace();
|
||||||
|
if (p.i >= p.s.len) return ParseError.ExpectedAttributeSelector;
|
||||||
|
|
||||||
|
buf.clearRetainingCapacity();
|
||||||
|
var is_val: bool = undefined;
|
||||||
|
if (op == .regexp) {
|
||||||
|
is_val = false;
|
||||||
|
try p.parseRegex(buf.writer(allocator));
|
||||||
|
} else {
|
||||||
|
is_val = true;
|
||||||
|
switch (p.s[p.i]) {
|
||||||
|
'\'', '"' => try p.parseString(buf.writer(allocator)),
|
||||||
|
else => try p.parseIdentifier(buf.writer(allocator)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_ = p.skipWhitespace();
|
||||||
|
if (p.i >= p.s.len) return ParseError.ExpectedAttributeSelector;
|
||||||
|
|
||||||
|
// check if the attribute contains an ignore case flag
|
||||||
|
var ci = false;
|
||||||
|
if (p.s[p.i] == 'i' or p.s[p.i] == 'I') {
|
||||||
|
ci = true;
|
||||||
|
p.i += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
_ = p.skipWhitespace();
|
||||||
|
if (p.i >= p.s.len) return ParseError.ExpectedAttributeSelector;
|
||||||
|
|
||||||
|
if (p.s[p.i] != ']') return ParseError.InvalidAttributeSelector;
|
||||||
|
p.i += 1;
|
||||||
|
|
||||||
|
return .{ .attribute = .{
|
||||||
|
.key = key,
|
||||||
|
.val = if (is_val) try buf.toOwnedSlice(allocator) else null,
|
||||||
|
.regexp = if (!is_val) try buf.toOwnedSlice(allocator) else null,
|
||||||
|
.op = op,
|
||||||
|
.ci = ci,
|
||||||
|
} };
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseString parses a single- or double-quoted string.
|
||||||
|
fn parseString(p: *Parser, writer: anytype) ParseError!void {
|
||||||
|
const sel = p.s;
|
||||||
|
const sel_len = sel.len;
|
||||||
|
|
||||||
|
var i = p.i;
|
||||||
|
if (sel_len < i + 2) return ParseError.ExpectedString;
|
||||||
|
|
||||||
|
const quote = sel[i];
|
||||||
|
i += 1;
|
||||||
|
|
||||||
|
loop: while (i < sel_len) {
|
||||||
|
switch (sel[i]) {
|
||||||
|
'\\' => {
|
||||||
|
if (sel_len > i + 1) {
|
||||||
|
const c = sel[i + 1];
|
||||||
|
switch (c) {
|
||||||
|
'\r' => {
|
||||||
|
if (sel_len > i + 2 and sel[i + 2] == '\n') {
|
||||||
|
i += 3;
|
||||||
|
continue :loop;
|
||||||
|
}
|
||||||
|
i += 2;
|
||||||
|
continue :loop;
|
||||||
|
},
|
||||||
|
'\n', std.ascii.control_code.ff => {
|
||||||
|
i += 2;
|
||||||
|
continue :loop;
|
||||||
|
},
|
||||||
|
else => {},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
p.i = i;
|
||||||
|
try p.parseEscape(writer);
|
||||||
|
i = p.i;
|
||||||
|
},
|
||||||
|
'\r', '\n', std.ascii.control_code.ff => return ParseError.InvalidString,
|
||||||
|
else => |c| {
|
||||||
|
if (c == quote) break :loop;
|
||||||
|
const start = i;
|
||||||
|
while (i < sel_len) {
|
||||||
|
const cc = sel[i];
|
||||||
|
if (cc == quote or cc == '\\' or c == '\r' or c == '\n' or c == std.ascii.control_code.ff) break;
|
||||||
|
i += 1;
|
||||||
|
}
|
||||||
|
writer.writeAll(sel[start..i]) catch return ParseError.WriteError;
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (i >= sel_len) return ParseError.InvalidString;
|
||||||
|
|
||||||
|
// Consume the final quote.
|
||||||
|
i += 1;
|
||||||
|
p.i = i;
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseRegex parses a regular expression; the end is defined by encountering an
|
||||||
|
// unmatched closing ')' or ']' which is not consumed
|
||||||
|
fn parseRegex(p: *Parser, writer: anytype) ParseError!void {
|
||||||
|
var i = p.i;
|
||||||
|
if (p.s.len < i + 2) return ParseError.ExpectedRegexp;
|
||||||
|
|
||||||
|
// number of open parens or brackets;
|
||||||
|
// when it becomes negative, finished parsing regex
|
||||||
|
var open: isize = 0;
|
||||||
|
|
||||||
|
loop: while (i < p.s.len) {
|
||||||
|
switch (p.s[i]) {
|
||||||
|
'(', '[' => open += 1,
|
||||||
|
')', ']' => {
|
||||||
|
open -= 1;
|
||||||
|
if (open < 0) break :loop;
|
||||||
|
},
|
||||||
|
else => {},
|
||||||
|
}
|
||||||
|
i += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (i >= p.s.len) return ParseError.InvalidRegexp;
|
||||||
|
writer.writeAll(p.s[p.i..i]) catch return ParseError.WriteError;
|
||||||
|
p.i = i;
|
||||||
|
}
|
||||||
|
|
||||||
|
// parsePseudoclassSelector parses a pseudoclass selector like :not(p) or a pseudo-element
|
||||||
|
// For backwards compatibility, both ':' and '::' prefix are allowed for pseudo-elements.
|
||||||
|
// https://drafts.csswg.org/selectors-3/#pseudo-elements
|
||||||
|
fn parsePseudoclassSelector(p: *Parser, allocator: Allocator) ParseError!Selector {
|
||||||
|
if (p.i >= p.s.len) return ParseError.ExpectedPseudoClassSelector;
|
||||||
|
if (p.s[p.i] != ':') return ParseError.ExpectedPseudoClassSelector;
|
||||||
|
|
||||||
|
p.i += 1;
|
||||||
|
|
||||||
|
var must_pseudo_elt: bool = false;
|
||||||
|
if (p.i >= p.s.len) return ParseError.EmptyPseudoClassSelector;
|
||||||
|
if (p.s[p.i] == ':') { // we found a pseudo-element
|
||||||
|
must_pseudo_elt = true;
|
||||||
|
p.i += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
var buf: std.ArrayListUnmanaged(u8) = .empty;
|
||||||
|
defer buf.deinit(allocator);
|
||||||
|
|
||||||
|
try p.parseIdentifier(buf.writer(allocator));
|
||||||
|
|
||||||
|
const pseudo_class = try PseudoClass.parse(buf.items);
|
||||||
|
|
||||||
|
// reset the buffer to reuse it.
|
||||||
|
buf.clearRetainingCapacity();
|
||||||
|
|
||||||
|
if (must_pseudo_elt and !pseudo_class.isPseudoElement()) return ParseError.InvalidPseudoElement;
|
||||||
|
|
||||||
|
switch (pseudo_class) {
|
||||||
|
.not, .has, .haschild => {
|
||||||
|
if (!p.consumeParenthesis()) return ParseError.ExpectedParenthesis;
|
||||||
|
|
||||||
|
const sel = try p.parseSelectorGroup(allocator);
|
||||||
|
if (!p.consumeClosingParenthesis()) return ParseError.ExpectedParenthesisClose;
|
||||||
|
|
||||||
|
const s = try allocator.create(Selector);
|
||||||
|
errdefer allocator.destroy(s);
|
||||||
|
s.* = sel;
|
||||||
|
|
||||||
|
return .{ .pseudo_class_relative = .{ .pseudo_class = pseudo_class, .match = s } };
|
||||||
|
},
|
||||||
|
.contains, .containsown => {
|
||||||
|
if (!p.consumeParenthesis()) return ParseError.ExpectedParenthesis;
|
||||||
|
if (p.i == p.s.len) return ParseError.UnmatchParenthesis;
|
||||||
|
|
||||||
|
switch (p.s[p.i]) {
|
||||||
|
'\'', '"' => try p.parseString(buf.writer(allocator)),
|
||||||
|
else => try p.parseString(buf.writer(allocator)),
|
||||||
|
}
|
||||||
|
|
||||||
|
_ = p.skipWhitespace();
|
||||||
|
if (p.i >= p.s.len) return ParseError.InvalidPseudoClass;
|
||||||
|
if (!p.consumeClosingParenthesis()) return ParseError.ExpectedParenthesisClose;
|
||||||
|
|
||||||
|
const val = try buf.toOwnedSlice(allocator);
|
||||||
|
errdefer allocator.free(val);
|
||||||
|
|
||||||
|
return .{ .pseudo_class_contains = .{ .own = pseudo_class == .containsown, .val = val } };
|
||||||
|
},
|
||||||
|
.matches, .matchesown => {
|
||||||
|
if (!p.consumeParenthesis()) return ParseError.ExpectedParenthesis;
|
||||||
|
|
||||||
|
try p.parseRegex(buf.writer(allocator));
|
||||||
|
if (p.i >= p.s.len) return ParseError.InvalidPseudoClassSelector;
|
||||||
|
if (!p.consumeClosingParenthesis()) return ParseError.ExpectedParenthesisClose;
|
||||||
|
|
||||||
|
return .{ .pseudo_class_regexp = .{ .own = pseudo_class == .matchesown, .regexp = try buf.toOwnedSlice(allocator) } };
|
||||||
|
},
|
||||||
|
.nth_child, .nth_last_child, .nth_of_type, .nth_last_of_type => {
|
||||||
|
if (!p.consumeParenthesis()) return ParseError.ExpectedParenthesis;
|
||||||
|
const nth = try p.parseNth(allocator);
|
||||||
|
if (!p.consumeClosingParenthesis()) return ParseError.ExpectedParenthesisClose;
|
||||||
|
|
||||||
|
const last = pseudo_class == .nth_last_child or pseudo_class == .nth_last_of_type;
|
||||||
|
const of_type = pseudo_class == .nth_of_type or pseudo_class == .nth_last_of_type;
|
||||||
|
return .{ .pseudo_class_nth = .{ .a = nth[0], .b = nth[1], .of_type = of_type, .last = last } };
|
||||||
|
},
|
||||||
|
.first_child => return .{ .pseudo_class_nth = .{ .a = 0, .b = 1, .of_type = false, .last = false } },
|
||||||
|
.last_child => return .{ .pseudo_class_nth = .{ .a = 0, .b = 1, .of_type = false, .last = true } },
|
||||||
|
.first_of_type => return .{ .pseudo_class_nth = .{ .a = 0, .b = 1, .of_type = true, .last = false } },
|
||||||
|
.last_of_type => return .{ .pseudo_class_nth = .{ .a = 0, .b = 1, .of_type = true, .last = true } },
|
||||||
|
.only_child => return .{ .pseudo_class_only_child = false },
|
||||||
|
.only_of_type => return .{ .pseudo_class_only_child = true },
|
||||||
|
.input, .empty, .root, .link => return .{ .pseudo_class = pseudo_class },
|
||||||
|
.enabled, .disabled, .checked => return .{ .pseudo_class = pseudo_class },
|
||||||
|
.visible => return .{ .pseudo_class = pseudo_class },
|
||||||
|
.lang => {
|
||||||
|
if (!p.consumeParenthesis()) return ParseError.ExpectedParenthesis;
|
||||||
|
if (p.i == p.s.len) return ParseError.UnmatchParenthesis;
|
||||||
|
|
||||||
|
try p.parseIdentifier(buf.writer(allocator));
|
||||||
|
|
||||||
|
_ = p.skipWhitespace();
|
||||||
|
if (p.i >= p.s.len) return ParseError.InvalidPseudoClass;
|
||||||
|
if (!p.consumeClosingParenthesis()) return ParseError.ExpectedParenthesisClose;
|
||||||
|
|
||||||
|
const val = try buf.toOwnedSlice(allocator);
|
||||||
|
errdefer allocator.free(val);
|
||||||
|
lowerstr(val);
|
||||||
|
|
||||||
|
return .{ .pseudo_class_lang = val };
|
||||||
|
},
|
||||||
|
.visited, .hover, .active, .focus, .target => {
|
||||||
|
// Not applicable in a static context: never match.
|
||||||
|
return .{ .never_match = pseudo_class };
|
||||||
|
},
|
||||||
|
.after, .backdrop, .before, .cue, .first_letter => return .{ .pseudo_element = pseudo_class },
|
||||||
|
.first_line, .grammar_error, .marker, .placeholder => return .{ .pseudo_element = pseudo_class },
|
||||||
|
.selection, .spelling_error => return .{ .pseudo_element = pseudo_class },
|
||||||
|
.modal, .popover_open => return .{ .pseudo_element = pseudo_class },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// consumeParenthesis consumes an opening parenthesis and any following
|
||||||
|
// whitespace. It returns true if there was actually a parenthesis to skip.
|
||||||
|
fn consumeParenthesis(p: *Parser) bool {
|
||||||
|
if (p.i < p.s.len and p.s[p.i] == '(') {
|
||||||
|
p.i += 1;
|
||||||
|
_ = p.skipWhitespace();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseSelectorGroup parses a group of selectors, separated by commas.
|
||||||
|
fn parseSelectorGroup(p: *Parser, allocator: Allocator) ParseError!Selector {
|
||||||
|
const s = try p.parseSelector(allocator);
|
||||||
|
|
||||||
|
var buf: std.ArrayListUnmanaged(Selector) = .empty;
|
||||||
|
defer buf.deinit(allocator);
|
||||||
|
|
||||||
|
try buf.append(allocator, s);
|
||||||
|
|
||||||
|
while (p.i < p.s.len) {
|
||||||
|
if (p.s[p.i] != ',') break;
|
||||||
|
p.i += 1;
|
||||||
|
const ss = try p.parseSelector(allocator);
|
||||||
|
try buf.append(allocator, ss);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (buf.items.len == 1) {
|
||||||
|
return buf.items[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
return .{ .group = try buf.toOwnedSlice(allocator) };
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseSelector parses a selector that may include combinators.
|
||||||
|
fn parseSelector(p: *Parser, allocator: Allocator) ParseError!Selector {
|
||||||
|
_ = p.skipWhitespace();
|
||||||
|
var s = try p.parseSimpleSelectorSequence(allocator);
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
var combinator: Combinator = .empty;
|
||||||
|
if (p.skipWhitespace()) {
|
||||||
|
combinator = .descendant;
|
||||||
|
}
|
||||||
|
if (p.i >= p.s.len) {
|
||||||
|
return s;
|
||||||
|
}
|
||||||
|
|
||||||
|
switch (p.s[p.i]) {
|
||||||
|
'+', '>', '~' => {
|
||||||
|
combinator = try Combinator.parse(p.s[p.i]);
|
||||||
|
p.i += 1;
|
||||||
|
_ = p.skipWhitespace();
|
||||||
|
},
|
||||||
|
// These characters can't begin a selector, but they can legally occur after one.
|
||||||
|
',', ')' => {
|
||||||
|
return s;
|
||||||
|
},
|
||||||
|
else => {},
|
||||||
|
}
|
||||||
|
|
||||||
|
if (combinator == .empty) {
|
||||||
|
return s;
|
||||||
|
}
|
||||||
|
|
||||||
|
const c = try p.parseSimpleSelectorSequence(allocator);
|
||||||
|
|
||||||
|
const first = try allocator.create(Selector);
|
||||||
|
errdefer allocator.destroy(first);
|
||||||
|
first.* = s;
|
||||||
|
|
||||||
|
const second = try allocator.create(Selector);
|
||||||
|
errdefer allocator.destroy(second);
|
||||||
|
second.* = c;
|
||||||
|
|
||||||
|
s = Selector{ .combined = .{
|
||||||
|
.first = first,
|
||||||
|
.second = second,
|
||||||
|
.combinator = combinator,
|
||||||
|
} };
|
||||||
|
}
|
||||||
|
|
||||||
|
return s;
|
||||||
|
}
|
||||||
|
|
||||||
|
// consumeClosingParenthesis consumes a closing parenthesis and any preceding
|
||||||
|
// whitespace. It returns true if there was actually a parenthesis to skip.
|
||||||
|
fn consumeClosingParenthesis(p: *Parser) bool {
|
||||||
|
const i = p.i;
|
||||||
|
_ = p.skipWhitespace();
|
||||||
|
if (p.i < p.s.len and p.s[p.i] == ')') {
|
||||||
|
p.i += 1;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
p.i = i;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseInteger parses a decimal integer.
|
||||||
|
fn parseInteger(p: *Parser) ParseError!isize {
|
||||||
|
var i = p.i;
|
||||||
|
const start = i;
|
||||||
|
while (i < p.s.len and '0' <= p.s[i] and p.s[i] <= '9') i += 1;
|
||||||
|
if (i == start) return ParseError.ExpectedInteger;
|
||||||
|
p.i = i;
|
||||||
|
|
||||||
|
return std.fmt.parseUnsigned(isize, p.s[start..i], 10) catch ParseError.ExpectedInteger;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parseNthReadN(p: *Parser, a: isize) ParseError![2]isize {
|
||||||
|
_ = p.skipWhitespace();
|
||||||
|
if (p.i >= p.s.len) return ParseError.ExpectedNthExpression;
|
||||||
|
|
||||||
|
return switch (p.s[p.i]) {
|
||||||
|
'+' => {
|
||||||
|
p.i += 1;
|
||||||
|
_ = p.skipWhitespace();
|
||||||
|
const b = try p.parseInteger();
|
||||||
|
return .{ a, b };
|
||||||
|
},
|
||||||
|
'-' => {
|
||||||
|
p.i += 1;
|
||||||
|
_ = p.skipWhitespace();
|
||||||
|
const b = try p.parseInteger();
|
||||||
|
return .{ a, -b };
|
||||||
|
},
|
||||||
|
else => .{ a, 0 },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parseNthReadA(p: *Parser, a: isize) ParseError![2]isize {
|
||||||
|
if (p.i >= p.s.len) return ParseError.ExpectedNthExpression;
|
||||||
|
return switch (p.s[p.i]) {
|
||||||
|
'n', 'N' => {
|
||||||
|
p.i += 1;
|
||||||
|
return p.parseNthReadN(a);
|
||||||
|
},
|
||||||
|
else => .{ 0, a },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parseNthNegativeA(p: *Parser) ParseError![2]isize {
|
||||||
|
if (p.i >= p.s.len) return ParseError.ExpectedNthExpression;
|
||||||
|
const c = p.s[p.i];
|
||||||
|
if (std.ascii.isDigit(c)) {
|
||||||
|
const a = try p.parseInteger() * -1;
|
||||||
|
return p.parseNthReadA(a);
|
||||||
|
}
|
||||||
|
if (c == 'n' or c == 'N') {
|
||||||
|
p.i += 1;
|
||||||
|
return p.parseNthReadN(-1);
|
||||||
|
}
|
||||||
|
|
||||||
|
return ParseError.InvalidNthExpression;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parseNthPositiveA(p: *Parser) ParseError![2]isize {
|
||||||
|
if (p.i >= p.s.len) return ParseError.ExpectedNthExpression;
|
||||||
|
const c = p.s[p.i];
|
||||||
|
if (std.ascii.isDigit(c)) {
|
||||||
|
const a = try p.parseInteger();
|
||||||
|
return p.parseNthReadA(a);
|
||||||
|
}
|
||||||
|
if (c == 'n' or c == 'N') {
|
||||||
|
p.i += 1;
|
||||||
|
return p.parseNthReadN(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
return ParseError.InvalidNthExpression;
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseNth parses the argument for :nth-child (normally of the form an+b).
|
||||||
|
fn parseNth(p: *Parser, allocator: Allocator) ParseError![2]isize {
|
||||||
|
// initial state
|
||||||
|
if (p.i >= p.s.len) return ParseError.ExpectedNthExpression;
|
||||||
|
return switch (p.s[p.i]) {
|
||||||
|
'-' => {
|
||||||
|
p.i += 1;
|
||||||
|
return p.parseNthNegativeA();
|
||||||
|
},
|
||||||
|
'+' => {
|
||||||
|
p.i += 1;
|
||||||
|
return p.parseNthPositiveA();
|
||||||
|
},
|
||||||
|
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9' => p.parseNthPositiveA(),
|
||||||
|
'n', 'N' => {
|
||||||
|
p.i += 1;
|
||||||
|
return p.parseNthReadN(1);
|
||||||
|
},
|
||||||
|
'o', 'O', 'e', 'E' => {
|
||||||
|
var buf: std.ArrayListUnmanaged(u8) = .empty;
|
||||||
|
defer buf.deinit(allocator);
|
||||||
|
|
||||||
|
try p.parseName(buf.writer(allocator));
|
||||||
|
|
||||||
|
if (std.ascii.eqlIgnoreCase("odd", buf.items)) return .{ 2, 1 };
|
||||||
|
if (std.ascii.eqlIgnoreCase("even", buf.items)) return .{ 2, 0 };
|
||||||
|
|
||||||
|
return ParseError.InvalidNthExpression;
|
||||||
|
},
|
||||||
|
else => ParseError.InvalidNthExpression,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// nameStart returns whether c can be the first character of an identifier
|
||||||
|
// (not counting an initial hyphen, or an escape sequence).
|
||||||
|
fn nameStart(c: u8) bool {
|
||||||
|
return 'a' <= c and c <= 'z' or 'A' <= c and c <= 'Z' or c == '_' or c > 127;
|
||||||
|
}
|
||||||
|
|
||||||
|
// nameChar returns whether c can be a character within an identifier
|
||||||
|
// (not counting an escape sequence).
|
||||||
|
fn nameChar(c: u8) bool {
|
||||||
|
return 'a' <= c and c <= 'z' or 'A' <= c and c <= 'Z' or c == '_' or c > 127 or
|
||||||
|
c == '-' or '0' <= c and c <= '9';
|
||||||
|
}
|
||||||
|
|
||||||
|
fn lowerstr(str: []u8) void {
|
||||||
|
for (str, 0..) |c, i| {
|
||||||
|
str[i] = std.ascii.toLower(c);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseAttributeOP parses an AttributeOP from a string of 1 or 2 bytes.
|
||||||
|
fn parseAttributeOP(s: []const u8) ParseError!AttributeOP {
|
||||||
|
if (s.len < 1 or s.len > 2) return ParseError.InvalidAttributeOperator;
|
||||||
|
|
||||||
|
// if the first sign is equal, we don't check anything else.
|
||||||
|
if (s[0] == '=') return .eql;
|
||||||
|
|
||||||
|
if (s.len != 2 or s[1] != '=') return ParseError.InvalidAttributeOperator;
|
||||||
|
|
||||||
|
return switch (s[0]) {
|
||||||
|
'=' => .eql,
|
||||||
|
'!' => .not_eql,
|
||||||
|
'~' => .one_of,
|
||||||
|
'|' => .prefix_hyphen,
|
||||||
|
'^' => .prefix,
|
||||||
|
'$' => .suffix,
|
||||||
|
'*' => .contains,
|
||||||
|
'#' => .regexp,
|
||||||
|
else => ParseError.InvalidAttributeOperator,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
test "parser.skipWhitespace" {
|
||||||
|
const testcases = [_]struct {
|
||||||
|
s: []const u8,
|
||||||
|
i: usize,
|
||||||
|
r: bool,
|
||||||
|
}{
|
||||||
|
.{ .s = "", .i = 0, .r = false },
|
||||||
|
.{ .s = "foo", .i = 0, .r = false },
|
||||||
|
.{ .s = " ", .i = 1, .r = true },
|
||||||
|
.{ .s = " foo", .i = 1, .r = true },
|
||||||
|
.{ .s = "/* foo */ bar", .i = 10, .r = true },
|
||||||
|
.{ .s = "/* foo", .i = 0, .r = false },
|
||||||
|
};
|
||||||
|
|
||||||
|
for (testcases) |tc| {
|
||||||
|
var p = Parser{ .s = tc.s, .opts = .{} };
|
||||||
|
const res = p.skipWhitespace();
|
||||||
|
try std.testing.expectEqual(tc.r, res);
|
||||||
|
try std.testing.expectEqual(tc.i, p.i);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
test "parser.parseIdentifier" {
|
||||||
|
const allocator = std.testing.allocator;
|
||||||
|
|
||||||
|
const testcases = [_]struct {
|
||||||
|
s: []const u8, // given value
|
||||||
|
exp: []const u8, // expected value
|
||||||
|
err: bool = false,
|
||||||
|
}{
|
||||||
|
.{ .s = "x", .exp = "x" },
|
||||||
|
.{ .s = "96", .exp = "", .err = true },
|
||||||
|
.{ .s = "-x", .exp = "-x" },
|
||||||
|
.{ .s = "r\\e9 sumé", .exp = "résumé" },
|
||||||
|
.{ .s = "r\\0000e9 sumé", .exp = "résumé" },
|
||||||
|
.{ .s = "r\\0000e9sumé", .exp = "résumé" },
|
||||||
|
.{ .s = "a\\\"b", .exp = "a\"b" },
|
||||||
|
};
|
||||||
|
|
||||||
|
var buf: std.ArrayListUnmanaged(u8) = .empty;
|
||||||
|
defer buf.deinit(allocator);
|
||||||
|
|
||||||
|
for (testcases) |tc| {
|
||||||
|
buf.clearRetainingCapacity();
|
||||||
|
|
||||||
|
var p = Parser{ .s = tc.s, .opts = .{} };
|
||||||
|
p.parseIdentifier(buf.writer(allocator)) catch |e| {
|
||||||
|
// if error was expected, continue.
|
||||||
|
if (tc.err) continue;
|
||||||
|
|
||||||
|
std.debug.print("test case {s}\n", .{tc.s});
|
||||||
|
return e;
|
||||||
|
};
|
||||||
|
std.testing.expectEqualDeep(tc.exp, buf.items) catch |e| {
|
||||||
|
std.debug.print("test case {s} : {s}\n", .{ tc.s, buf.items });
|
||||||
|
return e;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
test "parser.parseString" {
|
||||||
|
const allocator = std.testing.allocator;
|
||||||
|
|
||||||
|
const testcases = [_]struct {
|
||||||
|
s: []const u8, // given value
|
||||||
|
exp: []const u8, // expected value
|
||||||
|
err: bool = false,
|
||||||
|
}{
|
||||||
|
.{ .s = "\"x\"", .exp = "x" },
|
||||||
|
.{ .s = "'x'", .exp = "x" },
|
||||||
|
.{ .s = "'x", .exp = "", .err = true },
|
||||||
|
.{ .s = "'x\\\r\nx'", .exp = "xx" },
|
||||||
|
.{ .s = "\"r\\e9 sumé\"", .exp = "résumé" },
|
||||||
|
.{ .s = "\"r\\0000e9 sumé\"", .exp = "résumé" },
|
||||||
|
.{ .s = "\"r\\0000e9sumé\"", .exp = "résumé" },
|
||||||
|
.{ .s = "\"a\\\"b\"", .exp = "a\"b" },
|
||||||
|
.{ .s = "\"\\\n\"", .exp = "" },
|
||||||
|
.{ .s = "\"hello world\"", .exp = "hello world" },
|
||||||
|
};
|
||||||
|
|
||||||
|
var buf: std.ArrayListUnmanaged(u8) = .empty;
|
||||||
|
defer buf.deinit(allocator);
|
||||||
|
|
||||||
|
for (testcases) |tc| {
|
||||||
|
buf.clearRetainingCapacity();
|
||||||
|
|
||||||
|
var p = Parser{ .s = tc.s, .opts = .{} };
|
||||||
|
p.parseString(buf.writer(allocator)) catch |e| {
|
||||||
|
// if error was expected, continue.
|
||||||
|
if (tc.err) continue;
|
||||||
|
|
||||||
|
std.debug.print("test case {s}\n", .{tc.s});
|
||||||
|
return e;
|
||||||
|
};
|
||||||
|
std.testing.expectEqualDeep(tc.exp, buf.items) catch |e| {
|
||||||
|
std.debug.print("test case {s} : {s}\n", .{ tc.s, buf.items });
|
||||||
|
return e;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
test "parser.parse" {
|
||||||
|
var arena = std.heap.ArenaAllocator.init(std.testing.allocator);
|
||||||
|
defer arena.deinit();
|
||||||
|
const allocator = arena.allocator();
|
||||||
|
|
||||||
|
const testcases = [_]struct {
|
||||||
|
s: []const u8, // given value
|
||||||
|
exp: Selector, // expected value
|
||||||
|
err: bool = false,
|
||||||
|
}{
|
||||||
|
.{ .s = "root", .exp = .{ .tag = "root" } },
|
||||||
|
.{ .s = ".root", .exp = .{ .class = "root" } },
|
||||||
|
.{ .s = ":root", .exp = .{ .pseudo_class = .root } },
|
||||||
|
.{ .s = ".\\:bar", .exp = .{ .class = ":bar" } },
|
||||||
|
.{ .s = ".foo\\:bar", .exp = .{ .class = "foo:bar" } },
|
||||||
|
};
|
||||||
|
|
||||||
|
for (testcases) |tc| {
|
||||||
|
var p = Parser{ .s = tc.s, .opts = .{} };
|
||||||
|
const sel = p.parse(allocator) catch |e| {
|
||||||
|
// if error was expected, continue.
|
||||||
|
if (tc.err) continue;
|
||||||
|
|
||||||
|
std.debug.print("test case {s}\n", .{tc.s});
|
||||||
|
return e;
|
||||||
|
};
|
||||||
|
std.testing.expectEqualDeep(tc.exp, sel) catch |e| {
|
||||||
|
std.debug.print("test case {s} : {}\n", .{ tc.s, sel });
|
||||||
|
return e;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
1424
src/browser/css/selector.zig
Normal file
1424
src/browser/css/selector.zig
Normal file
File diff suppressed because it is too large
Load Diff
289
src/browser/cssom/CSSParser.zig
Normal file
289
src/browser/cssom/CSSParser.zig
Normal file
@@ -0,0 +1,289 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
const Allocator = std.mem.Allocator;
|
||||||
|
|
||||||
|
const CSSConstants = struct {
|
||||||
|
const IMPORTANT = "!important";
|
||||||
|
const URL_PREFIX = "url(";
|
||||||
|
};
|
||||||
|
|
||||||
|
const CSSParserState = enum {
|
||||||
|
seek_name,
|
||||||
|
in_name,
|
||||||
|
seek_colon,
|
||||||
|
seek_value,
|
||||||
|
in_value,
|
||||||
|
in_quoted_value,
|
||||||
|
in_single_quoted_value,
|
||||||
|
in_url,
|
||||||
|
in_important,
|
||||||
|
};
|
||||||
|
|
||||||
|
const CSSDeclaration = struct {
|
||||||
|
name: []const u8,
|
||||||
|
value: []const u8,
|
||||||
|
is_important: bool,
|
||||||
|
};
|
||||||
|
|
||||||
|
const CSSParser = @This();
|
||||||
|
state: CSSParserState,
|
||||||
|
name_start: usize,
|
||||||
|
name_end: usize,
|
||||||
|
value_start: usize,
|
||||||
|
position: usize,
|
||||||
|
paren_depth: usize,
|
||||||
|
escape_next: bool,
|
||||||
|
|
||||||
|
pub fn init() CSSParser {
|
||||||
|
return .{
|
||||||
|
.state = .seek_name,
|
||||||
|
.name_start = 0,
|
||||||
|
.name_end = 0,
|
||||||
|
.value_start = 0,
|
||||||
|
.position = 0,
|
||||||
|
.paren_depth = 0,
|
||||||
|
.escape_next = false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parseDeclarations(arena: Allocator, text: []const u8) ![]CSSDeclaration {
|
||||||
|
var parser = init();
|
||||||
|
var declarations: std.ArrayListUnmanaged(CSSDeclaration) = .empty;
|
||||||
|
|
||||||
|
while (parser.position < text.len) {
|
||||||
|
const c = text[parser.position];
|
||||||
|
|
||||||
|
switch (parser.state) {
|
||||||
|
.seek_name => {
|
||||||
|
if (!std.ascii.isWhitespace(c)) {
|
||||||
|
parser.name_start = parser.position;
|
||||||
|
parser.state = .in_name;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
.in_name => {
|
||||||
|
if (c == ':') {
|
||||||
|
parser.name_end = parser.position;
|
||||||
|
parser.state = .seek_value;
|
||||||
|
} else if (std.ascii.isWhitespace(c)) {
|
||||||
|
parser.name_end = parser.position;
|
||||||
|
parser.state = .seek_colon;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
.seek_colon => {
|
||||||
|
if (c == ':') {
|
||||||
|
parser.state = .seek_value;
|
||||||
|
} else if (!std.ascii.isWhitespace(c)) {
|
||||||
|
parser.state = .seek_name;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
.seek_value => {
|
||||||
|
if (!std.ascii.isWhitespace(c)) {
|
||||||
|
parser.value_start = parser.position;
|
||||||
|
if (c == '"') {
|
||||||
|
parser.state = .in_quoted_value;
|
||||||
|
} else if (c == '\'') {
|
||||||
|
parser.state = .in_single_quoted_value;
|
||||||
|
} else if (c == 'u' and parser.position + CSSConstants.URL_PREFIX.len <= text.len and std.mem.startsWith(u8, text[parser.position..], CSSConstants.URL_PREFIX)) {
|
||||||
|
parser.state = .in_url;
|
||||||
|
parser.paren_depth = 1;
|
||||||
|
parser.position += 3;
|
||||||
|
} else {
|
||||||
|
parser.state = .in_value;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
.in_value => {
|
||||||
|
if (parser.escape_next) {
|
||||||
|
parser.escape_next = false;
|
||||||
|
} else if (c == '\\') {
|
||||||
|
parser.escape_next = true;
|
||||||
|
} else if (c == '(') {
|
||||||
|
parser.paren_depth += 1;
|
||||||
|
} else if (c == ')' and parser.paren_depth > 0) {
|
||||||
|
parser.paren_depth -= 1;
|
||||||
|
} else if (c == ';' and parser.paren_depth == 0) {
|
||||||
|
try parser.finishDeclaration(arena, &declarations, text);
|
||||||
|
parser.state = .seek_name;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
.in_quoted_value => {
|
||||||
|
if (parser.escape_next) {
|
||||||
|
parser.escape_next = false;
|
||||||
|
} else if (c == '\\') {
|
||||||
|
parser.escape_next = true;
|
||||||
|
} else if (c == '"') {
|
||||||
|
parser.state = .in_value;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
.in_single_quoted_value => {
|
||||||
|
if (parser.escape_next) {
|
||||||
|
parser.escape_next = false;
|
||||||
|
} else if (c == '\\') {
|
||||||
|
parser.escape_next = true;
|
||||||
|
} else if (c == '\'') {
|
||||||
|
parser.state = .in_value;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
.in_url => {
|
||||||
|
if (parser.escape_next) {
|
||||||
|
parser.escape_next = false;
|
||||||
|
} else if (c == '\\') {
|
||||||
|
parser.escape_next = true;
|
||||||
|
} else if (c == '(') {
|
||||||
|
parser.paren_depth += 1;
|
||||||
|
} else if (c == ')') {
|
||||||
|
parser.paren_depth -= 1;
|
||||||
|
if (parser.paren_depth == 0) {
|
||||||
|
parser.state = .in_value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
.in_important => {},
|
||||||
|
}
|
||||||
|
|
||||||
|
parser.position += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
try parser.finalize(arena, &declarations, text);
|
||||||
|
|
||||||
|
return declarations.items;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn finishDeclaration(self: *CSSParser, arena: Allocator, declarations: *std.ArrayListUnmanaged(CSSDeclaration), text: []const u8) !void {
|
||||||
|
const name = std.mem.trim(u8, text[self.name_start..self.name_end], &std.ascii.whitespace);
|
||||||
|
if (name.len == 0) return;
|
||||||
|
|
||||||
|
const raw_value = text[self.value_start..self.position];
|
||||||
|
const value = std.mem.trim(u8, raw_value, &std.ascii.whitespace);
|
||||||
|
|
||||||
|
var final_value = value;
|
||||||
|
var is_important = false;
|
||||||
|
|
||||||
|
if (std.mem.endsWith(u8, value, CSSConstants.IMPORTANT)) {
|
||||||
|
is_important = true;
|
||||||
|
final_value = std.mem.trimRight(u8, value[0 .. value.len - CSSConstants.IMPORTANT.len], &std.ascii.whitespace);
|
||||||
|
}
|
||||||
|
|
||||||
|
try declarations.append(arena, .{
|
||||||
|
.name = name,
|
||||||
|
.value = final_value,
|
||||||
|
.is_important = is_important,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
fn finalize(self: *CSSParser, arena: Allocator, declarations: *std.ArrayListUnmanaged(CSSDeclaration), text: []const u8) !void {
|
||||||
|
if (self.state != .in_value) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
return self.finishDeclaration(arena, declarations, text);
|
||||||
|
}
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: CSS.Parser - Simple property" {
|
||||||
|
defer testing.reset();
|
||||||
|
|
||||||
|
const text = "color: red;";
|
||||||
|
const allocator = testing.arena_allocator;
|
||||||
|
|
||||||
|
const declarations = try CSSParser.parseDeclarations(allocator, text);
|
||||||
|
|
||||||
|
try testing.expectEqual(1, declarations.len);
|
||||||
|
try testing.expectEqual("color", declarations[0].name);
|
||||||
|
try testing.expectEqual("red", declarations[0].value);
|
||||||
|
try testing.expectEqual(false, declarations[0].is_important);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.Parser - Property with !important" {
|
||||||
|
defer testing.reset();
|
||||||
|
const text = "margin: 10px !important;";
|
||||||
|
const allocator = testing.arena_allocator;
|
||||||
|
|
||||||
|
const declarations = try CSSParser.parseDeclarations(allocator, text);
|
||||||
|
|
||||||
|
try testing.expectEqual(1, declarations.len);
|
||||||
|
try testing.expectEqual("margin", declarations[0].name);
|
||||||
|
try testing.expectEqual("10px", declarations[0].value);
|
||||||
|
try testing.expectEqual(true, declarations[0].is_important);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.Parser - Multiple properties" {
|
||||||
|
defer testing.reset();
|
||||||
|
const text = "color: red; font-size: 12px; margin: 5px !important;";
|
||||||
|
const allocator = testing.arena_allocator;
|
||||||
|
|
||||||
|
const declarations = try CSSParser.parseDeclarations(allocator, text);
|
||||||
|
|
||||||
|
try testing.expect(declarations.len == 3);
|
||||||
|
|
||||||
|
try testing.expectEqual("color", declarations[0].name);
|
||||||
|
try testing.expectEqual("red", declarations[0].value);
|
||||||
|
try testing.expectEqual(false, declarations[0].is_important);
|
||||||
|
|
||||||
|
try testing.expectEqual("font-size", declarations[1].name);
|
||||||
|
try testing.expectEqual("12px", declarations[1].value);
|
||||||
|
try testing.expectEqual(false, declarations[1].is_important);
|
||||||
|
|
||||||
|
try testing.expectEqual("margin", declarations[2].name);
|
||||||
|
try testing.expectEqual("5px", declarations[2].value);
|
||||||
|
try testing.expectEqual(true, declarations[2].is_important);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.Parser - Quoted value with semicolon" {
|
||||||
|
defer testing.reset();
|
||||||
|
const text = "content: \"Hello; world!\";";
|
||||||
|
const allocator = testing.arena_allocator;
|
||||||
|
|
||||||
|
const declarations = try CSSParser.parseDeclarations(allocator, text);
|
||||||
|
|
||||||
|
try testing.expectEqual(1, declarations.len);
|
||||||
|
try testing.expectEqual("content", declarations[0].name);
|
||||||
|
try testing.expectEqual("\"Hello; world!\"", declarations[0].value);
|
||||||
|
try testing.expectEqual(false, declarations[0].is_important);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.Parser - URL value" {
|
||||||
|
defer testing.reset();
|
||||||
|
const text = "background-image: url(\"test.png\");";
|
||||||
|
const allocator = testing.arena_allocator;
|
||||||
|
|
||||||
|
const declarations = try CSSParser.parseDeclarations(allocator, text);
|
||||||
|
|
||||||
|
try testing.expectEqual(1, declarations.len);
|
||||||
|
try testing.expectEqual("background-image", declarations[0].name);
|
||||||
|
try testing.expectEqual("url(\"test.png\")", declarations[0].value);
|
||||||
|
try testing.expectEqual(false, declarations[0].is_important);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.Parser - Whitespace handling" {
|
||||||
|
defer testing.reset();
|
||||||
|
const text = " color : purple ; margin : 10px ; ";
|
||||||
|
const allocator = testing.arena_allocator;
|
||||||
|
|
||||||
|
const declarations = try CSSParser.parseDeclarations(allocator, text);
|
||||||
|
|
||||||
|
try testing.expectEqual(2, declarations.len);
|
||||||
|
try testing.expectEqual("color", declarations[0].name);
|
||||||
|
try testing.expectEqual("purple", declarations[0].value);
|
||||||
|
try testing.expectEqual("margin", declarations[1].name);
|
||||||
|
try testing.expectEqual("10px", declarations[1].value);
|
||||||
|
}
|
||||||
41
src/browser/cssom/CSSRule.zig
Normal file
41
src/browser/cssom/CSSRule.zig
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
|
||||||
|
const CSSStyleSheet = @import("CSSStyleSheet.zig");
|
||||||
|
|
||||||
|
pub const Interfaces = .{
|
||||||
|
CSSRule,
|
||||||
|
CSSImportRule,
|
||||||
|
};
|
||||||
|
|
||||||
|
// https://developer.mozilla.org/en-US/docs/Web/API/CSSRule
|
||||||
|
const CSSRule = @This();
|
||||||
|
css_text: []const u8,
|
||||||
|
parent_rule: ?*CSSRule = null,
|
||||||
|
parent_stylesheet: ?*CSSStyleSheet = null,
|
||||||
|
|
||||||
|
pub const CSSImportRule = struct {
|
||||||
|
pub const prototype = *CSSRule;
|
||||||
|
href: []const u8,
|
||||||
|
layer_name: ?[]const u8,
|
||||||
|
media: void,
|
||||||
|
style_sheet: CSSStyleSheet,
|
||||||
|
supports_text: ?[]const u8,
|
||||||
|
};
|
||||||
52
src/browser/cssom/CSSRuleList.zig
Normal file
52
src/browser/cssom/CSSRuleList.zig
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
|
||||||
|
const CSSRule = @import("CSSRule.zig");
|
||||||
|
const StyleSheet = @import("StyleSheet.zig").StyleSheet;
|
||||||
|
|
||||||
|
const CSSImportRule = CSSRule.CSSImportRule;
|
||||||
|
|
||||||
|
const CSSRuleList = @This();
|
||||||
|
list: std.ArrayListUnmanaged([]const u8),
|
||||||
|
|
||||||
|
pub fn constructor() CSSRuleList {
|
||||||
|
return .{ .list = .empty };
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _item(self: *CSSRuleList, _index: u32) ?CSSRule {
|
||||||
|
const index: usize = @intCast(_index);
|
||||||
|
|
||||||
|
if (index > self.list.items.len) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// todo: for now, just return null.
|
||||||
|
// this depends on properly parsing CSSRule
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_length(self: *CSSRuleList) u32 {
|
||||||
|
return @intCast(self.list.items.len);
|
||||||
|
}
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: CSS.CSSRuleList" {
|
||||||
|
try testing.htmlRunner("cssom/css_rule_list.html");
|
||||||
|
}
|
||||||
958
src/browser/cssom/CSSStyleDeclaration.zig
Normal file
958
src/browser/cssom/CSSStyleDeclaration.zig
Normal file
@@ -0,0 +1,958 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
|
||||||
|
const Page = @import("../page.zig").Page;
|
||||||
|
const CSSRule = @import("CSSRule.zig");
|
||||||
|
const CSSParser = @import("CSSParser.zig");
|
||||||
|
|
||||||
|
const Property = struct {
|
||||||
|
value: []const u8,
|
||||||
|
priority: bool,
|
||||||
|
};
|
||||||
|
|
||||||
|
const CSSStyleDeclaration = @This();
|
||||||
|
|
||||||
|
properties: std.StringArrayHashMapUnmanaged(Property),
|
||||||
|
|
||||||
|
pub const empty: CSSStyleDeclaration = .{
|
||||||
|
.properties = .empty,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn get_cssFloat(self: *const CSSStyleDeclaration) []const u8 {
|
||||||
|
return self._getPropertyValue("float");
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_cssFloat(self: *CSSStyleDeclaration, value: ?[]const u8, page: *Page) !void {
|
||||||
|
const final_value = value orelse "";
|
||||||
|
return self._setProperty("float", final_value, null, page);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_cssText(self: *const CSSStyleDeclaration, page: *Page) ![]const u8 {
|
||||||
|
var buffer: std.ArrayListUnmanaged(u8) = .empty;
|
||||||
|
const writer = buffer.writer(page.call_arena);
|
||||||
|
var it = self.properties.iterator();
|
||||||
|
while (it.next()) |entry| {
|
||||||
|
const name = entry.key_ptr.*;
|
||||||
|
const property = entry.value_ptr;
|
||||||
|
const escaped = try escapeCSSValue(page.call_arena, property.value);
|
||||||
|
try writer.print("{s}: {s}", .{ name, escaped });
|
||||||
|
if (property.priority) {
|
||||||
|
try writer.writeAll(" !important; ");
|
||||||
|
} else {
|
||||||
|
try writer.writeAll("; ");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return buffer.items;
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO Propagate also upward to parent node
|
||||||
|
pub fn set_cssText(self: *CSSStyleDeclaration, text: []const u8, page: *Page) !void {
|
||||||
|
self.properties.clearRetainingCapacity();
|
||||||
|
|
||||||
|
// call_arena is safe here, because _setProperty will dupe the name
|
||||||
|
// using the page's longer-living arena.
|
||||||
|
const declarations = try CSSParser.parseDeclarations(page.call_arena, text);
|
||||||
|
|
||||||
|
for (declarations) |decl| {
|
||||||
|
if (!isValidPropertyName(decl.name)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const priority: ?[]const u8 = if (decl.is_important) "important" else null;
|
||||||
|
try self._setProperty(decl.name, decl.value, priority, page);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_length(self: *const CSSStyleDeclaration) usize {
|
||||||
|
return self.properties.count();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_parentRule(_: *const CSSStyleDeclaration) ?CSSRule {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _getPropertyPriority(self: *const CSSStyleDeclaration, name: []const u8) []const u8 {
|
||||||
|
const property = self.properties.getPtr(name) orelse return "";
|
||||||
|
return if (property.priority) "important" else "";
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO should handle properly shorthand properties and canonical forms
|
||||||
|
pub fn _getPropertyValue(self: *const CSSStyleDeclaration, name: []const u8) []const u8 {
|
||||||
|
if (self.properties.getPtr(name)) |property| {
|
||||||
|
return property.value;
|
||||||
|
}
|
||||||
|
|
||||||
|
// default to everything being visible (unless it's been explicitly set)
|
||||||
|
if (std.mem.eql(u8, name, "visibility")) {
|
||||||
|
return "visible";
|
||||||
|
}
|
||||||
|
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _item(self: *const CSSStyleDeclaration, index: usize) []const u8 {
|
||||||
|
const values = self.properties.entries.items(.key);
|
||||||
|
if (index >= values.len) {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
return values[index];
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _removeProperty(self: *CSSStyleDeclaration, name: []const u8) ![]const u8 {
|
||||||
|
const property = self.properties.fetchOrderedRemove(name) orelse return "";
|
||||||
|
return property.value.value;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _setProperty(self: *CSSStyleDeclaration, name: []const u8, value: []const u8, priority: ?[]const u8, page: *Page) !void {
|
||||||
|
const gop = try self.properties.getOrPut(page.arena, name);
|
||||||
|
if (!gop.found_existing) {
|
||||||
|
const owned_name = try page.arena.dupe(u8, name);
|
||||||
|
gop.key_ptr.* = owned_name;
|
||||||
|
}
|
||||||
|
|
||||||
|
const owned_value = try page.arena.dupe(u8, value);
|
||||||
|
const is_important = priority != null and std.ascii.eqlIgnoreCase(priority.?, "important");
|
||||||
|
gop.value_ptr.* = .{ .value = owned_value, .priority = is_important };
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn named_get(self: *const CSSStyleDeclaration, name: []const u8, _: *bool) []const u8 {
|
||||||
|
return self._getPropertyValue(name);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn named_set(self: *CSSStyleDeclaration, name: []const u8, value: []const u8, _: *bool, page: *Page) !void {
|
||||||
|
return self._setProperty(name, value, null, page);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn isNumericWithUnit(value: []const u8) bool {
|
||||||
|
if (value.len == 0) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
const first = value[0];
|
||||||
|
|
||||||
|
if (!std.ascii.isDigit(first) and first != '+' and first != '-' and first != '.') {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
var i: usize = 0;
|
||||||
|
var has_digit = false;
|
||||||
|
var decimal_point = false;
|
||||||
|
|
||||||
|
while (i < value.len) : (i += 1) {
|
||||||
|
const c = value[i];
|
||||||
|
if (std.ascii.isDigit(c)) {
|
||||||
|
has_digit = true;
|
||||||
|
} else if (c == '.' and !decimal_point) {
|
||||||
|
decimal_point = true;
|
||||||
|
} else if ((c == 'e' or c == 'E') and has_digit) {
|
||||||
|
if (i + 1 >= value.len) return false;
|
||||||
|
if (value[i + 1] != '+' and value[i + 1] != '-' and !std.ascii.isDigit(value[i + 1])) break;
|
||||||
|
i += 1;
|
||||||
|
if (value[i] == '+' or value[i] == '-') {
|
||||||
|
i += 1;
|
||||||
|
}
|
||||||
|
var has_exp_digits = false;
|
||||||
|
while (i < value.len and std.ascii.isDigit(value[i])) : (i += 1) {
|
||||||
|
has_exp_digits = true;
|
||||||
|
}
|
||||||
|
if (!has_exp_digits) return false;
|
||||||
|
break;
|
||||||
|
} else if (c != '-' and c != '+') {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!has_digit) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (i == value.len) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
const unit = value[i..];
|
||||||
|
return CSSKeywords.isValidUnit(unit);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn isHexColor(value: []const u8) bool {
|
||||||
|
if (value.len == 0) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (value[0] != '#') {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
const hex_part = value[1..];
|
||||||
|
if (hex_part.len != 3 and hex_part.len != 6 and hex_part.len != 8) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (hex_part) |c| {
|
||||||
|
if (!std.ascii.isHex(c)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn isMultiValueProperty(value: []const u8) bool {
|
||||||
|
var parts = std.mem.splitAny(u8, value, " ");
|
||||||
|
var multi_value_parts: usize = 0;
|
||||||
|
var all_parts_valid = true;
|
||||||
|
|
||||||
|
while (parts.next()) |part| {
|
||||||
|
if (part.len == 0) continue;
|
||||||
|
multi_value_parts += 1;
|
||||||
|
|
||||||
|
if (isNumericWithUnit(part)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (isHexColor(part)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (CSSKeywords.isKnownKeyword(part)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (CSSKeywords.startsWithFunction(part)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
all_parts_valid = false;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
return multi_value_parts >= 2 and all_parts_valid;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn isAlreadyQuoted(value: []const u8) bool {
|
||||||
|
return value.len >= 2 and ((value[0] == '"' and value[value.len - 1] == '"') or
|
||||||
|
(value[0] == '\'' and value[value.len - 1] == '\''));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn isValidPropertyName(name: []const u8) bool {
|
||||||
|
if (name.len == 0) return false;
|
||||||
|
|
||||||
|
if (std.mem.startsWith(u8, name, "--")) {
|
||||||
|
if (name.len == 2) return false;
|
||||||
|
for (name[2..]) |c| {
|
||||||
|
if (!std.ascii.isAlphanumeric(c) and c != '-' and c != '_') {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
const first_char = name[0];
|
||||||
|
if (!std.ascii.isAlphabetic(first_char) and first_char != '-') {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (first_char == '-') {
|
||||||
|
if (name.len < 2) return false;
|
||||||
|
|
||||||
|
if (!std.ascii.isAlphabetic(name[1])) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (name[2..]) |c| {
|
||||||
|
if (!std.ascii.isAlphanumeric(c) and c != '-') {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for (name[1..]) |c| {
|
||||||
|
if (!std.ascii.isAlphanumeric(c) and c != '-') {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extractImportant(value: []const u8) struct { value: []const u8, is_important: bool } {
|
||||||
|
const trimmed = std.mem.trim(u8, value, &std.ascii.whitespace);
|
||||||
|
|
||||||
|
if (std.mem.endsWith(u8, trimmed, "!important")) {
|
||||||
|
const clean_value = std.mem.trimRight(u8, trimmed[0 .. trimmed.len - 10], &std.ascii.whitespace);
|
||||||
|
return .{ .value = clean_value, .is_important = true };
|
||||||
|
}
|
||||||
|
|
||||||
|
return .{ .value = trimmed, .is_important = false };
|
||||||
|
}
|
||||||
|
|
||||||
|
fn needsQuotes(value: []const u8) bool {
|
||||||
|
if (value.len == 0) return true;
|
||||||
|
if (isAlreadyQuoted(value)) return false;
|
||||||
|
|
||||||
|
if (CSSKeywords.containsSpecialChar(value)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (std.mem.indexOfScalar(u8, value, ' ') == null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
const is_url = std.mem.startsWith(u8, value, "url(");
|
||||||
|
const is_function = CSSKeywords.startsWithFunction(value);
|
||||||
|
|
||||||
|
return !isMultiValueProperty(value) and
|
||||||
|
!is_url and
|
||||||
|
!is_function;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn escapeCSSValue(arena: std.mem.Allocator, value: []const u8) ![]const u8 {
|
||||||
|
if (!needsQuotes(value)) {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
var out: std.ArrayListUnmanaged(u8) = .empty;
|
||||||
|
|
||||||
|
// We'll need at least this much space, +2 for the quotes
|
||||||
|
try out.ensureTotalCapacity(arena, value.len + 2);
|
||||||
|
const writer = out.writer(arena);
|
||||||
|
|
||||||
|
try writer.writeByte('"');
|
||||||
|
|
||||||
|
for (value, 0..) |c, i| {
|
||||||
|
switch (c) {
|
||||||
|
'"' => try writer.writeAll("\\\""),
|
||||||
|
'\\' => try writer.writeAll("\\\\"),
|
||||||
|
'\n' => try writer.writeAll("\\A "),
|
||||||
|
'\r' => try writer.writeAll("\\D "),
|
||||||
|
'\t' => try writer.writeAll("\\9 "),
|
||||||
|
0...8, 11, 12, 14...31, 127 => {
|
||||||
|
try writer.print("\\{x}", .{c});
|
||||||
|
if (i + 1 < value.len and std.ascii.isHex(value[i + 1])) {
|
||||||
|
try writer.writeByte(' ');
|
||||||
|
}
|
||||||
|
},
|
||||||
|
else => try writer.writeByte(c),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try writer.writeByte('"');
|
||||||
|
return out.items;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn isKnownKeyword(value: []const u8) bool {
|
||||||
|
return CSSKeywords.isKnownKeyword(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn containsSpecialChar(value: []const u8) bool {
|
||||||
|
return CSSKeywords.containsSpecialChar(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
const CSSKeywords = struct {
|
||||||
|
const BORDER_STYLES = [_][]const u8{
|
||||||
|
"none", "solid", "dotted", "dashed", "double", "groove", "ridge", "inset", "outset",
|
||||||
|
};
|
||||||
|
|
||||||
|
const COLOR_NAMES = [_][]const u8{
|
||||||
|
"black", "white", "red", "green", "blue", "yellow", "purple", "gray", "transparent",
|
||||||
|
"currentColor", "inherit",
|
||||||
|
};
|
||||||
|
|
||||||
|
const POSITION_KEYWORDS = [_][]const u8{
|
||||||
|
"auto", "center", "left", "right", "top", "bottom",
|
||||||
|
};
|
||||||
|
|
||||||
|
const BACKGROUND_REPEAT = [_][]const u8{
|
||||||
|
"repeat", "no-repeat", "repeat-x", "repeat-y", "space", "round",
|
||||||
|
};
|
||||||
|
|
||||||
|
const FONT_STYLES = [_][]const u8{
|
||||||
|
"normal", "italic", "oblique", "bold", "bolder", "lighter",
|
||||||
|
};
|
||||||
|
|
||||||
|
const FONT_SIZES = [_][]const u8{
|
||||||
|
"xx-small", "x-small", "small", "medium", "large", "x-large", "xx-large",
|
||||||
|
"smaller", "larger",
|
||||||
|
};
|
||||||
|
|
||||||
|
const FONT_FAMILIES = [_][]const u8{
|
||||||
|
"serif", "sans-serif", "monospace", "cursive", "fantasy", "system-ui",
|
||||||
|
};
|
||||||
|
|
||||||
|
const CSS_GLOBAL = [_][]const u8{
|
||||||
|
"initial", "inherit", "unset", "revert",
|
||||||
|
};
|
||||||
|
|
||||||
|
const DISPLAY_VALUES = [_][]const u8{
|
||||||
|
"block", "inline", "inline-block", "flex", "grid", "none",
|
||||||
|
};
|
||||||
|
|
||||||
|
const UNITS = [_][]const u8{
|
||||||
|
// LENGTH
|
||||||
|
"px", "em", "rem", "vw", "vh", "vmin", "vmax", "%", "pt", "pc", "in", "cm", "mm",
|
||||||
|
"ex", "ch", "fr",
|
||||||
|
|
||||||
|
// ANGLE
|
||||||
|
"deg", "rad", "grad", "turn",
|
||||||
|
|
||||||
|
// TIME
|
||||||
|
"s", "ms",
|
||||||
|
|
||||||
|
// FREQUENCY
|
||||||
|
"hz", "khz",
|
||||||
|
|
||||||
|
// RESOLUTION
|
||||||
|
"dpi", "dpcm",
|
||||||
|
"dppx",
|
||||||
|
};
|
||||||
|
|
||||||
|
const SPECIAL_CHARS = [_]u8{
|
||||||
|
'"', '\'', ';', '{', '}', '\\', '<', '>', '/', '\n', '\t', '\r', '\x00', '\x7F',
|
||||||
|
};
|
||||||
|
|
||||||
|
const FUNCTIONS = [_][]const u8{
|
||||||
|
"rgb(", "rgba(", "hsl(", "hsla(", "url(", "calc(", "var(", "attr(",
|
||||||
|
"linear-gradient(", "radial-gradient(", "conic-gradient(", "translate(", "rotate(", "scale(", "skew(", "matrix(",
|
||||||
|
};
|
||||||
|
|
||||||
|
const KEYWORDS = BORDER_STYLES ++ COLOR_NAMES ++ POSITION_KEYWORDS ++
|
||||||
|
BACKGROUND_REPEAT ++ FONT_STYLES ++ FONT_SIZES ++ FONT_FAMILIES ++
|
||||||
|
CSS_GLOBAL ++ DISPLAY_VALUES;
|
||||||
|
|
||||||
|
const MAX_KEYWORD_LEN = lengthOfLongestValue(&KEYWORDS);
|
||||||
|
|
||||||
|
pub fn isKnownKeyword(value: []const u8) bool {
|
||||||
|
if (value.len > MAX_KEYWORD_LEN) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
var buf: [MAX_KEYWORD_LEN]u8 = undefined;
|
||||||
|
const normalized = std.ascii.lowerString(&buf, value);
|
||||||
|
|
||||||
|
for (KEYWORDS) |keyword| {
|
||||||
|
if (std.ascii.eqlIgnoreCase(normalized, keyword)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn containsSpecialChar(value: []const u8) bool {
|
||||||
|
return std.mem.indexOfAny(u8, value, &SPECIAL_CHARS) != null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const MAX_UNIT_LEN = lengthOfLongestValue(&UNITS);
|
||||||
|
|
||||||
|
pub fn isValidUnit(unit: []const u8) bool {
|
||||||
|
if (unit.len > MAX_UNIT_LEN) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
var buf: [MAX_UNIT_LEN]u8 = undefined;
|
||||||
|
const normalized = std.ascii.lowerString(&buf, unit);
|
||||||
|
|
||||||
|
for (UNITS) |u| {
|
||||||
|
if (std.mem.eql(u8, normalized, u)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn startsWithFunction(value: []const u8) bool {
|
||||||
|
const pos = std.mem.indexOfScalar(u8, value, '(') orelse return false;
|
||||||
|
if (pos == 0) return false;
|
||||||
|
|
||||||
|
if (std.mem.indexOfScalarPos(u8, value, pos, ')') == null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
const function_name = value[0..pos];
|
||||||
|
return isValidFunctionName(function_name);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn isValidFunctionName(name: []const u8) bool {
|
||||||
|
if (name.len == 0) return false;
|
||||||
|
|
||||||
|
const first = name[0];
|
||||||
|
if (!std.ascii.isAlphabetic(first) and first != '_' and first != '-') {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (name[1..]) |c| {
|
||||||
|
if (!std.ascii.isAlphanumeric(c) and c != '_' and c != '-') {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
fn lengthOfLongestValue(values: []const []const u8) usize {
|
||||||
|
var max: usize = 0;
|
||||||
|
for (values) |v| {
|
||||||
|
max = @max(v.len, max);
|
||||||
|
}
|
||||||
|
return max;
|
||||||
|
}
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: CSS.StyleDeclaration" {
|
||||||
|
try testing.htmlRunner("cssom/css_style_declaration.html");
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.StyleDeclaration: isNumericWithUnit - valid numbers with units" {
|
||||||
|
try testing.expect(isNumericWithUnit("10px"));
|
||||||
|
try testing.expect(isNumericWithUnit("3.14em"));
|
||||||
|
try testing.expect(isNumericWithUnit("-5rem"));
|
||||||
|
try testing.expect(isNumericWithUnit("+12.5%"));
|
||||||
|
try testing.expect(isNumericWithUnit("0vh"));
|
||||||
|
try testing.expect(isNumericWithUnit(".5vw"));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.StyleDeclaration: isNumericWithUnit - scientific notation" {
|
||||||
|
try testing.expect(isNumericWithUnit("1e5px"));
|
||||||
|
try testing.expect(isNumericWithUnit("2.5E-3em"));
|
||||||
|
try testing.expect(isNumericWithUnit("1e+2rem"));
|
||||||
|
try testing.expect(isNumericWithUnit("-3.14e10px"));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.StyleDeclaration: isNumericWithUnit - edge cases and invalid inputs" {
|
||||||
|
try testing.expect(!isNumericWithUnit(""));
|
||||||
|
|
||||||
|
try testing.expect(!isNumericWithUnit("px"));
|
||||||
|
try testing.expect(!isNumericWithUnit("--px"));
|
||||||
|
try testing.expect(!isNumericWithUnit(".px"));
|
||||||
|
|
||||||
|
try testing.expect(!isNumericWithUnit("1e"));
|
||||||
|
try testing.expect(!isNumericWithUnit("1epx"));
|
||||||
|
try testing.expect(!isNumericWithUnit("1e+"));
|
||||||
|
try testing.expect(!isNumericWithUnit("1e+px"));
|
||||||
|
|
||||||
|
try testing.expect(!isNumericWithUnit("1.2.3px"));
|
||||||
|
|
||||||
|
try testing.expect(!isNumericWithUnit("10xyz"));
|
||||||
|
try testing.expect(!isNumericWithUnit("5invalid"));
|
||||||
|
|
||||||
|
try testing.expect(isNumericWithUnit("10"));
|
||||||
|
try testing.expect(isNumericWithUnit("3.14"));
|
||||||
|
try testing.expect(isNumericWithUnit("-5"));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.StyleDeclaration: isHexColor - valid hex colors" {
|
||||||
|
try testing.expect(isHexColor("#000"));
|
||||||
|
try testing.expect(isHexColor("#fff"));
|
||||||
|
try testing.expect(isHexColor("#123456"));
|
||||||
|
try testing.expect(isHexColor("#abcdef"));
|
||||||
|
try testing.expect(isHexColor("#ABCDEF"));
|
||||||
|
try testing.expect(isHexColor("#12345678"));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.StyleDeclaration: isHexColor - invalid hex colors" {
|
||||||
|
try testing.expect(!isHexColor(""));
|
||||||
|
try testing.expect(!isHexColor("#"));
|
||||||
|
try testing.expect(!isHexColor("000"));
|
||||||
|
try testing.expect(!isHexColor("#00"));
|
||||||
|
try testing.expect(!isHexColor("#0000"));
|
||||||
|
try testing.expect(!isHexColor("#00000"));
|
||||||
|
try testing.expect(!isHexColor("#0000000"));
|
||||||
|
try testing.expect(!isHexColor("#000000000"));
|
||||||
|
try testing.expect(!isHexColor("#gggggg"));
|
||||||
|
try testing.expect(!isHexColor("#123xyz"));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.StyleDeclaration: isMultiValueProperty - valid multi-value properties" {
|
||||||
|
try testing.expect(isMultiValueProperty("10px 20px"));
|
||||||
|
try testing.expect(isMultiValueProperty("solid red"));
|
||||||
|
try testing.expect(isMultiValueProperty("#fff black"));
|
||||||
|
try testing.expect(isMultiValueProperty("1em 2em 3em 4em"));
|
||||||
|
try testing.expect(isMultiValueProperty("rgb(255,0,0) solid"));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.StyleDeclaration: isMultiValueProperty - invalid multi-value properties" {
|
||||||
|
try testing.expect(!isMultiValueProperty(""));
|
||||||
|
try testing.expect(!isMultiValueProperty("10px"));
|
||||||
|
try testing.expect(!isMultiValueProperty("invalid unknown"));
|
||||||
|
try testing.expect(!isMultiValueProperty("10px invalid"));
|
||||||
|
try testing.expect(!isMultiValueProperty(" "));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.StyleDeclaration: isAlreadyQuoted - various quoting scenarios" {
|
||||||
|
try testing.expect(isAlreadyQuoted("\"hello\""));
|
||||||
|
try testing.expect(isAlreadyQuoted("'world'"));
|
||||||
|
try testing.expect(isAlreadyQuoted("\"\""));
|
||||||
|
try testing.expect(isAlreadyQuoted("''"));
|
||||||
|
|
||||||
|
try testing.expect(!isAlreadyQuoted(""));
|
||||||
|
try testing.expect(!isAlreadyQuoted("hello"));
|
||||||
|
try testing.expect(!isAlreadyQuoted("\""));
|
||||||
|
try testing.expect(!isAlreadyQuoted("'"));
|
||||||
|
try testing.expect(!isAlreadyQuoted("\"hello'"));
|
||||||
|
try testing.expect(!isAlreadyQuoted("'hello\""));
|
||||||
|
try testing.expect(!isAlreadyQuoted("\"hello"));
|
||||||
|
try testing.expect(!isAlreadyQuoted("hello\""));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.StyleDeclaration: isValidPropertyName - valid property names" {
|
||||||
|
try testing.expect(isValidPropertyName("color"));
|
||||||
|
try testing.expect(isValidPropertyName("background-color"));
|
||||||
|
try testing.expect(isValidPropertyName("-webkit-transform"));
|
||||||
|
try testing.expect(isValidPropertyName("font-size"));
|
||||||
|
try testing.expect(isValidPropertyName("margin-top"));
|
||||||
|
try testing.expect(isValidPropertyName("z-index"));
|
||||||
|
try testing.expect(isValidPropertyName("line-height"));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.StyleDeclaration: isValidPropertyName - invalid property names" {
|
||||||
|
try testing.expect(!isValidPropertyName(""));
|
||||||
|
try testing.expect(!isValidPropertyName("123color"));
|
||||||
|
try testing.expect(!isValidPropertyName("color!"));
|
||||||
|
try testing.expect(!isValidPropertyName("color space"));
|
||||||
|
try testing.expect(!isValidPropertyName("@color"));
|
||||||
|
try testing.expect(!isValidPropertyName("color.test"));
|
||||||
|
try testing.expect(!isValidPropertyName("color_test"));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.StyleDeclaration: extractImportant - with and without !important" {
|
||||||
|
var result = extractImportant("red !important");
|
||||||
|
try testing.expect(result.is_important);
|
||||||
|
try testing.expectEqual("red", result.value);
|
||||||
|
|
||||||
|
result = extractImportant("blue");
|
||||||
|
try testing.expect(!result.is_important);
|
||||||
|
try testing.expectEqual("blue", result.value);
|
||||||
|
|
||||||
|
result = extractImportant(" green !important ");
|
||||||
|
try testing.expect(result.is_important);
|
||||||
|
try testing.expectEqual("green", result.value);
|
||||||
|
|
||||||
|
result = extractImportant("!important");
|
||||||
|
try testing.expect(result.is_important);
|
||||||
|
try testing.expectEqual("", result.value);
|
||||||
|
|
||||||
|
result = extractImportant("important");
|
||||||
|
try testing.expect(!result.is_important);
|
||||||
|
try testing.expectEqual("important", result.value);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.StyleDeclaration: needsQuotes - various scenarios" {
|
||||||
|
try testing.expect(needsQuotes(""));
|
||||||
|
try testing.expect(needsQuotes("hello world"));
|
||||||
|
try testing.expect(needsQuotes("test;"));
|
||||||
|
try testing.expect(needsQuotes("a{b}"));
|
||||||
|
try testing.expect(needsQuotes("test\"quote"));
|
||||||
|
|
||||||
|
try testing.expect(!needsQuotes("\"already quoted\""));
|
||||||
|
try testing.expect(!needsQuotes("'already quoted'"));
|
||||||
|
try testing.expect(!needsQuotes("url(image.png)"));
|
||||||
|
try testing.expect(!needsQuotes("rgb(255, 0, 0)"));
|
||||||
|
try testing.expect(!needsQuotes("10px 20px"));
|
||||||
|
try testing.expect(!needsQuotes("simple"));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.StyleDeclaration: escapeCSSValue - escaping various characters" {
|
||||||
|
const allocator = testing.arena_allocator;
|
||||||
|
|
||||||
|
var result = try escapeCSSValue(allocator, "simple");
|
||||||
|
try testing.expectEqual("simple", result);
|
||||||
|
|
||||||
|
result = try escapeCSSValue(allocator, "\"already quoted\"");
|
||||||
|
try testing.expectEqual("\"already quoted\"", result);
|
||||||
|
|
||||||
|
result = try escapeCSSValue(allocator, "test\"quote");
|
||||||
|
try testing.expectEqual("\"test\\\"quote\"", result);
|
||||||
|
|
||||||
|
result = try escapeCSSValue(allocator, "test\nline");
|
||||||
|
try testing.expectEqual("\"test\\A line\"", result);
|
||||||
|
|
||||||
|
result = try escapeCSSValue(allocator, "test\\back");
|
||||||
|
try testing.expectEqual("\"test\\\\back\"", result);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.StyleDeclaration: CSSKeywords.isKnownKeyword - case sensitivity" {
|
||||||
|
try testing.expect(CSSKeywords.isKnownKeyword("red"));
|
||||||
|
try testing.expect(CSSKeywords.isKnownKeyword("solid"));
|
||||||
|
try testing.expect(CSSKeywords.isKnownKeyword("center"));
|
||||||
|
try testing.expect(CSSKeywords.isKnownKeyword("inherit"));
|
||||||
|
|
||||||
|
try testing.expect(CSSKeywords.isKnownKeyword("RED"));
|
||||||
|
try testing.expect(CSSKeywords.isKnownKeyword("Red"));
|
||||||
|
try testing.expect(CSSKeywords.isKnownKeyword("SOLID"));
|
||||||
|
try testing.expect(CSSKeywords.isKnownKeyword("Center"));
|
||||||
|
|
||||||
|
try testing.expect(!CSSKeywords.isKnownKeyword("invalid"));
|
||||||
|
try testing.expect(!CSSKeywords.isKnownKeyword("unknown"));
|
||||||
|
try testing.expect(!CSSKeywords.isKnownKeyword(""));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.StyleDeclaration: CSSKeywords.containsSpecialChar - various special characters" {
|
||||||
|
try testing.expect(CSSKeywords.containsSpecialChar("test\"quote"));
|
||||||
|
try testing.expect(CSSKeywords.containsSpecialChar("test'quote"));
|
||||||
|
try testing.expect(CSSKeywords.containsSpecialChar("test;end"));
|
||||||
|
try testing.expect(CSSKeywords.containsSpecialChar("test{brace"));
|
||||||
|
try testing.expect(CSSKeywords.containsSpecialChar("test}brace"));
|
||||||
|
try testing.expect(CSSKeywords.containsSpecialChar("test\\back"));
|
||||||
|
try testing.expect(CSSKeywords.containsSpecialChar("test<angle"));
|
||||||
|
try testing.expect(CSSKeywords.containsSpecialChar("test>angle"));
|
||||||
|
try testing.expect(CSSKeywords.containsSpecialChar("test/slash"));
|
||||||
|
|
||||||
|
try testing.expect(!CSSKeywords.containsSpecialChar("normal-text"));
|
||||||
|
try testing.expect(!CSSKeywords.containsSpecialChar("text123"));
|
||||||
|
try testing.expect(!CSSKeywords.containsSpecialChar(""));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.StyleDeclaration: CSSKeywords.isValidUnit - various units" {
|
||||||
|
try testing.expect(CSSKeywords.isValidUnit("px"));
|
||||||
|
try testing.expect(CSSKeywords.isValidUnit("em"));
|
||||||
|
try testing.expect(CSSKeywords.isValidUnit("rem"));
|
||||||
|
try testing.expect(CSSKeywords.isValidUnit("%"));
|
||||||
|
|
||||||
|
try testing.expect(CSSKeywords.isValidUnit("deg"));
|
||||||
|
try testing.expect(CSSKeywords.isValidUnit("rad"));
|
||||||
|
|
||||||
|
try testing.expect(CSSKeywords.isValidUnit("s"));
|
||||||
|
try testing.expect(CSSKeywords.isValidUnit("ms"));
|
||||||
|
|
||||||
|
try testing.expect(CSSKeywords.isValidUnit("PX"));
|
||||||
|
|
||||||
|
try testing.expect(!CSSKeywords.isValidUnit("invalid"));
|
||||||
|
try testing.expect(!CSSKeywords.isValidUnit(""));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.StyleDeclaration: CSSKeywords.startsWithFunction - function detection" {
|
||||||
|
try testing.expect(CSSKeywords.startsWithFunction("rgb(255, 0, 0)"));
|
||||||
|
try testing.expect(CSSKeywords.startsWithFunction("rgba(255, 0, 0, 0.5)"));
|
||||||
|
try testing.expect(CSSKeywords.startsWithFunction("url(image.png)"));
|
||||||
|
try testing.expect(CSSKeywords.startsWithFunction("calc(100% - 20px)"));
|
||||||
|
try testing.expect(CSSKeywords.startsWithFunction("var(--custom-property)"));
|
||||||
|
try testing.expect(CSSKeywords.startsWithFunction("linear-gradient(to right, red, blue)"));
|
||||||
|
|
||||||
|
try testing.expect(CSSKeywords.startsWithFunction("custom-function(args)"));
|
||||||
|
try testing.expect(CSSKeywords.startsWithFunction("unknown(test)"));
|
||||||
|
|
||||||
|
try testing.expect(!CSSKeywords.startsWithFunction("not-a-function"));
|
||||||
|
try testing.expect(!CSSKeywords.startsWithFunction("missing-paren)"));
|
||||||
|
try testing.expect(!CSSKeywords.startsWithFunction("missing-close("));
|
||||||
|
try testing.expect(!CSSKeywords.startsWithFunction(""));
|
||||||
|
try testing.expect(!CSSKeywords.startsWithFunction("rgb"));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.StyleDeclaration: isNumericWithUnit - whitespace handling" {
|
||||||
|
try testing.expect(!isNumericWithUnit(" 10px"));
|
||||||
|
try testing.expect(!isNumericWithUnit("10 px"));
|
||||||
|
try testing.expect(!isNumericWithUnit("10px "));
|
||||||
|
try testing.expect(!isNumericWithUnit(" 10 px "));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.StyleDeclaration: extractImportant - whitespace edge cases" {
|
||||||
|
var result = extractImportant(" ");
|
||||||
|
try testing.expect(!result.is_important);
|
||||||
|
try testing.expectEqual("", result.value);
|
||||||
|
|
||||||
|
result = extractImportant("\t\n\r !important\t\n");
|
||||||
|
try testing.expect(result.is_important);
|
||||||
|
try testing.expectEqual("", result.value);
|
||||||
|
|
||||||
|
result = extractImportant("red\t!important");
|
||||||
|
try testing.expect(result.is_important);
|
||||||
|
try testing.expectEqual("red", result.value);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.StyleDeclaration: isHexColor - mixed case handling" {
|
||||||
|
try testing.expect(isHexColor("#AbC"));
|
||||||
|
try testing.expect(isHexColor("#123aBc"));
|
||||||
|
try testing.expect(isHexColor("#FFffFF"));
|
||||||
|
try testing.expect(isHexColor("#000FFF"));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.StyleDeclaration: edge case - very long inputs" {
|
||||||
|
const long_valid = "a" ** 1000 ++ "px";
|
||||||
|
try testing.expect(!isNumericWithUnit(long_valid)); // not numeric
|
||||||
|
|
||||||
|
const long_property = "a-" ** 100 ++ "property";
|
||||||
|
try testing.expect(isValidPropertyName(long_property));
|
||||||
|
|
||||||
|
const long_hex = "#" ++ "a" ** 20;
|
||||||
|
try testing.expect(!isHexColor(long_hex));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.StyleDeclaration: boundary conditions - numeric parsing" {
|
||||||
|
try testing.expect(isNumericWithUnit("0px"));
|
||||||
|
try testing.expect(isNumericWithUnit("0.0px"));
|
||||||
|
try testing.expect(isNumericWithUnit(".0px"));
|
||||||
|
try testing.expect(isNumericWithUnit("0.px"));
|
||||||
|
|
||||||
|
try testing.expect(isNumericWithUnit("999999999px"));
|
||||||
|
try testing.expect(isNumericWithUnit("1.7976931348623157e+308px"));
|
||||||
|
|
||||||
|
try testing.expect(isNumericWithUnit("0.000000001px"));
|
||||||
|
try testing.expect(isNumericWithUnit("1e-100px"));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.StyleDeclaration: extractImportant - malformed important declarations" {
|
||||||
|
var result = extractImportant("red ! important");
|
||||||
|
try testing.expect(!result.is_important);
|
||||||
|
try testing.expectEqual("red ! important", result.value);
|
||||||
|
|
||||||
|
result = extractImportant("red !Important");
|
||||||
|
try testing.expect(!result.is_important);
|
||||||
|
try testing.expectEqual("red !Important", result.value);
|
||||||
|
|
||||||
|
result = extractImportant("red !IMPORTANT");
|
||||||
|
try testing.expect(!result.is_important);
|
||||||
|
try testing.expectEqual("red !IMPORTANT", result.value);
|
||||||
|
|
||||||
|
result = extractImportant("!importantred");
|
||||||
|
try testing.expect(!result.is_important);
|
||||||
|
try testing.expectEqual("!importantred", result.value);
|
||||||
|
|
||||||
|
result = extractImportant("red !important !important");
|
||||||
|
try testing.expect(result.is_important);
|
||||||
|
try testing.expectEqual("red !important", result.value);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.StyleDeclaration: isMultiValueProperty - complex spacing scenarios" {
|
||||||
|
try testing.expect(isMultiValueProperty("10px 20px"));
|
||||||
|
try testing.expect(isMultiValueProperty("solid red"));
|
||||||
|
|
||||||
|
try testing.expect(isMultiValueProperty(" 10px 20px "));
|
||||||
|
|
||||||
|
try testing.expect(!isMultiValueProperty("10px\t20px"));
|
||||||
|
try testing.expect(!isMultiValueProperty("10px\n20px"));
|
||||||
|
|
||||||
|
try testing.expect(isMultiValueProperty("10px 20px 30px"));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.StyleDeclaration: isAlreadyQuoted - edge cases with quotes" {
|
||||||
|
try testing.expect(isAlreadyQuoted("\"'hello'\""));
|
||||||
|
try testing.expect(isAlreadyQuoted("'\"hello\"'"));
|
||||||
|
|
||||||
|
try testing.expect(isAlreadyQuoted("\"hello\\\"world\""));
|
||||||
|
try testing.expect(isAlreadyQuoted("'hello\\'world'"));
|
||||||
|
|
||||||
|
try testing.expect(!isAlreadyQuoted("\"hello"));
|
||||||
|
try testing.expect(!isAlreadyQuoted("hello\""));
|
||||||
|
try testing.expect(!isAlreadyQuoted("'hello"));
|
||||||
|
try testing.expect(!isAlreadyQuoted("hello'"));
|
||||||
|
|
||||||
|
try testing.expect(isAlreadyQuoted("\"a\""));
|
||||||
|
try testing.expect(isAlreadyQuoted("'b'"));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.StyleDeclaration: needsQuotes - function and URL edge cases" {
|
||||||
|
try testing.expect(!needsQuotes("rgb(255, 0, 0)"));
|
||||||
|
try testing.expect(!needsQuotes("calc(100% - 20px)"));
|
||||||
|
|
||||||
|
try testing.expect(!needsQuotes("url(path with spaces.jpg)"));
|
||||||
|
|
||||||
|
try testing.expect(!needsQuotes("linear-gradient(to right, red, blue)"));
|
||||||
|
|
||||||
|
try testing.expect(needsQuotes("rgb(255, 0, 0"));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.StyleDeclaration: escapeCSSValue - control characters and Unicode" {
|
||||||
|
const allocator = testing.arena_allocator;
|
||||||
|
|
||||||
|
var result = try escapeCSSValue(allocator, "test\ttab");
|
||||||
|
try testing.expectEqual("\"test\\9 tab\"", result);
|
||||||
|
|
||||||
|
result = try escapeCSSValue(allocator, "test\rreturn");
|
||||||
|
try testing.expectEqual("\"test\\D return\"", result);
|
||||||
|
|
||||||
|
result = try escapeCSSValue(allocator, "test\x00null");
|
||||||
|
try testing.expectEqual("\"test\\0null\"", result);
|
||||||
|
|
||||||
|
result = try escapeCSSValue(allocator, "test\x7Fdel");
|
||||||
|
try testing.expectEqual("\"test\\7f del\"", result);
|
||||||
|
|
||||||
|
result = try escapeCSSValue(allocator, "test\"quote\nline\\back");
|
||||||
|
try testing.expectEqual("\"test\\\"quote\\A line\\\\back\"", result);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.StyleDeclaration: isValidPropertyName - CSS custom properties and vendor prefixes" {
|
||||||
|
try testing.expect(isValidPropertyName("--custom-color"));
|
||||||
|
try testing.expect(isValidPropertyName("--my-variable"));
|
||||||
|
try testing.expect(isValidPropertyName("--123"));
|
||||||
|
|
||||||
|
try testing.expect(isValidPropertyName("-webkit-transform"));
|
||||||
|
try testing.expect(isValidPropertyName("-moz-border-radius"));
|
||||||
|
try testing.expect(isValidPropertyName("-ms-filter"));
|
||||||
|
try testing.expect(isValidPropertyName("-o-transition"));
|
||||||
|
|
||||||
|
try testing.expect(!isValidPropertyName("-123invalid"));
|
||||||
|
try testing.expect(!isValidPropertyName("--"));
|
||||||
|
try testing.expect(!isValidPropertyName("-"));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.StyleDeclaration: startsWithFunction - case sensitivity and partial matches" {
|
||||||
|
try testing.expect(CSSKeywords.startsWithFunction("RGB(255, 0, 0)"));
|
||||||
|
try testing.expect(CSSKeywords.startsWithFunction("Rgb(255, 0, 0)"));
|
||||||
|
try testing.expect(CSSKeywords.startsWithFunction("URL(image.png)"));
|
||||||
|
|
||||||
|
try testing.expect(CSSKeywords.startsWithFunction("rg(something)"));
|
||||||
|
try testing.expect(CSSKeywords.startsWithFunction("ur(something)"));
|
||||||
|
|
||||||
|
try testing.expect(CSSKeywords.startsWithFunction("rgb(1,2,3)"));
|
||||||
|
try testing.expect(CSSKeywords.startsWithFunction("rgba(1,2,3,4)"));
|
||||||
|
|
||||||
|
try testing.expect(CSSKeywords.startsWithFunction("my-custom-function(args)"));
|
||||||
|
try testing.expect(CSSKeywords.startsWithFunction("function-with-dashes(test)"));
|
||||||
|
|
||||||
|
try testing.expect(!CSSKeywords.startsWithFunction("123function(test)"));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.StyleDeclaration: isHexColor - Unicode and invalid characters" {
|
||||||
|
try testing.expect(!isHexColor("#ghijkl"));
|
||||||
|
try testing.expect(!isHexColor("#12345g"));
|
||||||
|
try testing.expect(!isHexColor("#xyz"));
|
||||||
|
|
||||||
|
try testing.expect(!isHexColor("#АВС"));
|
||||||
|
|
||||||
|
try testing.expect(!isHexColor("#1234567g"));
|
||||||
|
try testing.expect(!isHexColor("#g2345678"));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.StyleDeclaration: complex integration scenarios" {
|
||||||
|
const allocator = testing.arena_allocator;
|
||||||
|
|
||||||
|
try testing.expect(isMultiValueProperty("rgb(255,0,0) url(bg.jpg)"));
|
||||||
|
|
||||||
|
try testing.expect(!needsQuotes("calc(100% - 20px)"));
|
||||||
|
|
||||||
|
const result = try escapeCSSValue(allocator, "fake(function with spaces");
|
||||||
|
try testing.expectEqual("\"fake(function with spaces\"", result);
|
||||||
|
|
||||||
|
const important_result = extractImportant("rgb(255,0,0) !important");
|
||||||
|
try testing.expect(important_result.is_important);
|
||||||
|
try testing.expectEqual("rgb(255,0,0)", important_result.value);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: CSS.StyleDeclaration: performance edge cases - empty and minimal inputs" {
|
||||||
|
try testing.expect(!isNumericWithUnit(""));
|
||||||
|
try testing.expect(!isHexColor(""));
|
||||||
|
try testing.expect(!isMultiValueProperty(""));
|
||||||
|
try testing.expect(!isAlreadyQuoted(""));
|
||||||
|
try testing.expect(!isValidPropertyName(""));
|
||||||
|
try testing.expect(needsQuotes(""));
|
||||||
|
try testing.expect(!CSSKeywords.isKnownKeyword(""));
|
||||||
|
try testing.expect(!CSSKeywords.containsSpecialChar(""));
|
||||||
|
try testing.expect(!CSSKeywords.isValidUnit(""));
|
||||||
|
try testing.expect(!CSSKeywords.startsWithFunction(""));
|
||||||
|
|
||||||
|
try testing.expect(!isNumericWithUnit("a"));
|
||||||
|
try testing.expect(!isHexColor("a"));
|
||||||
|
try testing.expect(!isMultiValueProperty("a"));
|
||||||
|
try testing.expect(!isAlreadyQuoted("a"));
|
||||||
|
try testing.expect(isValidPropertyName("a"));
|
||||||
|
try testing.expect(!needsQuotes("a"));
|
||||||
|
}
|
||||||
97
src/browser/cssom/CSSStyleSheet.zig
Normal file
97
src/browser/cssom/CSSStyleSheet.zig
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
|
||||||
|
const Env = @import("../env.zig").Env;
|
||||||
|
const Page = @import("../page.zig").Page;
|
||||||
|
const StyleSheet = @import("StyleSheet.zig");
|
||||||
|
const CSSRuleList = @import("CSSRuleList.zig");
|
||||||
|
const CSSImportRule = @import("CSSRule.zig").CSSImportRule;
|
||||||
|
|
||||||
|
const CSSStyleSheet = @This();
|
||||||
|
pub const prototype = *StyleSheet;
|
||||||
|
|
||||||
|
proto: StyleSheet,
|
||||||
|
css_rules: CSSRuleList,
|
||||||
|
owner_rule: ?*CSSImportRule,
|
||||||
|
|
||||||
|
const CSSStyleSheetOpts = struct {
|
||||||
|
base_url: ?[]const u8 = null,
|
||||||
|
// TODO: Suupport media
|
||||||
|
disabled: bool = false,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn constructor(_opts: ?CSSStyleSheetOpts) !CSSStyleSheet {
|
||||||
|
const opts = _opts orelse CSSStyleSheetOpts{};
|
||||||
|
return .{
|
||||||
|
.proto = .{ .disabled = opts.disabled },
|
||||||
|
.css_rules = .constructor(),
|
||||||
|
.owner_rule = null,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_ownerRule(_: *CSSStyleSheet) ?*CSSImportRule {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_cssRules(self: *CSSStyleSheet) *CSSRuleList {
|
||||||
|
return &self.css_rules;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _insertRule(self: *CSSStyleSheet, rule: []const u8, _index: ?usize, page: *Page) !usize {
|
||||||
|
const index = _index orelse 0;
|
||||||
|
if (index > self.css_rules.list.items.len) {
|
||||||
|
return error.IndexSize;
|
||||||
|
}
|
||||||
|
|
||||||
|
const arena = page.arena;
|
||||||
|
try self.css_rules.list.insert(arena, index, try arena.dupe(u8, rule));
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _deleteRule(self: *CSSStyleSheet, index: usize) !void {
|
||||||
|
if (index > self.css_rules.list.items.len) {
|
||||||
|
return error.IndexSize;
|
||||||
|
}
|
||||||
|
|
||||||
|
_ = self.css_rules.list.orderedRemove(index);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _replace(self: *CSSStyleSheet, text: []const u8, page: *Page) !Env.Promise {
|
||||||
|
_ = self;
|
||||||
|
_ = text;
|
||||||
|
// TODO: clear self.css_rules
|
||||||
|
// parse text and re-populate self.css_rules
|
||||||
|
|
||||||
|
const resolver = page.main_context.createPromiseResolver();
|
||||||
|
try resolver.resolve({});
|
||||||
|
return resolver.promise();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _replaceSync(self: *CSSStyleSheet, text: []const u8) !void {
|
||||||
|
_ = self;
|
||||||
|
_ = text;
|
||||||
|
// TODO: clear self.css_rules
|
||||||
|
// parse text and re-populate self.css_rules
|
||||||
|
}
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: CSS.StyleSheet" {
|
||||||
|
try testing.htmlRunner("cssom/css_stylesheet.html");
|
||||||
|
}
|
||||||
55
src/browser/cssom/StyleSheet.zig
Normal file
55
src/browser/cssom/StyleSheet.zig
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
|
||||||
|
// https://developer.mozilla.org/en-US/docs/Web/API/StyleSheet#specifications
|
||||||
|
const StyleSheet = @This();
|
||||||
|
|
||||||
|
disabled: bool = false,
|
||||||
|
href: []const u8 = "",
|
||||||
|
owner_node: ?*parser.Node = null,
|
||||||
|
parent_stylesheet: ?*StyleSheet = null,
|
||||||
|
title: []const u8 = "",
|
||||||
|
type: []const u8 = "text/css",
|
||||||
|
|
||||||
|
pub fn get_disabled(self: *const StyleSheet) bool {
|
||||||
|
return self.disabled;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_href(self: *const StyleSheet) []const u8 {
|
||||||
|
return self.href;
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: media
|
||||||
|
|
||||||
|
pub fn get_ownerNode(self: *const StyleSheet) ?*parser.Node {
|
||||||
|
return self.owner_node;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_parentStyleSheet(self: *const StyleSheet) ?*StyleSheet {
|
||||||
|
return self.parent_stylesheet;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_title(self: *const StyleSheet) []const u8 {
|
||||||
|
return self.title;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_type(self: *const StyleSheet) []const u8 {
|
||||||
|
return self.type;
|
||||||
|
}
|
||||||
25
src/browser/cssom/cssom.zig
Normal file
25
src/browser/cssom/cssom.zig
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
pub const Interfaces = .{
|
||||||
|
@import("StyleSheet.zig"),
|
||||||
|
@import("CSSStyleSheet.zig"),
|
||||||
|
@import("CSSStyleDeclaration.zig"),
|
||||||
|
@import("CSSRuleList.zig"),
|
||||||
|
@import("CSSRule.zig").Interfaces,
|
||||||
|
};
|
||||||
109
src/browser/dom/Animation.zig
Normal file
109
src/browser/dom/Animation.zig
Normal file
@@ -0,0 +1,109 @@
|
|||||||
|
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
|
||||||
|
const Page = @import("../page.zig").Page;
|
||||||
|
const JsObject = @import("../env.zig").JsObject;
|
||||||
|
const Promise = @import("../env.zig").Promise;
|
||||||
|
const PromiseResolver = @import("../env.zig").PromiseResolver;
|
||||||
|
|
||||||
|
const Animation = @This();
|
||||||
|
|
||||||
|
effect: ?JsObject,
|
||||||
|
timeline: ?JsObject,
|
||||||
|
ready_resolver: ?PromiseResolver,
|
||||||
|
finished_resolver: ?PromiseResolver,
|
||||||
|
|
||||||
|
pub fn constructor(effect: ?JsObject, timeline: ?JsObject) !Animation {
|
||||||
|
return .{
|
||||||
|
.effect = if (effect) |eo| try eo.persist() else null,
|
||||||
|
.timeline = if (timeline) |to| try to.persist() else null,
|
||||||
|
.ready_resolver = null,
|
||||||
|
.finished_resolver = null,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_playState(self: *const Animation) []const u8 {
|
||||||
|
_ = self;
|
||||||
|
return "finished";
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_pending(self: *const Animation) bool {
|
||||||
|
_ = self;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_finished(self: *Animation, page: *Page) !Promise {
|
||||||
|
if (self.finished_resolver == null) {
|
||||||
|
const resolver = page.main_context.createPromiseResolver();
|
||||||
|
try resolver.resolve(self);
|
||||||
|
self.finished_resolver = resolver;
|
||||||
|
}
|
||||||
|
return self.finished_resolver.?.promise();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_ready(self: *Animation, page: *Page) !Promise {
|
||||||
|
// never resolved, because we're always "finished"
|
||||||
|
if (self.ready_resolver == null) {
|
||||||
|
const resolver = page.main_context.createPromiseResolver();
|
||||||
|
self.ready_resolver = resolver;
|
||||||
|
}
|
||||||
|
return self.ready_resolver.?.promise();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_effect(self: *const Animation) ?JsObject {
|
||||||
|
return self.effect;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_effect(self: *Animation, effect: JsObject) !void {
|
||||||
|
self.effect = try effect.persist();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_timeline(self: *const Animation) ?JsObject {
|
||||||
|
return self.timeline;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_timeline(self: *Animation, timeline: JsObject) !void {
|
||||||
|
self.timeline = try timeline.persist();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _play(self: *const Animation) void {
|
||||||
|
_ = self;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _pause(self: *const Animation) void {
|
||||||
|
_ = self;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _cancel(self: *const Animation) void {
|
||||||
|
_ = self;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _finish(self: *const Animation) void {
|
||||||
|
_ = self;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _reverse(self: *const Animation) void {
|
||||||
|
_ = self;
|
||||||
|
}
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: DOM.Animation" {
|
||||||
|
try testing.htmlRunner("dom/animation.html");
|
||||||
|
}
|
||||||
291
src/browser/dom/MessageChannel.zig
Normal file
291
src/browser/dom/MessageChannel.zig
Normal file
@@ -0,0 +1,291 @@
|
|||||||
|
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
const log = @import("../../log.zig");
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
|
||||||
|
const Env = @import("../env.zig").Env;
|
||||||
|
const Page = @import("../page.zig").Page;
|
||||||
|
const EventTarget = @import("../dom/event_target.zig").EventTarget;
|
||||||
|
const EventHandler = @import("../events/event.zig").EventHandler;
|
||||||
|
|
||||||
|
const JsObject = Env.JsObject;
|
||||||
|
const Function = Env.Function;
|
||||||
|
const Allocator = std.mem.Allocator;
|
||||||
|
|
||||||
|
const MAX_QUEUE_SIZE = 10;
|
||||||
|
|
||||||
|
pub const Interfaces = .{ MessageChannel, MessagePort };
|
||||||
|
|
||||||
|
const MessageChannel = @This();
|
||||||
|
|
||||||
|
port1: *MessagePort,
|
||||||
|
port2: *MessagePort,
|
||||||
|
|
||||||
|
pub fn constructor(page: *Page) !MessageChannel {
|
||||||
|
// Why do we allocate this rather than storing directly in the struct?
|
||||||
|
// https://github.com/lightpanda-io/project/discussions/165
|
||||||
|
const port1 = try page.arena.create(MessagePort);
|
||||||
|
const port2 = try page.arena.create(MessagePort);
|
||||||
|
port1.* = .{
|
||||||
|
.pair = port2,
|
||||||
|
};
|
||||||
|
port2.* = .{
|
||||||
|
.pair = port1,
|
||||||
|
};
|
||||||
|
|
||||||
|
return .{
|
||||||
|
.port1 = port1,
|
||||||
|
.port2 = port2,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_port1(self: *const MessageChannel) *MessagePort {
|
||||||
|
return self.port1;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_port2(self: *const MessageChannel) *MessagePort {
|
||||||
|
return self.port2;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub const MessagePort = struct {
|
||||||
|
pub const prototype = *EventTarget;
|
||||||
|
|
||||||
|
proto: parser.EventTargetTBase = .{ .internal_target_type = .message_port },
|
||||||
|
|
||||||
|
pair: *MessagePort,
|
||||||
|
closed: bool = false,
|
||||||
|
started: bool = false,
|
||||||
|
onmessage_cbk: ?Function = null,
|
||||||
|
onmessageerror_cbk: ?Function = null,
|
||||||
|
// This is the queue of messages to dispatch to THIS MessagePort when the
|
||||||
|
// MessagePort is started.
|
||||||
|
queue: std.ArrayListUnmanaged(JsObject) = .empty,
|
||||||
|
|
||||||
|
pub const PostMessageOption = union(enum) {
|
||||||
|
transfer: JsObject,
|
||||||
|
options: Opts,
|
||||||
|
|
||||||
|
pub const Opts = struct {
|
||||||
|
transfer: JsObject,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn _postMessage(self: *MessagePort, obj: JsObject, opts_: ?PostMessageOption, page: *Page) !void {
|
||||||
|
if (self.closed) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (opts_ != null) {
|
||||||
|
log.warn(.web_api, "not implemented", .{ .feature = "MessagePort postMessage options" });
|
||||||
|
return error.NotImplemented;
|
||||||
|
}
|
||||||
|
|
||||||
|
try self.pair.dispatchOrQueue(obj, page.arena);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start impacts the ability to receive a message.
|
||||||
|
// Given pair1 (started) and pair2 (not started), then:
|
||||||
|
// pair2.postMessage('x'); //will be dispatched to pair1.onmessage
|
||||||
|
// pair1.postMessage('x'); // will be queued until pair2 is started
|
||||||
|
pub fn _start(self: *MessagePort) !void {
|
||||||
|
if (self.started) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
self.started = true;
|
||||||
|
for (self.queue.items) |data| {
|
||||||
|
try self.dispatch(data);
|
||||||
|
}
|
||||||
|
// we'll never use this queue again, but it's allocated with an arena
|
||||||
|
// we don't even need to clear it, but it seems a bit safer to do at
|
||||||
|
// least that
|
||||||
|
self.queue.clearRetainingCapacity();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Closing seems to stop both the publishing and receiving of messages,
|
||||||
|
// effectively rendering the channel useless. It cannot be reversed.
|
||||||
|
pub fn _close(self: *MessagePort) void {
|
||||||
|
self.closed = true;
|
||||||
|
self.pair.closed = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_onmessage(self: *MessagePort) ?Function {
|
||||||
|
return self.onmessage_cbk;
|
||||||
|
}
|
||||||
|
pub fn get_onmessageerror(self: *MessagePort) ?Function {
|
||||||
|
return self.onmessageerror_cbk;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_onmessage(self: *MessagePort, listener: EventHandler.Listener, page: *Page) !void {
|
||||||
|
if (self.onmessage_cbk) |cbk| {
|
||||||
|
try self.unregister("message", cbk.id);
|
||||||
|
}
|
||||||
|
self.onmessage_cbk = try self.register(page.arena, "message", listener);
|
||||||
|
|
||||||
|
// When onmessage is set directly, then it's like start() was called.
|
||||||
|
// If addEventListener('message') is used, the app has to call start()
|
||||||
|
// explicitly.
|
||||||
|
try self._start();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_onmessageerror(self: *MessagePort, listener: EventHandler.Listener, page: *Page) !void {
|
||||||
|
if (self.onmessageerror_cbk) |cbk| {
|
||||||
|
try self.unregister("messageerror", cbk.id);
|
||||||
|
}
|
||||||
|
self.onmessageerror_cbk = try self.register(page.arena, "messageerror", listener);
|
||||||
|
}
|
||||||
|
|
||||||
|
// called from our pair. If port1.postMessage("x") is called, then this
|
||||||
|
// will be called on port2.
|
||||||
|
fn dispatchOrQueue(self: *MessagePort, obj: JsObject, arena: Allocator) !void {
|
||||||
|
// our pair should have checked this already
|
||||||
|
std.debug.assert(self.closed == false);
|
||||||
|
|
||||||
|
if (self.started) {
|
||||||
|
return self.dispatch(try obj.persist());
|
||||||
|
}
|
||||||
|
|
||||||
|
if (self.queue.items.len > MAX_QUEUE_SIZE) {
|
||||||
|
// This isn't part of the spec, but not putting a limit is reckless
|
||||||
|
return error.MessageQueueLimit;
|
||||||
|
}
|
||||||
|
return self.queue.append(arena, try obj.persist());
|
||||||
|
}
|
||||||
|
|
||||||
|
fn dispatch(self: *MessagePort, obj: JsObject) !void {
|
||||||
|
// obj is already persisted, don't use `MessageEvent.constructor`, but
|
||||||
|
// go directly to `init`, which assumes persisted objects.
|
||||||
|
var evt = try MessageEvent.init(.{ .data = obj });
|
||||||
|
_ = try parser.eventTargetDispatchEvent(
|
||||||
|
parser.toEventTarget(MessagePort, self),
|
||||||
|
@as(*parser.Event, @ptrCast(&evt)),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn register(
|
||||||
|
self: *MessagePort,
|
||||||
|
alloc: Allocator,
|
||||||
|
typ: []const u8,
|
||||||
|
listener: EventHandler.Listener,
|
||||||
|
) !?Function {
|
||||||
|
const target = @as(*parser.EventTarget, @ptrCast(self));
|
||||||
|
const eh = (try EventHandler.register(alloc, target, typ, listener, null)) orelse unreachable;
|
||||||
|
return eh.callback;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn unregister(self: *MessagePort, typ: []const u8, cbk_id: usize) !void {
|
||||||
|
const et = @as(*parser.EventTarget, @ptrCast(self));
|
||||||
|
const lst = try parser.eventTargetHasListener(et, typ, false, cbk_id);
|
||||||
|
if (lst == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
try parser.eventTargetRemoveEventListener(et, typ, lst.?, false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const MessageEvent = struct {
|
||||||
|
const Event = @import("../events/event.zig").Event;
|
||||||
|
const DOMException = @import("exceptions.zig").DOMException;
|
||||||
|
|
||||||
|
pub const prototype = *Event;
|
||||||
|
pub const Exception = DOMException;
|
||||||
|
pub const union_make_copy = true;
|
||||||
|
|
||||||
|
proto: parser.Event,
|
||||||
|
data: ?JsObject,
|
||||||
|
|
||||||
|
// You would think if port1 sends to port2, the source would be port2
|
||||||
|
// (which is how I read the documentation), but it appears to always be
|
||||||
|
// null. It can always be set explicitly via the constructor;
|
||||||
|
source: ?JsObject,
|
||||||
|
|
||||||
|
origin: []const u8,
|
||||||
|
|
||||||
|
// This is used for Server-Sent events. Appears to always be an empty
|
||||||
|
// string for MessagePort messages.
|
||||||
|
last_event_id: []const u8,
|
||||||
|
|
||||||
|
// This might be related to the "transfer" option of postMessage which
|
||||||
|
// we don't yet support. For "normal" message, it's always an empty array.
|
||||||
|
// Though it could be set explicitly via the constructor
|
||||||
|
ports: []*MessagePort,
|
||||||
|
|
||||||
|
const Options = struct {
|
||||||
|
data: ?JsObject = null,
|
||||||
|
source: ?JsObject = null,
|
||||||
|
origin: []const u8 = "",
|
||||||
|
lastEventId: []const u8 = "",
|
||||||
|
ports: []*MessagePort = &.{},
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn constructor(opts: Options) !MessageEvent {
|
||||||
|
return init(.{
|
||||||
|
.data = if (opts.data) |obj| try obj.persist() else null,
|
||||||
|
.source = if (opts.source) |obj| try obj.persist() else null,
|
||||||
|
.ports = opts.ports,
|
||||||
|
.origin = opts.origin,
|
||||||
|
.lastEventId = opts.lastEventId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// This is like "constructor", but it assumes JsObjects have already been
|
||||||
|
// persisted. Necessary because this `new MessageEvent()` can be called
|
||||||
|
// directly from JS OR from a port.postMessage. In the latter case, data
|
||||||
|
// may have already been persisted (as it might need to be queued);
|
||||||
|
fn init(opts: Options) !MessageEvent {
|
||||||
|
const event = try parser.eventCreate();
|
||||||
|
defer parser.eventDestroy(event);
|
||||||
|
try parser.eventInit(event, "message", .{});
|
||||||
|
parser.eventSetInternalType(event, .message_event);
|
||||||
|
|
||||||
|
return .{
|
||||||
|
.proto = event.*,
|
||||||
|
.data = opts.data,
|
||||||
|
.source = opts.source,
|
||||||
|
.ports = opts.ports,
|
||||||
|
.origin = opts.origin,
|
||||||
|
.last_event_id = opts.lastEventId,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_data(self: *const MessageEvent) !?JsObject {
|
||||||
|
return self.data;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_origin(self: *const MessageEvent) []const u8 {
|
||||||
|
return self.origin;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_source(self: *const MessageEvent) ?JsObject {
|
||||||
|
return self.source;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_ports(self: *const MessageEvent) []*MessagePort {
|
||||||
|
return self.ports;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_lastEventId(self: *const MessageEvent) []const u8 {
|
||||||
|
return self.last_event_id;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: DOM.MessageChannel" {
|
||||||
|
try testing.htmlRunner("dom/message_channel.html");
|
||||||
|
}
|
||||||
75
src/browser/dom/attribute.zig
Normal file
75
src/browser/dom/attribute.zig
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
|
||||||
|
const Node = @import("node.zig").Node;
|
||||||
|
|
||||||
|
// WEB IDL https://dom.spec.whatwg.org/#attr
|
||||||
|
pub const Attr = struct {
|
||||||
|
pub const Self = parser.Attribute;
|
||||||
|
pub const prototype = *Node;
|
||||||
|
pub const subtype = .node;
|
||||||
|
|
||||||
|
pub fn get_namespaceURI(self: *parser.Attribute) !?[]const u8 {
|
||||||
|
return try parser.nodeGetNamespace(parser.attributeToNode(self));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_prefix(self: *parser.Attribute) !?[]const u8 {
|
||||||
|
return try parser.nodeGetPrefix(parser.attributeToNode(self));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_localName(self: *parser.Attribute) ![]const u8 {
|
||||||
|
return try parser.nodeLocalName(parser.attributeToNode(self));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_name(self: *parser.Attribute) ![]const u8 {
|
||||||
|
return try parser.attributeGetName(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_value(self: *parser.Attribute) !?[]const u8 {
|
||||||
|
return try parser.attributeGetValue(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_value(self: *parser.Attribute, v: []const u8) !?[]const u8 {
|
||||||
|
if (try parser.attributeGetOwnerElement(self)) |el| {
|
||||||
|
// if possible, go through the element, as that triggers a
|
||||||
|
// DOMAttrModified event (which MutationObserver cares about)
|
||||||
|
const name = try parser.attributeGetName(self);
|
||||||
|
try parser.elementSetAttribute(el, name, v);
|
||||||
|
} else {
|
||||||
|
try parser.attributeSetValue(self, v);
|
||||||
|
}
|
||||||
|
return v;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_ownerElement(self: *parser.Attribute) !?*parser.Element {
|
||||||
|
return try parser.attributeGetOwnerElement(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_specified(_: *parser.Attribute) bool {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Tests
|
||||||
|
// -----
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: DOM.Attribute" {
|
||||||
|
try testing.htmlRunner("dom/attribute.html");
|
||||||
|
}
|
||||||
@@ -16,23 +16,13 @@
|
|||||||
// You should have received a copy of the GNU Affero General Public License
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
const std = @import("std");
|
const parser = @import("../netsurf.zig");
|
||||||
|
|
||||||
pub fn processMessage(cmd: anytype) !void {
|
const Text = @import("text.zig").Text;
|
||||||
const action = std.meta.stringToEnum(enum {
|
|
||||||
enable,
|
|
||||||
disable,
|
|
||||||
}, cmd.input.action) orelse return error.UnknownMethod;
|
|
||||||
|
|
||||||
switch (action) {
|
// https://dom.spec.whatwg.org/#cdatasection
|
||||||
.enable => return enable(cmd),
|
pub const CDATASection = struct {
|
||||||
.disable => return disable(cmd),
|
pub const Self = parser.CDATASection;
|
||||||
}
|
pub const prototype = *Text;
|
||||||
}
|
pub const subtype = .node;
|
||||||
fn enable(cmd: anytype) !void {
|
};
|
||||||
return cmd.sendResult(null, .{});
|
|
||||||
}
|
|
||||||
|
|
||||||
fn disable(cmd: anytype) !void {
|
|
||||||
return cmd.sendResult(null, .{});
|
|
||||||
}
|
|
||||||
134
src/browser/dom/character_data.zig
Normal file
134
src/browser/dom/character_data.zig
Normal file
@@ -0,0 +1,134 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
|
||||||
|
const Node = @import("node.zig").Node;
|
||||||
|
const Comment = @import("comment.zig").Comment;
|
||||||
|
const Text = @import("text.zig");
|
||||||
|
const ProcessingInstruction = @import("processing_instruction.zig").ProcessingInstruction;
|
||||||
|
const Element = @import("element.zig").Element;
|
||||||
|
const ElementUnion = @import("element.zig").Union;
|
||||||
|
|
||||||
|
// CharacterData interfaces
|
||||||
|
pub const Interfaces = .{
|
||||||
|
Comment,
|
||||||
|
Text.Text,
|
||||||
|
Text.Interfaces,
|
||||||
|
ProcessingInstruction,
|
||||||
|
};
|
||||||
|
|
||||||
|
// CharacterData implementation
|
||||||
|
pub const CharacterData = struct {
|
||||||
|
pub const Self = parser.CharacterData;
|
||||||
|
pub const prototype = *Node;
|
||||||
|
pub const subtype = .node;
|
||||||
|
|
||||||
|
// JS funcs
|
||||||
|
// --------
|
||||||
|
|
||||||
|
// Read attributes
|
||||||
|
|
||||||
|
pub fn get_length(self: *parser.CharacterData) !u32 {
|
||||||
|
return try parser.characterDataLength(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_nextElementSibling(self: *parser.CharacterData) !?ElementUnion {
|
||||||
|
const res = try parser.nodeNextElementSibling(parser.characterDataToNode(self));
|
||||||
|
if (res == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return try Element.toInterface(res.?);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_previousElementSibling(self: *parser.CharacterData) !?ElementUnion {
|
||||||
|
const res = try parser.nodePreviousElementSibling(parser.characterDataToNode(self));
|
||||||
|
if (res == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return try Element.toInterface(res.?);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read/Write attributes
|
||||||
|
|
||||||
|
pub fn get_data(self: *parser.CharacterData) ![]const u8 {
|
||||||
|
return try parser.characterDataData(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_data(self: *parser.CharacterData, data: []const u8) !void {
|
||||||
|
return try parser.characterDataSetData(self, data);
|
||||||
|
}
|
||||||
|
|
||||||
|
// JS methods
|
||||||
|
// ----------
|
||||||
|
|
||||||
|
pub fn _appendData(self: *parser.CharacterData, data: []const u8) !void {
|
||||||
|
return try parser.characterDataAppendData(self, data);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _deleteData(self: *parser.CharacterData, offset: u32, count: u32) !void {
|
||||||
|
return try parser.characterDataDeleteData(self, offset, count);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _insertData(self: *parser.CharacterData, offset: u32, data: []const u8) !void {
|
||||||
|
return try parser.characterDataInsertData(self, offset, data);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _replaceData(self: *parser.CharacterData, offset: u32, count: u32, data: []const u8) !void {
|
||||||
|
return try parser.characterDataReplaceData(self, offset, count, data);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _substringData(self: *parser.CharacterData, offset: u32, count: u32) ![]const u8 {
|
||||||
|
return try parser.characterDataSubstringData(self, offset, count);
|
||||||
|
}
|
||||||
|
|
||||||
|
// netsurf's CharacterData (text, comment) doesn't implement the
|
||||||
|
// dom_node_get_attributes and thus will crash if we try to call nodeIsEqualNode.
|
||||||
|
pub fn _isEqualNode(self: *parser.CharacterData, other_node: *parser.Node) !bool {
|
||||||
|
if (try parser.nodeType(@ptrCast(@alignCast(self))) != try parser.nodeType(other_node)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
const other: *parser.CharacterData = @ptrCast(other_node);
|
||||||
|
if (std.mem.eql(u8, try get_data(self), try get_data(other)) == false) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _before(self: *parser.CharacterData, nodes: []const Node.NodeOrText) !void {
|
||||||
|
const ref_node = parser.characterDataToNode(self);
|
||||||
|
return Node.before(ref_node, nodes);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _after(self: *parser.CharacterData, nodes: []const Node.NodeOrText) !void {
|
||||||
|
const ref_node = parser.characterDataToNode(self);
|
||||||
|
return Node.after(ref_node, nodes);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Tests
|
||||||
|
// -----
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: DOM.CharacterData" {
|
||||||
|
try testing.htmlRunner("dom/character_data.html");
|
||||||
|
}
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
//
|
//
|
||||||
// Francis Bouvier <francis@lightpanda.io>
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
// Pierre Tachoire <pierre@lightpanda.io>
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
@@ -16,28 +16,30 @@
|
|||||||
// You should have received a copy of the GNU Affero General Public License
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
const js = @import("../../js/js.zig");
|
const parser = @import("../netsurf.zig");
|
||||||
const Page = @import("../../Page.zig");
|
|
||||||
|
|
||||||
const CData = @import("../CData.zig");
|
const CharacterData = @import("character_data.zig").CharacterData;
|
||||||
|
|
||||||
const Comment = @This();
|
const Page = @import("../page.zig").Page;
|
||||||
|
|
||||||
_proto: *CData,
|
// https://dom.spec.whatwg.org/#interface-comment
|
||||||
|
pub const Comment = struct {
|
||||||
|
pub const Self = parser.Comment;
|
||||||
|
pub const prototype = *CharacterData;
|
||||||
|
pub const subtype = .node;
|
||||||
|
|
||||||
pub fn init(content: ?[]const u8, page: *Page) !*Comment {
|
pub fn constructor(data: ?[]const u8, page: *const Page) !*parser.Comment {
|
||||||
const node = try page.createComment(content orelse "");
|
return parser.documentCreateComment(
|
||||||
return node.as(Comment);
|
parser.documentHTMLToDocument(page.window.document),
|
||||||
}
|
data orelse "",
|
||||||
|
);
|
||||||
pub const JsApi = struct {
|
}
|
||||||
pub const bridge = js.Bridge(Comment);
|
|
||||||
|
|
||||||
pub const Meta = struct {
|
|
||||||
pub const name = "Comment";
|
|
||||||
pub const prototype_chain = bridge.prototypeChain();
|
|
||||||
pub var class_id: bridge.ClassId = undefined;
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const constructor = bridge.constructor(Comment.init, .{});
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Tests
|
||||||
|
// -----
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: DOM.Comment" {
|
||||||
|
try testing.htmlRunner("dom/comment.html");
|
||||||
|
}
|
||||||
80
src/browser/dom/css.zig
Normal file
80
src/browser/dom/css.zig
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
|
||||||
|
const css = @import("../css/css.zig");
|
||||||
|
const Node = @import("../css/libdom.zig").Node;
|
||||||
|
const NodeList = @import("nodelist.zig").NodeList;
|
||||||
|
|
||||||
|
const MatchFirst = struct {
|
||||||
|
n: ?*parser.Node = null,
|
||||||
|
|
||||||
|
pub fn match(m: *MatchFirst, n: Node) !void {
|
||||||
|
m.n = n.node;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn querySelector(alloc: std.mem.Allocator, n: *parser.Node, selector: []const u8) !?*parser.Node {
|
||||||
|
const ps = try css.parse(alloc, selector, .{ .accept_pseudo_elts = true });
|
||||||
|
defer ps.deinit(alloc);
|
||||||
|
|
||||||
|
var m = MatchFirst{};
|
||||||
|
|
||||||
|
_ = try css.matchFirst(&ps, Node{ .node = n }, &m);
|
||||||
|
return m.n;
|
||||||
|
}
|
||||||
|
|
||||||
|
const MatchAll = struct {
|
||||||
|
alloc: std.mem.Allocator,
|
||||||
|
nl: NodeList,
|
||||||
|
|
||||||
|
fn init(alloc: std.mem.Allocator) MatchAll {
|
||||||
|
return .{
|
||||||
|
.alloc = alloc,
|
||||||
|
.nl = .{},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
fn deinit(m: *MatchAll) void {
|
||||||
|
m.nl.deinit(m.alloc);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn match(m: *MatchAll, n: Node) !void {
|
||||||
|
try m.nl.append(m.alloc, n.node);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn toOwnedList(m: *MatchAll) NodeList {
|
||||||
|
// reset it.
|
||||||
|
defer m.nl = .{};
|
||||||
|
return m.nl;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn querySelectorAll(alloc: std.mem.Allocator, n: *parser.Node, selector: []const u8) !NodeList {
|
||||||
|
const ps = try css.parse(alloc, selector, .{ .accept_pseudo_elts = true });
|
||||||
|
defer ps.deinit(alloc);
|
||||||
|
|
||||||
|
var m = MatchAll.init(alloc);
|
||||||
|
defer m.deinit();
|
||||||
|
|
||||||
|
try css.matchAll(&ps, Node{ .node = n }, &m);
|
||||||
|
return m.toOwnedList();
|
||||||
|
}
|
||||||
331
src/browser/dom/document.zig
Normal file
331
src/browser/dom/document.zig
Normal file
@@ -0,0 +1,331 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
|
||||||
|
const log = @import("../../log.zig");
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
const Page = @import("../page.zig").Page;
|
||||||
|
|
||||||
|
const Node = @import("node.zig").Node;
|
||||||
|
const NodeList = @import("nodelist.zig").NodeList;
|
||||||
|
const NodeUnion = @import("node.zig").Union;
|
||||||
|
|
||||||
|
const collection = @import("html_collection.zig");
|
||||||
|
const css = @import("css.zig");
|
||||||
|
|
||||||
|
const Element = @import("element.zig").Element;
|
||||||
|
const ElementUnion = @import("element.zig").Union;
|
||||||
|
const TreeWalker = @import("tree_walker.zig").TreeWalker;
|
||||||
|
const CSSStyleSheet = @import("../cssom/CSSStyleSheet.zig");
|
||||||
|
const NodeIterator = @import("node_iterator.zig").NodeIterator;
|
||||||
|
const Range = @import("range.zig").Range;
|
||||||
|
|
||||||
|
const CustomEvent = @import("../events/custom_event.zig").CustomEvent;
|
||||||
|
|
||||||
|
const Env = @import("../env.zig").Env;
|
||||||
|
|
||||||
|
const DOMImplementation = @import("implementation.zig").DOMImplementation;
|
||||||
|
|
||||||
|
// WEB IDL https://dom.spec.whatwg.org/#document
|
||||||
|
pub const Document = struct {
|
||||||
|
pub const Self = parser.Document;
|
||||||
|
pub const prototype = *Node;
|
||||||
|
pub const subtype = .node;
|
||||||
|
|
||||||
|
pub fn constructor(page: *const Page) !*parser.DocumentHTML {
|
||||||
|
const doc = try parser.documentCreateDocument(
|
||||||
|
try parser.documentHTMLGetTitle(page.window.document),
|
||||||
|
);
|
||||||
|
|
||||||
|
// we have to work w/ document instead of html document.
|
||||||
|
const ddoc = parser.documentHTMLToDocument(doc);
|
||||||
|
const ccur = parser.documentHTMLToDocument(page.window.document);
|
||||||
|
try parser.documentSetDocumentURI(ddoc, try parser.documentGetDocumentURI(ccur));
|
||||||
|
try parser.documentSetInputEncoding(ddoc, try parser.documentGetInputEncoding(ccur));
|
||||||
|
|
||||||
|
return doc;
|
||||||
|
}
|
||||||
|
|
||||||
|
// JS funcs
|
||||||
|
// --------
|
||||||
|
pub fn get_implementation(_: *parser.Document) DOMImplementation {
|
||||||
|
return DOMImplementation{};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_documentElement(self: *parser.Document) !?ElementUnion {
|
||||||
|
const e = try parser.documentGetDocumentElement(self);
|
||||||
|
if (e == null) return null;
|
||||||
|
return try Element.toInterface(e.?);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_documentURI(self: *parser.Document) ![]const u8 {
|
||||||
|
return try parser.documentGetDocumentURI(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_URL(self: *parser.Document) ![]const u8 {
|
||||||
|
return try get_documentURI(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO implement contentType
|
||||||
|
pub fn get_contentType(self: *parser.Document) []const u8 {
|
||||||
|
_ = self;
|
||||||
|
return "text/html";
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO implement compactMode
|
||||||
|
pub fn get_compatMode(self: *parser.Document) []const u8 {
|
||||||
|
_ = self;
|
||||||
|
return "CSS1Compat";
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_characterSet(self: *parser.Document) ![]const u8 {
|
||||||
|
return try parser.documentGetInputEncoding(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
// alias of get_characterSet
|
||||||
|
pub fn get_charset(self: *parser.Document) ![]const u8 {
|
||||||
|
return try get_characterSet(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
// alias of get_characterSet
|
||||||
|
pub fn get_inputEncoding(self: *parser.Document) ![]const u8 {
|
||||||
|
return try get_characterSet(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_doctype(self: *parser.Document) !?*parser.DocumentType {
|
||||||
|
return try parser.documentGetDoctype(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _createEvent(_: *parser.Document, eventCstr: []const u8) !union(enum) {
|
||||||
|
base: *parser.Event,
|
||||||
|
custom: CustomEvent,
|
||||||
|
} {
|
||||||
|
const eqlIgnoreCase = std.ascii.eqlIgnoreCase;
|
||||||
|
|
||||||
|
if (eqlIgnoreCase(eventCstr, "Event") or eqlIgnoreCase(eventCstr, "Events") or eqlIgnoreCase(eventCstr, "HTMLEvents")) {
|
||||||
|
return .{ .base = try parser.eventCreate() };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Not documented in MDN but supported in Chrome.
|
||||||
|
// This is actually both instance of `Event` and `CustomEvent`.
|
||||||
|
if (std.ascii.eqlIgnoreCase(eventCstr, "CustomEvent")) {
|
||||||
|
return .{ .custom = try CustomEvent.constructor(eventCstr, null) };
|
||||||
|
}
|
||||||
|
|
||||||
|
return error.NotSupported;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _getElementById(self: *parser.Document, id: []const u8) !?ElementUnion {
|
||||||
|
const e = try parser.documentGetElementById(self, id) orelse return null;
|
||||||
|
return try Element.toInterface(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _createElement(self: *parser.Document, tag_name: []const u8) !ElementUnion {
|
||||||
|
// The element’s namespace is the HTML namespace when document is an HTML document
|
||||||
|
// https://dom.spec.whatwg.org/#ref-for-dom-document-createelement%E2%91%A0
|
||||||
|
const e = try parser.documentCreateElementNS(self, "http://www.w3.org/1999/xhtml", tag_name);
|
||||||
|
return Element.toInterface(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _createElementNS(self: *parser.Document, ns: []const u8, tag_name: []const u8) !ElementUnion {
|
||||||
|
const e = try parser.documentCreateElementNS(self, ns, tag_name);
|
||||||
|
return try Element.toInterface(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
// We can't simply use libdom dom_document_get_elements_by_tag_name here.
|
||||||
|
// Indeed, netsurf implemented a previous dom spec when
|
||||||
|
// getElementsByTagName returned a NodeList.
|
||||||
|
// But since
|
||||||
|
// https://github.com/whatwg/dom/commit/190700b7c12ecfd3b5ebdb359ab1d6ea9cbf7749
|
||||||
|
// the spec changed to return an HTMLCollection instead.
|
||||||
|
// That's why we reimplemented getElementsByTagName by using an
|
||||||
|
// HTMLCollection in zig here.
|
||||||
|
pub fn _getElementsByTagName(
|
||||||
|
self: *parser.Document,
|
||||||
|
tag_name: []const u8,
|
||||||
|
page: *Page,
|
||||||
|
) !collection.HTMLCollection {
|
||||||
|
return try collection.HTMLCollectionByTagName(page.arena, parser.documentToNode(self), tag_name, .{
|
||||||
|
.include_root = true,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _getElementsByClassName(
|
||||||
|
self: *parser.Document,
|
||||||
|
classNames: []const u8,
|
||||||
|
page: *Page,
|
||||||
|
) !collection.HTMLCollection {
|
||||||
|
return try collection.HTMLCollectionByClassName(page.arena, parser.documentToNode(self), classNames, .{
|
||||||
|
.include_root = true,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _createDocumentFragment(self: *parser.Document) !*parser.DocumentFragment {
|
||||||
|
return try parser.documentCreateDocumentFragment(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _createTextNode(self: *parser.Document, data: []const u8) !*parser.Text {
|
||||||
|
return try parser.documentCreateTextNode(self, data);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _createCDATASection(self: *parser.Document, data: []const u8) !*parser.CDATASection {
|
||||||
|
return try parser.documentCreateCDATASection(self, data);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _createComment(self: *parser.Document, data: []const u8) !*parser.Comment {
|
||||||
|
return try parser.documentCreateComment(self, data);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _createProcessingInstruction(self: *parser.Document, target: []const u8, data: []const u8) !*parser.ProcessingInstruction {
|
||||||
|
return try parser.documentCreateProcessingInstruction(self, target, data);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _importNode(self: *parser.Document, node: *parser.Node, deep: ?bool) !NodeUnion {
|
||||||
|
const n = try parser.documentImportNode(self, node, deep orelse false);
|
||||||
|
return try Node.toInterface(n);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _adoptNode(self: *parser.Document, node: *parser.Node) !NodeUnion {
|
||||||
|
const n = try parser.documentAdoptNode(self, node);
|
||||||
|
return try Node.toInterface(n);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _createAttribute(self: *parser.Document, name: []const u8) !*parser.Attribute {
|
||||||
|
return try parser.documentCreateAttribute(self, name);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _createAttributeNS(self: *parser.Document, ns: []const u8, qname: []const u8) !*parser.Attribute {
|
||||||
|
return try parser.documentCreateAttributeNS(self, ns, qname);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParentNode
|
||||||
|
// https://dom.spec.whatwg.org/#parentnode
|
||||||
|
pub fn get_children(self: *parser.Document) !collection.HTMLCollection {
|
||||||
|
return collection.HTMLCollectionChildren(parser.documentToNode(self), .{
|
||||||
|
.include_root = false,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_firstElementChild(self: *parser.Document) !?ElementUnion {
|
||||||
|
const elt = try parser.documentGetDocumentElement(self) orelse return null;
|
||||||
|
return try Element.toInterface(elt);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_lastElementChild(self: *parser.Document) !?ElementUnion {
|
||||||
|
const elt = try parser.documentGetDocumentElement(self) orelse return null;
|
||||||
|
return try Element.toInterface(elt);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_childElementCount(self: *parser.Document) !u32 {
|
||||||
|
_ = try parser.documentGetDocumentElement(self) orelse return 0;
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _querySelector(self: *parser.Document, selector: []const u8, page: *Page) !?ElementUnion {
|
||||||
|
if (selector.len == 0) return null;
|
||||||
|
|
||||||
|
const n = try css.querySelector(page.call_arena, parser.documentToNode(self), selector);
|
||||||
|
|
||||||
|
if (n == null) return null;
|
||||||
|
|
||||||
|
return try Element.toInterface(parser.nodeToElement(n.?));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _querySelectorAll(self: *parser.Document, selector: []const u8, page: *Page) !NodeList {
|
||||||
|
return css.querySelectorAll(page.arena, parser.documentToNode(self), selector);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _prepend(self: *parser.Document, nodes: []const Node.NodeOrText) !void {
|
||||||
|
return Node.prepend(parser.documentToNode(self), nodes);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _append(self: *parser.Document, nodes: []const Node.NodeOrText) !void {
|
||||||
|
return Node.append(parser.documentToNode(self), nodes);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _replaceChildren(self: *parser.Document, nodes: []const Node.NodeOrText) !void {
|
||||||
|
return Node.replaceChildren(parser.documentToNode(self), nodes);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _createTreeWalker(_: *parser.Document, root: *parser.Node, what_to_show: ?TreeWalker.WhatToShow, filter: ?TreeWalker.TreeWalkerOpts) !TreeWalker {
|
||||||
|
return TreeWalker.init(root, what_to_show, filter);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _createNodeIterator(_: *parser.Document, root: *parser.Node, what_to_show: ?NodeIterator.WhatToShow, filter: ?NodeIterator.NodeIteratorOpts) !NodeIterator {
|
||||||
|
return NodeIterator.init(root, what_to_show, filter);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn getActiveElement(self: *parser.Document, page: *Page) !?*parser.Element {
|
||||||
|
if (page.getNodeState(@ptrCast(@alignCast(self)))) |state| {
|
||||||
|
if (state.active_element) |ae| {
|
||||||
|
return ae;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (try parser.documentHTMLBody(page.window.document)) |body| {
|
||||||
|
return @ptrCast(@alignCast(body));
|
||||||
|
}
|
||||||
|
|
||||||
|
return try parser.documentGetDocumentElement(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_activeElement(self: *parser.Document, page: *Page) !?ElementUnion {
|
||||||
|
const ae = (try getActiveElement(self, page)) orelse return null;
|
||||||
|
return try Element.toInterface(ae);
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: some elements can't be focused, like if they're disabled
|
||||||
|
// but there doesn't seem to be a generic way to check this. For example
|
||||||
|
// we could look for the "disabled" attribute, but that's only meaningful
|
||||||
|
// on certain types, and libdom's vtable doesn't seem to expose this.
|
||||||
|
pub fn setFocus(self: *parser.Document, e: *parser.ElementHTML, page: *Page) !void {
|
||||||
|
const state = try page.getOrCreateNodeState(@ptrCast(@alignCast(self)));
|
||||||
|
state.active_element = @ptrCast(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _createRange(_: *parser.Document, page: *Page) Range {
|
||||||
|
return Range.constructor(page);
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: dummy implementation
|
||||||
|
pub fn get_styleSheets(_: *parser.Document) []CSSStyleSheet {
|
||||||
|
return &.{};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_adoptedStyleSheets(self: *parser.Document, page: *Page) !Env.JsObject {
|
||||||
|
const state = try page.getOrCreateNodeState(@ptrCast(@alignCast(self)));
|
||||||
|
if (state.adopted_style_sheets) |obj| {
|
||||||
|
return obj;
|
||||||
|
}
|
||||||
|
|
||||||
|
const obj = try page.main_context.newArray(0).persist();
|
||||||
|
state.adopted_style_sheets = obj;
|
||||||
|
return obj;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_adoptedStyleSheets(self: *parser.Document, sheets: Env.JsObject, page: *Page) !void {
|
||||||
|
const state = try page.getOrCreateNodeState(@ptrCast(@alignCast(self)));
|
||||||
|
state.adopted_style_sheets = try sheets.persist();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: DOM.Document" {
|
||||||
|
try testing.htmlRunner("dom/document.html");
|
||||||
|
}
|
||||||
96
src/browser/dom/document_fragment.zig
Normal file
96
src/browser/dom/document_fragment.zig
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const css = @import("css.zig");
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
const Page = @import("../page.zig").Page;
|
||||||
|
const NodeList = @import("nodelist.zig").NodeList;
|
||||||
|
const Element = @import("element.zig").Element;
|
||||||
|
const ElementUnion = @import("element.zig").Union;
|
||||||
|
const collection = @import("html_collection.zig");
|
||||||
|
|
||||||
|
const Node = @import("node.zig").Node;
|
||||||
|
|
||||||
|
// WEB IDL https://dom.spec.whatwg.org/#documentfragment
|
||||||
|
pub const DocumentFragment = struct {
|
||||||
|
pub const Self = parser.DocumentFragment;
|
||||||
|
pub const prototype = *Node;
|
||||||
|
pub const subtype = .node;
|
||||||
|
|
||||||
|
pub fn constructor(page: *const Page) !*parser.DocumentFragment {
|
||||||
|
return parser.documentCreateDocumentFragment(
|
||||||
|
parser.documentHTMLToDocument(page.window.document),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _isEqualNode(self: *parser.DocumentFragment, other_node: *parser.Node) !bool {
|
||||||
|
const other_type = try parser.nodeType(other_node);
|
||||||
|
if (other_type != .document_fragment) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
_ = self;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _prepend(self: *parser.DocumentFragment, nodes: []const Node.NodeOrText) !void {
|
||||||
|
return Node.prepend(parser.documentFragmentToNode(self), nodes);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _append(self: *parser.DocumentFragment, nodes: []const Node.NodeOrText) !void {
|
||||||
|
return Node.append(parser.documentFragmentToNode(self), nodes);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _replaceChildren(self: *parser.DocumentFragment, nodes: []const Node.NodeOrText) !void {
|
||||||
|
return Node.replaceChildren(parser.documentFragmentToNode(self), nodes);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _querySelector(self: *parser.DocumentFragment, selector: []const u8, page: *Page) !?ElementUnion {
|
||||||
|
if (selector.len == 0) return null;
|
||||||
|
|
||||||
|
const n = try css.querySelector(page.call_arena, parser.documentFragmentToNode(self), selector);
|
||||||
|
|
||||||
|
if (n == null) return null;
|
||||||
|
|
||||||
|
return try Element.toInterface(parser.nodeToElement(n.?));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _querySelectorAll(self: *parser.DocumentFragment, selector: []const u8, page: *Page) !NodeList {
|
||||||
|
return css.querySelectorAll(page.arena, parser.documentFragmentToNode(self), selector);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_childElementCount(self: *parser.DocumentFragment) !u32 {
|
||||||
|
var children = try get_children(self);
|
||||||
|
return children.get_length();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_children(self: *parser.DocumentFragment) !collection.HTMLCollection {
|
||||||
|
return collection.HTMLCollectionChildren(parser.documentFragmentToNode(self), .{
|
||||||
|
.include_root = false,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _getElementById(self: *parser.DocumentFragment, id: []const u8) !?ElementUnion {
|
||||||
|
const e = try parser.nodeGetElementById(@ptrCast(@alignCast(self)), id) orelse return null;
|
||||||
|
return try Element.toInterface(e);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: DOM.DocumentFragment" {
|
||||||
|
try testing.htmlRunner("dom/document_fragment.html");
|
||||||
|
}
|
||||||
67
src/browser/dom/document_type.zig
Normal file
67
src/browser/dom/document_type.zig
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
|
||||||
|
const Node = @import("node.zig").Node;
|
||||||
|
|
||||||
|
// WEB IDL https://dom.spec.whatwg.org/#documenttype
|
||||||
|
pub const DocumentType = struct {
|
||||||
|
pub const Self = parser.DocumentType;
|
||||||
|
pub const prototype = *Node;
|
||||||
|
pub const subtype = .node;
|
||||||
|
|
||||||
|
pub fn get_name(self: *parser.DocumentType) ![]const u8 {
|
||||||
|
return try parser.documentTypeGetName(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_publicId(self: *parser.DocumentType) ![]const u8 {
|
||||||
|
return try parser.documentTypeGetPublicId(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_systemId(self: *parser.DocumentType) ![]const u8 {
|
||||||
|
return try parser.documentTypeGetSystemId(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
// netsurf's DocumentType doesn't implement the dom_node_get_attributes
|
||||||
|
// and thus will crash if we try to call nodeIsEqualNode.
|
||||||
|
pub fn _isEqualNode(self: *parser.DocumentType, other_node: *parser.Node) !bool {
|
||||||
|
if (try parser.nodeType(other_node) != .document_type) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
const other: *parser.DocumentType = @ptrCast(other_node);
|
||||||
|
if (std.mem.eql(u8, try get_name(self), try get_name(other)) == false) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (std.mem.eql(u8, try get_publicId(self), try get_publicId(other)) == false) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (std.mem.eql(u8, try get_systemId(self), try get_systemId(other)) == false) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: DOM.DocumentType" {
|
||||||
|
try testing.htmlRunner("dom/document_type.html");
|
||||||
|
}
|
||||||
57
src/browser/dom/dom.zig
Normal file
57
src/browser/dom/dom.zig
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const DOMException = @import("exceptions.zig").DOMException;
|
||||||
|
const EventTarget = @import("event_target.zig").EventTarget;
|
||||||
|
const DOMImplementation = @import("implementation.zig").DOMImplementation;
|
||||||
|
const NamedNodeMap = @import("namednodemap.zig").NamedNodeMap;
|
||||||
|
const DOMTokenList = @import("token_list.zig");
|
||||||
|
const NodeList = @import("nodelist.zig");
|
||||||
|
const Node = @import("node.zig");
|
||||||
|
const ResizeObserver = @import("resize_observer.zig");
|
||||||
|
const MutationObserver = @import("mutation_observer.zig");
|
||||||
|
const IntersectionObserver = @import("intersection_observer.zig");
|
||||||
|
const DOMParser = @import("dom_parser.zig").DOMParser;
|
||||||
|
const TreeWalker = @import("tree_walker.zig").TreeWalker;
|
||||||
|
const NodeIterator = @import("node_iterator.zig").NodeIterator;
|
||||||
|
const NodeFilter = @import("node_filter.zig").NodeFilter;
|
||||||
|
const PerformanceObserver = @import("performance_observer.zig").PerformanceObserver;
|
||||||
|
|
||||||
|
pub const Interfaces = .{
|
||||||
|
DOMException,
|
||||||
|
EventTarget,
|
||||||
|
DOMImplementation,
|
||||||
|
NamedNodeMap,
|
||||||
|
NamedNodeMap.Iterator,
|
||||||
|
DOMTokenList.Interfaces,
|
||||||
|
NodeList.Interfaces,
|
||||||
|
Node.Node,
|
||||||
|
Node.Interfaces,
|
||||||
|
ResizeObserver.Interfaces,
|
||||||
|
MutationObserver.Interfaces,
|
||||||
|
IntersectionObserver.Interfaces,
|
||||||
|
DOMParser,
|
||||||
|
TreeWalker,
|
||||||
|
NodeIterator,
|
||||||
|
NodeFilter,
|
||||||
|
@import("performance.zig").Interfaces,
|
||||||
|
PerformanceObserver,
|
||||||
|
@import("range.zig").Interfaces,
|
||||||
|
@import("Animation.zig"),
|
||||||
|
@import("MessageChannel.zig").Interfaces,
|
||||||
|
};
|
||||||
41
src/browser/dom/dom_parser.zig
Normal file
41
src/browser/dom/dom_parser.zig
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
|
||||||
|
// https://developer.mozilla.org/en-US/docs/Web/API/DOMParser
|
||||||
|
pub const DOMParser = struct {
|
||||||
|
pub fn constructor() !DOMParser {
|
||||||
|
return .{};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _parseFromString(_: *DOMParser, string: []const u8, mime_type: []const u8) !*parser.DocumentHTML {
|
||||||
|
if (!std.mem.eql(u8, mime_type, "text/html")) {
|
||||||
|
// TODO: Support XML
|
||||||
|
return error.TypeError;
|
||||||
|
}
|
||||||
|
|
||||||
|
return try parser.documentHTMLParseFromStr(string);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: DOM.Parser" {
|
||||||
|
try testing.htmlRunner("dom/dom_parser.html");
|
||||||
|
}
|
||||||
609
src/browser/dom/element.zig
Normal file
609
src/browser/dom/element.zig
Normal file
@@ -0,0 +1,609 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
const Page = @import("../page.zig").Page;
|
||||||
|
|
||||||
|
const css = @import("css.zig");
|
||||||
|
const log = @import("../../log.zig");
|
||||||
|
const dump = @import("../dump.zig");
|
||||||
|
const collection = @import("html_collection.zig");
|
||||||
|
|
||||||
|
const Node = @import("node.zig").Node;
|
||||||
|
const Walker = @import("walker.zig").WalkerDepthFirst;
|
||||||
|
const NodeList = @import("nodelist.zig").NodeList;
|
||||||
|
const HTMLElem = @import("../html/elements.zig");
|
||||||
|
const ShadowRoot = @import("../dom/shadow_root.zig").ShadowRoot;
|
||||||
|
|
||||||
|
const Animation = @import("Animation.zig");
|
||||||
|
const JsObject = @import("../env.zig").JsObject;
|
||||||
|
|
||||||
|
pub const Union = @import("../html/elements.zig").Union;
|
||||||
|
|
||||||
|
// WEB IDL https://dom.spec.whatwg.org/#element
|
||||||
|
pub const Element = struct {
|
||||||
|
pub const Self = parser.Element;
|
||||||
|
pub const prototype = *Node;
|
||||||
|
pub const subtype = .node;
|
||||||
|
|
||||||
|
pub const DOMRect = struct {
|
||||||
|
x: f64,
|
||||||
|
y: f64,
|
||||||
|
width: f64,
|
||||||
|
height: f64,
|
||||||
|
bottom: f64,
|
||||||
|
right: f64,
|
||||||
|
top: f64,
|
||||||
|
left: f64,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn toInterface(e: *parser.Element) !Union {
|
||||||
|
return toInterfaceT(Union, e);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn toInterfaceT(comptime T: type, e: *parser.Element) !T {
|
||||||
|
const tagname = try parser.elementGetTagName(e) orelse {
|
||||||
|
// If the owner's document is HTML, assume we have an HTMLElement.
|
||||||
|
const doc = try parser.nodeOwnerDocument(parser.elementToNode(e));
|
||||||
|
if (doc != null and !doc.?.is_html) {
|
||||||
|
return .{ .HTMLElement = @as(*parser.ElementHTML, @ptrCast(e)) };
|
||||||
|
}
|
||||||
|
|
||||||
|
return .{ .Element = e };
|
||||||
|
};
|
||||||
|
|
||||||
|
// TODO SVGElement and MathML are not supported yet.
|
||||||
|
|
||||||
|
const tag = parser.Tag.fromString(tagname) catch {
|
||||||
|
// If the owner's document is HTML, assume we have an HTMLElement.
|
||||||
|
const doc = try parser.nodeOwnerDocument(parser.elementToNode(e));
|
||||||
|
if (doc != null and doc.?.is_html) {
|
||||||
|
return .{ .HTMLElement = @as(*parser.ElementHTML, @ptrCast(e)) };
|
||||||
|
}
|
||||||
|
|
||||||
|
return .{ .Element = e };
|
||||||
|
};
|
||||||
|
|
||||||
|
return HTMLElem.toInterfaceFromTag(T, e, tag);
|
||||||
|
}
|
||||||
|
|
||||||
|
// JS funcs
|
||||||
|
// --------
|
||||||
|
|
||||||
|
pub fn get_namespaceURI(self: *parser.Element) !?[]const u8 {
|
||||||
|
return try parser.nodeGetNamespace(parser.elementToNode(self));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_prefix(self: *parser.Element) !?[]const u8 {
|
||||||
|
return try parser.nodeGetPrefix(parser.elementToNode(self));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_localName(self: *parser.Element) ![]const u8 {
|
||||||
|
return try parser.nodeLocalName(parser.elementToNode(self));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_tagName(self: *parser.Element) ![]const u8 {
|
||||||
|
return try parser.nodeName(parser.elementToNode(self));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_id(self: *parser.Element) ![]const u8 {
|
||||||
|
return try parser.elementGetAttribute(self, "id") orelse "";
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_id(self: *parser.Element, id: []const u8) !void {
|
||||||
|
return try parser.elementSetAttribute(self, "id", id);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_className(self: *parser.Element) ![]const u8 {
|
||||||
|
return try parser.elementGetAttribute(self, "class") orelse "";
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_className(self: *parser.Element, class: []const u8) !void {
|
||||||
|
return try parser.elementSetAttribute(self, "class", class);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_slot(self: *parser.Element) ![]const u8 {
|
||||||
|
return try parser.elementGetAttribute(self, "slot") orelse "";
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_slot(self: *parser.Element, slot: []const u8) !void {
|
||||||
|
return try parser.elementSetAttribute(self, "slot", slot);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_classList(self: *parser.Element) !*parser.TokenList {
|
||||||
|
return try parser.tokenListCreate(self, "class");
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_attributes(self: *parser.Element) !*parser.NamedNodeMap {
|
||||||
|
// An element must have non-nil attributes.
|
||||||
|
return try parser.nodeGetAttributes(parser.elementToNode(self)) orelse unreachable;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_innerHTML(self: *parser.Element, page: *Page) ![]const u8 {
|
||||||
|
var aw = std.Io.Writer.Allocating.init(page.call_arena);
|
||||||
|
try dump.writeChildren(parser.elementToNode(self), .{}, &aw.writer);
|
||||||
|
return aw.written();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_outerHTML(self: *parser.Element, page: *Page) ![]const u8 {
|
||||||
|
var aw = std.Io.Writer.Allocating.init(page.call_arena);
|
||||||
|
try dump.writeNode(parser.elementToNode(self), .{}, &aw.writer);
|
||||||
|
return aw.written();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_innerHTML(self: *parser.Element, str: []const u8, page: *Page) !void {
|
||||||
|
const node = parser.elementToNode(self);
|
||||||
|
const doc = try parser.nodeOwnerDocument(node) orelse return parser.DOMError.WrongDocument;
|
||||||
|
// parse the fragment
|
||||||
|
const fragment = try parser.documentParseFragmentFromStr(doc, str);
|
||||||
|
|
||||||
|
// remove existing children
|
||||||
|
try Node.removeChildren(node);
|
||||||
|
|
||||||
|
const fragment_node = parser.documentFragmentToNode(fragment);
|
||||||
|
|
||||||
|
// I'm not sure what the exact behavior is supposed to be. Initially,
|
||||||
|
// we were only copying the body of the document fragment. But it seems
|
||||||
|
// like head elements should be copied too. Specifically, some sites
|
||||||
|
// create script tags via innerHTML, which we need to capture.
|
||||||
|
// If you play with this in a browser, you should notice that the
|
||||||
|
// behavior is different depending on whether you're in a blank page
|
||||||
|
// or an actual document. In a blank page, something like:
|
||||||
|
// x.innerHTML = '<script></script>';
|
||||||
|
// does _not_ create an empty script, but in a real page, it does. Weird.
|
||||||
|
const html = try parser.nodeFirstChild(fragment_node) orelse return;
|
||||||
|
const head = try parser.nodeFirstChild(html) orelse return;
|
||||||
|
const body = try parser.nodeNextSibling(head) orelse return;
|
||||||
|
|
||||||
|
if (try parser.elementTag(self) == .template) {
|
||||||
|
// HTMLElementTemplate is special. We don't append these as children
|
||||||
|
// of the template, but instead set its content as the body of the
|
||||||
|
// fragment. Simpler to do this by copying the body children into
|
||||||
|
// a new fragment
|
||||||
|
const clean = try parser.documentCreateDocumentFragment(doc);
|
||||||
|
const children = try parser.nodeGetChildNodes(body);
|
||||||
|
const ln = try parser.nodeListLength(children);
|
||||||
|
for (0..ln) |_| {
|
||||||
|
// always index 0, because nodeAppendChild moves the node out of
|
||||||
|
// the nodeList and into the new tree
|
||||||
|
const child = try parser.nodeListItem(children, 0) orelse continue;
|
||||||
|
_ = try parser.nodeAppendChild(@ptrCast(@alignCast(clean)), child);
|
||||||
|
}
|
||||||
|
|
||||||
|
const state = try page.getOrCreateNodeState(node);
|
||||||
|
state.template_content = clean;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// For any node other than a template, we copy the head and body elements
|
||||||
|
// as child nodes of the element
|
||||||
|
{
|
||||||
|
// First, copy some of the head element
|
||||||
|
const children = try parser.nodeGetChildNodes(head);
|
||||||
|
const ln = try parser.nodeListLength(children);
|
||||||
|
for (0..ln) |_| {
|
||||||
|
// always index 0, because nodeAppendChild moves the node out of
|
||||||
|
// the nodeList and into the new tree
|
||||||
|
const child = try parser.nodeListItem(children, 0) orelse continue;
|
||||||
|
_ = try parser.nodeAppendChild(node, child);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
const children = try parser.nodeGetChildNodes(body);
|
||||||
|
const ln = try parser.nodeListLength(children);
|
||||||
|
for (0..ln) |_| {
|
||||||
|
// always index 0, because nodeAppendChild moves the node out of
|
||||||
|
// the nodeList and into the new tree
|
||||||
|
const child = try parser.nodeListItem(children, 0) orelse continue;
|
||||||
|
_ = try parser.nodeAppendChild(node, child);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// The closest() method of the Element interface traverses the element and its parents (heading toward the document root) until it finds a node that matches the specified CSS selector.
|
||||||
|
// Returns the closest ancestor Element or itself, which matches the selectors. If there are no such element, null.
|
||||||
|
pub fn _closest(self: *parser.Element, selector: []const u8, page: *Page) !?*parser.Element {
|
||||||
|
const cssParse = @import("../css/css.zig").parse;
|
||||||
|
const CssNodeWrap = @import("../css/libdom.zig").Node;
|
||||||
|
const select = try cssParse(page.call_arena, selector, .{});
|
||||||
|
|
||||||
|
var current: CssNodeWrap = .{ .node = parser.elementToNode(self) };
|
||||||
|
while (true) {
|
||||||
|
if (try select.match(current)) {
|
||||||
|
if (!current.isElement()) {
|
||||||
|
log.err(.browser, "closest invalid type", .{ .type = try current.tag() });
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return parser.nodeToElement(current.node);
|
||||||
|
}
|
||||||
|
current = try current.parent() orelse return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// don't use parser.nodeHasAttributes(...) because that returns true/false
|
||||||
|
// based on the type, e.g. a node never as attributes, an element always has
|
||||||
|
// attributes. But, Element.hasAttributes is supposed to return true only
|
||||||
|
// if the element has at least 1 attribute.
|
||||||
|
pub fn _hasAttributes(self: *parser.Element) !bool {
|
||||||
|
// an element _must_ have at least an empty attribute
|
||||||
|
const node_map = try parser.nodeGetAttributes(parser.elementToNode(self)) orelse unreachable;
|
||||||
|
return try parser.namedNodeMapGetLength(node_map) > 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _getAttribute(self: *parser.Element, qname: []const u8) !?[]const u8 {
|
||||||
|
return try parser.elementGetAttribute(self, qname);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _getAttributeNS(self: *parser.Element, ns: []const u8, qname: []const u8) !?[]const u8 {
|
||||||
|
return try parser.elementGetAttributeNS(self, ns, qname);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _setAttribute(self: *parser.Element, qname: []const u8, value: []const u8) !void {
|
||||||
|
return try parser.elementSetAttribute(self, qname, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _setAttributeNS(self: *parser.Element, ns: []const u8, qname: []const u8, value: []const u8) !void {
|
||||||
|
return try parser.elementSetAttributeNS(self, ns, qname, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _removeAttribute(self: *parser.Element, qname: []const u8) !void {
|
||||||
|
return try parser.elementRemoveAttribute(self, qname);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _removeAttributeNS(self: *parser.Element, ns: []const u8, qname: []const u8) !void {
|
||||||
|
return try parser.elementRemoveAttributeNS(self, ns, qname);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _hasAttribute(self: *parser.Element, qname: []const u8) !bool {
|
||||||
|
return try parser.elementHasAttribute(self, qname);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _hasAttributeNS(self: *parser.Element, ns: []const u8, qname: []const u8) !bool {
|
||||||
|
return try parser.elementHasAttributeNS(self, ns, qname);
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://dom.spec.whatwg.org/#dom-element-toggleattribute
|
||||||
|
pub fn _toggleAttribute(self: *parser.Element, qname: []u8, force: ?bool) !bool {
|
||||||
|
_ = std.ascii.lowerString(qname, qname);
|
||||||
|
const exists = try parser.elementHasAttribute(self, qname);
|
||||||
|
|
||||||
|
// If attribute is null, then:
|
||||||
|
if (!exists) {
|
||||||
|
// If force is not given or is true, create an attribute whose
|
||||||
|
// local name is qualifiedName, value is the empty string and node
|
||||||
|
// document is this’s node document, then append this attribute to
|
||||||
|
// this, and then return true.
|
||||||
|
if (force == null or force.?) {
|
||||||
|
try parser.elementSetAttribute(self, qname, "");
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (try parser.validateName(qname) == false) {
|
||||||
|
return parser.DOMError.InvalidCharacter;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return false.
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise, if force is not given or is false, remove an attribute
|
||||||
|
// given qualifiedName and this, and then return false.
|
||||||
|
if (force == null or !force.?) {
|
||||||
|
try parser.elementRemoveAttribute(self, qname);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return true.
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _getAttributeNames(self: *parser.Element, page: *Page) ![]const []const u8 {
|
||||||
|
const attributes = try parser.nodeGetAttributes(@ptrCast(self)) orelse return &.{};
|
||||||
|
const ln = try parser.namedNodeMapGetLength(attributes);
|
||||||
|
|
||||||
|
const names = try page.call_arena.alloc([]const u8, ln);
|
||||||
|
var at: usize = 0;
|
||||||
|
|
||||||
|
for (0..ln) |i| {
|
||||||
|
const attribute = try parser.namedNodeMapItem(attributes, @intCast(i)) orelse break;
|
||||||
|
names[at] = try parser.attributeGetName(attribute);
|
||||||
|
at += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
return names[0..at];
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _getAttributeNode(self: *parser.Element, name: []const u8) !?*parser.Attribute {
|
||||||
|
return try parser.elementGetAttributeNode(self, name);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _getAttributeNodeNS(self: *parser.Element, ns: []const u8, name: []const u8) !?*parser.Attribute {
|
||||||
|
return try parser.elementGetAttributeNodeNS(self, ns, name);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _setAttributeNode(self: *parser.Element, attr: *parser.Attribute) !?*parser.Attribute {
|
||||||
|
return try parser.elementSetAttributeNode(self, attr);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _setAttributeNodeNS(self: *parser.Element, attr: *parser.Attribute) !?*parser.Attribute {
|
||||||
|
return try parser.elementSetAttributeNodeNS(self, attr);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _removeAttributeNode(self: *parser.Element, attr: *parser.Attribute) !*parser.Attribute {
|
||||||
|
return try parser.elementRemoveAttributeNode(self, attr);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _getElementsByTagName(
|
||||||
|
self: *parser.Element,
|
||||||
|
tag_name: []const u8,
|
||||||
|
page: *Page,
|
||||||
|
) !collection.HTMLCollection {
|
||||||
|
return try collection.HTMLCollectionByTagName(
|
||||||
|
page.arena,
|
||||||
|
parser.elementToNode(self),
|
||||||
|
tag_name,
|
||||||
|
.{ .include_root = false },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _getElementsByClassName(
|
||||||
|
self: *parser.Element,
|
||||||
|
classNames: []const u8,
|
||||||
|
page: *Page,
|
||||||
|
) !collection.HTMLCollection {
|
||||||
|
return try collection.HTMLCollectionByClassName(
|
||||||
|
page.arena,
|
||||||
|
parser.elementToNode(self),
|
||||||
|
classNames,
|
||||||
|
.{ .include_root = false },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParentNode
|
||||||
|
// https://dom.spec.whatwg.org/#parentnode
|
||||||
|
pub fn get_children(self: *parser.Element) !collection.HTMLCollection {
|
||||||
|
return collection.HTMLCollectionChildren(parser.elementToNode(self), .{
|
||||||
|
.include_root = false,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_firstElementChild(self: *parser.Element) !?Union {
|
||||||
|
var children = try get_children(self);
|
||||||
|
return try children._item(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_lastElementChild(self: *parser.Element) !?Union {
|
||||||
|
// TODO we could check the last child node first, if it's an element,
|
||||||
|
// we can return it directly instead of looping twice over the
|
||||||
|
// children.
|
||||||
|
var children = try get_children(self);
|
||||||
|
const ln = try children.get_length();
|
||||||
|
if (ln == 0) return null;
|
||||||
|
return try children._item(ln - 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_childElementCount(self: *parser.Element) !u32 {
|
||||||
|
var children = try get_children(self);
|
||||||
|
return try children.get_length();
|
||||||
|
}
|
||||||
|
|
||||||
|
// NonDocumentTypeChildNode
|
||||||
|
// https://dom.spec.whatwg.org/#interface-nondocumenttypechildnode
|
||||||
|
pub fn get_previousElementSibling(self: *parser.Element) !?Union {
|
||||||
|
const res = try parser.nodePreviousElementSibling(parser.elementToNode(self));
|
||||||
|
if (res == null) return null;
|
||||||
|
return try toInterface(res.?);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_nextElementSibling(self: *parser.Element) !?Union {
|
||||||
|
const res = try parser.nodeNextElementSibling(parser.elementToNode(self));
|
||||||
|
if (res == null) return null;
|
||||||
|
return try toInterface(res.?);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn getElementById(self: *parser.Element, id: []const u8) !?*parser.Node {
|
||||||
|
// walk over the node tree fo find the node by id.
|
||||||
|
const root = parser.elementToNode(self);
|
||||||
|
const walker = Walker{};
|
||||||
|
var next: ?*parser.Node = null;
|
||||||
|
while (true) {
|
||||||
|
next = try walker.get_next(root, next) orelse return null;
|
||||||
|
// ignore non-element nodes.
|
||||||
|
if (try parser.nodeType(next.?) != .element) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const e = parser.nodeToElement(next.?);
|
||||||
|
if (std.mem.eql(u8, id, try get_id(e))) return next;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _querySelector(self: *parser.Element, selector: []const u8, page: *Page) !?Union {
|
||||||
|
if (selector.len == 0) return null;
|
||||||
|
|
||||||
|
const n = try css.querySelector(page.call_arena, parser.elementToNode(self), selector);
|
||||||
|
|
||||||
|
if (n == null) return null;
|
||||||
|
|
||||||
|
return try toInterface(parser.nodeToElement(n.?));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _querySelectorAll(self: *parser.Element, selector: []const u8, page: *Page) !NodeList {
|
||||||
|
return css.querySelectorAll(page.arena, parser.elementToNode(self), selector);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _prepend(self: *parser.Element, nodes: []const Node.NodeOrText) !void {
|
||||||
|
return Node.prepend(parser.elementToNode(self), nodes);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _append(self: *parser.Element, nodes: []const Node.NodeOrText) !void {
|
||||||
|
return Node.append(parser.elementToNode(self), nodes);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _before(self: *parser.Element, nodes: []const Node.NodeOrText) !void {
|
||||||
|
const ref_node = parser.elementToNode(self);
|
||||||
|
return Node.before(ref_node, nodes);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _after(self: *parser.Element, nodes: []const Node.NodeOrText) !void {
|
||||||
|
const ref_node = parser.elementToNode(self);
|
||||||
|
return Node.after(ref_node, nodes);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _replaceChildren(self: *parser.Element, nodes: []const Node.NodeOrText) !void {
|
||||||
|
return Node.replaceChildren(parser.elementToNode(self), nodes);
|
||||||
|
}
|
||||||
|
|
||||||
|
// A DOMRect object providing information about the size of an element and its position relative to the viewport.
|
||||||
|
// Returns a 0 DOMRect object if the element is eventually detached from the main window
|
||||||
|
pub fn _getBoundingClientRect(self: *parser.Element, page: *Page) !DOMRect {
|
||||||
|
// Since we are lazy rendering we need to do this check. We could store the renderer in a viewport such that it could cache these, but it would require tracking changes.
|
||||||
|
if (!try page.isNodeAttached(parser.elementToNode(self))) {
|
||||||
|
return DOMRect{
|
||||||
|
.x = 0,
|
||||||
|
.y = 0,
|
||||||
|
.width = 0,
|
||||||
|
.height = 0,
|
||||||
|
.bottom = 0,
|
||||||
|
.right = 0,
|
||||||
|
.top = 0,
|
||||||
|
.left = 0,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return page.renderer.getRect(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Returns a collection of DOMRect objects that indicate the bounding rectangles for each CSS border box in a client.
|
||||||
|
// We do not render so it only always return the element's bounding rect.
|
||||||
|
// Returns an empty array if the element is eventually detached from the main window
|
||||||
|
pub fn _getClientRects(self: *parser.Element, page: *Page) ![]DOMRect {
|
||||||
|
if (!try page.isNodeAttached(parser.elementToNode(self))) {
|
||||||
|
return &.{};
|
||||||
|
}
|
||||||
|
const heap_ptr = try page.call_arena.create(DOMRect);
|
||||||
|
heap_ptr.* = try page.renderer.getRect(self);
|
||||||
|
return heap_ptr[0..1];
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_clientWidth(_: *parser.Element, page: *Page) u32 {
|
||||||
|
return page.renderer.width();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_clientHeight(_: *parser.Element, page: *Page) u32 {
|
||||||
|
return page.renderer.height();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _matches(self: *parser.Element, selectors: []const u8, page: *Page) !bool {
|
||||||
|
const cssParse = @import("../css/css.zig").parse;
|
||||||
|
const CssNodeWrap = @import("../css/libdom.zig").Node;
|
||||||
|
const s = try cssParse(page.call_arena, selectors, .{});
|
||||||
|
return s.match(CssNodeWrap{ .node = parser.elementToNode(self) });
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _scrollIntoViewIfNeeded(_: *parser.Element, center_if_needed: ?bool) void {
|
||||||
|
_ = center_if_needed;
|
||||||
|
}
|
||||||
|
|
||||||
|
const CheckVisibilityOpts = struct {
|
||||||
|
contentVisibilityAuto: bool,
|
||||||
|
opacityProperty: bool,
|
||||||
|
visibilityProperty: bool,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn _checkVisibility(self: *parser.Element, opts: ?CheckVisibilityOpts) bool {
|
||||||
|
_ = self;
|
||||||
|
_ = opts;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
const AttachShadowOpts = struct {
|
||||||
|
mode: []const u8, // must be specified
|
||||||
|
};
|
||||||
|
pub fn _attachShadow(self: *parser.Element, opts: AttachShadowOpts, page: *Page) !*ShadowRoot {
|
||||||
|
const mode = std.meta.stringToEnum(ShadowRoot.Mode, opts.mode) orelse return error.InvalidArgument;
|
||||||
|
const state = try page.getOrCreateNodeState(@ptrCast(@alignCast(self)));
|
||||||
|
if (state.shadow_root) |sr| {
|
||||||
|
if (mode != sr.mode) {
|
||||||
|
// this is the behavior per the spec
|
||||||
|
return error.NotSupportedError;
|
||||||
|
}
|
||||||
|
|
||||||
|
try Node.removeChildren(@ptrCast(@alignCast(sr.proto)));
|
||||||
|
return sr;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Not sure what to do if there is no owner document
|
||||||
|
const doc = try parser.nodeOwnerDocument(@ptrCast(self)) orelse return error.InvalidArgument;
|
||||||
|
const fragment = try parser.documentCreateDocumentFragment(doc);
|
||||||
|
const sr = try page.arena.create(ShadowRoot);
|
||||||
|
sr.* = .{
|
||||||
|
.host = self,
|
||||||
|
.mode = mode,
|
||||||
|
.proto = fragment,
|
||||||
|
};
|
||||||
|
state.shadow_root = sr;
|
||||||
|
parser.documentFragmentSetHost(sr.proto, @ptrCast(@alignCast(self)));
|
||||||
|
|
||||||
|
// Storing the ShadowRoot on the element makes sense, it's the ShadowRoot's
|
||||||
|
// parent. When we render, we go top-down, so we'll have the element, get
|
||||||
|
// its shadowroot, and go on. that's what the above code does.
|
||||||
|
// But we sometimes need to go bottom-up, e.g when we have a slot element
|
||||||
|
// and want to find the containing parent. Unforatunately , we don't have
|
||||||
|
// that link, so we need to create it. In the DOM, the ShadowRoot is
|
||||||
|
// represented by this DocumentFragment (it's the ShadowRoot's base prototype)
|
||||||
|
// So we can also store the ShadowRoot in the DocumentFragment's state.
|
||||||
|
const fragment_state = try page.getOrCreateNodeState(@ptrCast(@alignCast(fragment)));
|
||||||
|
fragment_state.shadow_root = sr;
|
||||||
|
|
||||||
|
return sr;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_shadowRoot(self: *parser.Element, page: *Page) ?*ShadowRoot {
|
||||||
|
const state = page.getNodeState(@ptrCast(@alignCast(self))) orelse return null;
|
||||||
|
const sr = state.shadow_root orelse return null;
|
||||||
|
if (sr.mode == .closed) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return sr;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _animate(self: *parser.Element, effect: JsObject, opts: JsObject) !Animation {
|
||||||
|
_ = self;
|
||||||
|
_ = opts;
|
||||||
|
return Animation.constructor(effect, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _remove(self: *parser.Element) !void {
|
||||||
|
// TODO: This hasn't been tested to make sure all references to this
|
||||||
|
// node are properly updated. A lot of libdom is lazy and will look
|
||||||
|
// for related elements JIT by walking the tree, but there could be
|
||||||
|
// cases in libdom or the Zig WebAPI where this reference is kept
|
||||||
|
const as_node: *parser.Node = @ptrCast(self);
|
||||||
|
const parent = try parser.nodeParentNode(as_node) orelse return;
|
||||||
|
_ = try Node._removeChild(parent, as_node);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Tests
|
||||||
|
// -----
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: DOM.Element" {
|
||||||
|
try testing.htmlRunner("dom/element.html");
|
||||||
|
}
|
||||||
166
src/browser/dom/event_target.zig
Normal file
166
src/browser/dom/event_target.zig
Normal file
@@ -0,0 +1,166 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
const Env = @import("../env.zig").Env;
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
const Page = @import("../page.zig").Page;
|
||||||
|
|
||||||
|
const EventHandler = @import("../events/event.zig").EventHandler;
|
||||||
|
|
||||||
|
const DOMException = @import("exceptions.zig").DOMException;
|
||||||
|
const nod = @import("node.zig");
|
||||||
|
|
||||||
|
pub const Union = union(enum) {
|
||||||
|
node: nod.Union,
|
||||||
|
xhr: *@import("../xhr/xhr.zig").XMLHttpRequest,
|
||||||
|
plain: *parser.EventTarget,
|
||||||
|
message_port: *@import("MessageChannel.zig").MessagePort,
|
||||||
|
screen: *@import("../html/screen.zig").Screen,
|
||||||
|
screen_orientation: *@import("../html/screen.zig").ScreenOrientation,
|
||||||
|
performance: *@import("performance.zig").Performance,
|
||||||
|
media_query_list: *@import("../html/media_query_list.zig").MediaQueryList,
|
||||||
|
};
|
||||||
|
|
||||||
|
// EventTarget implementation
|
||||||
|
pub const EventTarget = struct {
|
||||||
|
pub const Self = parser.EventTarget;
|
||||||
|
pub const Exception = DOMException;
|
||||||
|
|
||||||
|
// Extend libdom event target for pure zig struct.
|
||||||
|
base: parser.EventTargetTBase = parser.EventTargetTBase{ .internal_target_type = .plain },
|
||||||
|
|
||||||
|
pub fn toInterface(et: *parser.EventTarget, page: *Page) !Union {
|
||||||
|
// libdom assumes that all event targets are libdom nodes. They are not.
|
||||||
|
|
||||||
|
switch (try parser.eventTargetInternalType(et)) {
|
||||||
|
.libdom_node => {
|
||||||
|
return .{ .node = try nod.Node.toInterface(@as(*parser.Node, @ptrCast(et))) };
|
||||||
|
},
|
||||||
|
.plain => return .{ .plain = et },
|
||||||
|
.abort_signal => {
|
||||||
|
// AbortSignal is a special case, it has its own internal type.
|
||||||
|
// We return it as a node, but we need to handle it differently.
|
||||||
|
return .{ .node = .{ .AbortSignal = @fieldParentPtr("proto", @as(*parser.EventTargetTBase, @ptrCast(et))) } };
|
||||||
|
},
|
||||||
|
.window => {
|
||||||
|
// The window is a common non-node target, but it's easy to handle as its a singleton.
|
||||||
|
std.debug.assert(@intFromPtr(et) == @intFromPtr(&page.window.base));
|
||||||
|
return .{ .node = .{ .Window = &page.window } };
|
||||||
|
},
|
||||||
|
.xhr => {
|
||||||
|
const XMLHttpRequestEventTarget = @import("../xhr/event_target.zig").XMLHttpRequestEventTarget;
|
||||||
|
const base: *XMLHttpRequestEventTarget = @fieldParentPtr("base", @as(*parser.EventTargetTBase, @ptrCast(et)));
|
||||||
|
return .{ .xhr = @fieldParentPtr("proto", base) };
|
||||||
|
},
|
||||||
|
.message_port => {
|
||||||
|
return .{ .message_port = @fieldParentPtr("proto", @as(*parser.EventTargetTBase, @ptrCast(et))) };
|
||||||
|
},
|
||||||
|
.screen => {
|
||||||
|
return .{ .screen = @fieldParentPtr("proto", @as(*parser.EventTargetTBase, @ptrCast(et))) };
|
||||||
|
},
|
||||||
|
.screen_orientation => {
|
||||||
|
return .{ .screen_orientation = @fieldParentPtr("proto", @as(*parser.EventTargetTBase, @ptrCast(et))) };
|
||||||
|
},
|
||||||
|
.performance => {
|
||||||
|
return .{ .performance = @fieldParentPtr("base", @as(*parser.EventTargetTBase, @ptrCast(et))) };
|
||||||
|
},
|
||||||
|
.media_query_list => {
|
||||||
|
return .{ .media_query_list = @fieldParentPtr("base", @as(*parser.EventTargetTBase, @ptrCast(et))) };
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// JS funcs
|
||||||
|
// --------
|
||||||
|
pub fn constructor(page: *Page) !*parser.EventTarget {
|
||||||
|
const et = try page.arena.create(EventTarget);
|
||||||
|
return @ptrCast(&et.base);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _addEventListener(
|
||||||
|
self: *parser.EventTarget,
|
||||||
|
typ: []const u8,
|
||||||
|
listener: EventHandler.Listener,
|
||||||
|
opts: ?EventHandler.Opts,
|
||||||
|
page: *Page,
|
||||||
|
) !void {
|
||||||
|
_ = try EventHandler.register(page.arena, self, typ, listener, opts);
|
||||||
|
}
|
||||||
|
|
||||||
|
const RemoveEventListenerOpts = union(enum) {
|
||||||
|
opts: Opts,
|
||||||
|
capture: bool,
|
||||||
|
|
||||||
|
const Opts = struct {
|
||||||
|
capture: ?bool,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn _removeEventListener(
|
||||||
|
self: *parser.EventTarget,
|
||||||
|
typ: []const u8,
|
||||||
|
listener: EventHandler.Listener,
|
||||||
|
opts_: ?RemoveEventListenerOpts,
|
||||||
|
) !void {
|
||||||
|
var capture = false;
|
||||||
|
if (opts_) |opts| {
|
||||||
|
capture = switch (opts) {
|
||||||
|
.capture => |c| c,
|
||||||
|
.opts => |o| o.capture orelse false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const cbk = (try listener.callback(self)) orelse return;
|
||||||
|
|
||||||
|
// check if event target has already this listener
|
||||||
|
const lst = try parser.eventTargetHasListener(
|
||||||
|
self,
|
||||||
|
typ,
|
||||||
|
capture,
|
||||||
|
cbk.id,
|
||||||
|
);
|
||||||
|
if (lst == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// remove listener
|
||||||
|
try parser.eventTargetRemoveEventListener(
|
||||||
|
self,
|
||||||
|
typ,
|
||||||
|
lst.?,
|
||||||
|
capture,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _dispatchEvent(self: *parser.EventTarget, event: *parser.Event, page: *Page) !bool {
|
||||||
|
const res = try parser.eventTargetDispatchEvent(self, event);
|
||||||
|
|
||||||
|
if (!parser.eventBubbles(event) or parser.eventIsStopped(event)) {
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
try page.window.dispatchForDocumentTarget(event);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: DOM.EventTarget" {
|
||||||
|
try testing.htmlRunner("dom/event_target.html");
|
||||||
|
}
|
||||||
224
src/browser/dom/exceptions.zig
Normal file
224
src/browser/dom/exceptions.zig
Normal file
@@ -0,0 +1,224 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
const allocPrint = std.fmt.allocPrint;
|
||||||
|
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
const Page = @import("../page.zig").Page;
|
||||||
|
|
||||||
|
// https://webidl.spec.whatwg.org/#idl-DOMException
|
||||||
|
pub const DOMException = struct {
|
||||||
|
err: ?parser.DOMError,
|
||||||
|
str: []const u8,
|
||||||
|
|
||||||
|
pub const ErrorSet = parser.DOMError;
|
||||||
|
|
||||||
|
// static attributes
|
||||||
|
pub const _INDEX_SIZE_ERR = 1;
|
||||||
|
pub const _DOMSTRING_SIZE_ERR = 2;
|
||||||
|
pub const _HIERARCHY_REQUEST_ERR = 3;
|
||||||
|
pub const _WRONG_DOCUMENT_ERR = 4;
|
||||||
|
pub const _INVALID_CHARACTER_ERR = 5;
|
||||||
|
pub const _NO_DATA_ALLOWED_ERR = 6;
|
||||||
|
pub const _NO_MODIFICATION_ALLOWED_ERR = 7;
|
||||||
|
pub const _NOT_FOUND_ERR = 8;
|
||||||
|
pub const _NOT_SUPPORTED_ERR = 9;
|
||||||
|
pub const _INUSE_ATTRIBUTE_ERR = 10;
|
||||||
|
pub const _INVALID_STATE_ERR = 11;
|
||||||
|
pub const _SYNTAX_ERR = 12;
|
||||||
|
pub const _INVALID_MODIFICATION_ERR = 13;
|
||||||
|
pub const _NAMESPACE_ERR = 14;
|
||||||
|
pub const _INVALID_ACCESS_ERR = 15;
|
||||||
|
pub const _VALIDATION_ERR = 16;
|
||||||
|
pub const _TYPE_MISMATCH_ERR = 17;
|
||||||
|
pub const _SECURITY_ERR = 18;
|
||||||
|
pub const _NETWORK_ERR = 19;
|
||||||
|
pub const _ABORT_ERR = 20;
|
||||||
|
pub const _URL_MISMATCH_ERR = 21;
|
||||||
|
pub const _QUOTA_EXCEEDED_ERR = 22;
|
||||||
|
pub const _TIMEOUT_ERR = 23;
|
||||||
|
pub const _INVALID_NODE_TYPE_ERR = 24;
|
||||||
|
pub const _DATA_CLONE_ERR = 25;
|
||||||
|
|
||||||
|
pub fn constructor(message_: ?[]const u8, name_: ?[]const u8, page: *const Page) !DOMException {
|
||||||
|
const message = message_ orelse "";
|
||||||
|
const err = if (name_) |n| error_from_str(n) else null;
|
||||||
|
const fixed_name = name(err);
|
||||||
|
|
||||||
|
if (message.len == 0) return .{ .err = err, .str = fixed_name };
|
||||||
|
|
||||||
|
const str = try allocPrint(page.arena, "{s}: {s}", .{ fixed_name, message });
|
||||||
|
return .{ .err = err, .str = str };
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: deinit
|
||||||
|
pub fn init(alloc: std.mem.Allocator, err: anyerror, caller_name: []const u8) !DOMException {
|
||||||
|
const dom_error = @as(parser.DOMError, @errorCast(err));
|
||||||
|
const error_name = DOMException.name(dom_error);
|
||||||
|
const str = switch (dom_error) {
|
||||||
|
error.HierarchyRequest => try allocPrint(
|
||||||
|
alloc,
|
||||||
|
"{s}: Failed to execute '{s}' on 'Node': The new child element contains the parent.",
|
||||||
|
.{ error_name, caller_name },
|
||||||
|
),
|
||||||
|
// todo add more custom error messages
|
||||||
|
else => try allocPrint(
|
||||||
|
alloc,
|
||||||
|
"{s}: Failed to execute '{s}' : {s}",
|
||||||
|
.{ error_name, caller_name, error_name },
|
||||||
|
),
|
||||||
|
error.NoError => unreachable,
|
||||||
|
};
|
||||||
|
return .{ .err = dom_error, .str = str };
|
||||||
|
}
|
||||||
|
|
||||||
|
fn error_from_str(name_: []const u8) ?parser.DOMError {
|
||||||
|
// @speed: Consider length first, left as is for maintainability, awaiting switch on string support
|
||||||
|
if (std.mem.eql(u8, name_, "IndexSizeError")) return error.IndexSize;
|
||||||
|
if (std.mem.eql(u8, name_, "StringSizeError")) return error.StringSize;
|
||||||
|
if (std.mem.eql(u8, name_, "HierarchyRequestError")) return error.HierarchyRequest;
|
||||||
|
if (std.mem.eql(u8, name_, "WrongDocumentError")) return error.WrongDocument;
|
||||||
|
if (std.mem.eql(u8, name_, "InvalidCharacterError")) return error.InvalidCharacter;
|
||||||
|
if (std.mem.eql(u8, name_, "NoDataAllowedError")) return error.NoDataAllowed;
|
||||||
|
if (std.mem.eql(u8, name_, "NoModificationAllowedError")) return error.NoModificationAllowed;
|
||||||
|
if (std.mem.eql(u8, name_, "NotFoundError")) return error.NotFound;
|
||||||
|
if (std.mem.eql(u8, name_, "NotSupportedError")) return error.NotSupported;
|
||||||
|
if (std.mem.eql(u8, name_, "InuseAttributeError")) return error.InuseAttribute;
|
||||||
|
if (std.mem.eql(u8, name_, "InvalidStateError")) return error.InvalidState;
|
||||||
|
if (std.mem.eql(u8, name_, "SyntaxError")) return error.Syntax;
|
||||||
|
if (std.mem.eql(u8, name_, "InvalidModificationError")) return error.InvalidModification;
|
||||||
|
if (std.mem.eql(u8, name_, "NamespaceError")) return error.Namespace;
|
||||||
|
if (std.mem.eql(u8, name_, "InvalidAccessError")) return error.InvalidAccess;
|
||||||
|
if (std.mem.eql(u8, name_, "ValidationError")) return error.Validation;
|
||||||
|
if (std.mem.eql(u8, name_, "TypeMismatchError")) return error.TypeMismatch;
|
||||||
|
if (std.mem.eql(u8, name_, "SecurityError")) return error.Security;
|
||||||
|
if (std.mem.eql(u8, name_, "NetworkError")) return error.Network;
|
||||||
|
if (std.mem.eql(u8, name_, "AbortError")) return error.Abort;
|
||||||
|
if (std.mem.eql(u8, name_, "URLismatchError")) return error.URLismatch;
|
||||||
|
if (std.mem.eql(u8, name_, "QuotaExceededError")) return error.QuotaExceeded;
|
||||||
|
if (std.mem.eql(u8, name_, "TimeoutError")) return error.Timeout;
|
||||||
|
if (std.mem.eql(u8, name_, "InvalidNodeTypeError")) return error.InvalidNodeType;
|
||||||
|
if (std.mem.eql(u8, name_, "DataCloneError")) return error.DataClone;
|
||||||
|
|
||||||
|
// custom netsurf error
|
||||||
|
if (std.mem.eql(u8, name_, "UnspecifiedEventTypeError")) return error.UnspecifiedEventType;
|
||||||
|
if (std.mem.eql(u8, name_, "DispatchRequestError")) return error.DispatchRequest;
|
||||||
|
if (std.mem.eql(u8, name_, "NoMemoryError")) return error.NoMemory;
|
||||||
|
if (std.mem.eql(u8, name_, "AttributeWrongTypeError")) return error.AttributeWrongType;
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn name(err_: ?parser.DOMError) []const u8 {
|
||||||
|
const err = err_ orelse return "Error";
|
||||||
|
|
||||||
|
return switch (err) {
|
||||||
|
error.IndexSize => "IndexSizeError",
|
||||||
|
error.StringSize => "StringSizeError", // Legacy: DOMSTRING_SIZE_ERR
|
||||||
|
error.HierarchyRequest => "HierarchyRequestError",
|
||||||
|
error.WrongDocument => "WrongDocumentError",
|
||||||
|
error.InvalidCharacter => "InvalidCharacterError",
|
||||||
|
error.NoDataAllowed => "NoDataAllowedError", // Legacy: NO_DATA_ALLOWED_ERR
|
||||||
|
error.NoModificationAllowed => "NoModificationAllowedError",
|
||||||
|
error.NotFound => "NotFoundError",
|
||||||
|
error.NotSupported => "NotSupportedError",
|
||||||
|
error.InuseAttribute => "InuseAttributeError",
|
||||||
|
error.InvalidState => "InvalidStateError",
|
||||||
|
error.Syntax => "SyntaxError",
|
||||||
|
error.InvalidModification => "InvalidModificationError",
|
||||||
|
error.Namespace => "NamespaceError",
|
||||||
|
error.InvalidAccess => "InvalidAccessError",
|
||||||
|
error.Validation => "ValidationError", // Legacy: VALIDATION_ERR
|
||||||
|
error.TypeMismatch => "TypeMismatchError",
|
||||||
|
error.Security => "SecurityError",
|
||||||
|
error.Network => "NetworkError",
|
||||||
|
error.Abort => "AbortError",
|
||||||
|
error.URLismatch => "URLismatchError",
|
||||||
|
error.QuotaExceeded => "QuotaExceededError",
|
||||||
|
error.Timeout => "TimeoutError",
|
||||||
|
error.InvalidNodeType => "InvalidNodeTypeError",
|
||||||
|
error.DataClone => "DataCloneError",
|
||||||
|
error.NoError => unreachable,
|
||||||
|
|
||||||
|
// custom netsurf error
|
||||||
|
error.UnspecifiedEventType => "UnspecifiedEventTypeError",
|
||||||
|
error.DispatchRequest => "DispatchRequestError",
|
||||||
|
error.NoMemory => "NoMemoryError",
|
||||||
|
error.AttributeWrongType => "AttributeWrongTypeError",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// JS properties and methods
|
||||||
|
|
||||||
|
pub fn get_code(self: *const DOMException) u8 {
|
||||||
|
const err = self.err orelse return 0;
|
||||||
|
return switch (err) {
|
||||||
|
error.IndexSize => 1,
|
||||||
|
error.StringSize => 2,
|
||||||
|
error.HierarchyRequest => 3,
|
||||||
|
error.WrongDocument => 4,
|
||||||
|
error.InvalidCharacter => 5,
|
||||||
|
error.NoDataAllowed => 6,
|
||||||
|
error.NoModificationAllowed => 7,
|
||||||
|
error.NotFound => 8,
|
||||||
|
error.NotSupported => 9,
|
||||||
|
error.InuseAttribute => 10,
|
||||||
|
error.InvalidState => 11,
|
||||||
|
error.Syntax => 12,
|
||||||
|
error.InvalidModification => 13,
|
||||||
|
error.Namespace => 14,
|
||||||
|
error.InvalidAccess => 15,
|
||||||
|
error.Validation => 16,
|
||||||
|
error.TypeMismatch => 17,
|
||||||
|
error.Security => 18,
|
||||||
|
error.Network => 19,
|
||||||
|
error.Abort => 20,
|
||||||
|
error.URLismatch => 21,
|
||||||
|
error.QuotaExceeded => 22,
|
||||||
|
error.Timeout => 23,
|
||||||
|
error.InvalidNodeType => 24,
|
||||||
|
error.DataClone => 25,
|
||||||
|
error.NoError => unreachable,
|
||||||
|
|
||||||
|
// custom netsurf error
|
||||||
|
error.UnspecifiedEventType => 128,
|
||||||
|
error.DispatchRequest => 129,
|
||||||
|
error.NoMemory => 130,
|
||||||
|
error.AttributeWrongType => 131,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_name(self: *const DOMException) []const u8 {
|
||||||
|
return DOMException.name(self.err);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_message(self: *const DOMException) []const u8 {
|
||||||
|
const errName = DOMException.name(self.err);
|
||||||
|
if (self.str.len <= errName.len + 2) return "";
|
||||||
|
return self.str[errName.len + 2 ..]; // ! Requires str is formatted as "{name}: {message}"
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _toString(self: *const DOMException) []const u8 {
|
||||||
|
return self.str;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: DOM.Exceptions" {
|
||||||
|
try testing.htmlRunner("dom/exceptions.html");
|
||||||
|
}
|
||||||
467
src/browser/dom/html_collection.zig
Normal file
467
src/browser/dom/html_collection.zig
Normal file
@@ -0,0 +1,467 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
const Allocator = std.mem.Allocator;
|
||||||
|
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
|
||||||
|
const Element = @import("element.zig").Element;
|
||||||
|
const Union = @import("element.zig").Union;
|
||||||
|
const JsThis = @import("../env.zig").JsThis;
|
||||||
|
const Walker = @import("walker.zig").Walker;
|
||||||
|
|
||||||
|
const Matcher = union(enum) {
|
||||||
|
matchByName: MatchByName,
|
||||||
|
matchByTagName: MatchByTagName,
|
||||||
|
matchByClassName: MatchByClassName,
|
||||||
|
matchByLinks: MatchByLinks,
|
||||||
|
matchByAnchors: MatchByAnchors,
|
||||||
|
matchTrue: struct {},
|
||||||
|
matchFalse: struct {},
|
||||||
|
|
||||||
|
pub fn match(self: Matcher, node: *parser.Node) !bool {
|
||||||
|
switch (self) {
|
||||||
|
.matchTrue => return true,
|
||||||
|
.matchFalse => return false,
|
||||||
|
.matchByLinks => return MatchByLinks.match(node),
|
||||||
|
.matchByAnchors => return MatchByAnchors.match(node),
|
||||||
|
inline else => |m| return m.match(node),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const MatchByTagName = struct {
|
||||||
|
// tag is used to select node against their name.
|
||||||
|
// tag comparison is case insensitive.
|
||||||
|
tag: []const u8,
|
||||||
|
is_wildcard: bool,
|
||||||
|
|
||||||
|
fn init(arena: Allocator, tag_name: []const u8) !MatchByTagName {
|
||||||
|
if (std.mem.eql(u8, tag_name, "*")) {
|
||||||
|
return .{ .tag = "*", .is_wildcard = true };
|
||||||
|
}
|
||||||
|
|
||||||
|
return .{
|
||||||
|
.tag = try arena.dupe(u8, tag_name),
|
||||||
|
.is_wildcard = false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn match(self: MatchByTagName, node: *parser.Node) !bool {
|
||||||
|
return self.is_wildcard or std.ascii.eqlIgnoreCase(self.tag, try parser.nodeName(node));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn HTMLCollectionByTagName(
|
||||||
|
arena: Allocator,
|
||||||
|
root: ?*parser.Node,
|
||||||
|
tag_name: []const u8,
|
||||||
|
opts: Opts,
|
||||||
|
) !HTMLCollection {
|
||||||
|
return HTMLCollection{
|
||||||
|
.root = root,
|
||||||
|
.walker = .{ .walkerDepthFirst = .{} },
|
||||||
|
.matcher = .{ .matchByTagName = try MatchByTagName.init(arena, tag_name) },
|
||||||
|
.mutable = opts.mutable,
|
||||||
|
.include_root = opts.include_root,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub const MatchByClassName = struct {
|
||||||
|
class_names: []const u8,
|
||||||
|
|
||||||
|
fn init(arena: Allocator, class_names: []const u8) !MatchByClassName {
|
||||||
|
return .{
|
||||||
|
.class_names = try arena.dupe(u8, class_names),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn match(self: MatchByClassName, node: *parser.Node) !bool {
|
||||||
|
const e = parser.nodeToElement(node);
|
||||||
|
|
||||||
|
var it = std.mem.splitScalar(u8, self.class_names, ' ');
|
||||||
|
while (it.next()) |c| {
|
||||||
|
if (!try parser.elementHasClass(e, c)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn HTMLCollectionByClassName(
|
||||||
|
arena: Allocator,
|
||||||
|
root: ?*parser.Node,
|
||||||
|
classNames: []const u8,
|
||||||
|
opts: Opts,
|
||||||
|
) !HTMLCollection {
|
||||||
|
return HTMLCollection{
|
||||||
|
.root = root,
|
||||||
|
.walker = .{ .walkerDepthFirst = .{} },
|
||||||
|
.matcher = .{ .matchByClassName = try MatchByClassName.init(arena, classNames) },
|
||||||
|
.mutable = opts.mutable,
|
||||||
|
.include_root = opts.include_root,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub const MatchByName = struct {
|
||||||
|
name: []const u8,
|
||||||
|
|
||||||
|
fn init(arena: Allocator, name: []const u8) !MatchByName {
|
||||||
|
return .{
|
||||||
|
.name = try arena.dupe(u8, name),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn match(self: MatchByName, node: *parser.Node) !bool {
|
||||||
|
const e = parser.nodeToElement(node);
|
||||||
|
const nname = try parser.elementGetAttribute(e, "name") orelse return false;
|
||||||
|
return std.mem.eql(u8, self.name, nname);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn HTMLCollectionByName(
|
||||||
|
arena: Allocator,
|
||||||
|
root: ?*parser.Node,
|
||||||
|
name: []const u8,
|
||||||
|
opts: Opts,
|
||||||
|
) !HTMLCollection {
|
||||||
|
return HTMLCollection{
|
||||||
|
.root = root,
|
||||||
|
.walker = .{ .walkerDepthFirst = .{} },
|
||||||
|
.matcher = .{ .matchByName = try MatchByName.init(arena, name) },
|
||||||
|
.mutable = opts.mutable,
|
||||||
|
.include_root = opts.include_root,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// HTMLAllCollection is a special type: instances of it are falsy. It's the only
|
||||||
|
// object in the WebAPI that behaves like this - in fact, it's even a special
|
||||||
|
// case in the JavaScript spec.
|
||||||
|
// This is important, because a lot of browser detection rely on this behavior
|
||||||
|
// to determine what browser is running.
|
||||||
|
|
||||||
|
// It's also possible to use an instance like a function:
|
||||||
|
// document.all(3)
|
||||||
|
// document.all('some_id')
|
||||||
|
pub const HTMLAllCollection = struct {
|
||||||
|
pub const prototype = *HTMLCollection;
|
||||||
|
|
||||||
|
proto: HTMLCollection,
|
||||||
|
|
||||||
|
pub const mark_as_undetectable = true;
|
||||||
|
|
||||||
|
pub fn init(root: ?*parser.Node) HTMLAllCollection {
|
||||||
|
return .{ .proto = .{
|
||||||
|
.root = root,
|
||||||
|
.walker = .{ .walkerDepthFirst = .{} },
|
||||||
|
.matcher = .{ .matchTrue = .{} },
|
||||||
|
.include_root = true,
|
||||||
|
} };
|
||||||
|
}
|
||||||
|
|
||||||
|
const CAllAsFunctionArg = union(enum) {
|
||||||
|
index: u32,
|
||||||
|
id: []const u8,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn jsCallAsFunction(self: *HTMLAllCollection, arg: CAllAsFunctionArg) !?Union {
|
||||||
|
return switch (arg) {
|
||||||
|
.index => |i| self.proto._item(i),
|
||||||
|
.id => |id| self.proto._namedItem(id),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn HTMLCollectionChildren(
|
||||||
|
root: ?*parser.Node,
|
||||||
|
opts: Opts,
|
||||||
|
) HTMLCollection {
|
||||||
|
return HTMLCollection{
|
||||||
|
.root = root,
|
||||||
|
.walker = .{ .walkerChildren = .{} },
|
||||||
|
.matcher = .{ .matchTrue = .{} },
|
||||||
|
.mutable = opts.mutable,
|
||||||
|
.include_root = opts.include_root,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn HTMLCollectionEmpty() !HTMLCollection {
|
||||||
|
return HTMLCollection{
|
||||||
|
.root = null,
|
||||||
|
.walker = .{ .walkerNone = .{} },
|
||||||
|
.matcher = .{ .matchFalse = .{} },
|
||||||
|
.include_root = false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// MatchByLinks matches the a and area elements in the Document that have href
|
||||||
|
// attributes.
|
||||||
|
// https://html.spec.whatwg.org/#dom-document-links
|
||||||
|
pub const MatchByLinks = struct {
|
||||||
|
pub fn match(node: *parser.Node) !bool {
|
||||||
|
const tag = try parser.nodeName(node);
|
||||||
|
if (!std.ascii.eqlIgnoreCase(tag, "a") and !std.ascii.eqlIgnoreCase(tag, "area")) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
const elem = @as(*parser.Element, @ptrCast(node));
|
||||||
|
return parser.elementHasAttribute(elem, "href");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn HTMLCollectionByLinks(
|
||||||
|
root: ?*parser.Node,
|
||||||
|
opts: Opts,
|
||||||
|
) !HTMLCollection {
|
||||||
|
return HTMLCollection{
|
||||||
|
.root = root,
|
||||||
|
.walker = .{ .walkerDepthFirst = .{} },
|
||||||
|
.matcher = .{ .matchByLinks = MatchByLinks{} },
|
||||||
|
.mutable = opts.mutable,
|
||||||
|
.include_root = opts.include_root,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// MatchByAnchors matches the a elements in the Document that have name
|
||||||
|
// attributes.
|
||||||
|
// https://html.spec.whatwg.org/#dom-document-anchors
|
||||||
|
pub const MatchByAnchors = struct {
|
||||||
|
pub fn match(node: *parser.Node) !bool {
|
||||||
|
const tag = try parser.nodeName(node);
|
||||||
|
if (!std.ascii.eqlIgnoreCase(tag, "a")) return false;
|
||||||
|
|
||||||
|
const elem = @as(*parser.Element, @ptrCast(node));
|
||||||
|
return parser.elementHasAttribute(elem, "name");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn HTMLCollectionByAnchors(
|
||||||
|
root: ?*parser.Node,
|
||||||
|
opts: Opts,
|
||||||
|
) !HTMLCollection {
|
||||||
|
return HTMLCollection{
|
||||||
|
.root = root,
|
||||||
|
.walker = .{ .walkerDepthFirst = .{} },
|
||||||
|
.matcher = .{ .matchByAnchors = MatchByAnchors{} },
|
||||||
|
.mutable = opts.mutable,
|
||||||
|
.include_root = opts.include_root,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub const HTMLCollectionIterator = struct {
|
||||||
|
coll: *HTMLCollection,
|
||||||
|
index: u32 = 0,
|
||||||
|
|
||||||
|
pub const Return = struct {
|
||||||
|
value: ?Union,
|
||||||
|
done: bool,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn _next(self: *HTMLCollectionIterator) !Return {
|
||||||
|
const e = try self.coll._item(self.index);
|
||||||
|
if (e == null) {
|
||||||
|
return Return{
|
||||||
|
.value = null,
|
||||||
|
.done = true,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
self.index += 1;
|
||||||
|
return Return{
|
||||||
|
.value = e,
|
||||||
|
.done = false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const Opts = struct {
|
||||||
|
include_root: bool,
|
||||||
|
mutable: bool = false,
|
||||||
|
};
|
||||||
|
|
||||||
|
// WEB IDL https://dom.spec.whatwg.org/#htmlcollection
|
||||||
|
// HTMLCollection is re implemented in zig here because libdom
|
||||||
|
// dom_html_collection expects a comparison function callback as arguement.
|
||||||
|
// But we wanted a dynamically comparison here, according to the match tagname.
|
||||||
|
pub const HTMLCollection = struct {
|
||||||
|
matcher: Matcher,
|
||||||
|
walker: Walker,
|
||||||
|
|
||||||
|
root: ?*parser.Node,
|
||||||
|
|
||||||
|
// By default the HTMLCollection walk on the root's descendant only.
|
||||||
|
// But on somes cases, like for dom document, we want to walk over the root
|
||||||
|
// itself.
|
||||||
|
include_root: bool = false,
|
||||||
|
|
||||||
|
mutable: bool = false,
|
||||||
|
|
||||||
|
// save a state for the collection to improve the _item speed.
|
||||||
|
cur_idx: ?u32 = null,
|
||||||
|
cur_node: ?*parser.Node = null,
|
||||||
|
|
||||||
|
// start returns the first node to walk on.
|
||||||
|
fn start(self: *const HTMLCollection) !?*parser.Node {
|
||||||
|
if (self.root == null) return null;
|
||||||
|
|
||||||
|
if (self.include_root) {
|
||||||
|
return self.root.?;
|
||||||
|
}
|
||||||
|
|
||||||
|
return try self.walker.get_next(self.root.?, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _symbol_iterator(self: *HTMLCollection) HTMLCollectionIterator {
|
||||||
|
return HTMLCollectionIterator{
|
||||||
|
.coll = self,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/// get_length computes the collection's length dynamically according to
|
||||||
|
/// the current root structure.
|
||||||
|
// TODO: nodes retrieved must be de-referenced.
|
||||||
|
pub fn get_length(self: *HTMLCollection) !u32 {
|
||||||
|
if (self.root == null) return 0;
|
||||||
|
|
||||||
|
var len: u32 = 0;
|
||||||
|
var node = try self.start() orelse return 0;
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
if (try parser.nodeType(node) == .element) {
|
||||||
|
if (try self.matcher.match(node)) {
|
||||||
|
len += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
node = try self.walker.get_next(self.root.?, node) orelse break;
|
||||||
|
}
|
||||||
|
|
||||||
|
return len;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn item(self: *HTMLCollection, index: u32) !?*parser.Node {
|
||||||
|
if (self.root == null) return null;
|
||||||
|
|
||||||
|
var i: u32 = 0;
|
||||||
|
var node: *parser.Node = undefined;
|
||||||
|
|
||||||
|
// Use the current state to improve speed if possible.
|
||||||
|
if (self.mutable == false and self.cur_idx != null and index >= self.cur_idx.?) {
|
||||||
|
i = self.cur_idx.?;
|
||||||
|
node = self.cur_node.?;
|
||||||
|
} else {
|
||||||
|
node = try self.start() orelse return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
if (try parser.nodeType(node) == .element) {
|
||||||
|
if (try self.matcher.match(node)) {
|
||||||
|
// check if we found the searched element.
|
||||||
|
if (i == index) {
|
||||||
|
// save the current state
|
||||||
|
self.cur_node = node;
|
||||||
|
self.cur_idx = i;
|
||||||
|
|
||||||
|
return node;
|
||||||
|
}
|
||||||
|
|
||||||
|
i += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
node = try self.walker.get_next(self.root.?, node) orelse break;
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _item(self: *HTMLCollection, index: u32) !?Union {
|
||||||
|
const node = try self.item(index) orelse return null;
|
||||||
|
const e = @as(*parser.Element, @ptrCast(node));
|
||||||
|
return try Element.toInterface(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _namedItem(self: *const HTMLCollection, name: []const u8) !?Union {
|
||||||
|
if (self.root == null) return null;
|
||||||
|
if (name.len == 0) return null;
|
||||||
|
|
||||||
|
var node = try self.start() orelse return null;
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
if (try parser.nodeType(node) == .element) {
|
||||||
|
if (try self.matcher.match(node)) {
|
||||||
|
const elem = @as(*parser.Element, @ptrCast(node));
|
||||||
|
|
||||||
|
var attr = try parser.elementGetAttribute(elem, "id");
|
||||||
|
// check if the node id corresponds to the name argument.
|
||||||
|
if (attr != null and std.mem.eql(u8, name, attr.?)) {
|
||||||
|
return try Element.toInterface(elem);
|
||||||
|
}
|
||||||
|
|
||||||
|
attr = try parser.elementGetAttribute(elem, "name");
|
||||||
|
// check if the node id corresponds to the name argument.
|
||||||
|
if (attr != null and std.mem.eql(u8, name, attr.?)) {
|
||||||
|
return try Element.toInterface(elem);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
node = try self.walker.get_next(self.root.?, node) orelse break;
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn item_name(elt: *parser.Element) !?[]const u8 {
|
||||||
|
if (try parser.elementGetAttribute(elt, "id")) |v| {
|
||||||
|
return v;
|
||||||
|
}
|
||||||
|
if (try parser.elementGetAttribute(elt, "name")) |v| {
|
||||||
|
return v;
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn postAttach(self: *HTMLCollection, js_this: JsThis) !void {
|
||||||
|
const len = try self.get_length();
|
||||||
|
for (0..len) |i| {
|
||||||
|
const node = try self.item(@intCast(i)) orelse unreachable;
|
||||||
|
const e = @as(*parser.Element, @ptrCast(node));
|
||||||
|
const as_interface = try Element.toInterface(e);
|
||||||
|
try js_this.setIndex(@intCast(i), as_interface, .{});
|
||||||
|
|
||||||
|
if (try item_name(e)) |name| {
|
||||||
|
// Even though an entry might have an empty id, the spec says
|
||||||
|
// that namedItem("") should always return null
|
||||||
|
if (name.len > 0) {
|
||||||
|
// Named fields should not be enumerable (it is defined with
|
||||||
|
// the LegacyUnenumerableNamedProperties flag.)
|
||||||
|
try js_this.set(name, as_interface, .{ .DONT_ENUM = true });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: DOM.HTMLCollection" {
|
||||||
|
try testing.htmlRunner("dom/html_collection.html");
|
||||||
|
}
|
||||||
56
src/browser/dom/implementation.zig
Normal file
56
src/browser/dom/implementation.zig
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
const DOMException = @import("exceptions.zig").DOMException;
|
||||||
|
|
||||||
|
// WEB IDL https://dom.spec.whatwg.org/#domimplementation
|
||||||
|
pub const DOMImplementation = struct {
|
||||||
|
pub const Exception = DOMException;
|
||||||
|
|
||||||
|
pub fn _createDocumentType(
|
||||||
|
_: *DOMImplementation,
|
||||||
|
qname: [:0]const u8,
|
||||||
|
publicId: [:0]const u8,
|
||||||
|
systemId: [:0]const u8,
|
||||||
|
) !*parser.DocumentType {
|
||||||
|
return try parser.domImplementationCreateDocumentType(qname, publicId, systemId);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _createDocument(
|
||||||
|
_: *DOMImplementation,
|
||||||
|
namespace: ?[:0]const u8,
|
||||||
|
qname: ?[:0]const u8,
|
||||||
|
doctype: ?*parser.DocumentType,
|
||||||
|
) !*parser.Document {
|
||||||
|
return try parser.domImplementationCreateDocument(namespace, qname, doctype);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _createHTMLDocument(_: *DOMImplementation, title: ?[]const u8) !*parser.DocumentHTML {
|
||||||
|
return try parser.domImplementationCreateHTMLDocument(title);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _hasFeature(_: *DOMImplementation) bool {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: DOM.Implementation" {
|
||||||
|
try testing.htmlRunner("dom/implementation.html");
|
||||||
|
}
|
||||||
186
src/browser/dom/intersection_observer.zig
Normal file
186
src/browser/dom/intersection_observer.zig
Normal file
@@ -0,0 +1,186 @@
|
|||||||
|
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
|
||||||
|
const log = @import("../../log.zig");
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
const Page = @import("../page.zig").Page;
|
||||||
|
|
||||||
|
const Env = @import("../env.zig").Env;
|
||||||
|
const Element = @import("element.zig").Element;
|
||||||
|
|
||||||
|
pub const Interfaces = .{
|
||||||
|
IntersectionObserver,
|
||||||
|
IntersectionObserverEntry,
|
||||||
|
};
|
||||||
|
|
||||||
|
// This is supposed to listen to change between the root and observation targets.
|
||||||
|
// However, our rendered stores everything as 1 pixel sized boxes in a long row that never changes.
|
||||||
|
// As such, there are no changes to intersections between the root and any target.
|
||||||
|
// Instead we keep a list of all entries that are being observed.
|
||||||
|
// The callback is called with all entries everytime a new entry is added(observed).
|
||||||
|
// Potentially we should also call the callback at a regular interval.
|
||||||
|
// The returned Entries are phony, they always indicate full intersection.
|
||||||
|
// https://developer.mozilla.org/en-US/docs/Web/API/IntersectionObserver
|
||||||
|
pub const IntersectionObserver = struct {
|
||||||
|
page: *Page,
|
||||||
|
callback: Env.Function,
|
||||||
|
options: IntersectionObserverOptions,
|
||||||
|
|
||||||
|
observed_entries: std.ArrayListUnmanaged(IntersectionObserverEntry),
|
||||||
|
|
||||||
|
// new IntersectionObserver(callback)
|
||||||
|
// new IntersectionObserver(callback, options) [not supported yet]
|
||||||
|
pub fn constructor(callback: Env.Function, options_: ?IntersectionObserverOptions, page: *Page) !IntersectionObserver {
|
||||||
|
var options = IntersectionObserverOptions{
|
||||||
|
.root = parser.documentToNode(parser.documentHTMLToDocument(page.window.document)),
|
||||||
|
.rootMargin = "0px 0px 0px 0px",
|
||||||
|
.threshold = .{ .single = 0.0 },
|
||||||
|
};
|
||||||
|
if (options_) |*o| {
|
||||||
|
if (o.root) |root| {
|
||||||
|
options.root = root;
|
||||||
|
} // Other properties are not used due to the way we render
|
||||||
|
}
|
||||||
|
|
||||||
|
return .{
|
||||||
|
.page = page,
|
||||||
|
.callback = callback,
|
||||||
|
.options = options,
|
||||||
|
.observed_entries = .{},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _disconnect(self: *IntersectionObserver) !void {
|
||||||
|
self.observed_entries = .{}; // We don't free as it is on an arena
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _observe(self: *IntersectionObserver, target_element: *parser.Element) !void {
|
||||||
|
for (self.observed_entries.items) |*observer| {
|
||||||
|
if (observer.target == target_element) {
|
||||||
|
return; // Already observed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try self.observed_entries.append(self.page.arena, .{
|
||||||
|
.page = self.page,
|
||||||
|
.target = target_element,
|
||||||
|
.options = &self.options,
|
||||||
|
});
|
||||||
|
|
||||||
|
var result: Env.Function.Result = undefined;
|
||||||
|
self.callback.tryCall(void, .{self.observed_entries.items}, &result) catch {
|
||||||
|
log.debug(.user_script, "callback error", .{
|
||||||
|
.err = result.exception,
|
||||||
|
.stack = result.stack,
|
||||||
|
.source = "intersection observer",
|
||||||
|
});
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _unobserve(self: *IntersectionObserver, target: *parser.Element) !void {
|
||||||
|
for (self.observed_entries.items, 0..) |*observer, index| {
|
||||||
|
if (observer.target == target) {
|
||||||
|
_ = self.observed_entries.swapRemove(index);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _takeRecords(self: *IntersectionObserver) []IntersectionObserverEntry {
|
||||||
|
return self.observed_entries.items;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const IntersectionObserverOptions = struct {
|
||||||
|
root: ?*parser.Node, // Element or Document
|
||||||
|
rootMargin: ?[]const u8,
|
||||||
|
threshold: ?Threshold,
|
||||||
|
|
||||||
|
const Threshold = union(enum) {
|
||||||
|
single: f32,
|
||||||
|
list: []const f32,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
// https://developer.mozilla.org/en-US/docs/Web/API/IntersectionObserverEntry
|
||||||
|
// https://w3c.github.io/IntersectionObserver/#intersection-observer-entry
|
||||||
|
pub const IntersectionObserverEntry = struct {
|
||||||
|
page: *Page,
|
||||||
|
target: *parser.Element,
|
||||||
|
options: *IntersectionObserverOptions,
|
||||||
|
|
||||||
|
// Returns the bounds rectangle of the target element as a DOMRectReadOnly. The bounds are computed as described in the documentation for Element.getBoundingClientRect().
|
||||||
|
pub fn get_boundingClientRect(self: *const IntersectionObserverEntry) !Element.DOMRect {
|
||||||
|
return Element._getBoundingClientRect(self.target, self.page);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Returns the ratio of the intersectionRect to the boundingClientRect.
|
||||||
|
pub fn get_intersectionRatio(_: *const IntersectionObserverEntry) f32 {
|
||||||
|
return 1.0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Returns a DOMRectReadOnly representing the target's visible area.
|
||||||
|
pub fn get_intersectionRect(self: *const IntersectionObserverEntry) !Element.DOMRect {
|
||||||
|
return Element._getBoundingClientRect(self.target, self.page);
|
||||||
|
}
|
||||||
|
|
||||||
|
// A Boolean value which is true if the target element intersects with the
|
||||||
|
// intersection observer's root. If this is true, then, the
|
||||||
|
// IntersectionObserverEntry describes a transition into a state of
|
||||||
|
// intersection; if it's false, then you know the transition is from
|
||||||
|
// intersecting to not-intersecting.
|
||||||
|
pub fn get_isIntersecting(_: *const IntersectionObserverEntry) bool {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Returns a DOMRectReadOnly for the intersection observer's root.
|
||||||
|
pub fn get_rootBounds(self: *const IntersectionObserverEntry) !Element.DOMRect {
|
||||||
|
const root = self.options.root.?;
|
||||||
|
if (@intFromPtr(root) == @intFromPtr(self.page.window.document)) {
|
||||||
|
return self.page.renderer.boundingRect();
|
||||||
|
}
|
||||||
|
|
||||||
|
const root_type = try parser.nodeType(root);
|
||||||
|
|
||||||
|
var element: *parser.Element = undefined;
|
||||||
|
switch (root_type) {
|
||||||
|
.element => element = parser.nodeToElement(root),
|
||||||
|
.document => {
|
||||||
|
const doc = parser.nodeToDocument(root);
|
||||||
|
element = (try parser.documentGetDocumentElement(doc)).?;
|
||||||
|
},
|
||||||
|
else => return error.InvalidState,
|
||||||
|
}
|
||||||
|
|
||||||
|
return Element._getBoundingClientRect(element, self.page);
|
||||||
|
}
|
||||||
|
|
||||||
|
// The Element whose intersection with the root changed.
|
||||||
|
pub fn get_target(self: *const IntersectionObserverEntry) *parser.Element {
|
||||||
|
return self.target;
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: pub fn get_time(self: *const IntersectionObserverEntry)
|
||||||
|
};
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: DOM.IntersectionObserver" {
|
||||||
|
try testing.htmlRunner("dom/intersection_observer.html");
|
||||||
|
}
|
||||||
358
src/browser/dom/mutation_observer.zig
Normal file
358
src/browser/dom/mutation_observer.zig
Normal file
@@ -0,0 +1,358 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
const Allocator = std.mem.Allocator;
|
||||||
|
|
||||||
|
const log = @import("../../log.zig");
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
const Page = @import("../page.zig").Page;
|
||||||
|
|
||||||
|
const Env = @import("../env.zig").Env;
|
||||||
|
const NodeList = @import("nodelist.zig").NodeList;
|
||||||
|
|
||||||
|
pub const Interfaces = .{
|
||||||
|
MutationObserver,
|
||||||
|
MutationRecord,
|
||||||
|
};
|
||||||
|
|
||||||
|
const Walker = @import("../dom/walker.zig").WalkerChildren;
|
||||||
|
|
||||||
|
// WEB IDL https://dom.spec.whatwg.org/#interface-mutationobserver
|
||||||
|
pub const MutationObserver = struct {
|
||||||
|
page: *Page,
|
||||||
|
cbk: Env.Function,
|
||||||
|
connected: bool,
|
||||||
|
scheduled: bool,
|
||||||
|
|
||||||
|
// List of records which were observed. When the call scope ends, we need to
|
||||||
|
// execute our callback with it.
|
||||||
|
observed: std.ArrayListUnmanaged(MutationRecord),
|
||||||
|
|
||||||
|
pub fn constructor(cbk: Env.Function, page: *Page) !MutationObserver {
|
||||||
|
return .{
|
||||||
|
.cbk = cbk,
|
||||||
|
.page = page,
|
||||||
|
.observed = .{},
|
||||||
|
.connected = true,
|
||||||
|
.scheduled = false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _observe(self: *MutationObserver, node: *parser.Node, options_: ?Options) !void {
|
||||||
|
const arena = self.page.arena;
|
||||||
|
var options = options_ orelse Options{};
|
||||||
|
if (options.attributeFilter.len > 0) {
|
||||||
|
options.attributeFilter = try arena.dupe([]const u8, options.attributeFilter);
|
||||||
|
}
|
||||||
|
|
||||||
|
const observer = try arena.create(Observer);
|
||||||
|
observer.* = .{
|
||||||
|
.node = node,
|
||||||
|
.options = options,
|
||||||
|
.mutation_observer = self,
|
||||||
|
.event_node = .{ .id = self.cbk.id, .func = Observer.handle },
|
||||||
|
};
|
||||||
|
|
||||||
|
// register node's events
|
||||||
|
if (options.childList or options.subtree) {
|
||||||
|
_ = try parser.eventTargetAddEventListener(
|
||||||
|
parser.toEventTarget(parser.Node, node),
|
||||||
|
"DOMNodeInserted",
|
||||||
|
&observer.event_node,
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
_ = try parser.eventTargetAddEventListener(
|
||||||
|
parser.toEventTarget(parser.Node, node),
|
||||||
|
"DOMNodeRemoved",
|
||||||
|
&observer.event_node,
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (options.attr()) {
|
||||||
|
_ = try parser.eventTargetAddEventListener(
|
||||||
|
parser.toEventTarget(parser.Node, node),
|
||||||
|
"DOMAttrModified",
|
||||||
|
&observer.event_node,
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (options.cdata()) {
|
||||||
|
_ = try parser.eventTargetAddEventListener(
|
||||||
|
parser.toEventTarget(parser.Node, node),
|
||||||
|
"DOMCharacterDataModified",
|
||||||
|
&observer.event_node,
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (options.subtree) {
|
||||||
|
_ = try parser.eventTargetAddEventListener(
|
||||||
|
parser.toEventTarget(parser.Node, node),
|
||||||
|
"DOMSubtreeModified",
|
||||||
|
&observer.event_node,
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn callback(ctx: *anyopaque) ?u32 {
|
||||||
|
const self: *MutationObserver = @ptrCast(@alignCast(ctx));
|
||||||
|
if (self.connected == false) {
|
||||||
|
self.scheduled = true;
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
self.scheduled = false;
|
||||||
|
|
||||||
|
const records = self.observed.items;
|
||||||
|
if (records.len == 0) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
defer self.observed.clearRetainingCapacity();
|
||||||
|
|
||||||
|
var result: Env.Function.Result = undefined;
|
||||||
|
self.cbk.tryCall(void, .{records}, &result) catch {
|
||||||
|
log.debug(.user_script, "callback error", .{
|
||||||
|
.err = result.exception,
|
||||||
|
.stack = result.stack,
|
||||||
|
.source = "mutation observer",
|
||||||
|
});
|
||||||
|
};
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO
|
||||||
|
pub fn _disconnect(self: *MutationObserver) !void {
|
||||||
|
self.connected = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO
|
||||||
|
pub fn _takeRecords(_: *const MutationObserver) ?[]const u8 {
|
||||||
|
return &[_]u8{};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const MutationRecord = struct {
|
||||||
|
type: []const u8,
|
||||||
|
target: *parser.Node,
|
||||||
|
added_nodes: NodeList = .{},
|
||||||
|
removed_nodes: NodeList = .{},
|
||||||
|
previous_sibling: ?*parser.Node = null,
|
||||||
|
next_sibling: ?*parser.Node = null,
|
||||||
|
attribute_name: ?[]const u8 = null,
|
||||||
|
attribute_namespace: ?[]const u8 = null,
|
||||||
|
old_value: ?[]const u8 = null,
|
||||||
|
|
||||||
|
pub fn get_type(self: *const MutationRecord) []const u8 {
|
||||||
|
return self.type;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_addedNodes(self: *MutationRecord) *NodeList {
|
||||||
|
return &self.added_nodes;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_removedNodes(self: *MutationRecord) *NodeList {
|
||||||
|
return &self.removed_nodes;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_target(self: *const MutationRecord) *parser.Node {
|
||||||
|
return self.target;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_attributeName(self: *const MutationRecord) ?[]const u8 {
|
||||||
|
return self.attribute_name;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_attributeNamespace(self: *const MutationRecord) ?[]const u8 {
|
||||||
|
return self.attribute_namespace;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_previousSibling(self: *const MutationRecord) ?*parser.Node {
|
||||||
|
return self.previous_sibling;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_nextSibling(self: *const MutationRecord) ?*parser.Node {
|
||||||
|
return self.next_sibling;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_oldValue(self: *const MutationRecord) ?[]const u8 {
|
||||||
|
return self.old_value;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const Options = struct {
|
||||||
|
childList: bool = false,
|
||||||
|
attributes: bool = false,
|
||||||
|
characterData: bool = false,
|
||||||
|
subtree: bool = false,
|
||||||
|
attributeOldValue: bool = false,
|
||||||
|
characterDataOldValue: bool = false,
|
||||||
|
attributeFilter: [][]const u8 = &.{},
|
||||||
|
|
||||||
|
fn attr(self: Options) bool {
|
||||||
|
return self.attributes or self.attributeOldValue or self.attributeFilter.len > 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn cdata(self: Options) bool {
|
||||||
|
return self.characterData or self.characterDataOldValue;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const Observer = struct {
|
||||||
|
node: *parser.Node,
|
||||||
|
options: Options,
|
||||||
|
|
||||||
|
// reference back to the MutationObserver so that we can access the arena
|
||||||
|
// and batch the mutation records.
|
||||||
|
mutation_observer: *MutationObserver,
|
||||||
|
|
||||||
|
event_node: parser.EventNode,
|
||||||
|
|
||||||
|
fn appliesTo(
|
||||||
|
self: *const Observer,
|
||||||
|
target: *parser.Node,
|
||||||
|
event_type: MutationEventType,
|
||||||
|
event: *parser.MutationEvent,
|
||||||
|
) !bool {
|
||||||
|
if (event_type == .DOMAttrModified and self.options.attributeFilter.len > 0) {
|
||||||
|
const attribute_name = try parser.mutationEventAttributeName(event);
|
||||||
|
for (self.options.attributeFilter) |needle| blk: {
|
||||||
|
if (std.mem.eql(u8, attribute_name, needle)) {
|
||||||
|
break :blk;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// mutation on any target is always ok.
|
||||||
|
if (self.options.subtree) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// if target equals node, alway ok.
|
||||||
|
if (target == self.node) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// no subtree, no same target and no childlist, always noky.
|
||||||
|
if (!self.options.childList) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// target must be a child of o.node
|
||||||
|
const walker = Walker{};
|
||||||
|
var next: ?*parser.Node = null;
|
||||||
|
while (true) {
|
||||||
|
next = walker.get_next(self.node, next) catch break orelse break;
|
||||||
|
if (next.? == target) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn handle(en: *parser.EventNode, event: *parser.Event) void {
|
||||||
|
const self: *Observer = @fieldParentPtr("event_node", en);
|
||||||
|
self._handle(event) catch |err| {
|
||||||
|
log.err(.web_api, "handle error", .{ .err = err, .source = "mutation observer" });
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
fn _handle(self: *Observer, event: *parser.Event) !void {
|
||||||
|
var mutation_observer = self.mutation_observer;
|
||||||
|
|
||||||
|
const node = blk: {
|
||||||
|
const event_target = parser.eventTarget(event) orelse return;
|
||||||
|
break :blk parser.eventTargetToNode(event_target);
|
||||||
|
};
|
||||||
|
|
||||||
|
const mutation_event = parser.eventToMutationEvent(event);
|
||||||
|
const event_type = blk: {
|
||||||
|
const t = try parser.eventType(event);
|
||||||
|
break :blk std.meta.stringToEnum(MutationEventType, t) orelse return;
|
||||||
|
};
|
||||||
|
|
||||||
|
if (try self.appliesTo(node, event_type, mutation_event) == false) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var record = MutationRecord{
|
||||||
|
.target = self.node,
|
||||||
|
.type = event_type.recordType(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const arena = mutation_observer.page.arena;
|
||||||
|
switch (event_type) {
|
||||||
|
.DOMAttrModified => {
|
||||||
|
record.attribute_name = parser.mutationEventAttributeName(mutation_event) catch null;
|
||||||
|
if (self.options.attributeOldValue) {
|
||||||
|
record.old_value = parser.mutationEventPrevValue(mutation_event) catch null;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
.DOMCharacterDataModified => {
|
||||||
|
if (self.options.characterDataOldValue) {
|
||||||
|
record.old_value = parser.mutationEventPrevValue(mutation_event) catch null;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
.DOMNodeInserted => {
|
||||||
|
if (parser.mutationEventRelatedNode(mutation_event) catch null) |related_node| {
|
||||||
|
try record.added_nodes.append(arena, related_node);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
.DOMNodeRemoved => {
|
||||||
|
if (parser.mutationEventRelatedNode(mutation_event) catch null) |related_node| {
|
||||||
|
try record.removed_nodes.append(arena, related_node);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
try mutation_observer.observed.append(arena, record);
|
||||||
|
|
||||||
|
if (mutation_observer.scheduled == false) {
|
||||||
|
mutation_observer.scheduled = true;
|
||||||
|
try mutation_observer.page.scheduler.add(
|
||||||
|
mutation_observer,
|
||||||
|
MutationObserver.callback,
|
||||||
|
0,
|
||||||
|
.{ .name = "mutation_observer" },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const MutationEventType = enum {
|
||||||
|
DOMAttrModified,
|
||||||
|
DOMCharacterDataModified,
|
||||||
|
DOMNodeInserted,
|
||||||
|
DOMNodeRemoved,
|
||||||
|
|
||||||
|
fn recordType(self: MutationEventType) []const u8 {
|
||||||
|
return switch (self) {
|
||||||
|
.DOMAttrModified => "attributes",
|
||||||
|
.DOMCharacterDataModified => "characterData",
|
||||||
|
.DOMNodeInserted => "childList",
|
||||||
|
.DOMNodeRemoved => "childList",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: DOM.MutationObserver" {
|
||||||
|
try testing.htmlRunner("dom/mutation_observer.html");
|
||||||
|
}
|
||||||
121
src/browser/dom/namednodemap.zig
Normal file
121
src/browser/dom/namednodemap.zig
Normal file
@@ -0,0 +1,121 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
|
||||||
|
const DOMException = @import("exceptions.zig").DOMException;
|
||||||
|
|
||||||
|
// WEB IDL https://dom.spec.whatwg.org/#namednodemap
|
||||||
|
pub const NamedNodeMap = struct {
|
||||||
|
pub const Self = parser.NamedNodeMap;
|
||||||
|
|
||||||
|
pub const Exception = DOMException;
|
||||||
|
pub const Iterator = NamedNodeMapIterator;
|
||||||
|
|
||||||
|
// TODO implement LegacyUnenumerableNamedProperties.
|
||||||
|
// https://webidl.spec.whatwg.org/#LegacyUnenumerableNamedProperties
|
||||||
|
|
||||||
|
pub fn get_length(self: *parser.NamedNodeMap) !u32 {
|
||||||
|
return try parser.namedNodeMapGetLength(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _item(self: *parser.NamedNodeMap, index: u32) !?*parser.Attribute {
|
||||||
|
return try parser.namedNodeMapItem(self, index);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _getNamedItem(self: *parser.NamedNodeMap, qname: []const u8) !?*parser.Attribute {
|
||||||
|
return try parser.namedNodeMapGetNamedItem(self, qname);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _getNamedItemNS(
|
||||||
|
self: *parser.NamedNodeMap,
|
||||||
|
namespace: []const u8,
|
||||||
|
localname: []const u8,
|
||||||
|
) !?*parser.Attribute {
|
||||||
|
return try parser.namedNodeMapGetNamedItemNS(self, namespace, localname);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _setNamedItem(self: *parser.NamedNodeMap, attr: *parser.Attribute) !?*parser.Attribute {
|
||||||
|
return try parser.namedNodeMapSetNamedItem(self, attr);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _setNamedItemNS(self: *parser.NamedNodeMap, attr: *parser.Attribute) !?*parser.Attribute {
|
||||||
|
return try parser.namedNodeMapSetNamedItemNS(self, attr);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _removeNamedItem(self: *parser.NamedNodeMap, qname: []const u8) !*parser.Attribute {
|
||||||
|
return try parser.namedNodeMapRemoveNamedItem(self, qname);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _removeNamedItemNS(
|
||||||
|
self: *parser.NamedNodeMap,
|
||||||
|
namespace: []const u8,
|
||||||
|
localname: []const u8,
|
||||||
|
) !*parser.Attribute {
|
||||||
|
return try parser.namedNodeMapRemoveNamedItemNS(self, namespace, localname);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn indexed_get(self: *parser.NamedNodeMap, index: u32, has_value: *bool) !*parser.Attribute {
|
||||||
|
return (try _item(self, index)) orelse {
|
||||||
|
has_value.* = false;
|
||||||
|
return undefined;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn named_get(self: *parser.NamedNodeMap, name: []const u8, has_value: *bool) !*parser.Attribute {
|
||||||
|
return (try _getNamedItem(self, name)) orelse {
|
||||||
|
has_value.* = false;
|
||||||
|
return undefined;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _symbol_iterator(self: *parser.NamedNodeMap) NamedNodeMapIterator {
|
||||||
|
return .{ .map = self };
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const NamedNodeMapIterator = struct {
|
||||||
|
index: u32 = 0,
|
||||||
|
map: *parser.NamedNodeMap,
|
||||||
|
|
||||||
|
pub const Return = struct {
|
||||||
|
done: bool,
|
||||||
|
value: ?*parser.Attribute,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn _next(self: *NamedNodeMapIterator) !Return {
|
||||||
|
const e = try NamedNodeMap._item(self.map, self.index);
|
||||||
|
if (e == null) {
|
||||||
|
return .{
|
||||||
|
.value = null,
|
||||||
|
.done = true,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
self.index += 1;
|
||||||
|
return .{
|
||||||
|
.value = e,
|
||||||
|
.done = false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: DOM.NamedNodeMap" {
|
||||||
|
try testing.htmlRunner("dom/named_node_map.html");
|
||||||
|
}
|
||||||
637
src/browser/dom/node.zig
Normal file
637
src/browser/dom/node.zig
Normal file
@@ -0,0 +1,637 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
|
||||||
|
const log = @import("../../log.zig");
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
const generate = @import("../../runtime/generate.zig");
|
||||||
|
|
||||||
|
const Page = @import("../page.zig").Page;
|
||||||
|
const EventTarget = @import("event_target.zig").EventTarget;
|
||||||
|
|
||||||
|
// DOM
|
||||||
|
const Attr = @import("attribute.zig").Attr;
|
||||||
|
const CData = @import("character_data.zig");
|
||||||
|
const Element = @import("element.zig").Element;
|
||||||
|
const ElementUnion = @import("element.zig").Union;
|
||||||
|
const NodeList = @import("nodelist.zig").NodeList;
|
||||||
|
const Document = @import("document.zig").Document;
|
||||||
|
const DocumentType = @import("document_type.zig").DocumentType;
|
||||||
|
const DocumentFragment = @import("document_fragment.zig").DocumentFragment;
|
||||||
|
const HTMLCollection = @import("html_collection.zig").HTMLCollection;
|
||||||
|
const HTMLAllCollection = @import("html_collection.zig").HTMLAllCollection;
|
||||||
|
const HTMLCollectionIterator = @import("html_collection.zig").HTMLCollectionIterator;
|
||||||
|
const ShadowRoot = @import("shadow_root.zig").ShadowRoot;
|
||||||
|
const Walker = @import("walker.zig").WalkerDepthFirst;
|
||||||
|
|
||||||
|
// HTML
|
||||||
|
const HTML = @import("../html/html.zig");
|
||||||
|
|
||||||
|
// Node interfaces
|
||||||
|
pub const Interfaces = .{
|
||||||
|
Attr,
|
||||||
|
CData.CharacterData,
|
||||||
|
CData.Interfaces,
|
||||||
|
Element,
|
||||||
|
Document,
|
||||||
|
DocumentType,
|
||||||
|
DocumentFragment,
|
||||||
|
HTMLCollection,
|
||||||
|
HTMLAllCollection,
|
||||||
|
HTMLCollectionIterator,
|
||||||
|
HTML.Interfaces,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const Union = generate.Union(Interfaces);
|
||||||
|
|
||||||
|
// Node implementation
|
||||||
|
pub const Node = struct {
|
||||||
|
pub const Self = parser.Node;
|
||||||
|
pub const prototype = *EventTarget;
|
||||||
|
pub const subtype = .node;
|
||||||
|
|
||||||
|
pub fn toInterface(node: *parser.Node) !Union {
|
||||||
|
return switch (try parser.nodeType(node)) {
|
||||||
|
.element => try Element.toInterfaceT(
|
||||||
|
Union,
|
||||||
|
@as(*parser.Element, @ptrCast(node)),
|
||||||
|
),
|
||||||
|
.comment => .{ .Comment = @as(*parser.Comment, @ptrCast(node)) },
|
||||||
|
.text => .{ .Text = @as(*parser.Text, @ptrCast(node)) },
|
||||||
|
.cdata_section => .{ .CDATASection = @as(*parser.CDATASection, @ptrCast(node)) },
|
||||||
|
.processing_instruction => .{ .ProcessingInstruction = @as(*parser.ProcessingInstruction, @ptrCast(node)) },
|
||||||
|
.document => blk: {
|
||||||
|
const doc: *parser.Document = @ptrCast(node);
|
||||||
|
if (doc.is_html) {
|
||||||
|
break :blk .{ .HTMLDocument = @as(*parser.DocumentHTML, @ptrCast(node)) };
|
||||||
|
}
|
||||||
|
|
||||||
|
break :blk .{ .Document = doc };
|
||||||
|
},
|
||||||
|
.document_type => .{ .DocumentType = @as(*parser.DocumentType, @ptrCast(node)) },
|
||||||
|
.attribute => .{ .Attr = @as(*parser.Attribute, @ptrCast(node)) },
|
||||||
|
.document_fragment => .{ .DocumentFragment = @as(*parser.DocumentFragment, @ptrCast(node)) },
|
||||||
|
else => @panic("node type not handled"), // TODO
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// class attributes
|
||||||
|
|
||||||
|
pub const _ELEMENT_NODE = @intFromEnum(parser.NodeType.element);
|
||||||
|
pub const _ATTRIBUTE_NODE = @intFromEnum(parser.NodeType.attribute);
|
||||||
|
pub const _TEXT_NODE = @intFromEnum(parser.NodeType.text);
|
||||||
|
pub const _CDATA_SECTION_NODE = @intFromEnum(parser.NodeType.cdata_section);
|
||||||
|
pub const _PROCESSING_INSTRUCTION_NODE = @intFromEnum(parser.NodeType.processing_instruction);
|
||||||
|
pub const _COMMENT_NODE = @intFromEnum(parser.NodeType.comment);
|
||||||
|
pub const _DOCUMENT_NODE = @intFromEnum(parser.NodeType.document);
|
||||||
|
pub const _DOCUMENT_TYPE_NODE = @intFromEnum(parser.NodeType.document_type);
|
||||||
|
pub const _DOCUMENT_FRAGMENT_NODE = @intFromEnum(parser.NodeType.document_fragment);
|
||||||
|
|
||||||
|
// These 3 are deprecated, but both Chrome and Firefox still expose them
|
||||||
|
pub const _ENTITY_REFERENCE_NODE = @intFromEnum(parser.NodeType.entity_reference);
|
||||||
|
pub const _ENTITY_NODE = @intFromEnum(parser.NodeType.entity);
|
||||||
|
pub const _NOTATION_NODE = @intFromEnum(parser.NodeType.notation);
|
||||||
|
|
||||||
|
pub const _DOCUMENT_POSITION_DISCONNECTED = @intFromEnum(parser.DocumentPosition.disconnected);
|
||||||
|
pub const _DOCUMENT_POSITION_PRECEDING = @intFromEnum(parser.DocumentPosition.preceding);
|
||||||
|
pub const _DOCUMENT_POSITION_FOLLOWING = @intFromEnum(parser.DocumentPosition.following);
|
||||||
|
pub const _DOCUMENT_POSITION_CONTAINS = @intFromEnum(parser.DocumentPosition.contains);
|
||||||
|
pub const _DOCUMENT_POSITION_CONTAINED_BY = @intFromEnum(parser.DocumentPosition.contained_by);
|
||||||
|
pub const _DOCUMENT_POSITION_IMPLEMENTATION_SPECIFIC = @intFromEnum(parser.DocumentPosition.implementation_specific);
|
||||||
|
|
||||||
|
// JS funcs
|
||||||
|
// --------
|
||||||
|
|
||||||
|
// Read-only attributes
|
||||||
|
pub fn get_baseURI(_: *parser.Node, page: *Page) ![]const u8 {
|
||||||
|
return page.url.raw;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_firstChild(self: *parser.Node) !?Union {
|
||||||
|
const res = try parser.nodeFirstChild(self);
|
||||||
|
if (res == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return try Node.toInterface(res.?);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_lastChild(self: *parser.Node) !?Union {
|
||||||
|
const res = try parser.nodeLastChild(self);
|
||||||
|
if (res == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return try Node.toInterface(res.?);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_nextSibling(self: *parser.Node) !?Union {
|
||||||
|
const res = try parser.nodeNextSibling(self);
|
||||||
|
if (res == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return try Node.toInterface(res.?);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_previousSibling(self: *parser.Node) !?Union {
|
||||||
|
const res = try parser.nodePreviousSibling(self);
|
||||||
|
if (res == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return try Node.toInterface(res.?);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_parentNode(self: *parser.Node) !?Union {
|
||||||
|
const res = try parser.nodeParentNode(self);
|
||||||
|
if (res == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return try Node.toInterface(res.?);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_parentElement(self: *parser.Node) !?ElementUnion {
|
||||||
|
const res = try parser.nodeParentElement(self);
|
||||||
|
if (res == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return try Element.toInterface(res.?);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_nodeName(self: *parser.Node) ![]const u8 {
|
||||||
|
return try parser.nodeName(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_nodeType(self: *parser.Node) !u8 {
|
||||||
|
return @intFromEnum(try parser.nodeType(self));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_ownerDocument(self: *parser.Node) !?*parser.DocumentHTML {
|
||||||
|
const res = try parser.nodeOwnerDocument(self);
|
||||||
|
if (res == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return @as(*parser.DocumentHTML, @ptrCast(res.?));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_isConnected(self: *parser.Node) !bool {
|
||||||
|
var node = self;
|
||||||
|
while (true) {
|
||||||
|
const node_type = try parser.nodeType(node);
|
||||||
|
if (node_type == .document) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (try parser.nodeParentNode(node)) |parent| {
|
||||||
|
// didn't find a document, but node has a parent, let's see
|
||||||
|
// if it's connected;
|
||||||
|
node = parent;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (node_type != .document_fragment) {
|
||||||
|
// doesn't have a parent and isn't a document_fragment
|
||||||
|
// can't be connected
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (parser.documentFragmentGetHost(@ptrCast(node))) |host| {
|
||||||
|
// node doesn't have a parent, but it's a document fragment
|
||||||
|
// with a host. The host is like the parent, but we only want to
|
||||||
|
// traverse up (or down) to it in specific cases, like isConnected.
|
||||||
|
node = host;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read/Write attributes
|
||||||
|
|
||||||
|
pub fn get_nodeValue(self: *parser.Node) !?[]const u8 {
|
||||||
|
return try parser.nodeValue(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_nodeValue(self: *parser.Node, data: []u8) !void {
|
||||||
|
try parser.nodeSetValue(self, data);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_textContent(self: *parser.Node) !?[]const u8 {
|
||||||
|
return try parser.nodeTextContent(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_textContent(self: *parser.Node, data: []u8) !void {
|
||||||
|
return try parser.nodeSetTextContent(self, data);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Methods
|
||||||
|
|
||||||
|
pub fn _appendChild(self: *parser.Node, child: *parser.Node) !Union {
|
||||||
|
const self_owner = try parser.nodeOwnerDocument(self);
|
||||||
|
const child_owner = try parser.nodeOwnerDocument(child);
|
||||||
|
|
||||||
|
// If the node to be inserted has a different ownerDocument than the parent node,
|
||||||
|
// modern browsers automatically adopt the node and its descendants into
|
||||||
|
// the parent's ownerDocument.
|
||||||
|
// This process is known as adoption.
|
||||||
|
// (7.1) https://dom.spec.whatwg.org/#concept-node-insert
|
||||||
|
if (child_owner == null or (self_owner != null and child_owner.? != self_owner.?)) {
|
||||||
|
const w = Walker{};
|
||||||
|
var current = child;
|
||||||
|
while (true) {
|
||||||
|
current.owner = self_owner;
|
||||||
|
current = try w.get_next(child, current) orelse break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: DocumentFragment special case
|
||||||
|
const res = try parser.nodeAppendChild(self, child);
|
||||||
|
return try Node.toInterface(res);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _cloneNode(self: *parser.Node, deep: ?bool) !Union {
|
||||||
|
const clone = try parser.nodeCloneNode(self, deep orelse false);
|
||||||
|
return try Node.toInterface(clone);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _compareDocumentPosition(self: *parser.Node, other: *parser.Node) !u32 {
|
||||||
|
if (self == other) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
const docself = try parser.nodeOwnerDocument(self) orelse blk: {
|
||||||
|
if (try parser.nodeType(self) == .document) {
|
||||||
|
break :blk @as(*parser.Document, @ptrCast(self));
|
||||||
|
}
|
||||||
|
break :blk null;
|
||||||
|
};
|
||||||
|
const docother = try parser.nodeOwnerDocument(other) orelse blk: {
|
||||||
|
if (try parser.nodeType(other) == .document) {
|
||||||
|
break :blk @as(*parser.Document, @ptrCast(other));
|
||||||
|
}
|
||||||
|
break :blk null;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Both are in different document.
|
||||||
|
if (docself == null or docother == null or docself.? != docother.?) {
|
||||||
|
return @intFromEnum(parser.DocumentPosition.disconnected) +
|
||||||
|
@intFromEnum(parser.DocumentPosition.implementation_specific) +
|
||||||
|
@intFromEnum(parser.DocumentPosition.preceding);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (@intFromPtr(self) == @intFromPtr(docself.?)) {
|
||||||
|
// if self is the document, and we already know other is in the
|
||||||
|
// document, then other is contained by and following self.
|
||||||
|
return @intFromEnum(parser.DocumentPosition.following) +
|
||||||
|
@intFromEnum(parser.DocumentPosition.contained_by);
|
||||||
|
}
|
||||||
|
|
||||||
|
const rootself = try parser.nodeGetRootNode(self);
|
||||||
|
const rootother = try parser.nodeGetRootNode(other);
|
||||||
|
if (rootself != rootother) {
|
||||||
|
return @intFromEnum(parser.DocumentPosition.disconnected) +
|
||||||
|
@intFromEnum(parser.DocumentPosition.implementation_specific) +
|
||||||
|
@intFromEnum(parser.DocumentPosition.preceding);
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO Both are in a different trees in the same document.
|
||||||
|
|
||||||
|
const w = Walker{};
|
||||||
|
var next: ?*parser.Node = null;
|
||||||
|
|
||||||
|
// Is other a descendant of self?
|
||||||
|
while (true) {
|
||||||
|
next = try w.get_next(self, next) orelse break;
|
||||||
|
if (other == next) {
|
||||||
|
return @intFromEnum(parser.DocumentPosition.following) +
|
||||||
|
@intFromEnum(parser.DocumentPosition.contained_by);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Is self a descendant of other?
|
||||||
|
next = null;
|
||||||
|
while (true) {
|
||||||
|
next = try w.get_next(other, next) orelse break;
|
||||||
|
if (self == next) {
|
||||||
|
return @intFromEnum(parser.DocumentPosition.contains) +
|
||||||
|
@intFromEnum(parser.DocumentPosition.preceding);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
next = null;
|
||||||
|
while (true) {
|
||||||
|
next = try w.get_next(parser.documentToNode(docself.?), next) orelse break;
|
||||||
|
if (other == next) {
|
||||||
|
// other precedes self.
|
||||||
|
return @intFromEnum(parser.DocumentPosition.preceding);
|
||||||
|
}
|
||||||
|
if (self == next) {
|
||||||
|
// other follows self.
|
||||||
|
return @intFromEnum(parser.DocumentPosition.following);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _contains(self: *parser.Node, other: *parser.Node) !bool {
|
||||||
|
return try parser.nodeContains(self, other);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Returns itself or ancestor object inheriting from Node.
|
||||||
|
// - An Element inside a standard web page will return an HTMLDocument object representing the entire page (or <iframe>).
|
||||||
|
// - An Element inside a shadow DOM will return the associated ShadowRoot.
|
||||||
|
// - An Element that is not attached to a document or a shadow tree will return the root of the DOM tree it belongs to
|
||||||
|
const GetRootNodeResult = union(enum) {
|
||||||
|
shadow_root: *ShadowRoot,
|
||||||
|
node: Union,
|
||||||
|
};
|
||||||
|
pub fn _getRootNode(self: *parser.Node, options: ?struct { composed: bool = false }, page: *Page) !GetRootNodeResult {
|
||||||
|
if (options) |options_| if (options_.composed) {
|
||||||
|
log.warn(.web_api, "not implemented", .{ .feature = "getRootNode composed" });
|
||||||
|
};
|
||||||
|
|
||||||
|
const root = try parser.nodeGetRootNode(self);
|
||||||
|
if (page.getNodeState(root)) |state| {
|
||||||
|
if (state.shadow_root) |sr| {
|
||||||
|
return .{ .shadow_root = sr };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return .{ .node = try Node.toInterface(root) };
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _hasChildNodes(self: *parser.Node) !bool {
|
||||||
|
return try parser.nodeHasChildNodes(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_childNodes(self: *parser.Node, page: *Page) !NodeList {
|
||||||
|
const allocator = page.arena;
|
||||||
|
var list: NodeList = .{};
|
||||||
|
|
||||||
|
var n = try parser.nodeFirstChild(self) orelse return list;
|
||||||
|
while (true) {
|
||||||
|
try list.append(allocator, n);
|
||||||
|
n = try parser.nodeNextSibling(n) orelse return list;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _insertBefore(self: *parser.Node, new_node: *parser.Node, ref_node_: ?*parser.Node) !Union {
|
||||||
|
if (ref_node_ == null) {
|
||||||
|
return _appendChild(self, new_node);
|
||||||
|
}
|
||||||
|
|
||||||
|
const self_owner = try parser.nodeOwnerDocument(self);
|
||||||
|
const new_node_owner = try parser.nodeOwnerDocument(new_node);
|
||||||
|
|
||||||
|
// If the node to be inserted has a different ownerDocument than the parent node,
|
||||||
|
// modern browsers automatically adopt the node and its descendants into
|
||||||
|
// the parent's ownerDocument.
|
||||||
|
// This process is known as adoption.
|
||||||
|
// (7.1) https://dom.spec.whatwg.org/#concept-node-insert
|
||||||
|
if (new_node_owner == null or (self_owner != null and new_node_owner.? != self_owner.?)) {
|
||||||
|
const w = Walker{};
|
||||||
|
var current = new_node;
|
||||||
|
while (true) {
|
||||||
|
current.owner = self_owner;
|
||||||
|
current = try w.get_next(new_node, current) orelse break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return Node.toInterface(try parser.nodeInsertBefore(self, new_node, ref_node_.?));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _isDefaultNamespace(self: *parser.Node, namespace: ?[]const u8) !bool {
|
||||||
|
return try parser.nodeIsDefaultNamespace(self, namespace);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _isEqualNode(self: *parser.Node, other: *parser.Node) !bool {
|
||||||
|
// TODO: other is not an optional parameter, but can be null.
|
||||||
|
return try parser.nodeIsEqualNode(self, other);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _isSameNode(self: *parser.Node, other: *parser.Node) !bool {
|
||||||
|
// TODO: other is not an optional parameter, but can be null.
|
||||||
|
// NOTE: there is no need to use isSameNode(); instead use the === strict equality operator
|
||||||
|
return try parser.nodeIsSameNode(self, other);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _lookupPrefix(self: *parser.Node, namespace: ?[]const u8) !?[]const u8 {
|
||||||
|
// TODO: other is not an optional parameter, but can be null.
|
||||||
|
if (namespace == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
if (std.mem.eql(u8, namespace.?, "")) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return try parser.nodeLookupPrefix(self, namespace.?);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _lookupNamespaceURI(self: *parser.Node, prefix: ?[]const u8) !?[]const u8 {
|
||||||
|
// TODO: other is not an optional parameter, but can be null.
|
||||||
|
return try parser.nodeLookupNamespaceURI(self, prefix);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _normalize(self: *parser.Node) !void {
|
||||||
|
return try parser.nodeNormalize(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _removeChild(self: *parser.Node, child: *parser.Node) !Union {
|
||||||
|
const res = try parser.nodeRemoveChild(self, child);
|
||||||
|
return try Node.toInterface(res);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _replaceChild(self: *parser.Node, new_child: *parser.Node, old_child: *parser.Node) !Union {
|
||||||
|
const res = try parser.nodeReplaceChild(self, new_child, old_child);
|
||||||
|
return try Node.toInterface(res);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the hierarchy node tree constraints are respected.
|
||||||
|
// For now, it checks only if new nodes are not self.
|
||||||
|
// TODO implements the others contraints.
|
||||||
|
// see https://dom.spec.whatwg.org/#concept-node-tree
|
||||||
|
pub fn hierarchy(self: *parser.Node, nodes: []const NodeOrText) bool {
|
||||||
|
for (nodes) |n| {
|
||||||
|
if (n.is(self)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn prepend(self: *parser.Node, nodes: []const NodeOrText) !void {
|
||||||
|
if (nodes.len == 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// check hierarchy
|
||||||
|
if (!hierarchy(self, nodes)) {
|
||||||
|
return parser.DOMError.HierarchyRequest;
|
||||||
|
}
|
||||||
|
|
||||||
|
const doc = (try parser.nodeOwnerDocument(self)) orelse return;
|
||||||
|
|
||||||
|
if (try parser.nodeFirstChild(self)) |first| {
|
||||||
|
for (nodes) |node| {
|
||||||
|
_ = try parser.nodeInsertBefore(self, try node.toNode(doc), first);
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (nodes) |node| {
|
||||||
|
_ = try parser.nodeAppendChild(self, try node.toNode(doc));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn append(self: *parser.Node, nodes: []const NodeOrText) !void {
|
||||||
|
if (nodes.len == 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// check hierarchy
|
||||||
|
if (!hierarchy(self, nodes)) {
|
||||||
|
return parser.DOMError.HierarchyRequest;
|
||||||
|
}
|
||||||
|
|
||||||
|
const doc = (try parser.nodeOwnerDocument(self)) orelse return;
|
||||||
|
for (nodes) |node| {
|
||||||
|
_ = try parser.nodeAppendChild(self, try node.toNode(doc));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn replaceChildren(self: *parser.Node, nodes: []const NodeOrText) !void {
|
||||||
|
if (nodes.len == 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// check hierarchy
|
||||||
|
if (!hierarchy(self, nodes)) {
|
||||||
|
return parser.DOMError.HierarchyRequest;
|
||||||
|
}
|
||||||
|
|
||||||
|
// remove existing children
|
||||||
|
try removeChildren(self);
|
||||||
|
|
||||||
|
const doc = (try parser.nodeOwnerDocument(self)) orelse return;
|
||||||
|
// add new children
|
||||||
|
for (nodes) |node| {
|
||||||
|
_ = try parser.nodeAppendChild(self, try node.toNode(doc));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn removeChildren(self: *parser.Node) !void {
|
||||||
|
if (!try parser.nodeHasChildNodes(self)) return;
|
||||||
|
|
||||||
|
const children = try parser.nodeGetChildNodes(self);
|
||||||
|
const ln = try parser.nodeListLength(children);
|
||||||
|
var i: u32 = 0;
|
||||||
|
while (i < ln) {
|
||||||
|
defer i += 1;
|
||||||
|
// we always retrieve the 0 index child on purpose: libdom nodelist
|
||||||
|
// are dynamic. So the next child to remove is always as pos 0.
|
||||||
|
const child = try parser.nodeListItem(children, 0) orelse continue;
|
||||||
|
_ = try parser.nodeRemoveChild(self, child);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn before(self: *parser.Node, nodes: []const NodeOrText) !void {
|
||||||
|
const parent = try parser.nodeParentNode(self) orelse return;
|
||||||
|
const doc = (try parser.nodeOwnerDocument(parent)) orelse return;
|
||||||
|
|
||||||
|
var sibling: ?*parser.Node = self;
|
||||||
|
// have to find the first sibling that isn't in nodes
|
||||||
|
CHECK: while (sibling) |s| {
|
||||||
|
for (nodes) |n| {
|
||||||
|
if (n.is(s)) {
|
||||||
|
sibling = try parser.nodePreviousSibling(s);
|
||||||
|
continue :CHECK;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (sibling == null) {
|
||||||
|
sibling = try parser.nodeFirstChild(parent);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (sibling) |ref_node| {
|
||||||
|
for (nodes) |node| {
|
||||||
|
_ = try parser.nodeInsertBefore(parent, try node.toNode(doc), ref_node);
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
return Node.prepend(self, nodes);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn after(self: *parser.Node, nodes: []const NodeOrText) !void {
|
||||||
|
const parent = try parser.nodeParentNode(self) orelse return;
|
||||||
|
const doc = (try parser.nodeOwnerDocument(parent)) orelse return;
|
||||||
|
|
||||||
|
// have to find the first sibling that isn't in nodes
|
||||||
|
var sibling = try parser.nodeNextSibling(self);
|
||||||
|
CHECK: while (sibling) |s| {
|
||||||
|
for (nodes) |n| {
|
||||||
|
if (n.is(s)) {
|
||||||
|
sibling = try parser.nodeNextSibling(s);
|
||||||
|
continue :CHECK;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (sibling) |ref_node| {
|
||||||
|
for (nodes) |node| {
|
||||||
|
_ = try parser.nodeInsertBefore(parent, try node.toNode(doc), ref_node);
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (nodes) |node| {
|
||||||
|
_ = try parser.nodeAppendChild(parent, try node.toNode(doc));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// A lot of functions take either a node or text input.
|
||||||
|
// The text input is to be converted into a Text node.
|
||||||
|
pub const NodeOrText = union(enum) {
|
||||||
|
text: []const u8,
|
||||||
|
node: *parser.Node,
|
||||||
|
|
||||||
|
fn toNode(self: NodeOrText, doc: *parser.Document) !*parser.Node {
|
||||||
|
return switch (self) {
|
||||||
|
.node => |n| n,
|
||||||
|
.text => |txt| @ptrCast(@alignCast(try parser.documentCreateTextNode(doc, txt))),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Whether the node represented by the NodeOrText is the same as the
|
||||||
|
// given Node. Always false for text values as these represent as-of-yet
|
||||||
|
// created Text nodes.
|
||||||
|
fn is(self: NodeOrText, other: *parser.Node) bool {
|
||||||
|
return switch (self) {
|
||||||
|
.text => false,
|
||||||
|
.node => |n| n == other,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: DOM.Node" {
|
||||||
|
try testing.htmlRunner("dom/node.html");
|
||||||
|
try testing.htmlRunner("dom/node_owner.html");
|
||||||
|
}
|
||||||
80
src/browser/dom/node_filter.zig
Normal file
80
src/browser/dom/node_filter.zig
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
const Env = @import("../env.zig").Env;
|
||||||
|
const Node = @import("node.zig").Node;
|
||||||
|
|
||||||
|
pub const NodeFilter = struct {
|
||||||
|
pub const _FILTER_ACCEPT: u16 = 1;
|
||||||
|
pub const _FILTER_REJECT: u16 = 2;
|
||||||
|
pub const _FILTER_SKIP: u16 = 3;
|
||||||
|
|
||||||
|
pub const _SHOW_ALL: u32 = std.math.maxInt(u32);
|
||||||
|
pub const _SHOW_ELEMENT: u32 = 0b1;
|
||||||
|
pub const _SHOW_ATTRIBUTE: u32 = 0b10;
|
||||||
|
pub const _SHOW_TEXT: u32 = 0b100;
|
||||||
|
pub const _SHOW_CDATA_SECTION: u32 = 0b1000;
|
||||||
|
pub const _SHOW_ENTITY_REFERENCE: u32 = 0b10000;
|
||||||
|
pub const _SHOW_ENTITY: u32 = 0b100000;
|
||||||
|
pub const _SHOW_PROCESSING_INSTRUCTION: u32 = 0b1000000;
|
||||||
|
pub const _SHOW_COMMENT: u32 = 0b10000000;
|
||||||
|
pub const _SHOW_DOCUMENT: u32 = 0b100000000;
|
||||||
|
pub const _SHOW_DOCUMENT_TYPE: u32 = 0b1000000000;
|
||||||
|
pub const _SHOW_DOCUMENT_FRAGMENT: u32 = 0b10000000000;
|
||||||
|
pub const _SHOW_NOTATION: u32 = 0b100000000000;
|
||||||
|
};
|
||||||
|
|
||||||
|
const VerifyResult = enum { accept, skip, reject };
|
||||||
|
|
||||||
|
pub fn verify(what_to_show: u32, filter: ?Env.Function, node: *parser.Node) !VerifyResult {
|
||||||
|
const node_type = try parser.nodeType(node);
|
||||||
|
|
||||||
|
// Verify that we can show this node type.
|
||||||
|
if (!switch (node_type) {
|
||||||
|
.attribute => what_to_show & NodeFilter._SHOW_ATTRIBUTE != 0,
|
||||||
|
.cdata_section => what_to_show & NodeFilter._SHOW_CDATA_SECTION != 0,
|
||||||
|
.comment => what_to_show & NodeFilter._SHOW_COMMENT != 0,
|
||||||
|
.document => what_to_show & NodeFilter._SHOW_DOCUMENT != 0,
|
||||||
|
.document_fragment => what_to_show & NodeFilter._SHOW_DOCUMENT_FRAGMENT != 0,
|
||||||
|
.document_type => what_to_show & NodeFilter._SHOW_DOCUMENT_TYPE != 0,
|
||||||
|
.element => what_to_show & NodeFilter._SHOW_ELEMENT != 0,
|
||||||
|
.entity => what_to_show & NodeFilter._SHOW_ENTITY != 0,
|
||||||
|
.entity_reference => what_to_show & NodeFilter._SHOW_ENTITY_REFERENCE != 0,
|
||||||
|
.notation => what_to_show & NodeFilter._SHOW_NOTATION != 0,
|
||||||
|
.processing_instruction => what_to_show & NodeFilter._SHOW_PROCESSING_INSTRUCTION != 0,
|
||||||
|
.text => what_to_show & NodeFilter._SHOW_TEXT != 0,
|
||||||
|
}) return .reject;
|
||||||
|
|
||||||
|
// Verify that we aren't filtering it out.
|
||||||
|
if (filter) |f| {
|
||||||
|
const acceptance = try f.call(u16, .{try Node.toInterface(node)});
|
||||||
|
return switch (acceptance) {
|
||||||
|
NodeFilter._FILTER_ACCEPT => .accept,
|
||||||
|
NodeFilter._FILTER_REJECT => .reject,
|
||||||
|
NodeFilter._FILTER_SKIP => .skip,
|
||||||
|
else => .reject,
|
||||||
|
};
|
||||||
|
} else return .accept;
|
||||||
|
}
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: DOM.NodeFilter" {
|
||||||
|
try testing.htmlRunner("dom/node_filter.html");
|
||||||
|
}
|
||||||
273
src/browser/dom/node_iterator.zig
Normal file
273
src/browser/dom/node_iterator.zig
Normal file
@@ -0,0 +1,273 @@
|
|||||||
|
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
const Env = @import("../env.zig").Env;
|
||||||
|
const NodeFilter = @import("node_filter.zig");
|
||||||
|
const Node = @import("node.zig").Node;
|
||||||
|
const NodeUnion = @import("node.zig").Union;
|
||||||
|
const DOMException = @import("exceptions.zig").DOMException;
|
||||||
|
|
||||||
|
// https://developer.mozilla.org/en-US/docs/Web/API/NodeIterator
|
||||||
|
// While this is similar to TreeWalker it has its own implementation as there are several subtle differences
|
||||||
|
// For example:
|
||||||
|
// - nextNode returns the reference node, whereas TreeWalker returns the next node
|
||||||
|
// - Skip and reject are equivalent for NodeIterator, for TreeWalker they are different
|
||||||
|
pub const NodeIterator = struct {
|
||||||
|
pub const Exception = DOMException;
|
||||||
|
|
||||||
|
root: *parser.Node,
|
||||||
|
reference_node: *parser.Node,
|
||||||
|
what_to_show: u32,
|
||||||
|
filter: ?NodeIteratorOpts,
|
||||||
|
filter_func: ?Env.Function,
|
||||||
|
pointer_before_current: bool = true,
|
||||||
|
// used to track / block recursive filters
|
||||||
|
is_in_callback: bool = false,
|
||||||
|
|
||||||
|
// One of the few cases where null and undefined resolve to different default.
|
||||||
|
// We need the raw JsObject so that we can probe the tri state:
|
||||||
|
// null, undefined or i32.
|
||||||
|
pub const WhatToShow = Env.JsObject;
|
||||||
|
|
||||||
|
pub const NodeIteratorOpts = union(enum) {
|
||||||
|
function: Env.Function,
|
||||||
|
object: struct { acceptNode: Env.Function },
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn init(node: *parser.Node, what_to_show_: ?WhatToShow, filter: ?NodeIteratorOpts) !NodeIterator {
|
||||||
|
var filter_func: ?Env.Function = null;
|
||||||
|
if (filter) |f| {
|
||||||
|
filter_func = switch (f) {
|
||||||
|
.function => |func| func,
|
||||||
|
.object => |o| o.acceptNode,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
var what_to_show: u32 = undefined;
|
||||||
|
if (what_to_show_) |wts| {
|
||||||
|
switch (try wts.triState(NodeIterator, "what_to_show", u32)) {
|
||||||
|
.null => what_to_show = 0,
|
||||||
|
.undefined => what_to_show = NodeFilter.NodeFilter._SHOW_ALL,
|
||||||
|
.value => |v| what_to_show = v,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
what_to_show = NodeFilter.NodeFilter._SHOW_ALL;
|
||||||
|
}
|
||||||
|
|
||||||
|
return .{
|
||||||
|
.root = node,
|
||||||
|
.reference_node = node,
|
||||||
|
.what_to_show = what_to_show,
|
||||||
|
.filter = filter,
|
||||||
|
.filter_func = filter_func,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_filter(self: *const NodeIterator) ?NodeIteratorOpts {
|
||||||
|
return self.filter;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_pointerBeforeReferenceNode(self: *const NodeIterator) bool {
|
||||||
|
return self.pointer_before_current;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_referenceNode(self: *const NodeIterator) !NodeUnion {
|
||||||
|
return try Node.toInterface(self.reference_node);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_root(self: *const NodeIterator) !NodeUnion {
|
||||||
|
return try Node.toInterface(self.root);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_whatToShow(self: *const NodeIterator) u32 {
|
||||||
|
return self.what_to_show;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _nextNode(self: *NodeIterator) !?NodeUnion {
|
||||||
|
try self.callbackStart();
|
||||||
|
defer self.callbackEnd();
|
||||||
|
|
||||||
|
if (self.pointer_before_current) {
|
||||||
|
// Unlike TreeWalker, NodeIterator starts at the first node
|
||||||
|
if (.accept == try NodeFilter.verify(self.what_to_show, self.filter_func, self.reference_node)) {
|
||||||
|
self.pointer_before_current = false;
|
||||||
|
return try Node.toInterface(self.reference_node);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (try self.firstChild(self.reference_node)) |child| {
|
||||||
|
self.reference_node = child;
|
||||||
|
return try Node.toInterface(child);
|
||||||
|
}
|
||||||
|
|
||||||
|
var current = self.reference_node;
|
||||||
|
while (current != self.root) {
|
||||||
|
if (try self.nextSibling(current)) |sibling| {
|
||||||
|
self.reference_node = sibling;
|
||||||
|
return try Node.toInterface(sibling);
|
||||||
|
}
|
||||||
|
|
||||||
|
current = (try parser.nodeParentNode(current)) orelse break;
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _previousNode(self: *NodeIterator) !?NodeUnion {
|
||||||
|
try self.callbackStart();
|
||||||
|
defer self.callbackEnd();
|
||||||
|
|
||||||
|
if (!self.pointer_before_current) {
|
||||||
|
if (.accept == try NodeFilter.verify(self.what_to_show, self.filter_func, self.reference_node)) {
|
||||||
|
self.pointer_before_current = true;
|
||||||
|
// Still need to verify as last may be first as well
|
||||||
|
return try Node.toInterface(self.reference_node);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (self.reference_node == self.root) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
var current = self.reference_node;
|
||||||
|
while (try parser.nodePreviousSibling(current)) |previous| {
|
||||||
|
current = previous;
|
||||||
|
|
||||||
|
switch (try NodeFilter.verify(self.what_to_show, self.filter_func, current)) {
|
||||||
|
.accept => {
|
||||||
|
// Get last child if it has one.
|
||||||
|
if (try self.lastChild(current)) |child| {
|
||||||
|
self.reference_node = child;
|
||||||
|
return try Node.toInterface(child);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise, this node is our previous one.
|
||||||
|
self.reference_node = current;
|
||||||
|
return try Node.toInterface(current);
|
||||||
|
},
|
||||||
|
.reject, .skip => {
|
||||||
|
// Get last child if it has one.
|
||||||
|
if (try self.lastChild(current)) |child| {
|
||||||
|
self.reference_node = child;
|
||||||
|
return try Node.toInterface(child);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (current != self.root) {
|
||||||
|
if (try self.parentNode(current)) |parent| {
|
||||||
|
self.reference_node = parent;
|
||||||
|
return try Node.toInterface(parent);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _detach(self: *const NodeIterator) void {
|
||||||
|
// no-op as per spec
|
||||||
|
_ = self;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn firstChild(self: *const NodeIterator, node: *parser.Node) !?*parser.Node {
|
||||||
|
const children = try parser.nodeGetChildNodes(node);
|
||||||
|
const child_count = try parser.nodeListLength(children);
|
||||||
|
|
||||||
|
for (0..child_count) |i| {
|
||||||
|
const index: u32 = @intCast(i);
|
||||||
|
const child = (try parser.nodeListItem(children, index)) orelse return null;
|
||||||
|
|
||||||
|
switch (try NodeFilter.verify(self.what_to_show, self.filter_func, child)) {
|
||||||
|
.accept => return child, // NOTE: Skip and reject are equivalent for NodeIterator, this is different from TreeWalker
|
||||||
|
.reject, .skip => if (try self.firstChild(child)) |gchild| return gchild,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn lastChild(self: *const NodeIterator, node: *parser.Node) !?*parser.Node {
|
||||||
|
const children = try parser.nodeGetChildNodes(node);
|
||||||
|
const child_count = try parser.nodeListLength(children);
|
||||||
|
|
||||||
|
var index: u32 = child_count;
|
||||||
|
while (index > 0) {
|
||||||
|
index -= 1;
|
||||||
|
const child = (try parser.nodeListItem(children, index)) orelse return null;
|
||||||
|
|
||||||
|
switch (try NodeFilter.verify(self.what_to_show, self.filter_func, child)) {
|
||||||
|
.accept => return child, // NOTE: Skip and reject are equivalent for NodeIterator, this is different from TreeWalker
|
||||||
|
.reject, .skip => if (try self.lastChild(child)) |gchild| return gchild,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// This implementation is actually the same as :TreeWalker
|
||||||
|
fn parentNode(self: *const NodeIterator, node: *parser.Node) !?*parser.Node {
|
||||||
|
if (self.root == node) return null;
|
||||||
|
|
||||||
|
var current = node;
|
||||||
|
while (true) {
|
||||||
|
if (current == self.root) return null;
|
||||||
|
current = (try parser.nodeParentNode(current)) orelse return null;
|
||||||
|
|
||||||
|
switch (try NodeFilter.verify(self.what_to_show, self.filter_func, current)) {
|
||||||
|
.accept => return current,
|
||||||
|
.reject, .skip => continue,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// This implementation is actually the same as :TreeWalker
|
||||||
|
fn nextSibling(self: *const NodeIterator, node: *parser.Node) !?*parser.Node {
|
||||||
|
var current = node;
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
current = (try parser.nodeNextSibling(current)) orelse return null;
|
||||||
|
|
||||||
|
switch (try NodeFilter.verify(self.what_to_show, self.filter_func, current)) {
|
||||||
|
.accept => return current,
|
||||||
|
.skip, .reject => continue,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn callbackStart(self: *NodeIterator) !void {
|
||||||
|
if (self.is_in_callback) {
|
||||||
|
// this is the correct DOMExeption
|
||||||
|
return error.InvalidState;
|
||||||
|
}
|
||||||
|
self.is_in_callback = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn callbackEnd(self: *NodeIterator) void {
|
||||||
|
self.is_in_callback = false;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: DOM.NodeIterator" {
|
||||||
|
try testing.htmlRunner("dom/node_iterator.html");
|
||||||
|
}
|
||||||
182
src/browser/dom/nodelist.zig
Normal file
182
src/browser/dom/nodelist.zig
Normal file
@@ -0,0 +1,182 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
|
||||||
|
const log = @import("../../log.zig");
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
|
||||||
|
const JsThis = @import("../env.zig").JsThis;
|
||||||
|
const Function = @import("../env.zig").Function;
|
||||||
|
|
||||||
|
const NodeUnion = @import("node.zig").Union;
|
||||||
|
const Node = @import("node.zig").Node;
|
||||||
|
|
||||||
|
const U32Iterator = @import("../iterator/iterator.zig").U32Iterator;
|
||||||
|
|
||||||
|
const DOMException = @import("exceptions.zig").DOMException;
|
||||||
|
|
||||||
|
pub const Interfaces = .{
|
||||||
|
NodeListIterator,
|
||||||
|
NodeList,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const NodeListIterator = struct {
|
||||||
|
coll: *NodeList,
|
||||||
|
index: u32 = 0,
|
||||||
|
|
||||||
|
pub const Return = struct {
|
||||||
|
value: ?NodeUnion,
|
||||||
|
done: bool,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn _next(self: *NodeListIterator) !Return {
|
||||||
|
const e = try self.coll._item(self.index);
|
||||||
|
if (e == null) {
|
||||||
|
return Return{
|
||||||
|
.value = null,
|
||||||
|
.done = true,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
self.index += 1;
|
||||||
|
return Return{
|
||||||
|
.value = e,
|
||||||
|
.done = false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const NodeListEntriesIterator = struct {
|
||||||
|
coll: *NodeList,
|
||||||
|
index: u32 = 0,
|
||||||
|
|
||||||
|
pub const Return = struct {
|
||||||
|
value: ?NodeUnion,
|
||||||
|
done: bool,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn _next(self: *NodeListEntriesIterator) !Return {
|
||||||
|
const e = try self.coll._item(self.index);
|
||||||
|
if (e == null) {
|
||||||
|
return Return{
|
||||||
|
.value = null,
|
||||||
|
.done = true,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
self.index += 1;
|
||||||
|
return Return{
|
||||||
|
.value = e,
|
||||||
|
.done = false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Nodelist is implemented in pure Zig b/c libdom's NodeList doesn't allow to
|
||||||
|
// append nodes.
|
||||||
|
// WEB IDL https://dom.spec.whatwg.org/#nodelist
|
||||||
|
//
|
||||||
|
// TODO: a Nodelist can be either static or live. But the current
|
||||||
|
// implementation allows only static nodelist.
|
||||||
|
// see https://dom.spec.whatwg.org/#old-style-collections
|
||||||
|
pub const NodeList = struct {
|
||||||
|
pub const Exception = DOMException;
|
||||||
|
const NodesArrayList = std.ArrayListUnmanaged(*parser.Node);
|
||||||
|
|
||||||
|
nodes: NodesArrayList = .{},
|
||||||
|
|
||||||
|
pub fn deinit(self: *NodeList, alloc: std.mem.Allocator) void {
|
||||||
|
// TODO unref all nodes
|
||||||
|
self.nodes.deinit(alloc);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn append(self: *NodeList, alloc: std.mem.Allocator, node: *parser.Node) !void {
|
||||||
|
try self.nodes.append(alloc, node);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_length(self: *const NodeList) u32 {
|
||||||
|
return @intCast(self.nodes.items.len);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _item(self: *const NodeList, index: u32) !?NodeUnion {
|
||||||
|
if (index >= self.nodes.items.len) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const n = self.nodes.items[index];
|
||||||
|
return try Node.toInterface(n);
|
||||||
|
}
|
||||||
|
|
||||||
|
// This code works, but it's _MUCH_ slower than using postAttach. The benefit
|
||||||
|
// of this version, is that it's "live"..but we're talking many orders of
|
||||||
|
// magnitude slower.
|
||||||
|
//
|
||||||
|
// You can test it by commenting out `postAttach`, uncommenting this and
|
||||||
|
// running:
|
||||||
|
// zig build wpt -- tests/wpt/dom/nodes/NodeList-static-length-getter-tampered-indexOf-1.html
|
||||||
|
//
|
||||||
|
// I think this _is_ the right way to do it, but I must be doing something
|
||||||
|
// wrong to make it so slow.
|
||||||
|
// pub fn indexed_get(self: *const NodeList, index: u32, has_value: *bool) !?NodeUnion {
|
||||||
|
// return (try self._item(index)) orelse {
|
||||||
|
// has_value.* = false;
|
||||||
|
// return null;
|
||||||
|
// };
|
||||||
|
// }
|
||||||
|
|
||||||
|
pub fn _forEach(self: *NodeList, cbk: Function) !void { // TODO handle thisArg
|
||||||
|
for (self.nodes.items, 0..) |n, i| {
|
||||||
|
const ii: u32 = @intCast(i);
|
||||||
|
var result: Function.Result = undefined;
|
||||||
|
cbk.tryCall(void, .{ n, ii, self }, &result) catch {
|
||||||
|
log.debug(.user_script, "forEach callback", .{ .err = result.exception, .stack = result.stack });
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _keys(self: *NodeList) U32Iterator {
|
||||||
|
return .{
|
||||||
|
.length = self.get_length(),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _values(self: *NodeList) NodeListIterator {
|
||||||
|
return .{
|
||||||
|
.coll = self,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _symbol_iterator(self: *NodeList) NodeListIterator {
|
||||||
|
return self._values();
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO entries() https://developer.mozilla.org/en-US/docs/Web/API/NodeList/entries
|
||||||
|
pub fn postAttach(self: *NodeList, js_this: JsThis) !void {
|
||||||
|
const len = self.get_length();
|
||||||
|
for (0..len) |i| {
|
||||||
|
const node = try self._item(@intCast(i)) orelse unreachable;
|
||||||
|
try js_this.setIndex(@intCast(i), node, .{});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: DOM.NodeList" {
|
||||||
|
try testing.htmlRunner("dom/node_list.html");
|
||||||
|
}
|
||||||
208
src/browser/dom/performance.zig
Normal file
208
src/browser/dom/performance.zig
Normal file
@@ -0,0 +1,208 @@
|
|||||||
|
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
const EventTarget = @import("../dom/event_target.zig").EventTarget;
|
||||||
|
const Env = @import("../env.zig").Env;
|
||||||
|
const Page = @import("../page.zig").Page;
|
||||||
|
|
||||||
|
const milliTimestamp = @import("../../datetime.zig").milliTimestamp;
|
||||||
|
|
||||||
|
pub const Interfaces = .{
|
||||||
|
Performance,
|
||||||
|
PerformanceEntry,
|
||||||
|
PerformanceMark,
|
||||||
|
};
|
||||||
|
|
||||||
|
// https://developer.mozilla.org/en-US/docs/Web/API/Performance
|
||||||
|
pub const Performance = struct {
|
||||||
|
pub const prototype = *EventTarget;
|
||||||
|
|
||||||
|
// Extend libdom event target for pure zig struct.
|
||||||
|
base: parser.EventTargetTBase = parser.EventTargetTBase{ .internal_target_type = .performance },
|
||||||
|
|
||||||
|
time_origin: u64,
|
||||||
|
// if (Window.crossOriginIsolated) -> Resolution in isolated contexts: 5 microseconds
|
||||||
|
// else -> Resolution in non-isolated contexts: 100 microseconds
|
||||||
|
const ms_resolution = 100;
|
||||||
|
|
||||||
|
pub fn init() Performance {
|
||||||
|
return .{
|
||||||
|
.time_origin = milliTimestamp(),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_timeOrigin(self: *const Performance) u64 {
|
||||||
|
return self.time_origin;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn reset(self: *Performance) void {
|
||||||
|
self.time_origin = milliTimestamp();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _now(self: *const Performance) u64 {
|
||||||
|
return milliTimestamp() - self.time_origin;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _mark(_: *Performance, name: []const u8, _options: ?PerformanceMark.Options, page: *Page) !PerformanceMark {
|
||||||
|
const mark: PerformanceMark = try .constructor(name, _options, page);
|
||||||
|
// TODO: Should store this in an entries list
|
||||||
|
return mark;
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: fn _mark should record the marks in a lookup
|
||||||
|
pub fn _clearMarks(_: *Performance, name: ?[]const u8) void {
|
||||||
|
_ = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: fn _measures should record the marks in a lookup
|
||||||
|
pub fn _clearMeasures(_: *Performance, name: ?[]const u8) void {
|
||||||
|
_ = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: fn _measures should record the marks in a lookup
|
||||||
|
pub fn _getEntriesByName(_: *Performance, name: []const u8, typ: ?[]const u8) []PerformanceEntry {
|
||||||
|
_ = name;
|
||||||
|
_ = typ;
|
||||||
|
return &.{};
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: fn _measures should record the marks in a lookup
|
||||||
|
pub fn _getEntriesByType(_: *Performance, typ: []const u8) []PerformanceEntry {
|
||||||
|
_ = typ;
|
||||||
|
return &.{};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// https://developer.mozilla.org/en-US/docs/Web/API/PerformanceEntry
|
||||||
|
pub const PerformanceEntry = struct {
|
||||||
|
const PerformanceEntryType = enum {
|
||||||
|
element,
|
||||||
|
event,
|
||||||
|
first_input,
|
||||||
|
largest_contentful_paint,
|
||||||
|
layout_shift,
|
||||||
|
long_animation_frame,
|
||||||
|
longtask,
|
||||||
|
mark,
|
||||||
|
measure,
|
||||||
|
navigation,
|
||||||
|
paint,
|
||||||
|
resource,
|
||||||
|
taskattribution,
|
||||||
|
visibility_state,
|
||||||
|
|
||||||
|
pub fn toString(self: PerformanceEntryType) []const u8 {
|
||||||
|
return switch (self) {
|
||||||
|
.first_input => "first-input",
|
||||||
|
.largest_contentful_paint => "largest-contentful-paint",
|
||||||
|
.layout_shift => "layout-shift",
|
||||||
|
.long_animation_frame => "long-animation-frame",
|
||||||
|
.visibility_state => "visibility-state",
|
||||||
|
else => @tagName(self),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
duration: f64 = 0.0,
|
||||||
|
entry_type: PerformanceEntryType,
|
||||||
|
name: []const u8,
|
||||||
|
start_time: f64 = 0.0,
|
||||||
|
|
||||||
|
pub fn get_duration(self: *const PerformanceEntry) f64 {
|
||||||
|
return self.duration;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_entryType(self: *const PerformanceEntry) PerformanceEntryType {
|
||||||
|
return self.entry_type;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_name(self: *const PerformanceEntry) []const u8 {
|
||||||
|
return self.name;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_startTime(self: *const PerformanceEntry) f64 {
|
||||||
|
return self.start_time;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// https://developer.mozilla.org/en-US/docs/Web/API/PerformanceMark
|
||||||
|
pub const PerformanceMark = struct {
|
||||||
|
pub const prototype = *PerformanceEntry;
|
||||||
|
|
||||||
|
proto: PerformanceEntry,
|
||||||
|
detail: ?Env.JsObject,
|
||||||
|
|
||||||
|
const Options = struct {
|
||||||
|
detail: ?Env.JsObject = null,
|
||||||
|
startTime: ?f64 = null,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn constructor(name: []const u8, _options: ?Options, page: *Page) !PerformanceMark {
|
||||||
|
const perf = &page.window.performance;
|
||||||
|
|
||||||
|
const options = _options orelse Options{};
|
||||||
|
const start_time = options.startTime orelse @as(f64, @floatFromInt(perf._now()));
|
||||||
|
|
||||||
|
if (start_time < 0.0) {
|
||||||
|
return error.TypeError;
|
||||||
|
}
|
||||||
|
|
||||||
|
const detail = if (options.detail) |d| try d.persist() else null;
|
||||||
|
|
||||||
|
const duped_name = try page.arena.dupe(u8, name);
|
||||||
|
const proto = PerformanceEntry{ .name = duped_name, .entry_type = .mark, .start_time = start_time };
|
||||||
|
|
||||||
|
return .{ .proto = proto, .detail = detail };
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_detail(self: *const PerformanceMark) ?Env.JsObject {
|
||||||
|
return self.detail;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const testing = @import("./../../testing.zig");
|
||||||
|
|
||||||
|
test "Performance: get_timeOrigin" {
|
||||||
|
var perf = Performance.init();
|
||||||
|
const time_origin = perf.get_timeOrigin();
|
||||||
|
try testing.expect(time_origin >= 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Performance: now" {
|
||||||
|
var perf = Performance.init();
|
||||||
|
|
||||||
|
// Monotonically increasing
|
||||||
|
var now = perf._now();
|
||||||
|
while (now <= 0) { // Loop for now to not be 0
|
||||||
|
try testing.expectEqual(now, 0);
|
||||||
|
now = perf._now();
|
||||||
|
}
|
||||||
|
|
||||||
|
var after = perf._now();
|
||||||
|
while (after <= now) { // Loop untill after > now
|
||||||
|
try testing.expectEqual(after, now);
|
||||||
|
after = perf._now();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Browser: Performance.Mark" {
|
||||||
|
try testing.htmlRunner("dom/performance.html");
|
||||||
|
}
|
||||||
58
src/browser/dom/performance_observer.zig
Normal file
58
src/browser/dom/performance_observer.zig
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
const Env = @import("../env.zig").Env;
|
||||||
|
|
||||||
|
const PerformanceEntry = @import("performance.zig").PerformanceEntry;
|
||||||
|
|
||||||
|
// https://developer.mozilla.org/en-US/docs/Web/API/PerformanceObserver
|
||||||
|
pub const PerformanceObserver = struct {
|
||||||
|
pub const _supportedEntryTypes = [0][]const u8{};
|
||||||
|
|
||||||
|
pub fn constructor(cbk: Env.Function) PerformanceObserver {
|
||||||
|
_ = cbk;
|
||||||
|
return .{};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _observe(self: *const PerformanceObserver, options_: ?Options) void {
|
||||||
|
_ = self;
|
||||||
|
_ = options_;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _disconnect(self: *PerformanceObserver) void {
|
||||||
|
_ = self;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _takeRecords(_: *const PerformanceObserver) []PerformanceEntry {
|
||||||
|
return &[_]PerformanceEntry{};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const Options = struct {
|
||||||
|
buffered: ?bool = null,
|
||||||
|
durationThreshold: ?f64 = null,
|
||||||
|
entryTypes: ?[]const []const u8 = null,
|
||||||
|
type: ?[]const u8 = null,
|
||||||
|
};
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: DOM.PerformanceObserver" {
|
||||||
|
try testing.htmlRunner("dom/performance_observer.html");
|
||||||
|
}
|
||||||
92
src/browser/dom/processing_instruction.zig
Normal file
92
src/browser/dom/processing_instruction.zig
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
const Node = @import("node.zig").Node;
|
||||||
|
const Page = @import("../page.zig").Page;
|
||||||
|
|
||||||
|
// https://dom.spec.whatwg.org/#processinginstruction
|
||||||
|
pub const ProcessingInstruction = struct {
|
||||||
|
pub const Self = parser.ProcessingInstruction;
|
||||||
|
|
||||||
|
// TODO for libdom processing instruction inherit from node.
|
||||||
|
// But the spec says it must inherit from CDATA.
|
||||||
|
pub const prototype = *Node;
|
||||||
|
pub const subtype = .node;
|
||||||
|
|
||||||
|
pub fn get_target(self: *parser.ProcessingInstruction) ![]const u8 {
|
||||||
|
// libdom stores the ProcessingInstruction target in the node's name.
|
||||||
|
return try parser.nodeName(parser.processingInstructionToNode(self));
|
||||||
|
}
|
||||||
|
|
||||||
|
// There's something wrong when we try to clone a ProcessInstruction normally.
|
||||||
|
// The resulting object can't be cast back into a node (it crashes). This is
|
||||||
|
// a simple workaround.
|
||||||
|
pub fn _cloneNode(self: *parser.ProcessingInstruction, _: ?bool, page: *Page) !*parser.ProcessingInstruction {
|
||||||
|
return try parser.documentCreateProcessingInstruction(
|
||||||
|
@ptrCast(page.window.document),
|
||||||
|
try get_target(self),
|
||||||
|
(try get_data(self)) orelse "",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_data(self: *parser.ProcessingInstruction) !?[]const u8 {
|
||||||
|
return try parser.nodeValue(parser.processingInstructionToNode(self));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_data(self: *parser.ProcessingInstruction, data: []u8) !void {
|
||||||
|
try parser.nodeSetValue(parser.processingInstructionToNode(self), data);
|
||||||
|
}
|
||||||
|
|
||||||
|
// netsurf's ProcessInstruction doesn't implement the dom_node_get_attributes
|
||||||
|
// and thus will crash if we try to call nodeIsEqualNode.
|
||||||
|
pub fn _isEqualNode(self: *parser.ProcessingInstruction, other_node: *parser.Node) !bool {
|
||||||
|
if (try parser.nodeType(other_node) != .processing_instruction) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
const other: *parser.ProcessingInstruction = @ptrCast(other_node);
|
||||||
|
|
||||||
|
if (std.mem.eql(u8, try get_target(self), try get_target(other)) == false) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
const self_data = try get_data(self);
|
||||||
|
const other_data = try get_data(other);
|
||||||
|
if (self_data == null and other_data != null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (self_data != null and other_data == null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (std.mem.eql(u8, self_data.?, other_data.?) == false) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: DOM.ProcessingInstruction" {
|
||||||
|
try testing.htmlRunner("dom/processing_instruction.html");
|
||||||
|
}
|
||||||
390
src/browser/dom/range.zig
Normal file
390
src/browser/dom/range.zig
Normal file
@@ -0,0 +1,390 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
const Page = @import("../page.zig").Page;
|
||||||
|
|
||||||
|
const Node = @import("node.zig").Node;
|
||||||
|
const NodeUnion = @import("node.zig").Union;
|
||||||
|
const DOMException = @import("exceptions.zig").DOMException;
|
||||||
|
|
||||||
|
pub const Interfaces = .{
|
||||||
|
AbstractRange,
|
||||||
|
Range,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const AbstractRange = struct {
|
||||||
|
collapsed: bool,
|
||||||
|
end_node: *parser.Node,
|
||||||
|
end_offset: u32,
|
||||||
|
start_node: *parser.Node,
|
||||||
|
start_offset: u32,
|
||||||
|
|
||||||
|
pub fn updateCollapsed(self: *AbstractRange) void {
|
||||||
|
// TODO: Eventually, compare properly.
|
||||||
|
self.collapsed = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_collapsed(self: *const AbstractRange) bool {
|
||||||
|
return self.collapsed;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_endContainer(self: *const AbstractRange) !NodeUnion {
|
||||||
|
return Node.toInterface(self.end_node);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_endOffset(self: *const AbstractRange) u32 {
|
||||||
|
return self.end_offset;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_startContainer(self: *const AbstractRange) !NodeUnion {
|
||||||
|
return Node.toInterface(self.start_node);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_startOffset(self: *const AbstractRange) u32 {
|
||||||
|
return self.start_offset;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const Range = struct {
|
||||||
|
pub const Exception = DOMException;
|
||||||
|
pub const prototype = *AbstractRange;
|
||||||
|
|
||||||
|
proto: AbstractRange,
|
||||||
|
|
||||||
|
pub const _START_TO_START = 0;
|
||||||
|
pub const _START_TO_END = 1;
|
||||||
|
pub const _END_TO_END = 2;
|
||||||
|
pub const _END_TO_START = 3;
|
||||||
|
|
||||||
|
// The Range() constructor returns a newly created Range object whose start
|
||||||
|
// and end is the global Document object.
|
||||||
|
// https://developer.mozilla.org/en-US/docs/Web/API/Range/Range
|
||||||
|
pub fn constructor(page: *Page) Range {
|
||||||
|
const proto: AbstractRange = .{
|
||||||
|
.collapsed = true,
|
||||||
|
.end_node = parser.documentHTMLToNode(page.window.document),
|
||||||
|
.end_offset = 0,
|
||||||
|
.start_node = parser.documentHTMLToNode(page.window.document),
|
||||||
|
.start_offset = 0,
|
||||||
|
};
|
||||||
|
|
||||||
|
return .{ .proto = proto };
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _setStart(self: *Range, node: *parser.Node, offset_: i32) !void {
|
||||||
|
try ensureValidOffset(node, offset_);
|
||||||
|
const offset: u32 = @intCast(offset_);
|
||||||
|
const position = compare(node, offset, self.proto.start_node, self.proto.start_offset) catch |err| switch (err) {
|
||||||
|
error.WrongDocument => blk: {
|
||||||
|
// allow a node with a different root than the current, or
|
||||||
|
// a disconnected one. Treat it as if it's "after", so that
|
||||||
|
// we also update the end_offset and end_node.
|
||||||
|
break :blk 1;
|
||||||
|
},
|
||||||
|
else => return err,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (position == 1) {
|
||||||
|
// if we're setting the node after the current start, the end must
|
||||||
|
// be set too.
|
||||||
|
self.proto.end_offset = offset;
|
||||||
|
self.proto.end_node = node;
|
||||||
|
}
|
||||||
|
self.proto.start_node = node;
|
||||||
|
self.proto.start_offset = offset;
|
||||||
|
self.proto.updateCollapsed();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _setStartBefore(self: *Range, node: *parser.Node) !void {
|
||||||
|
const parent, const index = try getParentAndIndex(node);
|
||||||
|
self.proto.start_node = parent;
|
||||||
|
self.proto.start_offset = index;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _setStartAfter(self: *Range, node: *parser.Node) !void {
|
||||||
|
const parent, const index = try getParentAndIndex(node);
|
||||||
|
self.proto.start_node = parent;
|
||||||
|
self.proto.start_offset = index + 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _setEnd(self: *Range, node: *parser.Node, offset_: i32) !void {
|
||||||
|
try ensureValidOffset(node, offset_);
|
||||||
|
const offset: u32 = @intCast(offset_);
|
||||||
|
|
||||||
|
const position = compare(node, offset, self.proto.start_node, self.proto.start_offset) catch |err| switch (err) {
|
||||||
|
error.WrongDocument => blk: {
|
||||||
|
// allow a node with a different root than the current, or
|
||||||
|
// a disconnected one. Treat it as if it's "before", so that
|
||||||
|
// we also update the end_offset and end_node.
|
||||||
|
break :blk -1;
|
||||||
|
},
|
||||||
|
else => return err,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (position == -1) {
|
||||||
|
// if we're setting the node before the current start, the start
|
||||||
|
// must be set too.
|
||||||
|
self.proto.start_offset = offset;
|
||||||
|
self.proto.start_node = node;
|
||||||
|
}
|
||||||
|
|
||||||
|
self.proto.end_node = node;
|
||||||
|
self.proto.end_offset = offset;
|
||||||
|
self.proto.updateCollapsed();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _setEndBefore(self: *Range, node: *parser.Node) !void {
|
||||||
|
const parent, const index = try getParentAndIndex(node);
|
||||||
|
self.proto.end_node = parent;
|
||||||
|
self.proto.end_offset = index;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _setEndAfter(self: *Range, node: *parser.Node) !void {
|
||||||
|
const parent, const index = try getParentAndIndex(node);
|
||||||
|
self.proto.end_node = parent;
|
||||||
|
self.proto.end_offset = index + 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _createContextualFragment(_: *Range, fragment: []const u8, page: *Page) !*parser.DocumentFragment {
|
||||||
|
const document_html = page.window.document;
|
||||||
|
const document = parser.documentHTMLToDocument(document_html);
|
||||||
|
const doc_frag = try parser.documentParseFragmentFromStr(document, fragment);
|
||||||
|
return doc_frag;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _selectNodeContents(self: *Range, node: *parser.Node) !void {
|
||||||
|
self.proto.start_node = node;
|
||||||
|
self.proto.start_offset = 0;
|
||||||
|
self.proto.end_node = node;
|
||||||
|
|
||||||
|
// Set end_offset
|
||||||
|
switch (try parser.nodeType(node)) {
|
||||||
|
.text, .cdata_section, .comment, .processing_instruction => {
|
||||||
|
// For text-like nodes, end_offset should be the length of the text data
|
||||||
|
if (try parser.nodeValue(node)) |text_data| {
|
||||||
|
self.proto.end_offset = @intCast(text_data.len);
|
||||||
|
} else {
|
||||||
|
self.proto.end_offset = 0;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
else => {
|
||||||
|
// For element and other nodes, end_offset is the number of children
|
||||||
|
const child_nodes = try parser.nodeGetChildNodes(node);
|
||||||
|
const child_count = try parser.nodeListLength(child_nodes);
|
||||||
|
self.proto.end_offset = @intCast(child_count);
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
self.proto.updateCollapsed();
|
||||||
|
}
|
||||||
|
|
||||||
|
// creates a copy
|
||||||
|
pub fn _cloneRange(self: *const Range) Range {
|
||||||
|
return .{
|
||||||
|
.proto = .{
|
||||||
|
.collapsed = self.proto.collapsed,
|
||||||
|
.end_node = self.proto.end_node,
|
||||||
|
.end_offset = self.proto.end_offset,
|
||||||
|
.start_node = self.proto.start_node,
|
||||||
|
.start_offset = self.proto.start_offset,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _comparePoint(self: *const Range, node: *parser.Node, offset_: i32) !i32 {
|
||||||
|
const start = self.proto.start_node;
|
||||||
|
if (try parser.nodeGetRootNode(start) != try parser.nodeGetRootNode(node)) {
|
||||||
|
// WPT really wants this error to be first. Later, when we check
|
||||||
|
// if the relative position is 'disconnected', it'll also catch this
|
||||||
|
// case, but WPT will complain because it sometimes also sends
|
||||||
|
// invalid offsets, and it wants WrongDocument to be raised.
|
||||||
|
return error.WrongDocument;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (try parser.nodeType(node) == .document_type) {
|
||||||
|
return error.InvalidNodeType;
|
||||||
|
}
|
||||||
|
|
||||||
|
try ensureValidOffset(node, offset_);
|
||||||
|
|
||||||
|
const offset: u32 = @intCast(offset_);
|
||||||
|
if (try compare(node, offset, start, self.proto.start_offset) == -1) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (try compare(node, offset, self.proto.end_node, self.proto.end_offset) == 1) {
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _isPointInRange(self: *const Range, node: *parser.Node, offset_: i32) !bool {
|
||||||
|
return self._comparePoint(node, offset_) catch |err| switch (err) {
|
||||||
|
error.WrongDocument => return false,
|
||||||
|
else => return err,
|
||||||
|
} == 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _intersectsNode(self: *const Range, node: *parser.Node) !bool {
|
||||||
|
const start_root = try parser.nodeGetRootNode(self.proto.start_node);
|
||||||
|
const node_root = try parser.nodeGetRootNode(node);
|
||||||
|
if (start_root != node_root) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
const parent, const index = getParentAndIndex(node) catch |err| switch (err) {
|
||||||
|
error.InvalidNodeType => return true, // if node has no parent, we return true.
|
||||||
|
else => return err,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (try compare(parent, index + 1, self.proto.start_node, self.proto.start_offset) != 1) {
|
||||||
|
// node isn't after start, can't intersect
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (try compare(parent, index, self.proto.end_node, self.proto.end_offset) != -1) {
|
||||||
|
// node isn't before end, can't intersect
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _compareBoundaryPoints(self: *const Range, how: i32, other: *const Range) !i32 {
|
||||||
|
return switch (how) {
|
||||||
|
_START_TO_START => compare(self.proto.start_node, self.proto.start_offset, other.proto.start_node, other.proto.start_offset),
|
||||||
|
_START_TO_END => compare(self.proto.start_node, self.proto.start_offset, other.proto.end_node, other.proto.end_offset),
|
||||||
|
_END_TO_END => compare(self.proto.end_node, self.proto.end_offset, other.proto.end_node, other.proto.end_offset),
|
||||||
|
_END_TO_START => compare(self.proto.end_node, self.proto.end_offset, other.proto.start_node, other.proto.start_offset),
|
||||||
|
else => error.NotSupported, // this is the correct DOM Exception to return
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// The Range.detach() method does nothing. It used to disable the Range
|
||||||
|
// object and enable the browser to release associated resources. The
|
||||||
|
// method has been kept for compatibility.
|
||||||
|
// https://developer.mozilla.org/en-US/docs/Web/API/Range/detach
|
||||||
|
pub fn _detach(_: *Range) void {}
|
||||||
|
};
|
||||||
|
|
||||||
|
fn ensureValidOffset(node: *parser.Node, offset: i32) !void {
|
||||||
|
if (offset < 0) {
|
||||||
|
return error.IndexSize;
|
||||||
|
}
|
||||||
|
|
||||||
|
// not >= because 0 seems to represent the node itself.
|
||||||
|
if (offset > try nodeLength(node)) {
|
||||||
|
return error.IndexSize;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn nodeLength(node: *parser.Node) !usize {
|
||||||
|
switch (try isTextual(node)) {
|
||||||
|
true => return ((try parser.nodeTextContent(node)) orelse "").len,
|
||||||
|
false => {
|
||||||
|
const children = try parser.nodeGetChildNodes(node);
|
||||||
|
return @intCast(try parser.nodeListLength(children));
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn isTextual(node: *parser.Node) !bool {
|
||||||
|
return switch (try parser.nodeType(node)) {
|
||||||
|
.text, .comment, .cdata_section => true,
|
||||||
|
else => false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
fn getParentAndIndex(child: *parser.Node) !struct { *parser.Node, u32 } {
|
||||||
|
const parent = (try parser.nodeParentNode(child)) orelse return error.InvalidNodeType;
|
||||||
|
const children = try parser.nodeGetChildNodes(parent);
|
||||||
|
const ln = try parser.nodeListLength(children);
|
||||||
|
var i: u32 = 0;
|
||||||
|
while (i < ln) {
|
||||||
|
defer i += 1;
|
||||||
|
const c = try parser.nodeListItem(children, i) orelse continue;
|
||||||
|
if (c == child) {
|
||||||
|
return .{ parent, i };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// should not be possible to reach this point
|
||||||
|
return error.InvalidNodeType;
|
||||||
|
}
|
||||||
|
|
||||||
|
// implementation is largely copied from the WPT helper called getPosition in
|
||||||
|
// the common.js of the dom folder.
|
||||||
|
fn compare(node_a: *parser.Node, offset_a: u32, node_b: *parser.Node, offset_b: u32) !i32 {
|
||||||
|
if (node_a == node_b) {
|
||||||
|
// This is a simple and common case, where the two nodes are the same
|
||||||
|
// We just need to compare their offsets
|
||||||
|
if (offset_a == offset_b) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
return if (offset_a < offset_b) -1 else 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// We're probably comparing two different nodes. "Probably", because the
|
||||||
|
// above case on considered the offset if the two nodes were the same
|
||||||
|
// as-is. They could still be the same here, if we first consider the
|
||||||
|
// offset.
|
||||||
|
const position = try Node._compareDocumentPosition(node_b, node_a);
|
||||||
|
if (position & @intFromEnum(parser.DocumentPosition.disconnected) == @intFromEnum(parser.DocumentPosition.disconnected)) {
|
||||||
|
return error.WrongDocument;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (position & @intFromEnum(parser.DocumentPosition.following) == @intFromEnum(parser.DocumentPosition.following)) {
|
||||||
|
return switch (try compare(node_b, offset_b, node_a, offset_a)) {
|
||||||
|
-1 => 1,
|
||||||
|
1 => -1,
|
||||||
|
else => unreachable,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (position & @intFromEnum(parser.DocumentPosition.contains) == @intFromEnum(parser.DocumentPosition.contains)) {
|
||||||
|
// node_a contains node_b
|
||||||
|
var child = node_b;
|
||||||
|
while (try parser.nodeParentNode(child)) |parent| {
|
||||||
|
if (parent == node_a) {
|
||||||
|
// child.parentNode == node_a
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
child = parent;
|
||||||
|
} else {
|
||||||
|
// this should not happen, because Node._compareDocumentPosition
|
||||||
|
// has told us that node_a contains node_b, so one of node_b's
|
||||||
|
// parent's MUST be node_a. But somehow we do end up here sometimes.
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
const child_parent, const child_index = try getParentAndIndex(child);
|
||||||
|
std.debug.assert(node_a == child_parent);
|
||||||
|
return if (child_index < offset_a) -1 else 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: Range" {
|
||||||
|
try testing.htmlRunner("dom/range.html");
|
||||||
|
}
|
||||||
@@ -16,38 +16,39 @@
|
|||||||
// You should have received a copy of the GNU Affero General Public License
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
const std = @import("std");
|
const Env = @import("../env.zig").Env;
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
|
||||||
const js = @import("../../js/js.zig");
|
pub const Interfaces = .{
|
||||||
|
ResizeObserver,
|
||||||
const NavigationHistoryEntry = @import("NavigationHistoryEntry.zig");
|
|
||||||
|
|
||||||
pub const NavigationType = enum {
|
|
||||||
push,
|
|
||||||
replace,
|
|
||||||
traverse,
|
|
||||||
reload,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const NavigationKind = union(NavigationType) {
|
// WEB IDL https://drafts.csswg.org/resize-observer/#resize-observer-interface
|
||||||
push: ?[]const u8,
|
pub const ResizeObserver = struct {
|
||||||
replace: ?[]const u8,
|
pub fn constructor(cbk: Env.Function) ResizeObserver {
|
||||||
traverse: usize,
|
_ = cbk;
|
||||||
reload,
|
return .{};
|
||||||
|
}
|
||||||
|
|
||||||
pub fn toNavigationType(self: NavigationKind) NavigationType {
|
pub fn _observe(self: *const ResizeObserver, element: *parser.Element, options_: ?Options) void {
|
||||||
return std.meta.activeTag(self);
|
_ = self;
|
||||||
|
_ = element;
|
||||||
|
_ = options_;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _unobserve(self: *const ResizeObserver, element: *parser.Element) void {
|
||||||
|
_ = self;
|
||||||
|
_ = element;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO
|
||||||
|
pub fn _disconnect(self: *ResizeObserver) void {
|
||||||
|
_ = self;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const NavigationState = struct {
|
const Options = struct {
|
||||||
source: enum { history, navigation },
|
box: []const u8,
|
||||||
value: ?[]const u8,
|
|
||||||
};
|
|
||||||
|
|
||||||
// https://developer.mozilla.org/en-US/docs/Web/API/NavigationTransition
|
|
||||||
pub const NavigationTransition = struct {
|
|
||||||
finished: js.Promise,
|
|
||||||
from: NavigationHistoryEntry,
|
|
||||||
navigation_type: NavigationType,
|
|
||||||
};
|
};
|
||||||
101
src/browser/dom/shadow_root.zig
Normal file
101
src/browser/dom/shadow_root.zig
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
const dump = @import("../dump.zig");
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
|
||||||
|
const Env = @import("../env.zig").Env;
|
||||||
|
const Page = @import("../page.zig").Page;
|
||||||
|
const Node = @import("node.zig").Node;
|
||||||
|
const Element = @import("element.zig").Element;
|
||||||
|
const ElementUnion = @import("element.zig").Union;
|
||||||
|
|
||||||
|
// WEB IDL https://dom.spec.whatwg.org/#interface-shadowroot
|
||||||
|
pub const ShadowRoot = struct {
|
||||||
|
pub const prototype = *parser.DocumentFragment;
|
||||||
|
pub const subtype = .node;
|
||||||
|
|
||||||
|
mode: Mode,
|
||||||
|
host: *parser.Element,
|
||||||
|
proto: *parser.DocumentFragment,
|
||||||
|
adopted_style_sheets: ?Env.JsObject = null,
|
||||||
|
|
||||||
|
pub const Mode = enum {
|
||||||
|
open,
|
||||||
|
closed,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn get_host(self: *const ShadowRoot) !ElementUnion {
|
||||||
|
return Element.toInterface(self.host);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_adoptedStyleSheets(self: *ShadowRoot, page: *Page) !Env.JsObject {
|
||||||
|
if (self.adopted_style_sheets) |obj| {
|
||||||
|
return obj;
|
||||||
|
}
|
||||||
|
|
||||||
|
const obj = try page.main_context.newArray(0).persist();
|
||||||
|
self.adopted_style_sheets = obj;
|
||||||
|
return obj;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_adoptedStyleSheets(self: *ShadowRoot, sheets: Env.JsObject) !void {
|
||||||
|
self.adopted_style_sheets = try sheets.persist();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_innerHTML(self: *ShadowRoot, page: *Page) ![]const u8 {
|
||||||
|
var aw = std.Io.Writer.Allocating.init(page.call_arena);
|
||||||
|
try dump.writeChildren(parser.documentFragmentToNode(self.proto), .{}, &aw.writer);
|
||||||
|
return aw.written();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_innerHTML(self: *ShadowRoot, str_: ?[]const u8) !void {
|
||||||
|
const sr_doc = parser.documentFragmentToNode(self.proto);
|
||||||
|
const doc = try parser.nodeOwnerDocument(sr_doc) orelse return parser.DOMError.WrongDocument;
|
||||||
|
try Node.removeChildren(sr_doc);
|
||||||
|
const str = str_ orelse return;
|
||||||
|
|
||||||
|
const fragment = try parser.documentParseFragmentFromStr(doc, str);
|
||||||
|
const fragment_node = parser.documentFragmentToNode(fragment);
|
||||||
|
|
||||||
|
// Element.set_innerHTML also has some weirdness here. It isn't clear
|
||||||
|
// what should and shouldn't be set. Whatever string you pass to libdom,
|
||||||
|
// it always creates a full HTML document, with an html, head and body
|
||||||
|
// element.
|
||||||
|
// For ShadowRoot, it appears the only the children within the body should
|
||||||
|
// be set.
|
||||||
|
const html = try parser.nodeFirstChild(fragment_node) orelse return;
|
||||||
|
const head = try parser.nodeFirstChild(html) orelse return;
|
||||||
|
const body = try parser.nodeNextSibling(head) orelse return;
|
||||||
|
|
||||||
|
const children = try parser.nodeGetChildNodes(body);
|
||||||
|
const ln = try parser.nodeListLength(children);
|
||||||
|
for (0..ln) |_| {
|
||||||
|
// always index 0, because nodeAppendChild moves the node out of
|
||||||
|
// the nodeList and into the new tree
|
||||||
|
const child = try parser.nodeListItem(children, 0) orelse continue;
|
||||||
|
_ = try parser.nodeAppendChild(sr_doc, child);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: DOM.ShadowRoot" {
|
||||||
|
try testing.htmlRunner("dom/shadow_root.html");
|
||||||
|
}
|
||||||
62
src/browser/dom/text.zig
Normal file
62
src/browser/dom/text.zig
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
const Page = @import("../page.zig").Page;
|
||||||
|
|
||||||
|
const CharacterData = @import("character_data.zig").CharacterData;
|
||||||
|
const CDATASection = @import("cdata_section.zig").CDATASection;
|
||||||
|
|
||||||
|
// Text interfaces
|
||||||
|
pub const Interfaces = .{
|
||||||
|
CDATASection,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const Text = struct {
|
||||||
|
pub const Self = parser.Text;
|
||||||
|
pub const prototype = *CharacterData;
|
||||||
|
pub const subtype = .node;
|
||||||
|
|
||||||
|
pub fn constructor(data: ?[]const u8, page: *const Page) !*parser.Text {
|
||||||
|
return parser.documentCreateTextNode(
|
||||||
|
parser.documentHTMLToDocument(page.window.document),
|
||||||
|
data orelse "",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// JS funcs
|
||||||
|
// --------
|
||||||
|
|
||||||
|
// Read attributes
|
||||||
|
|
||||||
|
pub fn get_wholeText(self: *parser.Text) ![]const u8 {
|
||||||
|
return try parser.textWholdeText(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
// JS methods
|
||||||
|
// ----------
|
||||||
|
|
||||||
|
pub fn _splitText(self: *parser.Text, offset: u32) !*parser.Text {
|
||||||
|
return try parser.textSplitText(self, offset);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: DOM.Text" {
|
||||||
|
try testing.htmlRunner("dom/text.html");
|
||||||
|
}
|
||||||
175
src/browser/dom/token_list.zig
Normal file
175
src/browser/dom/token_list.zig
Normal file
@@ -0,0 +1,175 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
|
||||||
|
const log = @import("../../log.zig");
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
const iterator = @import("../iterator/iterator.zig");
|
||||||
|
|
||||||
|
const Function = @import("../env.zig").Function;
|
||||||
|
const JsObject = @import("../env.zig").JsObject;
|
||||||
|
const DOMException = @import("exceptions.zig").DOMException;
|
||||||
|
|
||||||
|
pub const Interfaces = .{
|
||||||
|
DOMTokenList,
|
||||||
|
DOMTokenListIterable,
|
||||||
|
TokenListEntriesIterator,
|
||||||
|
TokenListEntriesIterator.Iterable,
|
||||||
|
};
|
||||||
|
|
||||||
|
// https://dom.spec.whatwg.org/#domtokenlist
|
||||||
|
pub const DOMTokenList = struct {
|
||||||
|
pub const Self = parser.TokenList;
|
||||||
|
pub const Exception = DOMException;
|
||||||
|
|
||||||
|
pub fn get_length(self: *parser.TokenList) !u32 {
|
||||||
|
return parser.tokenListGetLength(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _item(self: *parser.TokenList, index: u32) !?[]const u8 {
|
||||||
|
return parser.tokenListItem(self, index);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _contains(self: *parser.TokenList, token: []const u8) !bool {
|
||||||
|
return parser.tokenListContains(self, token);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _add(self: *parser.TokenList, tokens: []const []const u8) !void {
|
||||||
|
for (tokens) |token| {
|
||||||
|
try parser.tokenListAdd(self, token);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _remove(self: *parser.TokenList, tokens: []const []const u8) !void {
|
||||||
|
for (tokens) |token| {
|
||||||
|
try parser.tokenListRemove(self, token);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// If token is the empty string, then throw a "SyntaxError" DOMException.
|
||||||
|
/// If token contains any ASCII whitespace, then throw an
|
||||||
|
/// "InvalidCharacterError" DOMException.
|
||||||
|
fn validateToken(token: []const u8) !void {
|
||||||
|
if (token.len == 0) {
|
||||||
|
return parser.DOMError.Syntax;
|
||||||
|
}
|
||||||
|
for (token) |c| {
|
||||||
|
if (std.ascii.isWhitespace(c)) return parser.DOMError.InvalidCharacter;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _toggle(self: *parser.TokenList, token: []const u8, force: ?bool) !bool {
|
||||||
|
try validateToken(token);
|
||||||
|
const exists = try parser.tokenListContains(self, token);
|
||||||
|
if (exists) {
|
||||||
|
if (force == null or force.? == false) {
|
||||||
|
try parser.tokenListRemove(self, token);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (force == null or force.? == true) {
|
||||||
|
try parser.tokenListAdd(self, token);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _replace(self: *parser.TokenList, token: []const u8, new: []const u8) !bool {
|
||||||
|
try validateToken(token);
|
||||||
|
try validateToken(new);
|
||||||
|
const exists = try parser.tokenListContains(self, token);
|
||||||
|
if (!exists) return false;
|
||||||
|
try parser.tokenListRemove(self, token);
|
||||||
|
try parser.tokenListAdd(self, new);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO to implement.
|
||||||
|
pub fn _supports(_: *parser.TokenList, token: []const u8) !bool {
|
||||||
|
try validateToken(token);
|
||||||
|
return error.TypeError;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_value(self: *parser.TokenList) !?[]const u8 {
|
||||||
|
return (try parser.tokenListGetValue(self)) orelse "";
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_value(self: *parser.TokenList, value: []const u8) !void {
|
||||||
|
return parser.tokenListSetValue(self, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _toString(self: *parser.TokenList) ![]const u8 {
|
||||||
|
return (try get_value(self)) orelse "";
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _keys(self: *parser.TokenList) !iterator.U32Iterator {
|
||||||
|
return .{ .length = try get_length(self) };
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _values(self: *parser.TokenList) DOMTokenListIterable {
|
||||||
|
return DOMTokenListIterable.init(.{ .token_list = self });
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _entries(self: *parser.TokenList) TokenListEntriesIterator {
|
||||||
|
return TokenListEntriesIterator.init(.{ .token_list = self });
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _symbol_iterator(self: *parser.TokenList) DOMTokenListIterable {
|
||||||
|
return _values(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO handle thisArg
|
||||||
|
pub fn _forEach(self: *parser.TokenList, cbk: Function, this_arg: JsObject) !void {
|
||||||
|
var entries = _entries(self);
|
||||||
|
while (try entries._next()) |entry| {
|
||||||
|
var result: Function.Result = undefined;
|
||||||
|
cbk.tryCallWithThis(void, this_arg, .{ entry.@"1", entry.@"0", self }, &result) catch {
|
||||||
|
log.debug(.user_script, "callback error", .{
|
||||||
|
.err = result.exception,
|
||||||
|
.stack = result.stack,
|
||||||
|
.soure = "tokenList foreach",
|
||||||
|
});
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const DOMTokenListIterable = iterator.Iterable(Iterator, "DOMTokenListIterable");
|
||||||
|
const TokenListEntriesIterator = iterator.NumericEntries(Iterator, "TokenListEntriesIterator");
|
||||||
|
|
||||||
|
pub const Iterator = struct {
|
||||||
|
index: u32 = 0,
|
||||||
|
token_list: *parser.TokenList,
|
||||||
|
|
||||||
|
// used when wrapped in an iterator.NumericEntries
|
||||||
|
pub const Error = parser.DOMError;
|
||||||
|
|
||||||
|
pub fn _next(self: *Iterator) !?[]const u8 {
|
||||||
|
const index = self.index;
|
||||||
|
self.index = index + 1;
|
||||||
|
return DOMTokenList._item(self.token_list, index);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: DOM.TokenList" {
|
||||||
|
try testing.htmlRunner("dom/token_list.html");
|
||||||
|
}
|
||||||
281
src/browser/dom/tree_walker.zig
Normal file
281
src/browser/dom/tree_walker.zig
Normal file
@@ -0,0 +1,281 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
|
||||||
|
const NodeFilter = @import("node_filter.zig");
|
||||||
|
const Env = @import("../env.zig").Env;
|
||||||
|
const Page = @import("../page.zig").Page;
|
||||||
|
const Node = @import("node.zig").Node;
|
||||||
|
const NodeUnion = @import("node.zig").Union;
|
||||||
|
|
||||||
|
// https://developer.mozilla.org/en-US/docs/Web/API/TreeWalker
|
||||||
|
pub const TreeWalker = struct {
|
||||||
|
root: *parser.Node,
|
||||||
|
current_node: *parser.Node,
|
||||||
|
what_to_show: u32,
|
||||||
|
filter: ?TreeWalkerOpts,
|
||||||
|
filter_func: ?Env.Function,
|
||||||
|
|
||||||
|
// One of the few cases where null and undefined resolve to different default.
|
||||||
|
// We need the raw JsObject so that we can probe the tri state:
|
||||||
|
// null, undefined or i32.
|
||||||
|
pub const WhatToShow = Env.JsObject;
|
||||||
|
|
||||||
|
pub const TreeWalkerOpts = union(enum) {
|
||||||
|
function: Env.Function,
|
||||||
|
object: struct { acceptNode: Env.Function },
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn init(node: *parser.Node, what_to_show_: ?WhatToShow, filter: ?TreeWalkerOpts) !TreeWalker {
|
||||||
|
var filter_func: ?Env.Function = null;
|
||||||
|
|
||||||
|
if (filter) |f| {
|
||||||
|
filter_func = switch (f) {
|
||||||
|
.function => |func| func,
|
||||||
|
.object => |o| o.acceptNode,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
var what_to_show: u32 = undefined;
|
||||||
|
if (what_to_show_) |wts| {
|
||||||
|
switch (try wts.triState(TreeWalker, "what_to_show", u32)) {
|
||||||
|
.null => what_to_show = 0,
|
||||||
|
.undefined => what_to_show = NodeFilter.NodeFilter._SHOW_ALL,
|
||||||
|
.value => |v| what_to_show = v,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
what_to_show = NodeFilter.NodeFilter._SHOW_ALL;
|
||||||
|
}
|
||||||
|
|
||||||
|
return .{
|
||||||
|
.root = node,
|
||||||
|
.current_node = node,
|
||||||
|
.what_to_show = what_to_show,
|
||||||
|
.filter = filter,
|
||||||
|
.filter_func = filter_func,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_root(self: *TreeWalker) !NodeUnion {
|
||||||
|
return try Node.toInterface(self.root);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_currentNode(self: *TreeWalker) !NodeUnion {
|
||||||
|
return try Node.toInterface(self.current_node);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_whatToShow(self: *TreeWalker) u32 {
|
||||||
|
return self.what_to_show;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_filter(self: *TreeWalker) ?TreeWalkerOpts {
|
||||||
|
return self.filter;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_currentNode(self: *TreeWalker, node: *parser.Node) !void {
|
||||||
|
self.current_node = node;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn firstChild(self: *const TreeWalker, node: *parser.Node) !?*parser.Node {
|
||||||
|
const children = try parser.nodeGetChildNodes(node);
|
||||||
|
const child_count = try parser.nodeListLength(children);
|
||||||
|
|
||||||
|
for (0..child_count) |i| {
|
||||||
|
const index: u32 = @intCast(i);
|
||||||
|
const child = (try parser.nodeListItem(children, index)) orelse return null;
|
||||||
|
|
||||||
|
switch (try NodeFilter.verify(self.what_to_show, self.filter_func, child)) {
|
||||||
|
.accept => return child,
|
||||||
|
.reject => continue,
|
||||||
|
.skip => if (try self.firstChild(child)) |gchild| return gchild,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn lastChild(self: *const TreeWalker, node: *parser.Node) !?*parser.Node {
|
||||||
|
const children = try parser.nodeGetChildNodes(node);
|
||||||
|
const child_count = try parser.nodeListLength(children);
|
||||||
|
|
||||||
|
var index: u32 = child_count;
|
||||||
|
while (index > 0) {
|
||||||
|
index -= 1;
|
||||||
|
const child = (try parser.nodeListItem(children, index)) orelse return null;
|
||||||
|
|
||||||
|
switch (try NodeFilter.verify(self.what_to_show, self.filter_func, child)) {
|
||||||
|
.accept => return child,
|
||||||
|
.reject => continue,
|
||||||
|
.skip => if (try self.lastChild(child)) |gchild| return gchild,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn nextSibling(self: *const TreeWalker, node: *parser.Node) !?*parser.Node {
|
||||||
|
var current = node;
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
current = (try parser.nodeNextSibling(current)) orelse return null;
|
||||||
|
|
||||||
|
switch (try NodeFilter.verify(self.what_to_show, self.filter_func, current)) {
|
||||||
|
.accept => return current,
|
||||||
|
.skip, .reject => continue,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn previousSibling(self: *const TreeWalker, node: *parser.Node) !?*parser.Node {
|
||||||
|
var current = node;
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
current = (try parser.nodePreviousSibling(current)) orelse return null;
|
||||||
|
|
||||||
|
switch (try NodeFilter.verify(self.what_to_show, self.filter_func, current)) {
|
||||||
|
.accept => return current,
|
||||||
|
.skip, .reject => continue,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parentNode(self: *const TreeWalker, node: *parser.Node) !?*parser.Node {
|
||||||
|
if (self.root == node) return null;
|
||||||
|
|
||||||
|
var current = node;
|
||||||
|
while (true) {
|
||||||
|
if (current == self.root) return null;
|
||||||
|
current = (try parser.nodeParentNode(current)) orelse return null;
|
||||||
|
|
||||||
|
switch (try NodeFilter.verify(self.what_to_show, self.filter_func, current)) {
|
||||||
|
.accept => return current,
|
||||||
|
.reject, .skip => continue,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _firstChild(self: *TreeWalker) !?NodeUnion {
|
||||||
|
if (try self.firstChild(self.current_node)) |child| {
|
||||||
|
self.current_node = child;
|
||||||
|
return try Node.toInterface(child);
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _lastChild(self: *TreeWalker) !?NodeUnion {
|
||||||
|
if (try self.lastChild(self.current_node)) |child| {
|
||||||
|
self.current_node = child;
|
||||||
|
return try Node.toInterface(child);
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _nextNode(self: *TreeWalker) !?NodeUnion {
|
||||||
|
if (try self.firstChild(self.current_node)) |child| {
|
||||||
|
self.current_node = child;
|
||||||
|
return try Node.toInterface(child);
|
||||||
|
}
|
||||||
|
|
||||||
|
var current = self.current_node;
|
||||||
|
while (current != self.root) {
|
||||||
|
if (try self.nextSibling(current)) |sibling| {
|
||||||
|
self.current_node = sibling;
|
||||||
|
return try Node.toInterface(sibling);
|
||||||
|
}
|
||||||
|
|
||||||
|
current = (try parser.nodeParentNode(current)) orelse break;
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _nextSibling(self: *TreeWalker) !?NodeUnion {
|
||||||
|
if (try self.nextSibling(self.current_node)) |sibling| {
|
||||||
|
self.current_node = sibling;
|
||||||
|
return try Node.toInterface(sibling);
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _parentNode(self: *TreeWalker) !?NodeUnion {
|
||||||
|
if (try self.parentNode(self.current_node)) |parent| {
|
||||||
|
self.current_node = parent;
|
||||||
|
return try Node.toInterface(parent);
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _previousNode(self: *TreeWalker) !?NodeUnion {
|
||||||
|
if (self.current_node == self.root) return null;
|
||||||
|
|
||||||
|
var current = self.current_node;
|
||||||
|
while (try parser.nodePreviousSibling(current)) |previous| {
|
||||||
|
current = previous;
|
||||||
|
|
||||||
|
switch (try NodeFilter.verify(self.what_to_show, self.filter_func, current)) {
|
||||||
|
.accept => {
|
||||||
|
// Get last child if it has one.
|
||||||
|
if (try self.lastChild(current)) |child| {
|
||||||
|
self.current_node = child;
|
||||||
|
return try Node.toInterface(child);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise, this node is our previous one.
|
||||||
|
self.current_node = current;
|
||||||
|
return try Node.toInterface(current);
|
||||||
|
},
|
||||||
|
.reject => continue,
|
||||||
|
.skip => {
|
||||||
|
// Get last child if it has one.
|
||||||
|
if (try self.lastChild(current)) |child| {
|
||||||
|
self.current_node = child;
|
||||||
|
return try Node.toInterface(child);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (current != self.root) {
|
||||||
|
if (try self.parentNode(current)) |parent| {
|
||||||
|
self.current_node = parent;
|
||||||
|
return try Node.toInterface(parent);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _previousSibling(self: *TreeWalker) !?NodeUnion {
|
||||||
|
if (try self.previousSibling(self.current_node)) |sibling| {
|
||||||
|
self.current_node = sibling;
|
||||||
|
return try Node.toInterface(sibling);
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
};
|
||||||
102
src/browser/dom/walker.zig
Normal file
102
src/browser/dom/walker.zig
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
|
||||||
|
pub const Walker = union(enum) {
|
||||||
|
walkerDepthFirst: WalkerDepthFirst,
|
||||||
|
walkerChildren: WalkerChildren,
|
||||||
|
walkerNone: WalkerNone,
|
||||||
|
|
||||||
|
pub fn get_next(self: Walker, root: *parser.Node, cur: ?*parser.Node) !?*parser.Node {
|
||||||
|
switch (self) {
|
||||||
|
inline else => |case| return case.get_next(root, cur),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// WalkerDepthFirst iterates over the DOM tree to return the next following
|
||||||
|
// node or null at the end.
|
||||||
|
//
|
||||||
|
// This implementation is a zig version of Netsurf code.
|
||||||
|
// http://source.netsurf-browser.org/libdom.git/tree/src/html/html_collection.c#n177
|
||||||
|
//
|
||||||
|
// The iteration is a depth first as required by the specification.
|
||||||
|
// https://dom.spec.whatwg.org/#htmlcollection
|
||||||
|
// https://dom.spec.whatwg.org/#concept-tree-order
|
||||||
|
pub const WalkerDepthFirst = struct {
|
||||||
|
pub fn get_next(_: WalkerDepthFirst, root: *parser.Node, cur: ?*parser.Node) !?*parser.Node {
|
||||||
|
var n = cur orelse root;
|
||||||
|
|
||||||
|
// TODO deinit next
|
||||||
|
if (try parser.nodeFirstChild(n)) |next| {
|
||||||
|
return next;
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO deinit next
|
||||||
|
if (try parser.nodeNextSibling(n)) |next| {
|
||||||
|
return next;
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO deinit parent
|
||||||
|
// Back to the parent of cur.
|
||||||
|
// If cur has no parent, then the iteration is over.
|
||||||
|
var parent = try parser.nodeParentNode(n) orelse return null;
|
||||||
|
|
||||||
|
// TODO deinit lastchild
|
||||||
|
var lastchild = try parser.nodeLastChild(parent);
|
||||||
|
while (n != root and n == lastchild) {
|
||||||
|
n = parent;
|
||||||
|
|
||||||
|
// TODO deinit parent
|
||||||
|
// Back to the prev's parent.
|
||||||
|
// If prev has no parent, then the loop must stop.
|
||||||
|
parent = try parser.nodeParentNode(n) orelse break;
|
||||||
|
|
||||||
|
// TODO deinit lastchild
|
||||||
|
lastchild = try parser.nodeLastChild(parent);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (n == root) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return try parser.nodeNextSibling(n);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// WalkerChildren iterates over the root's children only.
|
||||||
|
pub const WalkerChildren = struct {
|
||||||
|
pub fn get_next(_: WalkerChildren, root: *parser.Node, cur: ?*parser.Node) !?*parser.Node {
|
||||||
|
// On walk start, we return the first root's child.
|
||||||
|
if (cur == null) return try parser.nodeFirstChild(root);
|
||||||
|
|
||||||
|
// If cur is root, then return null.
|
||||||
|
// This is a special case, if the root is included in the walk, we
|
||||||
|
// don't want to go further to find children.
|
||||||
|
if (root == cur.?) return null;
|
||||||
|
|
||||||
|
return try parser.nodeNextSibling(cur.?);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const WalkerNone = struct {
|
||||||
|
pub fn get_next(_: WalkerNone, _: *parser.Node, _: ?*parser.Node) !?*parser.Node {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
};
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
//
|
//
|
||||||
// Francis Bouvier <francis@lightpanda.io>
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
// Pierre Tachoire <pierre@lightpanda.io>
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
@@ -17,307 +17,267 @@
|
|||||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
const Page = @import("Page.zig");
|
|
||||||
const Node = @import("webapi/Node.zig");
|
|
||||||
const Slot = @import("webapi/element/html/Slot.zig");
|
|
||||||
|
|
||||||
pub const RootOpts = struct {
|
const parser = @import("netsurf.zig");
|
||||||
with_base: bool = false,
|
const Page = @import("page.zig").Page;
|
||||||
strip: Opts.Strip = .{},
|
const Walker = @import("dom/walker.zig").WalkerChildren;
|
||||||
shadow: Opts.Shadow = .rendered,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const Opts = struct {
|
pub const Opts = struct {
|
||||||
strip: Strip = .{},
|
// set to include element shadowroots in the dump
|
||||||
shadow: Shadow = .rendered,
|
page: ?*const Page = null,
|
||||||
|
|
||||||
pub const Strip = struct {
|
exclude_scripts: bool = false,
|
||||||
js: bool = false,
|
|
||||||
ui: bool = false,
|
|
||||||
css: bool = false,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const Shadow = enum {
|
|
||||||
// Skip shadow DOM entirely (innerHTML/outerHTML)
|
|
||||||
skip,
|
|
||||||
|
|
||||||
// Dump everyhting (like "view source")
|
|
||||||
complete,
|
|
||||||
|
|
||||||
// Resolve slot elements (like what actually gets rendered)
|
|
||||||
rendered,
|
|
||||||
};
|
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn root(doc: *Node.Document, opts: RootOpts, writer: *std.Io.Writer, page: *Page) !void {
|
// writer must be a std.io.Writer
|
||||||
if (doc.is(Node.Document.HTMLDocument)) |html_doc| {
|
pub fn writeHTML(doc: *parser.Document, opts: Opts, writer: *std.Io.Writer) !void {
|
||||||
try writer.writeAll("<!DOCTYPE html>");
|
try writer.writeAll("<!DOCTYPE html>\n");
|
||||||
if (opts.with_base) {
|
try writeChildren(parser.documentToNode(doc), opts, writer);
|
||||||
const parent = if (html_doc.getHead()) |head| head.asNode() else doc.asNode();
|
try writer.writeAll("\n");
|
||||||
const base = try doc.createElement("base", null, page);
|
|
||||||
try base.setAttributeSafe("base", page.base(), page);
|
|
||||||
_ = try parent.insertBefore(base.asNode(), parent.firstChild(), page);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return deep(doc.asNode(), .{ .strip = opts.strip, .shadow = opts.shadow }, writer, page);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn deep(node: *Node, opts: Opts, writer: *std.Io.Writer, page: *Page) error{WriteFailed}!void {
|
// Spec: https://www.w3.org/TR/xml/#sec-prolog-dtd
|
||||||
return _deep(node, opts, false, writer, page);
|
pub fn writeDocType(doc_type: *parser.DocumentType, writer: *std.Io.Writer) !void {
|
||||||
}
|
|
||||||
|
|
||||||
fn _deep(node: *Node, opts: Opts, comptime force_slot: bool, writer: *std.Io.Writer, page: *Page) error{WriteFailed}!void {
|
|
||||||
switch (node._type) {
|
|
||||||
.cdata => |cd| {
|
|
||||||
if (node.is(Node.CData.Comment)) |_| {
|
|
||||||
try writer.writeAll("<!--");
|
|
||||||
try writer.writeAll(cd.getData());
|
|
||||||
try writer.writeAll("-->");
|
|
||||||
} else if (node.is(Node.CData.ProcessingInstruction)) |pi| {
|
|
||||||
try writer.writeAll("<?");
|
|
||||||
try writer.writeAll(pi._target);
|
|
||||||
try writer.writeAll(" ");
|
|
||||||
try writer.writeAll(cd.getData());
|
|
||||||
try writer.writeAll("?>");
|
|
||||||
} else {
|
|
||||||
if (shouldEscapeText(node._parent)) {
|
|
||||||
try writeEscapedText(cd.getData(), writer);
|
|
||||||
} else {
|
|
||||||
try writer.writeAll(cd.getData());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
.element => |el| {
|
|
||||||
if (shouldStripElement(el, opts)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// When opts.shadow == .rendered, we normally skip any element with
|
|
||||||
// a slot attribute. Only the "active" element will get rendered into
|
|
||||||
// the <slot name="X">. However, the `deep` function is itself used
|
|
||||||
// to render that "active" content, so when we're trying to render
|
|
||||||
// it, we don't want to skip it.
|
|
||||||
if ((comptime force_slot == false) and opts.shadow == .rendered) {
|
|
||||||
if (el.getAttributeSafe("slot")) |_| {
|
|
||||||
// Skip - will be rendered by the Slot if it's the active container
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
try el.format(writer);
|
|
||||||
|
|
||||||
if (opts.shadow == .rendered) {
|
|
||||||
if (el.is(Slot)) |slot| {
|
|
||||||
try dumpSlotContent(slot, opts, writer, page);
|
|
||||||
return writer.writeAll("</slot>");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (opts.shadow != .skip) {
|
|
||||||
if (page._element_shadow_roots.get(el)) |shadow| {
|
|
||||||
try children(shadow.asNode(), opts, writer, page);
|
|
||||||
// In rendered mode, light DOM is only shown through slots, not directly
|
|
||||||
if (opts.shadow == .rendered) {
|
|
||||||
// Skip rendering light DOM children
|
|
||||||
if (!isVoidElement(el)) {
|
|
||||||
try writer.writeAll("</");
|
|
||||||
try writer.writeAll(el.getTagNameDump());
|
|
||||||
try writer.writeByte('>');
|
|
||||||
}
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
try children(node, opts, writer, page);
|
|
||||||
if (!isVoidElement(el)) {
|
|
||||||
try writer.writeAll("</");
|
|
||||||
try writer.writeAll(el.getTagNameDump());
|
|
||||||
try writer.writeByte('>');
|
|
||||||
}
|
|
||||||
},
|
|
||||||
.document => try children(node, opts, writer, page),
|
|
||||||
.document_type => |dt| {
|
|
||||||
try writer.writeAll("<!DOCTYPE ");
|
try writer.writeAll("<!DOCTYPE ");
|
||||||
try writer.writeAll(dt.getName());
|
try writer.writeAll(try parser.documentTypeGetName(doc_type));
|
||||||
|
|
||||||
const public_id = dt.getPublicId();
|
const public_id = try parser.documentTypeGetPublicId(doc_type);
|
||||||
const system_id = dt.getSystemId();
|
const system_id = try parser.documentTypeGetSystemId(doc_type);
|
||||||
if (public_id.len != 0 and system_id.len != 0) {
|
if (public_id.len != 0 and system_id.len != 0) {
|
||||||
try writer.writeAll(" PUBLIC \"");
|
try writer.writeAll(" PUBLIC \"");
|
||||||
try writeEscapedText(public_id, writer);
|
try writeEscapedAttributeValue(writer, public_id);
|
||||||
try writer.writeAll("\" \"");
|
try writer.writeAll("\" \"");
|
||||||
try writeEscapedText(system_id, writer);
|
try writeEscapedAttributeValue(writer, system_id);
|
||||||
try writer.writeByte('"');
|
try writer.writeAll("\"");
|
||||||
} else if (public_id.len != 0) {
|
} else if (public_id.len != 0) {
|
||||||
try writer.writeAll(" PUBLIC \"");
|
try writer.writeAll(" PUBLIC \"");
|
||||||
try writeEscapedText(public_id, writer);
|
try writeEscapedAttributeValue(writer, public_id);
|
||||||
try writer.writeByte('"');
|
try writer.writeAll("\"");
|
||||||
} else if (system_id.len != 0) {
|
} else if (system_id.len != 0) {
|
||||||
try writer.writeAll(" SYSTEM \"");
|
try writer.writeAll(" SYSTEM \"");
|
||||||
try writeEscapedText(system_id, writer);
|
try writeEscapedAttributeValue(writer, system_id);
|
||||||
try writer.writeByte('"');
|
try writer.writeAll("\"");
|
||||||
}
|
|
||||||
try writer.writeAll(">\n");
|
|
||||||
},
|
|
||||||
.document_fragment => try children(node, opts, writer, page),
|
|
||||||
.attribute => unreachable,
|
|
||||||
}
|
}
|
||||||
|
// Internal subset is not implemented
|
||||||
|
try writer.writeAll(">");
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn children(parent: *Node, opts: Opts, writer: *std.Io.Writer, page: *Page) !void {
|
pub fn writeNode(node: *parser.Node, opts: Opts, writer: *std.Io.Writer) anyerror!void {
|
||||||
var it = parent.childrenIterator();
|
switch (try parser.nodeType(node)) {
|
||||||
while (it.next()) |child| {
|
.element => {
|
||||||
try deep(child, opts, writer, page);
|
// open the tag
|
||||||
}
|
const tag_type = try parser.nodeHTMLGetTagType(node) orelse .undef;
|
||||||
}
|
if (opts.exclude_scripts and try isScriptOrRelated(tag_type, node)) {
|
||||||
|
return;
|
||||||
pub fn toJSON(node: *Node, writer: *std.json.Stringify) !void {
|
|
||||||
try writer.beginObject();
|
|
||||||
|
|
||||||
try writer.objectField("type");
|
|
||||||
switch (node.type) {
|
|
||||||
.cdata => {
|
|
||||||
try writer.write("cdata");
|
|
||||||
},
|
|
||||||
.document => {
|
|
||||||
try writer.write("document");
|
|
||||||
},
|
|
||||||
.document_type => {
|
|
||||||
try writer.write("document_type");
|
|
||||||
},
|
|
||||||
.element => |*el| {
|
|
||||||
try writer.write("element");
|
|
||||||
try writer.objectField("tag");
|
|
||||||
try writer.write(el.tagName());
|
|
||||||
|
|
||||||
try writer.objectField("attributes");
|
|
||||||
try writer.beginObject();
|
|
||||||
var it = el.attributeIterator();
|
|
||||||
while (it.next()) |attr| {
|
|
||||||
try writer.objectField(attr.name);
|
|
||||||
try writer.write(attr.value);
|
|
||||||
}
|
|
||||||
try writer.endObject();
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
try writer.objectField("children");
|
const tag = try parser.nodeLocalName(node);
|
||||||
try writer.beginArray();
|
try writer.writeAll("<");
|
||||||
var it = node.childrenIterator();
|
try writer.writeAll(tag);
|
||||||
while (it.next()) |child| {
|
|
||||||
try toJSON(child, writer);
|
|
||||||
}
|
|
||||||
try writer.endArray();
|
|
||||||
try writer.endObject();
|
|
||||||
}
|
|
||||||
|
|
||||||
fn dumpSlotContent(slot: *Slot, opts: Opts, writer: *std.Io.Writer, page: *Page) !void {
|
// write the attributes
|
||||||
const assigned = slot.assignedNodes(null, page) catch return;
|
const _map = try parser.nodeGetAttributes(node);
|
||||||
|
if (_map) |map| {
|
||||||
if (assigned.len > 0) {
|
const ln = try parser.namedNodeMapGetLength(map);
|
||||||
for (assigned) |assigned_node| {
|
for (0..ln) |i| {
|
||||||
try _deep(assigned_node, opts, true, writer, page);
|
const attr = try parser.namedNodeMapItem(map, @intCast(i)) orelse break;
|
||||||
|
try writer.writeAll(" ");
|
||||||
|
try writer.writeAll(try parser.attributeGetName(attr));
|
||||||
|
try writer.writeAll("=\"");
|
||||||
|
const attribute_value = try parser.attributeGetValue(attr) orelse "";
|
||||||
|
try writeEscapedAttributeValue(writer, attribute_value);
|
||||||
|
try writer.writeAll("\"");
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try writer.writeAll(">");
|
||||||
|
|
||||||
|
if (opts.page) |page| {
|
||||||
|
if (page.getNodeState(node)) |state| {
|
||||||
|
if (state.shadow_root) |sr| {
|
||||||
|
try writeChildren(@ptrCast(@alignCast(sr.proto)), opts, writer);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// void elements can't have any content.
|
||||||
|
if (try isVoid(parser.nodeToElement(node))) return;
|
||||||
|
|
||||||
|
if (tag_type == .script) {
|
||||||
|
try writer.writeAll(try parser.nodeTextContent(node) orelse "");
|
||||||
} else {
|
} else {
|
||||||
try children(slot.asNode(), opts, writer, page);
|
// write the children
|
||||||
|
// TODO avoid recursion
|
||||||
|
try writeChildren(node, opts, writer);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
fn isVoidElement(el: *const Node.Element) bool {
|
// close the tag
|
||||||
return switch (el._type) {
|
try writer.writeAll("</");
|
||||||
.html => |html| switch (html._type) {
|
try writer.writeAll(tag);
|
||||||
.br, .hr, .img, .input, .link, .meta => true,
|
try writer.writeAll(">");
|
||||||
else => false,
|
|
||||||
},
|
},
|
||||||
.svg => false,
|
.text => {
|
||||||
};
|
const v = try parser.nodeValue(node) orelse return;
|
||||||
|
try writeEscapedTextNode(writer, v);
|
||||||
|
},
|
||||||
|
.cdata_section => {
|
||||||
|
const v = try parser.nodeValue(node) orelse return;
|
||||||
|
try writer.writeAll("<![CDATA[");
|
||||||
|
try writer.writeAll(v);
|
||||||
|
try writer.writeAll("]]>");
|
||||||
|
},
|
||||||
|
.comment => {
|
||||||
|
const v = try parser.nodeValue(node) orelse return;
|
||||||
|
try writer.writeAll("<!--");
|
||||||
|
try writer.writeAll(v);
|
||||||
|
try writer.writeAll("-->");
|
||||||
|
},
|
||||||
|
// TODO handle processing instruction dump
|
||||||
|
.processing_instruction => return,
|
||||||
|
// document fragment is outside of the main document DOM, so we
|
||||||
|
// don't output it.
|
||||||
|
.document_fragment => return,
|
||||||
|
// document will never be called, but required for completeness.
|
||||||
|
.document => return,
|
||||||
|
// done globally instead, but required for completeness. Only the outer DOCTYPE should be written
|
||||||
|
.document_type => return,
|
||||||
|
// deprecated
|
||||||
|
.attribute => return,
|
||||||
|
.entity_reference => return,
|
||||||
|
.entity => return,
|
||||||
|
.notation => return,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn shouldStripElement(el: *const Node.Element, opts: Opts) bool {
|
// writer must be a std.io.Writer
|
||||||
const tag_name = el.getTagNameDump();
|
pub fn writeChildren(root: *parser.Node, opts: Opts, writer: *std.Io.Writer) !void {
|
||||||
|
const walker = Walker{};
|
||||||
if (opts.strip.js) {
|
var next: ?*parser.Node = null;
|
||||||
if (std.mem.eql(u8, tag_name, "script")) return true;
|
while (true) {
|
||||||
if (std.mem.eql(u8, tag_name, "noscript")) return true;
|
next = try walker.get_next(root, next) orelse break;
|
||||||
|
try writeNode(next.?, opts, writer);
|
||||||
if (std.mem.eql(u8, tag_name, "link")) {
|
|
||||||
if (el.getAttributeSafe("as")) |as| {
|
|
||||||
if (std.mem.eql(u8, as, "script")) return true;
|
|
||||||
}
|
}
|
||||||
if (el.getAttributeSafe("rel")) |rel| {
|
|
||||||
if (std.mem.eql(u8, rel, "modulepreload") or std.mem.eql(u8, rel, "preload")) {
|
|
||||||
if (el.getAttributeSafe("as")) |as| {
|
|
||||||
if (std.mem.eql(u8, as, "script")) return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (opts.strip.css or opts.strip.ui) {
|
|
||||||
if (std.mem.eql(u8, tag_name, "style")) return true;
|
|
||||||
|
|
||||||
if (std.mem.eql(u8, tag_name, "link")) {
|
|
||||||
if (el.getAttributeSafe("rel")) |rel| {
|
|
||||||
if (std.mem.eql(u8, rel, "stylesheet")) return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (opts.strip.ui) {
|
|
||||||
if (std.mem.eql(u8, tag_name, "img")) return true;
|
|
||||||
if (std.mem.eql(u8, tag_name, "picture")) return true;
|
|
||||||
if (std.mem.eql(u8, tag_name, "video")) return true;
|
|
||||||
if (std.mem.eql(u8, tag_name, "audio")) return true;
|
|
||||||
if (std.mem.eql(u8, tag_name, "svg")) return true;
|
|
||||||
if (std.mem.eql(u8, tag_name, "canvas")) return true;
|
|
||||||
if (std.mem.eql(u8, tag_name, "iframe")) return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn shouldEscapeText(node_: ?*Node) bool {
|
// When `exclude_scripts` is passed to dump, we don't include <script> tags.
|
||||||
const node = node_ orelse return true;
|
// We also want to omit <link rel=preload as=ascript>
|
||||||
if (node.is(Node.Element.Html.Script) != null) {
|
fn isScriptOrRelated(tag_type: parser.Tag, node: *parser.Node) !bool {
|
||||||
return false;
|
if (tag_type == .script) {
|
||||||
}
|
|
||||||
return true;
|
return true;
|
||||||
|
}
|
||||||
|
if (tag_type == .link) {
|
||||||
|
const el = parser.nodeToElement(node);
|
||||||
|
const as = try parser.elementGetAttribute(el, "as") orelse return false;
|
||||||
|
if (!std.ascii.eqlIgnoreCase(as, "script")) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
const rel = try parser.elementGetAttribute(el, "rel") orelse return false;
|
||||||
|
return std.ascii.eqlIgnoreCase(rel, "preload");
|
||||||
|
}
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
fn writeEscapedText(text: []const u8, writer: *std.Io.Writer) !void {
|
|
||||||
// Fast path: if no special characters, write directly
|
// area, base, br, col, embed, hr, img, input, link, meta, source, track, wbr
|
||||||
const first_special = std.mem.indexOfAnyPos(u8, text, 0, &.{ '&', '<', '>', 194 }) orelse {
|
// https://html.spec.whatwg.org/#void-elements
|
||||||
return writer.writeAll(text);
|
fn isVoid(elem: *parser.Element) !bool {
|
||||||
|
const tag = try parser.elementTag(elem);
|
||||||
|
return switch (tag) {
|
||||||
|
.area, .base, .br, .col, .embed, .hr, .img, .input, .link => true,
|
||||||
|
.meta, .source, .track, .wbr => true,
|
||||||
|
else => false,
|
||||||
};
|
};
|
||||||
|
|
||||||
try writer.writeAll(text[0..first_special]);
|
|
||||||
var remaining = try writeEscapedByte(text, first_special, writer);
|
|
||||||
|
|
||||||
while (std.mem.indexOfAnyPos(u8, remaining, 0, &.{ '&', '<', '>', 194 })) |offset| {
|
|
||||||
try writer.writeAll(remaining[0..offset]);
|
|
||||||
remaining = try writeEscapedByte(remaining, offset, writer);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (remaining.len > 0) {
|
|
||||||
try writer.writeAll(remaining);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn writeEscapedByte(input: []const u8, index: usize, writer: *std.Io.Writer) ![]const u8 {
|
fn writeEscapedTextNode(writer: anytype, value: []const u8) !void {
|
||||||
switch (input[index]) {
|
var v = value;
|
||||||
|
while (v.len > 0) {
|
||||||
|
const index = std.mem.indexOfAnyPos(u8, v, 0, &.{ '&', '<', '>' }) orelse {
|
||||||
|
return writer.writeAll(v);
|
||||||
|
};
|
||||||
|
try writer.writeAll(v[0..index]);
|
||||||
|
switch (v[index]) {
|
||||||
'&' => try writer.writeAll("&"),
|
'&' => try writer.writeAll("&"),
|
||||||
'<' => try writer.writeAll("<"),
|
'<' => try writer.writeAll("<"),
|
||||||
'>' => try writer.writeAll(">"),
|
'>' => try writer.writeAll(">"),
|
||||||
194 => {
|
|
||||||
// non breaking space
|
|
||||||
if (input.len > index + 1 and input[index + 1] == 160) {
|
|
||||||
try writer.writeAll(" ");
|
|
||||||
return input[index + 2 ..];
|
|
||||||
}
|
|
||||||
try writer.writeByte(194);
|
|
||||||
},
|
|
||||||
else => unreachable,
|
else => unreachable,
|
||||||
}
|
}
|
||||||
return input[index + 1 ..];
|
v = v[index + 1 ..];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn writeEscapedAttributeValue(writer: anytype, value: []const u8) !void {
|
||||||
|
var v = value;
|
||||||
|
while (v.len > 0) {
|
||||||
|
const index = std.mem.indexOfAnyPos(u8, v, 0, &.{ '&', '<', '>', '"' }) orelse {
|
||||||
|
return writer.writeAll(v);
|
||||||
|
};
|
||||||
|
try writer.writeAll(v[0..index]);
|
||||||
|
switch (v[index]) {
|
||||||
|
'&' => try writer.writeAll("&"),
|
||||||
|
'<' => try writer.writeAll("<"),
|
||||||
|
'>' => try writer.writeAll(">"),
|
||||||
|
'"' => try writer.writeAll("""),
|
||||||
|
else => unreachable,
|
||||||
|
}
|
||||||
|
v = v[index + 1 ..];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const testing = std.testing;
|
||||||
|
test "dump.writeHTML" {
|
||||||
|
try parser.init();
|
||||||
|
defer parser.deinit();
|
||||||
|
|
||||||
|
try testWriteHTML(
|
||||||
|
"<div id=\"content\">Over 9000!</div>",
|
||||||
|
"<div id=\"content\">Over 9000!</div>",
|
||||||
|
);
|
||||||
|
|
||||||
|
try testWriteHTML(
|
||||||
|
"<root><!-- a comment --></root>",
|
||||||
|
"<root><!-- a comment --></root>",
|
||||||
|
);
|
||||||
|
|
||||||
|
try testWriteHTML(
|
||||||
|
"<p>< > &</p>",
|
||||||
|
"<p>< > &</p>",
|
||||||
|
);
|
||||||
|
|
||||||
|
try testWriteHTML(
|
||||||
|
"<p id=\""><&"''\">wat?</p>",
|
||||||
|
"<p id='\"><&"'''>wat?</p>",
|
||||||
|
);
|
||||||
|
|
||||||
|
try testWriteFullHTML(
|
||||||
|
\\<!DOCTYPE html>
|
||||||
|
\\<html><head><title>It's over what?</title><meta name="a" value="b">
|
||||||
|
\\</head><body>9000</body></html>
|
||||||
|
\\
|
||||||
|
, "<html><title>It's over what?</title><meta name=a value=\"b\">\n<body>9000");
|
||||||
|
|
||||||
|
try testWriteHTML(
|
||||||
|
"<p>hi</p><script>alert(power > 9000)</script>",
|
||||||
|
"<p>hi</p><script>alert(power > 9000)</script>",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn testWriteHTML(comptime expected_body: []const u8, src: []const u8) !void {
|
||||||
|
const expected =
|
||||||
|
"<!DOCTYPE html>\n<html><head></head><body>" ++
|
||||||
|
expected_body ++
|
||||||
|
"</body></html>\n";
|
||||||
|
return testWriteFullHTML(expected, src);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn testWriteFullHTML(comptime expected: []const u8, src: []const u8) !void {
|
||||||
|
var aw = std.Io.Writer.Allocating.init(testing.allocator);
|
||||||
|
defer aw.deinit();
|
||||||
|
|
||||||
|
const doc_html = try parser.documentHTMLParseFromStr(src);
|
||||||
|
defer parser.documentHTMLClose(doc_html) catch {};
|
||||||
|
|
||||||
|
const doc = parser.documentHTMLToDocument(doc_html);
|
||||||
|
try writeHTML(doc, .{}, &aw.writer);
|
||||||
|
try testing.expectEqualStrings(expected, aw.written());
|
||||||
}
|
}
|
||||||
|
|||||||
103
src/browser/encoding/TextDecoder.zig
Normal file
103
src/browser/encoding/TextDecoder.zig
Normal file
@@ -0,0 +1,103 @@
|
|||||||
|
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
const log = @import("../../log.zig");
|
||||||
|
|
||||||
|
const Env = @import("../env.zig").Env;
|
||||||
|
const Page = @import("../page.zig").Page;
|
||||||
|
|
||||||
|
// https://encoding.spec.whatwg.org/#interface-textdecoder
|
||||||
|
const TextDecoder = @This();
|
||||||
|
|
||||||
|
const SupportedLabels = enum {
|
||||||
|
utf8,
|
||||||
|
@"utf-8",
|
||||||
|
@"unicode-1-1-utf-8",
|
||||||
|
};
|
||||||
|
|
||||||
|
const Options = struct {
|
||||||
|
fatal: bool = false,
|
||||||
|
ignoreBOM: bool = false,
|
||||||
|
};
|
||||||
|
|
||||||
|
fatal: bool,
|
||||||
|
ignore_bom: bool,
|
||||||
|
stream: std.ArrayList(u8),
|
||||||
|
|
||||||
|
pub fn constructor(label_: ?[]const u8, opts_: ?Options) !TextDecoder {
|
||||||
|
if (label_) |l| {
|
||||||
|
_ = std.meta.stringToEnum(SupportedLabels, l) orelse {
|
||||||
|
log.warn(.web_api, "not implemented", .{ .feature = "TextDecoder label", .label = l });
|
||||||
|
return error.NotImplemented;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
const opts = opts_ orelse Options{};
|
||||||
|
return .{
|
||||||
|
.stream = .empty,
|
||||||
|
.fatal = opts.fatal,
|
||||||
|
.ignore_bom = opts.ignoreBOM,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_encoding(_: *const TextDecoder) []const u8 {
|
||||||
|
return "utf-8";
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_ignoreBOM(self: *const TextDecoder) bool {
|
||||||
|
return self.ignore_bom;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_fatal(self: *const TextDecoder) bool {
|
||||||
|
return self.fatal;
|
||||||
|
}
|
||||||
|
|
||||||
|
const DecodeOptions = struct {
|
||||||
|
stream: bool = false,
|
||||||
|
};
|
||||||
|
pub fn _decode(self: *TextDecoder, input_: ?[]const u8, opts_: ?DecodeOptions, page: *Page) ![]const u8 {
|
||||||
|
var str = input_ orelse return "";
|
||||||
|
const opts: DecodeOptions = opts_ orelse .{};
|
||||||
|
|
||||||
|
if (self.stream.items.len > 0) {
|
||||||
|
try self.stream.appendSlice(page.arena, str);
|
||||||
|
str = self.stream.items;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (self.fatal and !std.unicode.utf8ValidateSlice(str)) {
|
||||||
|
if (opts.stream) {
|
||||||
|
if (self.stream.items.len == 0) {
|
||||||
|
try self.stream.appendSlice(page.arena, str);
|
||||||
|
}
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
return error.InvalidUtf8;
|
||||||
|
}
|
||||||
|
|
||||||
|
self.stream.clearRetainingCapacity();
|
||||||
|
if (self.ignore_bom == false and std.mem.startsWith(u8, str, &.{ 0xEF, 0xBB, 0xBF })) {
|
||||||
|
return str[3..];
|
||||||
|
}
|
||||||
|
|
||||||
|
return str;
|
||||||
|
}
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: Encoding.TextDecoder" {
|
||||||
|
try testing.htmlRunner("encoding/decoder.html");
|
||||||
|
}
|
||||||
@@ -18,33 +18,31 @@
|
|||||||
|
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
|
|
||||||
const Page = @import("../Page.zig");
|
const Env = @import("../env.zig").Env;
|
||||||
const Blob = @import("Blob.zig");
|
|
||||||
const js = @import("../js/js.zig");
|
|
||||||
|
|
||||||
const File = @This();
|
// https://encoding.spec.whatwg.org/#interface-textencoder
|
||||||
|
const TextEncoder = @This();
|
||||||
|
|
||||||
/// `File` inherits `Blob`.
|
pub fn constructor() !TextEncoder {
|
||||||
_proto: *Blob,
|
return .{};
|
||||||
|
|
||||||
// TODO: Implement File API.
|
|
||||||
pub fn init(page: *Page) !*File {
|
|
||||||
return page._factory.blob(File{ ._proto = undefined });
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub const JsApi = struct {
|
pub fn get_encoding(_: *const TextEncoder) []const u8 {
|
||||||
pub const bridge = js.Bridge(File);
|
return "utf-8";
|
||||||
|
}
|
||||||
|
|
||||||
pub const Meta = struct {
|
pub fn _encode(_: *const TextEncoder, v: []const u8) !Env.TypedArray(u8) {
|
||||||
pub const name = "File";
|
// Ensure the input is a valid utf-8
|
||||||
pub const prototype_chain = bridge.prototypeChain();
|
// It seems chrome accepts invalid utf-8 sequence.
|
||||||
pub var class_id: bridge.ClassId = undefined;
|
//
|
||||||
};
|
if (!std.unicode.utf8ValidateSlice(v)) {
|
||||||
|
return error.InvalidUtf8;
|
||||||
|
}
|
||||||
|
|
||||||
pub const constructor = bridge.constructor(File.init, .{});
|
return .{ .values = v };
|
||||||
};
|
}
|
||||||
|
|
||||||
const testing = @import("../../testing.zig");
|
const testing = @import("../../testing.zig");
|
||||||
test "WebApi: File" {
|
test "Browser: Encoding.TextEncoder" {
|
||||||
try testing.htmlRunner("file.html", .{});
|
try testing.htmlRunner("encoding/encoder.html");
|
||||||
}
|
}
|
||||||
@@ -16,20 +16,7 @@
|
|||||||
// You should have received a copy of the GNU Affero General Public License
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
const js = @import("../../js/js.zig");
|
pub const Interfaces = .{
|
||||||
|
@import("TextDecoder.zig"),
|
||||||
const Text = @import("Text.zig");
|
@import("TextEncoder.zig"),
|
||||||
|
|
||||||
const CDATASection = @This();
|
|
||||||
|
|
||||||
_proto: *Text,
|
|
||||||
|
|
||||||
pub const JsApi = struct {
|
|
||||||
pub const bridge = js.Bridge(CDATASection);
|
|
||||||
|
|
||||||
pub const Meta = struct {
|
|
||||||
pub const name = "CDATASection";
|
|
||||||
pub const prototype_chain = bridge.prototypeChain();
|
|
||||||
pub var class_id: bridge.ClassId = undefined;
|
|
||||||
};
|
|
||||||
};
|
};
|
||||||
51
src/browser/env.zig
Normal file
51
src/browser/env.zig
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
const std = @import("std");
|
||||||
|
|
||||||
|
const Page = @import("page.zig").Page;
|
||||||
|
const js = @import("../runtime/js.zig");
|
||||||
|
const generate = @import("../runtime/generate.zig");
|
||||||
|
|
||||||
|
const WebApis = struct {
|
||||||
|
// Wrapped like this for debug ergonomics.
|
||||||
|
// When we create our Env, a few lines down, we define it as:
|
||||||
|
// pub const Env = js.Env(*Page, WebApis);
|
||||||
|
//
|
||||||
|
// If there's a compile time error witht he Env, it's type will be readable,
|
||||||
|
// i.e.: runtime.js.Env(*browser.env.Page, browser.env.WebApis)
|
||||||
|
//
|
||||||
|
// But if we didn't wrap it in the struct, like we once didn't, and defined
|
||||||
|
// env as:
|
||||||
|
// pub const Env = js.Env(*Page, Interfaces);
|
||||||
|
//
|
||||||
|
// Because Interfaces is an anynoumous type, it doesn't have a friendly name
|
||||||
|
// and errors would be something like:
|
||||||
|
// runtime.js.Env(*browser.Page, .{...A HUNDRED TYPES...})
|
||||||
|
pub const Interfaces = generate.Tuple(.{
|
||||||
|
@import("crypto/crypto.zig").Crypto,
|
||||||
|
@import("console/console.zig").Console,
|
||||||
|
@import("css/css.zig").Interfaces,
|
||||||
|
@import("cssom/cssom.zig").Interfaces,
|
||||||
|
@import("dom/dom.zig").Interfaces,
|
||||||
|
@import("dom/shadow_root.zig").ShadowRoot,
|
||||||
|
@import("encoding/encoding.zig").Interfaces,
|
||||||
|
@import("events/event.zig").Interfaces,
|
||||||
|
@import("html/html.zig").Interfaces,
|
||||||
|
@import("iterator/iterator.zig").Interfaces,
|
||||||
|
@import("storage/storage.zig").Interfaces,
|
||||||
|
@import("url/url.zig").Interfaces,
|
||||||
|
@import("xhr/xhr.zig").Interfaces,
|
||||||
|
@import("xhr/form_data.zig").Interfaces,
|
||||||
|
@import("xhr/File.zig"),
|
||||||
|
@import("xmlserializer/xmlserializer.zig").Interfaces,
|
||||||
|
@import("fetch/fetch.zig").Interfaces,
|
||||||
|
@import("streams/streams.zig").Interfaces,
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const JsThis = Env.JsThis;
|
||||||
|
pub const JsObject = Env.JsObject;
|
||||||
|
pub const Function = Env.Function;
|
||||||
|
pub const Promise = Env.Promise;
|
||||||
|
pub const PromiseResolver = Env.PromiseResolver;
|
||||||
|
|
||||||
|
pub const Env = js.Env(*Page, WebApis);
|
||||||
|
pub const Global = @import("html/window.zig").Window;
|
||||||
85
src/browser/events/custom_event.zig
Normal file
85
src/browser/events/custom_event.zig
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
const Event = @import("event.zig").Event;
|
||||||
|
const JsObject = @import("../env.zig").JsObject;
|
||||||
|
const netsurf = @import("../netsurf.zig");
|
||||||
|
|
||||||
|
// https://dom.spec.whatwg.org/#interface-customevent
|
||||||
|
pub const CustomEvent = struct {
|
||||||
|
pub const prototype = *Event;
|
||||||
|
pub const union_make_copy = true;
|
||||||
|
|
||||||
|
proto: parser.Event,
|
||||||
|
detail: ?JsObject,
|
||||||
|
|
||||||
|
const CustomEventInit = struct {
|
||||||
|
bubbles: bool = false,
|
||||||
|
cancelable: bool = false,
|
||||||
|
composed: bool = false,
|
||||||
|
detail: ?JsObject = null,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn constructor(event_type: []const u8, opts_: ?CustomEventInit) !CustomEvent {
|
||||||
|
const opts = opts_ orelse CustomEventInit{};
|
||||||
|
|
||||||
|
const event = try parser.eventCreate();
|
||||||
|
defer parser.eventDestroy(event);
|
||||||
|
try parser.eventInit(event, event_type, .{
|
||||||
|
.bubbles = opts.bubbles,
|
||||||
|
.cancelable = opts.cancelable,
|
||||||
|
.composed = opts.composed,
|
||||||
|
});
|
||||||
|
|
||||||
|
return .{
|
||||||
|
.proto = event.*,
|
||||||
|
.detail = if (opts.detail) |d| try d.persist() else null,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_detail(self: *CustomEvent) ?JsObject {
|
||||||
|
return self.detail;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initializes an already created `CustomEvent`.
|
||||||
|
// https://developer.mozilla.org/en-US/docs/Web/API/CustomEvent/initCustomEvent
|
||||||
|
pub fn _initCustomEvent(
|
||||||
|
self: *CustomEvent,
|
||||||
|
event_type: []const u8,
|
||||||
|
can_bubble: bool,
|
||||||
|
cancelable: bool,
|
||||||
|
maybe_detail: ?JsObject,
|
||||||
|
) !void {
|
||||||
|
// This function can only be called after the constructor has called.
|
||||||
|
// So we assume proto is initialized already by constructor.
|
||||||
|
self.proto.type = try netsurf.strFromData(event_type);
|
||||||
|
self.proto.bubble = can_bubble;
|
||||||
|
self.proto.cancelable = cancelable;
|
||||||
|
self.proto.is_initialised = true;
|
||||||
|
// Detail is stored separately.
|
||||||
|
if (maybe_detail) |detail| {
|
||||||
|
self.detail = try detail.persist();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: Events.Custom" {
|
||||||
|
try testing.htmlRunner("events/custom.html");
|
||||||
|
}
|
||||||
400
src/browser/events/event.zig
Normal file
400
src/browser/events/event.zig
Normal file
@@ -0,0 +1,400 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
const Allocator = std.mem.Allocator;
|
||||||
|
|
||||||
|
const log = @import("../../log.zig");
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
const generate = @import("../../runtime/generate.zig");
|
||||||
|
|
||||||
|
const Page = @import("../page.zig").Page;
|
||||||
|
const Node = @import("../dom/node.zig").Node;
|
||||||
|
const DOMException = @import("../dom/exceptions.zig").DOMException;
|
||||||
|
const EventTarget = @import("../dom/event_target.zig").EventTarget;
|
||||||
|
const EventTargetUnion = @import("../dom/event_target.zig").Union;
|
||||||
|
const AbortSignal = @import("../html/AbortController.zig").AbortSignal;
|
||||||
|
|
||||||
|
const CustomEvent = @import("custom_event.zig").CustomEvent;
|
||||||
|
const ProgressEvent = @import("../xhr/progress_event.zig").ProgressEvent;
|
||||||
|
const MouseEvent = @import("mouse_event.zig").MouseEvent;
|
||||||
|
const KeyboardEvent = @import("keyboard_event.zig").KeyboardEvent;
|
||||||
|
const ErrorEvent = @import("../html/error_event.zig").ErrorEvent;
|
||||||
|
const MessageEvent = @import("../dom/MessageChannel.zig").MessageEvent;
|
||||||
|
|
||||||
|
// Event interfaces
|
||||||
|
pub const Interfaces = .{
|
||||||
|
Event,
|
||||||
|
CustomEvent,
|
||||||
|
ProgressEvent,
|
||||||
|
MouseEvent,
|
||||||
|
KeyboardEvent,
|
||||||
|
ErrorEvent,
|
||||||
|
MessageEvent,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const Union = generate.Union(Interfaces);
|
||||||
|
|
||||||
|
// https://dom.spec.whatwg.org/#event
|
||||||
|
pub const Event = struct {
|
||||||
|
pub const Self = parser.Event;
|
||||||
|
pub const Exception = DOMException;
|
||||||
|
|
||||||
|
pub const EventInit = parser.EventInit;
|
||||||
|
|
||||||
|
// JS
|
||||||
|
// --
|
||||||
|
|
||||||
|
pub const _CAPTURING_PHASE = 1;
|
||||||
|
pub const _AT_TARGET = 2;
|
||||||
|
pub const _BUBBLING_PHASE = 3;
|
||||||
|
|
||||||
|
pub fn toInterface(evt: *parser.Event) Union {
|
||||||
|
return switch (parser.eventGetInternalType(evt)) {
|
||||||
|
.event, .abort_signal, .xhr_event => .{ .Event = evt },
|
||||||
|
.custom_event => .{ .CustomEvent = @as(*CustomEvent, @ptrCast(evt)).* },
|
||||||
|
.progress_event => .{ .ProgressEvent = @as(*ProgressEvent, @ptrCast(evt)).* },
|
||||||
|
.mouse_event => .{ .MouseEvent = @as(*parser.MouseEvent, @ptrCast(evt)) },
|
||||||
|
.error_event => .{ .ErrorEvent = @as(*ErrorEvent, @ptrCast(evt)).* },
|
||||||
|
.message_event => .{ .MessageEvent = @as(*MessageEvent, @ptrCast(evt)).* },
|
||||||
|
.keyboard_event => .{ .KeyboardEvent = @as(*parser.KeyboardEvent, @ptrCast(evt)) },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn constructor(event_type: []const u8, opts: ?EventInit) !*parser.Event {
|
||||||
|
const event = try parser.eventCreate();
|
||||||
|
try parser.eventInit(event, event_type, opts orelse EventInit{});
|
||||||
|
return event;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Getters
|
||||||
|
|
||||||
|
pub fn get_type(self: *parser.Event) ![]const u8 {
|
||||||
|
return try parser.eventType(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_target(self: *parser.Event, page: *Page) !?EventTargetUnion {
|
||||||
|
const et = parser.eventTarget(self);
|
||||||
|
if (et == null) return null;
|
||||||
|
return try EventTarget.toInterface(et.?, page);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_currentTarget(self: *parser.Event, page: *Page) !?EventTargetUnion {
|
||||||
|
const et = parser.eventCurrentTarget(self);
|
||||||
|
if (et == null) return null;
|
||||||
|
return try EventTarget.toInterface(et.?, page);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_eventPhase(self: *parser.Event) u8 {
|
||||||
|
return parser.eventPhase(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_bubbles(self: *parser.Event) bool {
|
||||||
|
return parser.eventBubbles(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_cancelable(self: *parser.Event) bool {
|
||||||
|
return parser.eventCancelable(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_defaultPrevented(self: *parser.Event) bool {
|
||||||
|
return parser.eventDefaultPrevented(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_isTrusted(self: *parser.Event) bool {
|
||||||
|
return parser.eventIsTrusted(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Even though this is supposed to to provide microsecond resolution, browser
|
||||||
|
// return coarser values to protect against fingerprinting. libdom returns
|
||||||
|
// seconds, which is good enough.
|
||||||
|
pub fn get_timeStamp(self: *parser.Event) u64 {
|
||||||
|
return parser.eventTimestamp(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Methods
|
||||||
|
|
||||||
|
pub fn _initEvent(
|
||||||
|
self: *parser.Event,
|
||||||
|
eventType: []const u8,
|
||||||
|
bubbles: ?bool,
|
||||||
|
cancelable: ?bool,
|
||||||
|
) !void {
|
||||||
|
const opts = EventInit{
|
||||||
|
.bubbles = bubbles orelse false,
|
||||||
|
.cancelable = cancelable orelse false,
|
||||||
|
};
|
||||||
|
return try parser.eventInit(self, eventType, opts);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _stopPropagation(self: *parser.Event) !void {
|
||||||
|
return parser.eventStopPropagation(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _stopImmediatePropagation(self: *parser.Event) !void {
|
||||||
|
return parser.eventStopImmediatePropagation(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _preventDefault(self: *parser.Event) !void {
|
||||||
|
return parser.eventPreventDefault(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _composedPath(self: *parser.Event, page: *Page) ![]const EventTargetUnion {
|
||||||
|
const et_ = parser.eventTarget(self);
|
||||||
|
const et = et_ orelse return &.{};
|
||||||
|
|
||||||
|
var node: ?*parser.Node = switch (try parser.eventTargetInternalType(et)) {
|
||||||
|
.libdom_node => @as(*parser.Node, @ptrCast(et)),
|
||||||
|
.plain => parser.eventTargetToNode(et),
|
||||||
|
else => {
|
||||||
|
// Window, XHR, MessagePort, etc...no path beyond the event itself
|
||||||
|
return &.{try EventTarget.toInterface(et, page)};
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const arena = page.call_arena;
|
||||||
|
var path: std.ArrayListUnmanaged(EventTargetUnion) = .empty;
|
||||||
|
while (node) |n| {
|
||||||
|
try path.append(arena, .{
|
||||||
|
.node = try Node.toInterface(n),
|
||||||
|
});
|
||||||
|
|
||||||
|
node = try parser.nodeParentNode(n);
|
||||||
|
if (node == null and try parser.nodeType(n) == .document_fragment) {
|
||||||
|
// we have a non-continuous hook from a shadowroot to its host (
|
||||||
|
// it's parent element). libdom doesn't really support ShdowRoots
|
||||||
|
// and, for the most part, that works out well since it naturally
|
||||||
|
// provides isolation. But events don't follow the same
|
||||||
|
// shadowroot isolation as most other things, so, if this is
|
||||||
|
// a parent-less document fragment, we need to check if it has
|
||||||
|
// a host.
|
||||||
|
if (parser.documentFragmentGetHost(@ptrCast(n))) |host| {
|
||||||
|
node = host;
|
||||||
|
|
||||||
|
// If a document fragment has a host, then that host
|
||||||
|
// _has_ to have a state and that state _has_ to have
|
||||||
|
// a shadow_root field. All of this is set in Element._attachShadow
|
||||||
|
if (page.getNodeState(host).?.shadow_root.?.mode == .closed) {
|
||||||
|
// if the shadow root is closed, then the composedPath
|
||||||
|
// starts at the host element.
|
||||||
|
path.clearRetainingCapacity();
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Our document fragement has no parent and no host, we
|
||||||
|
// can break out of the loop.
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (path.getLastOrNull()) |last| {
|
||||||
|
// the Window isn't part of the DOM hierarchy, but for events, it
|
||||||
|
// is, so we need to glue it on.
|
||||||
|
if (last.node == .HTMLDocument and last.node.HTMLDocument == page.window.document) {
|
||||||
|
try path.append(arena, .{ .node = .{ .Window = &page.window } });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return path.items;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const EventHandler = struct {
|
||||||
|
once: bool,
|
||||||
|
capture: bool,
|
||||||
|
callback: Function,
|
||||||
|
node: parser.EventNode,
|
||||||
|
listener: *parser.EventListener,
|
||||||
|
|
||||||
|
const Env = @import("../env.zig").Env;
|
||||||
|
const Function = Env.Function;
|
||||||
|
|
||||||
|
pub const Listener = union(enum) {
|
||||||
|
function: Function,
|
||||||
|
object: Env.JsObject,
|
||||||
|
|
||||||
|
pub fn callback(self: Listener, target: *parser.EventTarget) !?Function {
|
||||||
|
return switch (self) {
|
||||||
|
.function => |func| try func.withThis(target),
|
||||||
|
.object => |obj| blk: {
|
||||||
|
const func = (try obj.getFunction("handleEvent")) orelse return null;
|
||||||
|
break :blk try func.withThis(try obj.persist());
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const Opts = union(enum) {
|
||||||
|
flags: Flags,
|
||||||
|
capture: bool,
|
||||||
|
|
||||||
|
const Flags = struct {
|
||||||
|
once: ?bool,
|
||||||
|
capture: ?bool,
|
||||||
|
// We ignore this property. It seems to be largely used to help the
|
||||||
|
// browser make certain performance tweaks (i.e. the browser knows
|
||||||
|
// that the listener won't call preventDefault() and thus can safely
|
||||||
|
// run the default as needed).
|
||||||
|
passive: ?bool,
|
||||||
|
signal: ?*AbortSignal, // currently does nothing
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn register(
|
||||||
|
allocator: Allocator,
|
||||||
|
target: *parser.EventTarget,
|
||||||
|
typ: []const u8,
|
||||||
|
listener: Listener,
|
||||||
|
opts_: ?Opts,
|
||||||
|
) !?*EventHandler {
|
||||||
|
var once = false;
|
||||||
|
var capture = false;
|
||||||
|
var signal: ?*AbortSignal = null;
|
||||||
|
|
||||||
|
if (opts_) |opts| {
|
||||||
|
switch (opts) {
|
||||||
|
.capture => |c| capture = c,
|
||||||
|
.flags => |f| {
|
||||||
|
once = f.once orelse false;
|
||||||
|
signal = f.signal orelse null;
|
||||||
|
capture = f.capture orelse false;
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const callback = (try listener.callback(target)) orelse return null;
|
||||||
|
|
||||||
|
if (signal) |s| {
|
||||||
|
const signal_target = parser.toEventTarget(AbortSignal, s);
|
||||||
|
|
||||||
|
const scb = try allocator.create(SignalCallback);
|
||||||
|
scb.* = .{
|
||||||
|
.target = target,
|
||||||
|
.capture = capture,
|
||||||
|
.callback_id = callback.id,
|
||||||
|
.typ = try allocator.dupe(u8, typ),
|
||||||
|
.signal_target = signal_target,
|
||||||
|
.signal_listener = undefined,
|
||||||
|
.node = .{ .func = SignalCallback.handle },
|
||||||
|
};
|
||||||
|
|
||||||
|
scb.signal_listener = try parser.eventTargetAddEventListener(
|
||||||
|
signal_target,
|
||||||
|
"abort",
|
||||||
|
&scb.node,
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// check if event target has already this listener
|
||||||
|
if (try parser.eventTargetHasListener(target, typ, capture, callback.id) != null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const eh = try allocator.create(EventHandler);
|
||||||
|
eh.* = .{
|
||||||
|
.once = once,
|
||||||
|
.capture = capture,
|
||||||
|
.callback = callback,
|
||||||
|
.node = .{
|
||||||
|
.id = callback.id,
|
||||||
|
.func = handle,
|
||||||
|
},
|
||||||
|
.listener = undefined,
|
||||||
|
};
|
||||||
|
|
||||||
|
eh.listener = try parser.eventTargetAddEventListener(
|
||||||
|
target,
|
||||||
|
typ,
|
||||||
|
&eh.node,
|
||||||
|
capture,
|
||||||
|
);
|
||||||
|
return eh;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn handle(node: *parser.EventNode, event: *parser.Event) void {
|
||||||
|
const ievent = Event.toInterface(event);
|
||||||
|
const self: *EventHandler = @fieldParentPtr("node", node);
|
||||||
|
var result: Function.Result = undefined;
|
||||||
|
self.callback.tryCall(void, .{ievent}, &result) catch {
|
||||||
|
log.debug(.user_script, "callback error", .{
|
||||||
|
.err = result.exception,
|
||||||
|
.stack = result.stack,
|
||||||
|
.source = "event handler",
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
if (self.once) {
|
||||||
|
const target = parser.eventTarget(event).?;
|
||||||
|
const typ = parser.eventType(event) catch return;
|
||||||
|
parser.eventTargetRemoveEventListener(
|
||||||
|
target,
|
||||||
|
typ,
|
||||||
|
self.listener,
|
||||||
|
self.capture,
|
||||||
|
) catch {};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const SignalCallback = struct {
|
||||||
|
typ: []const u8,
|
||||||
|
capture: bool,
|
||||||
|
callback_id: usize,
|
||||||
|
node: parser.EventNode,
|
||||||
|
target: *parser.EventTarget,
|
||||||
|
signal_target: *parser.EventTarget,
|
||||||
|
signal_listener: *parser.EventListener,
|
||||||
|
|
||||||
|
fn handle(node: *parser.EventNode, _: *parser.Event) void {
|
||||||
|
const self: *SignalCallback = @fieldParentPtr("node", node);
|
||||||
|
self._handle() catch |err| {
|
||||||
|
log.err(.app, "event signal handler", .{ .err = err });
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
fn _handle(self: *SignalCallback) !void {
|
||||||
|
const lst = try parser.eventTargetHasListener(
|
||||||
|
self.target,
|
||||||
|
self.typ,
|
||||||
|
self.capture,
|
||||||
|
self.callback_id,
|
||||||
|
);
|
||||||
|
if (lst == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try parser.eventTargetRemoveEventListener(
|
||||||
|
self.target,
|
||||||
|
self.typ,
|
||||||
|
lst.?,
|
||||||
|
self.capture,
|
||||||
|
);
|
||||||
|
|
||||||
|
// remove the abort signal listener itself
|
||||||
|
try parser.eventTargetRemoveEventListener(
|
||||||
|
self.signal_target,
|
||||||
|
"abort",
|
||||||
|
self.signal_listener,
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: Event" {
|
||||||
|
try testing.htmlRunner("events/event.html");
|
||||||
|
}
|
||||||
160
src/browser/events/keyboard_event.zig
Normal file
160
src/browser/events/keyboard_event.zig
Normal file
@@ -0,0 +1,160 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
const log = @import("../../log.zig");
|
||||||
|
const builtin = @import("builtin");
|
||||||
|
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
const Event = @import("event.zig").Event;
|
||||||
|
const JsObject = @import("../env.zig").JsObject;
|
||||||
|
|
||||||
|
// TODO: We currently don't have a UIEvent interface so we skip it in the prototype chain.
|
||||||
|
// https://developer.mozilla.org/en-US/docs/Web/API/UIEvent
|
||||||
|
const UIEvent = Event;
|
||||||
|
|
||||||
|
// https://developer.mozilla.org/en-US/docs/Web/API/KeyboardEvent
|
||||||
|
pub const KeyboardEvent = struct {
|
||||||
|
pub const Self = parser.KeyboardEvent;
|
||||||
|
pub const prototype = *UIEvent;
|
||||||
|
|
||||||
|
pub const ConstructorOptions = struct {
|
||||||
|
key: []const u8 = "",
|
||||||
|
code: []const u8 = "",
|
||||||
|
location: parser.KeyboardEventOpts.LocationCode = .standard,
|
||||||
|
repeat: bool = false,
|
||||||
|
isComposing: bool = false,
|
||||||
|
// Currently not supported but we take as argument.
|
||||||
|
charCode: u32 = 0,
|
||||||
|
// Currently not supported but we take as argument.
|
||||||
|
keyCode: u32 = 0,
|
||||||
|
// Currently not supported but we take as argument.
|
||||||
|
which: u32 = 0,
|
||||||
|
ctrlKey: bool = false,
|
||||||
|
shiftKey: bool = false,
|
||||||
|
altKey: bool = false,
|
||||||
|
metaKey: bool = false,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn constructor(event_type: []const u8, maybe_options: ?ConstructorOptions) !*parser.KeyboardEvent {
|
||||||
|
const options: ConstructorOptions = maybe_options orelse .{};
|
||||||
|
|
||||||
|
var event = try parser.keyboardEventCreate();
|
||||||
|
parser.eventSetInternalType(@ptrCast(&event), .keyboard_event);
|
||||||
|
|
||||||
|
try parser.keyboardEventInit(
|
||||||
|
event,
|
||||||
|
event_type,
|
||||||
|
.{
|
||||||
|
.key = options.key,
|
||||||
|
.code = options.code,
|
||||||
|
.location = options.location,
|
||||||
|
.repeat = options.repeat,
|
||||||
|
.is_composing = options.isComposing,
|
||||||
|
.ctrl_key = options.ctrlKey,
|
||||||
|
.shift_key = options.shiftKey,
|
||||||
|
.alt_key = options.altKey,
|
||||||
|
.meta_key = options.metaKey,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
return event;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Returns the modifier state for given modifier key.
|
||||||
|
pub fn _getModifierState(self: *Self, key: []const u8) bool {
|
||||||
|
// Chrome and Firefox do case-sensitive match, here we prefer the same.
|
||||||
|
if (std.mem.eql(u8, key, "Alt")) {
|
||||||
|
return get_altKey(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (std.mem.eql(u8, key, "AltGraph")) {
|
||||||
|
return (get_altKey(self) and get_ctrlKey(self));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (std.mem.eql(u8, key, "Control")) {
|
||||||
|
return get_ctrlKey(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (std.mem.eql(u8, key, "Shift")) {
|
||||||
|
return get_shiftKey(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (std.mem.eql(u8, key, "Meta") or std.mem.eql(u8, key, "OS")) {
|
||||||
|
return get_metaKey(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Special case for IE.
|
||||||
|
if (comptime builtin.os.tag == .windows) {
|
||||||
|
if (std.mem.eql(u8, key, "Win")) {
|
||||||
|
return get_metaKey(self);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// getModifierState() also accepts a deprecated virtual modifier named "Accel".
|
||||||
|
// event.getModifierState("Accel") returns true when at least one of
|
||||||
|
// KeyboardEvent.ctrlKey or KeyboardEvent.metaKey is true.
|
||||||
|
//
|
||||||
|
// https://developer.mozilla.org/en-US/docs/Web/API/KeyboardEvent/getModifierState#accel_virtual_modifier
|
||||||
|
if (std.mem.eql(u8, key, "Accel")) {
|
||||||
|
return (get_ctrlKey(self) or get_metaKey(self));
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: Add support for "CapsLock", "ScrollLock".
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Getters.
|
||||||
|
|
||||||
|
pub fn get_altKey(self: *Self) bool {
|
||||||
|
return parser.keyboardEventKeyIsSet(self, .alt);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_ctrlKey(self: *Self) bool {
|
||||||
|
return parser.keyboardEventKeyIsSet(self, .ctrl);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_metaKey(self: *Self) bool {
|
||||||
|
return parser.keyboardEventKeyIsSet(self, .meta);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_shiftKey(self: *Self) bool {
|
||||||
|
return parser.keyboardEventKeyIsSet(self, .shift);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_isComposing(self: *Self) bool {
|
||||||
|
return self.is_composing;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_location(self: *Self) u32 {
|
||||||
|
return self.location;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_key(self: *Self) ![]const u8 {
|
||||||
|
return parser.keyboardEventGetKey(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_repeat(self: *Self) bool {
|
||||||
|
return self.repeat;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: Events.Keyboard" {
|
||||||
|
try testing.htmlRunner("events/keyboard.html");
|
||||||
|
}
|
||||||
112
src/browser/events/mouse_event.zig
Normal file
112
src/browser/events/mouse_event.zig
Normal file
@@ -0,0 +1,112 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
const log = @import("../../log.zig");
|
||||||
|
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
const Event = @import("event.zig").Event;
|
||||||
|
const JsObject = @import("../env.zig").JsObject;
|
||||||
|
|
||||||
|
// TODO: We currently don't have a UIEvent interface so we skip it in the prototype chain.
|
||||||
|
// https://developer.mozilla.org/en-US/docs/Web/API/UIEvent
|
||||||
|
const UIEvent = Event;
|
||||||
|
|
||||||
|
// https://developer.mozilla.org/en-US/docs/Web/API/MouseEvent
|
||||||
|
pub const MouseEvent = struct {
|
||||||
|
pub const Self = parser.MouseEvent;
|
||||||
|
pub const prototype = *UIEvent;
|
||||||
|
|
||||||
|
const MouseButton = enum(u16) {
|
||||||
|
main_button = 0,
|
||||||
|
auxillary_button = 1,
|
||||||
|
secondary_button = 2,
|
||||||
|
fourth_button = 3,
|
||||||
|
fifth_button = 4,
|
||||||
|
};
|
||||||
|
|
||||||
|
const MouseEventInit = struct {
|
||||||
|
screenX: i32 = 0,
|
||||||
|
screenY: i32 = 0,
|
||||||
|
clientX: i32 = 0,
|
||||||
|
clientY: i32 = 0,
|
||||||
|
ctrlKey: bool = false,
|
||||||
|
shiftKey: bool = false,
|
||||||
|
altKey: bool = false,
|
||||||
|
metaKey: bool = false,
|
||||||
|
button: MouseButton = .main_button,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn constructor(event_type: []const u8, opts_: ?MouseEventInit) !*parser.MouseEvent {
|
||||||
|
const opts = opts_ orelse MouseEventInit{};
|
||||||
|
|
||||||
|
var mouse_event = try parser.mouseEventCreate();
|
||||||
|
parser.eventSetInternalType(@ptrCast(&mouse_event), .mouse_event);
|
||||||
|
|
||||||
|
try parser.mouseEventInit(mouse_event, event_type, .{
|
||||||
|
.x = opts.clientX,
|
||||||
|
.y = opts.clientY,
|
||||||
|
.ctrl = opts.ctrlKey,
|
||||||
|
.shift = opts.shiftKey,
|
||||||
|
.alt = opts.altKey,
|
||||||
|
.meta = opts.metaKey,
|
||||||
|
.button = @intFromEnum(opts.button),
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!std.mem.eql(u8, event_type, "click")) {
|
||||||
|
log.warn(.mouse_event, "unsupported mouse event", .{ .event = event_type });
|
||||||
|
}
|
||||||
|
|
||||||
|
return mouse_event;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_button(self: *parser.MouseEvent) u16 {
|
||||||
|
return self.button;
|
||||||
|
}
|
||||||
|
|
||||||
|
// These is just an alias for clientX.
|
||||||
|
pub fn get_x(self: *parser.MouseEvent) i32 {
|
||||||
|
return self.cx;
|
||||||
|
}
|
||||||
|
|
||||||
|
// These is just an alias for clientY.
|
||||||
|
pub fn get_y(self: *parser.MouseEvent) i32 {
|
||||||
|
return self.cy;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_clientX(self: *parser.MouseEvent) i32 {
|
||||||
|
return self.cx;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_clientY(self: *parser.MouseEvent) i32 {
|
||||||
|
return self.cy;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_screenX(self: *parser.MouseEvent) i32 {
|
||||||
|
return self.sx;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_screenY(self: *parser.MouseEvent) i32 {
|
||||||
|
return self.sy;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: Events.Mouse" {
|
||||||
|
try testing.htmlRunner("events/mouse.html");
|
||||||
|
}
|
||||||
227
src/browser/fetch/Headers.zig
Normal file
227
src/browser/fetch/Headers.zig
Normal file
@@ -0,0 +1,227 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
const log = @import("../../log.zig");
|
||||||
|
const URL = @import("../../url.zig").URL;
|
||||||
|
const Page = @import("../page.zig").Page;
|
||||||
|
|
||||||
|
const iterator = @import("../iterator/iterator.zig");
|
||||||
|
|
||||||
|
const v8 = @import("v8");
|
||||||
|
const Env = @import("../env.zig").Env;
|
||||||
|
|
||||||
|
// https://developer.mozilla.org/en-US/docs/Web/API/Headers
|
||||||
|
const Headers = @This();
|
||||||
|
|
||||||
|
// Case-Insensitive String HashMap.
|
||||||
|
// This allows us to avoid having to allocate lowercase keys all the time.
|
||||||
|
const HeaderHashMap = std.HashMapUnmanaged([]const u8, []const u8, struct {
|
||||||
|
pub fn hash(_: @This(), s: []const u8) u64 {
|
||||||
|
var buf: [64]u8 = undefined;
|
||||||
|
var hasher = std.hash.Wyhash.init(s.len);
|
||||||
|
|
||||||
|
var key = s;
|
||||||
|
while (key.len >= 64) {
|
||||||
|
const lower = std.ascii.lowerString(buf[0..], key[0..64]);
|
||||||
|
hasher.update(lower);
|
||||||
|
key = key[64..];
|
||||||
|
}
|
||||||
|
|
||||||
|
if (key.len > 0) {
|
||||||
|
const lower = std.ascii.lowerString(buf[0..key.len], key);
|
||||||
|
hasher.update(lower);
|
||||||
|
}
|
||||||
|
|
||||||
|
return hasher.final();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn eql(_: @This(), a: []const u8, b: []const u8) bool {
|
||||||
|
return std.ascii.eqlIgnoreCase(a, b);
|
||||||
|
}
|
||||||
|
}, 80);
|
||||||
|
|
||||||
|
headers: HeaderHashMap = .empty,
|
||||||
|
|
||||||
|
// They can either be:
|
||||||
|
//
|
||||||
|
// 1. An array of string pairs.
|
||||||
|
// 2. An object with string keys to string values.
|
||||||
|
// 3. Another Headers object.
|
||||||
|
pub const HeadersInit = union(enum) {
|
||||||
|
// List of Pairs of []const u8
|
||||||
|
strings: []const [2][]const u8,
|
||||||
|
// Headers
|
||||||
|
headers: *Headers,
|
||||||
|
// Mappings
|
||||||
|
object: Env.JsObject,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn constructor(_init: ?HeadersInit, page: *Page) !Headers {
|
||||||
|
const arena = page.arena;
|
||||||
|
var headers: HeaderHashMap = .empty;
|
||||||
|
|
||||||
|
if (_init) |init| {
|
||||||
|
switch (init) {
|
||||||
|
.strings => |kvs| {
|
||||||
|
for (kvs) |pair| {
|
||||||
|
const key = try arena.dupe(u8, pair[0]);
|
||||||
|
const value = try arena.dupe(u8, pair[1]);
|
||||||
|
|
||||||
|
try headers.put(arena, key, value);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
.headers => |hdrs| {
|
||||||
|
var iter = hdrs.headers.iterator();
|
||||||
|
while (iter.next()) |entry| {
|
||||||
|
try headers.put(arena, entry.key_ptr.*, entry.value_ptr.*);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
.object => |obj| {
|
||||||
|
var iter = obj.nameIterator();
|
||||||
|
while (try iter.next()) |name_value| {
|
||||||
|
const name = try name_value.toString(arena);
|
||||||
|
const value = try obj.get(name);
|
||||||
|
const value_string = try value.toString(arena);
|
||||||
|
|
||||||
|
try headers.put(arena, name, value_string);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return .{
|
||||||
|
.headers = headers,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn append(self: *Headers, name: []const u8, value: []const u8, allocator: std.mem.Allocator) !void {
|
||||||
|
const key = try allocator.dupe(u8, name);
|
||||||
|
const gop = try self.headers.getOrPut(allocator, key);
|
||||||
|
|
||||||
|
if (gop.found_existing) {
|
||||||
|
// If we found it, append the value.
|
||||||
|
const new_value = try std.fmt.allocPrint(allocator, "{s}, {s}", .{ gop.value_ptr.*, value });
|
||||||
|
gop.value_ptr.* = new_value;
|
||||||
|
} else {
|
||||||
|
// Otherwise, we should just put it in.
|
||||||
|
gop.value_ptr.* = try allocator.dupe(u8, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _append(self: *Headers, name: []const u8, value: []const u8, page: *Page) !void {
|
||||||
|
const arena = page.arena;
|
||||||
|
try self.append(name, value, arena);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _delete(self: *Headers, name: []const u8) void {
|
||||||
|
_ = self.headers.remove(name);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub const HeadersEntryIterator = struct {
|
||||||
|
slot: [2][]const u8,
|
||||||
|
iter: HeaderHashMap.Iterator,
|
||||||
|
|
||||||
|
// TODO: these SHOULD be in lexigraphical order but I'm not sure how actually
|
||||||
|
// important that is.
|
||||||
|
pub fn _next(self: *HeadersEntryIterator) ?[2][]const u8 {
|
||||||
|
if (self.iter.next()) |entry| {
|
||||||
|
self.slot[0] = entry.key_ptr.*;
|
||||||
|
self.slot[1] = entry.value_ptr.*;
|
||||||
|
return self.slot;
|
||||||
|
} else {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn _entries(self: *const Headers) HeadersEntryIterable {
|
||||||
|
return .{
|
||||||
|
.inner = .{
|
||||||
|
.slot = undefined,
|
||||||
|
.iter = self.headers.iterator(),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _forEach(self: *Headers, callback_fn: Env.Function, this_arg: ?Env.JsObject) !void {
|
||||||
|
var iter = self.headers.iterator();
|
||||||
|
|
||||||
|
const cb = if (this_arg) |this| try callback_fn.withThis(this) else callback_fn;
|
||||||
|
|
||||||
|
while (iter.next()) |entry| {
|
||||||
|
try cb.call(void, .{ entry.key_ptr.*, entry.value_ptr.*, self });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _get(self: *const Headers, name: []const u8) ?[]const u8 {
|
||||||
|
return self.headers.get(name);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _has(self: *const Headers, name: []const u8) bool {
|
||||||
|
return self.headers.contains(name);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub const HeadersKeyIterator = struct {
|
||||||
|
iter: HeaderHashMap.KeyIterator,
|
||||||
|
|
||||||
|
pub fn _next(self: *HeadersKeyIterator) ?[]const u8 {
|
||||||
|
if (self.iter.next()) |key| {
|
||||||
|
return key.*;
|
||||||
|
} else {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn _keys(self: *const Headers) HeadersKeyIterable {
|
||||||
|
return .{ .inner = .{ .iter = self.headers.keyIterator() } };
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _set(self: *Headers, name: []const u8, value: []const u8, page: *Page) !void {
|
||||||
|
const arena = page.arena;
|
||||||
|
|
||||||
|
const key = try arena.dupe(u8, name);
|
||||||
|
const gop = try self.headers.getOrPut(arena, key);
|
||||||
|
gop.value_ptr.* = try arena.dupe(u8, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub const HeadersValueIterator = struct {
|
||||||
|
iter: HeaderHashMap.ValueIterator,
|
||||||
|
|
||||||
|
pub fn _next(self: *HeadersValueIterator) ?[]const u8 {
|
||||||
|
if (self.iter.next()) |value| {
|
||||||
|
return value.*;
|
||||||
|
} else {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn _values(self: *const Headers) HeadersValueIterable {
|
||||||
|
return .{ .inner = .{ .iter = self.headers.valueIterator() } };
|
||||||
|
}
|
||||||
|
|
||||||
|
pub const HeadersKeyIterable = iterator.Iterable(HeadersKeyIterator, "HeadersKeyIterator");
|
||||||
|
pub const HeadersValueIterable = iterator.Iterable(HeadersValueIterator, "HeadersValueIterator");
|
||||||
|
pub const HeadersEntryIterable = iterator.Iterable(HeadersEntryIterator, "HeadersEntryIterator");
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "fetch: Headers" {
|
||||||
|
try testing.htmlRunner("fetch/headers.html");
|
||||||
|
}
|
||||||
266
src/browser/fetch/Request.zig
Normal file
266
src/browser/fetch/Request.zig
Normal file
@@ -0,0 +1,266 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
const log = @import("../../log.zig");
|
||||||
|
|
||||||
|
const URL = @import("../../url.zig").URL;
|
||||||
|
const Page = @import("../page.zig").Page;
|
||||||
|
|
||||||
|
const Response = @import("./Response.zig");
|
||||||
|
const Http = @import("../../http/Http.zig");
|
||||||
|
const ReadableStream = @import("../streams/ReadableStream.zig");
|
||||||
|
|
||||||
|
const v8 = @import("v8");
|
||||||
|
const Env = @import("../env.zig").Env;
|
||||||
|
|
||||||
|
const Headers = @import("Headers.zig");
|
||||||
|
const HeadersInit = @import("Headers.zig").HeadersInit;
|
||||||
|
|
||||||
|
pub const RequestInput = union(enum) {
|
||||||
|
string: []const u8,
|
||||||
|
request: *Request,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const RequestCache = enum {
|
||||||
|
default,
|
||||||
|
@"no-store",
|
||||||
|
reload,
|
||||||
|
@"no-cache",
|
||||||
|
@"force-cache",
|
||||||
|
@"only-if-cached",
|
||||||
|
|
||||||
|
pub fn fromString(str: []const u8) ?RequestCache {
|
||||||
|
for (std.enums.values(RequestCache)) |cache| {
|
||||||
|
if (std.ascii.eqlIgnoreCase(str, @tagName(cache))) {
|
||||||
|
return cache;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn toString(self: RequestCache) []const u8 {
|
||||||
|
return @tagName(self);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const RequestCredentials = enum {
|
||||||
|
omit,
|
||||||
|
@"same-origin",
|
||||||
|
include,
|
||||||
|
|
||||||
|
pub fn fromString(str: []const u8) ?RequestCredentials {
|
||||||
|
for (std.enums.values(RequestCredentials)) |cache| {
|
||||||
|
if (std.ascii.eqlIgnoreCase(str, @tagName(cache))) {
|
||||||
|
return cache;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn toString(self: RequestCredentials) []const u8 {
|
||||||
|
return @tagName(self);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// https://developer.mozilla.org/en-US/docs/Web/API/RequestInit
|
||||||
|
pub const RequestInit = struct {
|
||||||
|
body: ?[]const u8 = null,
|
||||||
|
cache: ?[]const u8 = null,
|
||||||
|
credentials: ?[]const u8 = null,
|
||||||
|
headers: ?HeadersInit = null,
|
||||||
|
integrity: ?[]const u8 = null,
|
||||||
|
method: ?[]const u8 = null,
|
||||||
|
};
|
||||||
|
|
||||||
|
// https://developer.mozilla.org/en-US/docs/Web/API/Request/Request
|
||||||
|
const Request = @This();
|
||||||
|
|
||||||
|
method: Http.Method,
|
||||||
|
url: [:0]const u8,
|
||||||
|
cache: RequestCache,
|
||||||
|
credentials: RequestCredentials,
|
||||||
|
headers: Headers,
|
||||||
|
body: ?[]const u8,
|
||||||
|
body_used: bool = false,
|
||||||
|
integrity: []const u8,
|
||||||
|
|
||||||
|
pub fn constructor(input: RequestInput, _options: ?RequestInit, page: *Page) !Request {
|
||||||
|
const arena = page.arena;
|
||||||
|
const options: RequestInit = _options orelse .{};
|
||||||
|
|
||||||
|
const url: [:0]const u8 = blk: switch (input) {
|
||||||
|
.string => |str| {
|
||||||
|
break :blk try URL.stitch(arena, str, page.url.raw, .{ .null_terminated = true });
|
||||||
|
},
|
||||||
|
.request => |req| {
|
||||||
|
break :blk try arena.dupeZ(u8, req.url);
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const body = if (options.body) |body| try arena.dupe(u8, body) else null;
|
||||||
|
const cache = (if (options.cache) |cache| RequestCache.fromString(cache) else null) orelse RequestCache.default;
|
||||||
|
const credentials = (if (options.credentials) |creds| RequestCredentials.fromString(creds) else null) orelse RequestCredentials.@"same-origin";
|
||||||
|
const integrity = if (options.integrity) |integ| try arena.dupe(u8, integ) else "";
|
||||||
|
const headers: Headers = if (options.headers) |hdrs| try Headers.constructor(hdrs, page) else .{};
|
||||||
|
|
||||||
|
const method: Http.Method = blk: {
|
||||||
|
if (options.method) |given_method| {
|
||||||
|
for (std.enums.values(Http.Method)) |method| {
|
||||||
|
if (std.ascii.eqlIgnoreCase(given_method, @tagName(method))) {
|
||||||
|
break :blk method;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return error.TypeError;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
break :blk Http.Method.GET;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return .{
|
||||||
|
.method = method,
|
||||||
|
.url = url,
|
||||||
|
.cache = cache,
|
||||||
|
.credentials = credentials,
|
||||||
|
.headers = headers,
|
||||||
|
.body = body,
|
||||||
|
.integrity = integrity,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_body(self: *const Request, page: *Page) !?*ReadableStream {
|
||||||
|
if (self.body) |body| {
|
||||||
|
const stream = try ReadableStream.constructor(null, null, page);
|
||||||
|
try stream.queue.append(page.arena, body);
|
||||||
|
return stream;
|
||||||
|
} else return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_bodyUsed(self: *const Request) bool {
|
||||||
|
return self.body_used;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_cache(self: *const Request) RequestCache {
|
||||||
|
return self.cache;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_credentials(self: *const Request) RequestCredentials {
|
||||||
|
return self.credentials;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_headers(self: *Request) *Headers {
|
||||||
|
return &self.headers;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_integrity(self: *const Request) []const u8 {
|
||||||
|
return self.integrity;
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: If we ever support the Navigation API, we need isHistoryNavigation
|
||||||
|
// https://developer.mozilla.org/en-US/docs/Web/API/Request/isHistoryNavigation
|
||||||
|
|
||||||
|
pub fn get_method(self: *const Request) []const u8 {
|
||||||
|
return @tagName(self.method);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_url(self: *const Request) []const u8 {
|
||||||
|
return self.url;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _clone(self: *Request) !Request {
|
||||||
|
// Not allowed to clone if the body was used.
|
||||||
|
if (self.body_used) {
|
||||||
|
return error.TypeError;
|
||||||
|
}
|
||||||
|
|
||||||
|
// OK to just return the same fields BECAUSE
|
||||||
|
// all of these fields are read-only and can't be modified.
|
||||||
|
return Request{
|
||||||
|
.body = self.body,
|
||||||
|
.body_used = self.body_used,
|
||||||
|
.cache = self.cache,
|
||||||
|
.credentials = self.credentials,
|
||||||
|
.headers = self.headers,
|
||||||
|
.method = self.method,
|
||||||
|
.integrity = self.integrity,
|
||||||
|
.url = self.url,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _bytes(self: *Response, page: *Page) !Env.Promise {
|
||||||
|
if (self.body_used) {
|
||||||
|
return error.TypeError;
|
||||||
|
}
|
||||||
|
|
||||||
|
const resolver = Env.PromiseResolver{
|
||||||
|
.js_context = page.main_context,
|
||||||
|
.resolver = v8.PromiseResolver.init(page.main_context.v8_context),
|
||||||
|
};
|
||||||
|
|
||||||
|
try resolver.resolve(self.body);
|
||||||
|
self.body_used = true;
|
||||||
|
return resolver.promise();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _json(self: *Response, page: *Page) !Env.Promise {
|
||||||
|
if (self.body_used) {
|
||||||
|
return error.TypeError;
|
||||||
|
}
|
||||||
|
|
||||||
|
const resolver = Env.PromiseResolver{
|
||||||
|
.js_context = page.main_context,
|
||||||
|
.resolver = v8.PromiseResolver.init(page.main_context.v8_context),
|
||||||
|
};
|
||||||
|
|
||||||
|
const p = std.json.parseFromSliceLeaky(
|
||||||
|
std.json.Value,
|
||||||
|
page.call_arena,
|
||||||
|
self.body,
|
||||||
|
.{},
|
||||||
|
) catch |e| {
|
||||||
|
log.info(.browser, "invalid json", .{ .err = e, .source = "Request" });
|
||||||
|
return error.SyntaxError;
|
||||||
|
};
|
||||||
|
|
||||||
|
try resolver.resolve(p);
|
||||||
|
self.body_used = true;
|
||||||
|
return resolver.promise();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _text(self: *Response, page: *Page) !Env.Promise {
|
||||||
|
if (self.body_used) {
|
||||||
|
return error.TypeError;
|
||||||
|
}
|
||||||
|
|
||||||
|
const resolver = Env.PromiseResolver{
|
||||||
|
.js_context = page.main_context,
|
||||||
|
.resolver = v8.PromiseResolver.init(page.main_context.v8_context),
|
||||||
|
};
|
||||||
|
|
||||||
|
try resolver.resolve(self.body);
|
||||||
|
self.body_used = true;
|
||||||
|
return resolver.promise();
|
||||||
|
}
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "fetch: Request" {
|
||||||
|
try testing.htmlRunner("fetch/request.html");
|
||||||
|
}
|
||||||
196
src/browser/fetch/Response.zig
Normal file
196
src/browser/fetch/Response.zig
Normal file
@@ -0,0 +1,196 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
const log = @import("../../log.zig");
|
||||||
|
|
||||||
|
const v8 = @import("v8");
|
||||||
|
|
||||||
|
const HttpClient = @import("../../http/Client.zig");
|
||||||
|
const Http = @import("../../http/Http.zig");
|
||||||
|
const URL = @import("../../url.zig").URL;
|
||||||
|
|
||||||
|
const ReadableStream = @import("../streams/ReadableStream.zig");
|
||||||
|
const Headers = @import("Headers.zig");
|
||||||
|
const HeadersInit = @import("Headers.zig").HeadersInit;
|
||||||
|
|
||||||
|
const Env = @import("../env.zig").Env;
|
||||||
|
const Mime = @import("../mime.zig").Mime;
|
||||||
|
const Page = @import("../page.zig").Page;
|
||||||
|
|
||||||
|
// https://developer.mozilla.org/en-US/docs/Web/API/Response
|
||||||
|
const Response = @This();
|
||||||
|
|
||||||
|
status: u16 = 200,
|
||||||
|
status_text: []const u8 = "",
|
||||||
|
headers: Headers,
|
||||||
|
mime: ?Mime = null,
|
||||||
|
url: []const u8 = "",
|
||||||
|
body: []const u8 = "",
|
||||||
|
body_used: bool = false,
|
||||||
|
redirected: bool = false,
|
||||||
|
|
||||||
|
const ResponseBody = union(enum) {
|
||||||
|
string: []const u8,
|
||||||
|
};
|
||||||
|
|
||||||
|
const ResponseOptions = struct {
|
||||||
|
status: u16 = 200,
|
||||||
|
statusText: ?[]const u8 = null,
|
||||||
|
headers: ?HeadersInit = null,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn constructor(_input: ?ResponseBody, _options: ?ResponseOptions, page: *Page) !Response {
|
||||||
|
const arena = page.arena;
|
||||||
|
|
||||||
|
const options: ResponseOptions = _options orelse .{};
|
||||||
|
|
||||||
|
const body = blk: {
|
||||||
|
if (_input) |input| {
|
||||||
|
switch (input) {
|
||||||
|
.string => |str| {
|
||||||
|
break :blk try arena.dupe(u8, str);
|
||||||
|
},
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
break :blk "";
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const headers: Headers = if (options.headers) |hdrs| try Headers.constructor(hdrs, page) else .{};
|
||||||
|
const status_text = if (options.statusText) |st| try arena.dupe(u8, st) else "";
|
||||||
|
|
||||||
|
return .{
|
||||||
|
.body = body,
|
||||||
|
.headers = headers,
|
||||||
|
.status = options.status,
|
||||||
|
.status_text = status_text,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_body(self: *const Response, page: *Page) !*ReadableStream {
|
||||||
|
const stream = try ReadableStream.constructor(null, null, page);
|
||||||
|
try stream.queue.append(page.arena, self.body);
|
||||||
|
return stream;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_bodyUsed(self: *const Response) bool {
|
||||||
|
return self.body_used;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_headers(self: *Response) *Headers {
|
||||||
|
return &self.headers;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_ok(self: *const Response) bool {
|
||||||
|
return self.status >= 200 and self.status <= 299;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_redirected(self: *const Response) bool {
|
||||||
|
return self.redirected;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_status(self: *const Response) u16 {
|
||||||
|
return self.status;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_statusText(self: *const Response) []const u8 {
|
||||||
|
return self.status_text;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_url(self: *const Response) []const u8 {
|
||||||
|
return self.url;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _clone(self: *const Response) !Response {
|
||||||
|
if (self.body_used) {
|
||||||
|
return error.TypeError;
|
||||||
|
}
|
||||||
|
|
||||||
|
// OK to just return the same fields BECAUSE
|
||||||
|
// all of these fields are read-only and can't be modified.
|
||||||
|
return Response{
|
||||||
|
.body = self.body,
|
||||||
|
.body_used = self.body_used,
|
||||||
|
.mime = self.mime,
|
||||||
|
.headers = self.headers,
|
||||||
|
.redirected = self.redirected,
|
||||||
|
.status = self.status,
|
||||||
|
.url = self.url,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _bytes(self: *Response, page: *Page) !Env.Promise {
|
||||||
|
if (self.body_used) {
|
||||||
|
return error.TypeError;
|
||||||
|
}
|
||||||
|
|
||||||
|
const resolver = Env.PromiseResolver{
|
||||||
|
.js_context = page.main_context,
|
||||||
|
.resolver = v8.PromiseResolver.init(page.main_context.v8_context),
|
||||||
|
};
|
||||||
|
|
||||||
|
try resolver.resolve(self.body);
|
||||||
|
self.body_used = true;
|
||||||
|
return resolver.promise();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _json(self: *Response, page: *Page) !Env.Promise {
|
||||||
|
if (self.body_used) {
|
||||||
|
return error.TypeError;
|
||||||
|
}
|
||||||
|
|
||||||
|
const resolver = Env.PromiseResolver{
|
||||||
|
.js_context = page.main_context,
|
||||||
|
.resolver = v8.PromiseResolver.init(page.main_context.v8_context),
|
||||||
|
};
|
||||||
|
|
||||||
|
const p = std.json.parseFromSliceLeaky(
|
||||||
|
std.json.Value,
|
||||||
|
page.call_arena,
|
||||||
|
self.body,
|
||||||
|
.{},
|
||||||
|
) catch |e| {
|
||||||
|
log.info(.browser, "invalid json", .{ .err = e, .source = "Response" });
|
||||||
|
return error.SyntaxError;
|
||||||
|
};
|
||||||
|
|
||||||
|
try resolver.resolve(p);
|
||||||
|
self.body_used = true;
|
||||||
|
return resolver.promise();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _text(self: *Response, page: *Page) !Env.Promise {
|
||||||
|
if (self.body_used) {
|
||||||
|
return error.TypeError;
|
||||||
|
}
|
||||||
|
|
||||||
|
const resolver = Env.PromiseResolver{
|
||||||
|
.js_context = page.main_context,
|
||||||
|
.resolver = v8.PromiseResolver.init(page.main_context.v8_context),
|
||||||
|
};
|
||||||
|
|
||||||
|
try resolver.resolve(self.body);
|
||||||
|
self.body_used = true;
|
||||||
|
return resolver.promise();
|
||||||
|
}
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "fetch: Response" {
|
||||||
|
try testing.htmlRunner("fetch/response.html");
|
||||||
|
}
|
||||||
212
src/browser/fetch/fetch.zig
Normal file
212
src/browser/fetch/fetch.zig
Normal file
@@ -0,0 +1,212 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
const log = @import("../../log.zig");
|
||||||
|
|
||||||
|
const Env = @import("../env.zig").Env;
|
||||||
|
const Page = @import("../page.zig").Page;
|
||||||
|
|
||||||
|
const Http = @import("../../http/Http.zig");
|
||||||
|
const HttpClient = @import("../../http/Client.zig");
|
||||||
|
const Mime = @import("../mime.zig").Mime;
|
||||||
|
|
||||||
|
const Headers = @import("Headers.zig");
|
||||||
|
|
||||||
|
const RequestInput = @import("Request.zig").RequestInput;
|
||||||
|
const RequestInit = @import("Request.zig").RequestInit;
|
||||||
|
const Request = @import("Request.zig");
|
||||||
|
const Response = @import("Response.zig");
|
||||||
|
|
||||||
|
pub const Interfaces = .{
|
||||||
|
@import("Headers.zig"),
|
||||||
|
@import("Headers.zig").HeadersEntryIterable,
|
||||||
|
@import("Headers.zig").HeadersKeyIterable,
|
||||||
|
@import("Headers.zig").HeadersValueIterable,
|
||||||
|
@import("Request.zig"),
|
||||||
|
@import("Response.zig"),
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const FetchContext = struct {
|
||||||
|
arena: std.mem.Allocator,
|
||||||
|
js_ctx: *Env.JsContext,
|
||||||
|
promise_resolver: Env.PersistentPromiseResolver,
|
||||||
|
|
||||||
|
method: Http.Method,
|
||||||
|
url: []const u8,
|
||||||
|
body: std.ArrayListUnmanaged(u8) = .empty,
|
||||||
|
headers: std.ArrayListUnmanaged([]const u8) = .empty,
|
||||||
|
status: u16 = 0,
|
||||||
|
mime: ?Mime = null,
|
||||||
|
transfer: ?*HttpClient.Transfer = null,
|
||||||
|
|
||||||
|
/// This effectively takes ownership of the FetchContext.
|
||||||
|
///
|
||||||
|
/// We just return the underlying slices used for `headers`
|
||||||
|
/// and for `body` here to avoid an allocation.
|
||||||
|
pub fn toResponse(self: *const FetchContext) !Response {
|
||||||
|
var headers: Headers = .{};
|
||||||
|
|
||||||
|
// convert into Headers
|
||||||
|
for (self.headers.items) |hdr| {
|
||||||
|
var iter = std.mem.splitScalar(u8, hdr, ':');
|
||||||
|
const name = iter.next() orelse "";
|
||||||
|
const value = iter.next() orelse "";
|
||||||
|
try headers.append(name, value, self.arena);
|
||||||
|
}
|
||||||
|
|
||||||
|
return Response{
|
||||||
|
.status = self.status,
|
||||||
|
.headers = headers,
|
||||||
|
.mime = self.mime,
|
||||||
|
.body = self.body.items,
|
||||||
|
.url = self.url,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// https://developer.mozilla.org/en-US/docs/Web/API/Window/fetch
|
||||||
|
pub fn fetch(input: RequestInput, options: ?RequestInit, page: *Page) !Env.Promise {
|
||||||
|
const arena = page.arena;
|
||||||
|
|
||||||
|
const req = try Request.constructor(input, options, page);
|
||||||
|
var headers = try Http.Headers.init();
|
||||||
|
|
||||||
|
// Copy our headers into the HTTP headers.
|
||||||
|
var header_iter = req.headers.headers.iterator();
|
||||||
|
while (header_iter.next()) |entry| {
|
||||||
|
const combined = try std.fmt.allocPrintSentinel(
|
||||||
|
page.arena,
|
||||||
|
"{s}: {s}",
|
||||||
|
.{ entry.key_ptr.*, entry.value_ptr.* },
|
||||||
|
0,
|
||||||
|
);
|
||||||
|
try headers.add(combined.ptr);
|
||||||
|
}
|
||||||
|
|
||||||
|
try page.requestCookie(.{}).headersForRequest(arena, req.url, &headers);
|
||||||
|
|
||||||
|
const resolver = try page.main_context.createPersistentPromiseResolver();
|
||||||
|
|
||||||
|
const fetch_ctx = try arena.create(FetchContext);
|
||||||
|
fetch_ctx.* = .{
|
||||||
|
.arena = arena,
|
||||||
|
.js_ctx = page.main_context,
|
||||||
|
.promise_resolver = resolver,
|
||||||
|
.method = req.method,
|
||||||
|
.url = req.url,
|
||||||
|
};
|
||||||
|
|
||||||
|
try page.http_client.request(.{
|
||||||
|
.ctx = @ptrCast(fetch_ctx),
|
||||||
|
.url = req.url,
|
||||||
|
.method = req.method,
|
||||||
|
.headers = headers,
|
||||||
|
.body = req.body,
|
||||||
|
.cookie_jar = page.cookie_jar,
|
||||||
|
.resource_type = .fetch,
|
||||||
|
|
||||||
|
.start_callback = struct {
|
||||||
|
fn startCallback(transfer: *HttpClient.Transfer) !void {
|
||||||
|
const self: *FetchContext = @ptrCast(@alignCast(transfer.ctx));
|
||||||
|
log.debug(.fetch, "request start", .{ .method = self.method, .url = self.url, .source = "fetch" });
|
||||||
|
|
||||||
|
self.transfer = transfer;
|
||||||
|
}
|
||||||
|
}.startCallback,
|
||||||
|
.header_callback = struct {
|
||||||
|
fn headerCallback(transfer: *HttpClient.Transfer) !void {
|
||||||
|
const self: *FetchContext = @ptrCast(@alignCast(transfer.ctx));
|
||||||
|
|
||||||
|
const header = &transfer.response_header.?;
|
||||||
|
|
||||||
|
log.debug(.fetch, "request header", .{
|
||||||
|
.source = "fetch",
|
||||||
|
.method = self.method,
|
||||||
|
.url = self.url,
|
||||||
|
.status = header.status,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (header.contentType()) |ct| {
|
||||||
|
self.mime = Mime.parse(ct) catch {
|
||||||
|
return error.MimeParsing;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (transfer.getContentLength()) |cl| {
|
||||||
|
try self.body.ensureTotalCapacity(self.arena, cl);
|
||||||
|
}
|
||||||
|
|
||||||
|
var it = transfer.responseHeaderIterator();
|
||||||
|
while (it.next()) |hdr| {
|
||||||
|
const joined = try std.fmt.allocPrint(self.arena, "{s}: {s}", .{ hdr.name, hdr.value });
|
||||||
|
try self.headers.append(self.arena, joined);
|
||||||
|
}
|
||||||
|
|
||||||
|
self.status = header.status;
|
||||||
|
}
|
||||||
|
}.headerCallback,
|
||||||
|
.data_callback = struct {
|
||||||
|
fn dataCallback(transfer: *HttpClient.Transfer, data: []const u8) !void {
|
||||||
|
const self: *FetchContext = @ptrCast(@alignCast(transfer.ctx));
|
||||||
|
try self.body.appendSlice(self.arena, data);
|
||||||
|
}
|
||||||
|
}.dataCallback,
|
||||||
|
.done_callback = struct {
|
||||||
|
fn doneCallback(ctx: *anyopaque) !void {
|
||||||
|
const self: *FetchContext = @ptrCast(@alignCast(ctx));
|
||||||
|
self.transfer = null;
|
||||||
|
|
||||||
|
log.info(.fetch, "request complete", .{
|
||||||
|
.source = "fetch",
|
||||||
|
.method = self.method,
|
||||||
|
.url = self.url,
|
||||||
|
.status = self.status,
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = try self.toResponse();
|
||||||
|
try self.promise_resolver.resolve(response);
|
||||||
|
}
|
||||||
|
}.doneCallback,
|
||||||
|
.error_callback = struct {
|
||||||
|
fn errorCallback(ctx: *anyopaque, err: anyerror) void {
|
||||||
|
const self: *FetchContext = @ptrCast(@alignCast(ctx));
|
||||||
|
self.transfer = null;
|
||||||
|
|
||||||
|
log.err(.fetch, "error", .{
|
||||||
|
.url = self.url,
|
||||||
|
.err = err,
|
||||||
|
.source = "fetch error",
|
||||||
|
});
|
||||||
|
|
||||||
|
// We throw an Abort error when the page is getting closed so,
|
||||||
|
// in this case, we don't need to reject the promise.
|
||||||
|
if (err != error.Abort) {
|
||||||
|
self.promise_resolver.reject(@errorName(err)) catch unreachable;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}.errorCallback,
|
||||||
|
});
|
||||||
|
|
||||||
|
return resolver.promise();
|
||||||
|
}
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "fetch: fetch" {
|
||||||
|
try testing.htmlRunner("fetch/fetch.html");
|
||||||
|
}
|
||||||
143
src/browser/html/AbortController.zig
Normal file
143
src/browser/html/AbortController.zig
Normal file
@@ -0,0 +1,143 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
const log = @import("../../log.zig");
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
const Env = @import("../env.zig").Env;
|
||||||
|
const Page = @import("../page.zig").Page;
|
||||||
|
const EventTarget = @import("../dom/event_target.zig").EventTarget;
|
||||||
|
|
||||||
|
pub const Interfaces = .{
|
||||||
|
AbortController,
|
||||||
|
AbortSignal,
|
||||||
|
};
|
||||||
|
|
||||||
|
const AbortController = @This();
|
||||||
|
|
||||||
|
signal: *AbortSignal,
|
||||||
|
|
||||||
|
pub fn constructor(page: *Page) !AbortController {
|
||||||
|
// Why do we allocate this rather than storing directly in the struct?
|
||||||
|
// https://github.com/lightpanda-io/project/discussions/165
|
||||||
|
const signal = try page.arena.create(AbortSignal);
|
||||||
|
signal.* = .init;
|
||||||
|
|
||||||
|
return .{
|
||||||
|
.signal = signal,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_signal(self: *AbortController) *AbortSignal {
|
||||||
|
return self.signal;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _abort(self: *AbortController, reason_: ?[]const u8) !void {
|
||||||
|
return self.signal.abort(reason_);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub const AbortSignal = struct {
|
||||||
|
const DEFAULT_REASON = "AbortError";
|
||||||
|
|
||||||
|
pub const prototype = *EventTarget;
|
||||||
|
proto: parser.EventTargetTBase = .{ .internal_target_type = .abort_signal },
|
||||||
|
|
||||||
|
aborted: bool,
|
||||||
|
reason: ?[]const u8,
|
||||||
|
|
||||||
|
pub const init: AbortSignal = .{
|
||||||
|
.reason = null,
|
||||||
|
.aborted = false,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn static_abort(reason_: ?[]const u8) AbortSignal {
|
||||||
|
return .{
|
||||||
|
.aborted = true,
|
||||||
|
.reason = reason_ orelse DEFAULT_REASON,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn static_timeout(delay: u32, page: *Page) !*AbortSignal {
|
||||||
|
const callback = try page.arena.create(TimeoutCallback);
|
||||||
|
callback.* = .{
|
||||||
|
.signal = .init,
|
||||||
|
};
|
||||||
|
|
||||||
|
try page.scheduler.add(callback, TimeoutCallback.run, delay, .{ .name = "abort_signal" });
|
||||||
|
return &callback.signal;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_aborted(self: *const AbortSignal) bool {
|
||||||
|
return self.aborted;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn abort(self: *AbortSignal, reason_: ?[]const u8) !void {
|
||||||
|
self.aborted = true;
|
||||||
|
self.reason = reason_ orelse DEFAULT_REASON;
|
||||||
|
|
||||||
|
const abort_event = try parser.eventCreate();
|
||||||
|
parser.eventSetInternalType(abort_event, .abort_signal);
|
||||||
|
|
||||||
|
defer parser.eventDestroy(abort_event);
|
||||||
|
try parser.eventInit(abort_event, "abort", .{});
|
||||||
|
_ = try parser.eventTargetDispatchEvent(
|
||||||
|
parser.toEventTarget(AbortSignal, self),
|
||||||
|
abort_event,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const Reason = union(enum) {
|
||||||
|
reason: []const u8,
|
||||||
|
undefined: void,
|
||||||
|
};
|
||||||
|
pub fn get_reason(self: *const AbortSignal) Reason {
|
||||||
|
if (self.reason) |r| {
|
||||||
|
return .{ .reason = r };
|
||||||
|
}
|
||||||
|
return .{ .undefined = {} };
|
||||||
|
}
|
||||||
|
|
||||||
|
const ThrowIfAborted = union(enum) {
|
||||||
|
exception: Env.Exception,
|
||||||
|
undefined: void,
|
||||||
|
};
|
||||||
|
pub fn _throwIfAborted(self: *const AbortSignal, page: *Page) ThrowIfAborted {
|
||||||
|
if (self.aborted) {
|
||||||
|
const ex = page.main_context.throw(self.reason orelse DEFAULT_REASON);
|
||||||
|
return .{ .exception = ex };
|
||||||
|
}
|
||||||
|
return .{ .undefined = {} };
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const TimeoutCallback = struct {
|
||||||
|
signal: AbortSignal,
|
||||||
|
|
||||||
|
fn run(ctx: *anyopaque) ?u32 {
|
||||||
|
const self: *TimeoutCallback = @ptrCast(@alignCast(ctx));
|
||||||
|
self.signal.abort("TimeoutError") catch |err| {
|
||||||
|
log.warn(.app, "abort signal timeout", .{ .err = err });
|
||||||
|
};
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: HTML.AbortController" {
|
||||||
|
try testing.htmlRunner("html/abort_controller.html");
|
||||||
|
}
|
||||||
81
src/browser/html/DataSet.zig
Normal file
81
src/browser/html/DataSet.zig
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
const std = @import("std");
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
const Env = @import("../env.zig").Env;
|
||||||
|
const Page = @import("../page.zig").Page;
|
||||||
|
|
||||||
|
const Allocator = std.mem.Allocator;
|
||||||
|
|
||||||
|
const DataSet = @This();
|
||||||
|
|
||||||
|
element: *parser.Element,
|
||||||
|
|
||||||
|
pub fn named_get(self: *const DataSet, name: []const u8, _: *bool, page: *Page) !Env.UndefinedOr([]const u8) {
|
||||||
|
const normalized_name = try normalize(page.call_arena, name);
|
||||||
|
if (try parser.elementGetAttribute(self.element, normalized_name)) |value| {
|
||||||
|
return .{ .value = value };
|
||||||
|
}
|
||||||
|
return .undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn named_set(self: *DataSet, name: []const u8, value: []const u8, _: *bool, page: *Page) !void {
|
||||||
|
const normalized_name = try normalize(page.call_arena, name);
|
||||||
|
try parser.elementSetAttribute(self.element, normalized_name, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn named_delete(self: *DataSet, name: []const u8, _: *bool, page: *Page) !void {
|
||||||
|
const normalized_name = try normalize(page.call_arena, name);
|
||||||
|
try parser.elementRemoveAttribute(self.element, normalized_name);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn normalize(allocator: Allocator, name: []const u8) ![]const u8 {
|
||||||
|
var upper_count: usize = 0;
|
||||||
|
for (name) |c| {
|
||||||
|
if (std.ascii.isUpper(c)) {
|
||||||
|
upper_count += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// for every upper-case letter, we'll probably need a dash before it
|
||||||
|
// and we need the 'data-' prefix
|
||||||
|
var normalized = try allocator.alloc(u8, name.len + upper_count + 5);
|
||||||
|
|
||||||
|
@memcpy(normalized[0..5], "data-");
|
||||||
|
if (upper_count == 0) {
|
||||||
|
@memcpy(normalized[5..], name);
|
||||||
|
return normalized;
|
||||||
|
}
|
||||||
|
|
||||||
|
var pos: usize = 5;
|
||||||
|
for (name) |c| {
|
||||||
|
if (std.ascii.isUpper(c)) {
|
||||||
|
normalized[pos] = '-';
|
||||||
|
pos += 1;
|
||||||
|
normalized[pos] = c + 32;
|
||||||
|
} else {
|
||||||
|
normalized[pos] = c;
|
||||||
|
}
|
||||||
|
pos += 1;
|
||||||
|
}
|
||||||
|
return normalized;
|
||||||
|
}
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: HTML.DataSet" {
|
||||||
|
try testing.htmlRunner("html/dataset.html");
|
||||||
|
}
|
||||||
320
src/browser/html/document.zig
Normal file
320
src/browser/html/document.zig
Normal file
@@ -0,0 +1,320 @@
|
|||||||
|
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||||
|
//
|
||||||
|
// Francis Bouvier <francis@lightpanda.io>
|
||||||
|
// Pierre Tachoire <pierre@lightpanda.io>
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as
|
||||||
|
// published by the Free Software Foundation, either version 3 of the
|
||||||
|
// License, or (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
|
||||||
|
const log = @import("../../log.zig");
|
||||||
|
const parser = @import("../netsurf.zig");
|
||||||
|
const Page = @import("../page.zig").Page;
|
||||||
|
|
||||||
|
const Window = @import("window.zig").Window;
|
||||||
|
const Element = @import("../dom/element.zig").Element;
|
||||||
|
const ElementUnion = @import("../dom/element.zig").Union;
|
||||||
|
const Document = @import("../dom/document.zig").Document;
|
||||||
|
const NodeList = @import("../dom/nodelist.zig").NodeList;
|
||||||
|
const Location = @import("location.zig").Location;
|
||||||
|
|
||||||
|
const collection = @import("../dom/html_collection.zig");
|
||||||
|
const Walker = @import("../dom/walker.zig").WalkerDepthFirst;
|
||||||
|
const Cookie = @import("../storage/cookie.zig").Cookie;
|
||||||
|
|
||||||
|
// WEB IDL https://html.spec.whatwg.org/#the-document-object
|
||||||
|
pub const HTMLDocument = struct {
|
||||||
|
pub const Self = parser.DocumentHTML;
|
||||||
|
pub const prototype = *Document;
|
||||||
|
pub const subtype = .node;
|
||||||
|
|
||||||
|
// JS funcs
|
||||||
|
// --------
|
||||||
|
|
||||||
|
pub fn get_domain(self: *parser.DocumentHTML, page: *Page) ![]const u8 {
|
||||||
|
// libdom's document_html get_domain always returns null, this is
|
||||||
|
// the way MDN recommends getting the domain anyways, since document.domain
|
||||||
|
// is deprecated.
|
||||||
|
const location = try parser.documentHTMLGetLocation(Location, self) orelse return "";
|
||||||
|
return location.get_host(page);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_domain(_: *parser.DocumentHTML, _: []const u8) ![]const u8 {
|
||||||
|
return error.NotImplemented;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_referrer(self: *parser.DocumentHTML) ![]const u8 {
|
||||||
|
return try parser.documentHTMLGetReferrer(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_referrer(_: *parser.DocumentHTML, _: []const u8) ![]const u8 {
|
||||||
|
return error.NotImplemented;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_body(self: *parser.DocumentHTML) !?*parser.Body {
|
||||||
|
return try parser.documentHTMLBody(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_body(self: *parser.DocumentHTML, elt: ?*parser.ElementHTML) !?*parser.Body {
|
||||||
|
try parser.documentHTMLSetBody(self, elt);
|
||||||
|
return try get_body(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_head(self: *parser.DocumentHTML) !?*parser.Head {
|
||||||
|
const root = parser.documentHTMLToNode(self);
|
||||||
|
const walker = Walker{};
|
||||||
|
var next: ?*parser.Node = null;
|
||||||
|
while (true) {
|
||||||
|
next = try walker.get_next(root, next) orelse return null;
|
||||||
|
if (std.ascii.eqlIgnoreCase("head", try parser.nodeName(next.?))) {
|
||||||
|
return @as(*parser.Head, @ptrCast(next.?));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_cookie(_: *parser.DocumentHTML, page: *Page) ![]const u8 {
|
||||||
|
var buf: std.ArrayListUnmanaged(u8) = .{};
|
||||||
|
try page.cookie_jar.forRequest(&page.url.uri, buf.writer(page.arena), .{
|
||||||
|
.is_http = false,
|
||||||
|
.is_navigation = true,
|
||||||
|
});
|
||||||
|
return buf.items;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_cookie(_: *parser.DocumentHTML, cookie_str: []const u8, page: *Page) ![]const u8 {
|
||||||
|
// we use the cookie jar's allocator to parse the cookie because it
|
||||||
|
// outlives the page's arena.
|
||||||
|
const c = try Cookie.parse(page.cookie_jar.allocator, &page.url.uri, cookie_str);
|
||||||
|
errdefer c.deinit();
|
||||||
|
if (c.http_only) {
|
||||||
|
c.deinit();
|
||||||
|
return ""; // HttpOnly cookies cannot be set from JS
|
||||||
|
}
|
||||||
|
try page.cookie_jar.add(c, std.time.timestamp());
|
||||||
|
return cookie_str;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_title(self: *parser.DocumentHTML) ![]const u8 {
|
||||||
|
return try parser.documentHTMLGetTitle(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_title(self: *parser.DocumentHTML, v: []const u8) ![]const u8 {
|
||||||
|
try parser.documentHTMLSetTitle(self, v);
|
||||||
|
return v;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _getElementsByName(self: *parser.DocumentHTML, name: []const u8, page: *Page) !NodeList {
|
||||||
|
const arena = page.arena;
|
||||||
|
var list: NodeList = .{};
|
||||||
|
|
||||||
|
if (name.len == 0) return list;
|
||||||
|
|
||||||
|
const root = parser.documentHTMLToNode(self);
|
||||||
|
var c = try collection.HTMLCollectionByName(arena, root, name, .{
|
||||||
|
.include_root = false,
|
||||||
|
});
|
||||||
|
|
||||||
|
const ln = try c.get_length();
|
||||||
|
var i: u32 = 0;
|
||||||
|
while (i < ln) {
|
||||||
|
const n = try c.item(i) orelse break;
|
||||||
|
try list.append(arena, n);
|
||||||
|
i += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
return list;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_images(self: *parser.DocumentHTML, page: *Page) !collection.HTMLCollection {
|
||||||
|
return try collection.HTMLCollectionByTagName(page.arena, parser.documentHTMLToNode(self), "img", .{
|
||||||
|
.include_root = false,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_embeds(self: *parser.DocumentHTML, page: *Page) !collection.HTMLCollection {
|
||||||
|
return try collection.HTMLCollectionByTagName(page.arena, parser.documentHTMLToNode(self), "embed", .{
|
||||||
|
.include_root = false,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_plugins(self: *parser.DocumentHTML, page: *Page) !collection.HTMLCollection {
|
||||||
|
return get_embeds(self, page);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_forms(self: *parser.DocumentHTML, page: *Page) !collection.HTMLCollection {
|
||||||
|
return try collection.HTMLCollectionByTagName(page.arena, parser.documentHTMLToNode(self), "form", .{
|
||||||
|
.include_root = false,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_scripts(self: *parser.DocumentHTML, page: *Page) !collection.HTMLCollection {
|
||||||
|
return try collection.HTMLCollectionByTagName(page.arena, parser.documentHTMLToNode(self), "script", .{
|
||||||
|
.include_root = false,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_applets(_: *parser.DocumentHTML) !collection.HTMLCollection {
|
||||||
|
return try collection.HTMLCollectionEmpty();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_links(self: *parser.DocumentHTML) !collection.HTMLCollection {
|
||||||
|
return try collection.HTMLCollectionByLinks(parser.documentHTMLToNode(self), .{
|
||||||
|
.include_root = false,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_anchors(self: *parser.DocumentHTML) !collection.HTMLCollection {
|
||||||
|
return try collection.HTMLCollectionByAnchors(parser.documentHTMLToNode(self), .{
|
||||||
|
.include_root = false,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_all(self: *parser.DocumentHTML) collection.HTMLAllCollection {
|
||||||
|
return collection.HTMLAllCollection.init(parser.documentHTMLToNode(self));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_currentScript(self: *parser.DocumentHTML) !?*parser.Script {
|
||||||
|
return try parser.documentHTMLGetCurrentScript(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_location(self: *parser.DocumentHTML) !?*Location {
|
||||||
|
return try parser.documentHTMLGetLocation(Location, self);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_location(_: *const parser.DocumentHTML, url: []const u8, page: *Page) !void {
|
||||||
|
return page.navigateFromWebAPI(url, .{ .reason = .script });
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_designMode(_: *parser.DocumentHTML) []const u8 {
|
||||||
|
return "off";
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_designMode(_: *parser.DocumentHTML, _: []const u8) []const u8 {
|
||||||
|
return "off";
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_defaultView(_: *parser.DocumentHTML, page: *Page) *Window {
|
||||||
|
return &page.window;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_readyState(self: *parser.DocumentHTML, page: *Page) ![]const u8 {
|
||||||
|
const state = try page.getOrCreateNodeState(@ptrCast(@alignCast(self)));
|
||||||
|
return @tagName(state.ready_state);
|
||||||
|
}
|
||||||
|
|
||||||
|
// noop legacy functions
|
||||||
|
// https://html.spec.whatwg.org/#Document-partial
|
||||||
|
pub fn _clear(_: *parser.DocumentHTML) void {}
|
||||||
|
pub fn _captureEvents(_: *parser.DocumentHTML) void {}
|
||||||
|
pub fn _releaseEvents(_: *parser.DocumentHTML) void {}
|
||||||
|
|
||||||
|
pub fn get_fgColor(_: *parser.DocumentHTML) []const u8 {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
pub fn set_fgColor(_: *parser.DocumentHTML, _: []const u8) []const u8 {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
pub fn get_linkColor(_: *parser.DocumentHTML) []const u8 {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
pub fn set_linkColor(_: *parser.DocumentHTML, _: []const u8) []const u8 {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
pub fn get_vlinkColor(_: *parser.DocumentHTML) []const u8 {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
pub fn set_vlinkColor(_: *parser.DocumentHTML, _: []const u8) []const u8 {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
pub fn get_alinkColor(_: *parser.DocumentHTML) []const u8 {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
pub fn set_alinkColor(_: *parser.DocumentHTML, _: []const u8) []const u8 {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
pub fn get_bgColor(_: *parser.DocumentHTML) []const u8 {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
pub fn set_bgColor(_: *parser.DocumentHTML, _: []const u8) []const u8 {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
|
||||||
|
// Returns the topmost Element at the specified coordinates (relative to the viewport).
|
||||||
|
// Since LightPanda requires the client to know what they are clicking on we do not return the underlying element at this moment
|
||||||
|
// This can currenty only happen if the first pixel is clicked without having rendered any element. This will change when css properties are supported.
|
||||||
|
// This returns an ElementUnion instead of a *Parser.Element in case the element somehow hasn't passed through the js runtime yet.
|
||||||
|
// While x and y should be f32, here we take i32 since that's what our
|
||||||
|
// "renderer" uses. By specifying i32 here, rather than f32 and doing the
|
||||||
|
// conversion ourself, we rely on v8's type conversion which is both more
|
||||||
|
// flexible (e.g. handles NaN) and will be more consistent with a browser.
|
||||||
|
pub fn _elementFromPoint(_: *parser.DocumentHTML, x: i32, y: i32, page: *Page) !?ElementUnion {
|
||||||
|
const element = page.renderer.getElementAtPosition(x, y) orelse return null;
|
||||||
|
// TODO if pointer-events set to none the underlying element should be returned (parser.documentGetDocumentElement(self.document);?)
|
||||||
|
return try Element.toInterface(element);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Returns an array of all elements at the specified coordinates (relative to the viewport). The elements are ordered from the topmost to the bottommost box of the viewport.
|
||||||
|
// While x and y should be f32, here we take i32 since that's what our
|
||||||
|
// "renderer" uses. By specifying i32 here, rather than f32 and doing the
|
||||||
|
// conversion ourself, we rely on v8's type conversion which is both more
|
||||||
|
// flexible (e.g. handles NaN) and will be more consistent with a browser.
|
||||||
|
pub fn _elementsFromPoint(_: *parser.DocumentHTML, x: i32, y: i32, page: *Page) ![]ElementUnion {
|
||||||
|
const element = page.renderer.getElementAtPosition(x, y) orelse return &.{};
|
||||||
|
// TODO if pointer-events set to none the underlying element should be returned (parser.documentGetDocumentElement(self.document);?)
|
||||||
|
|
||||||
|
var list: std.ArrayListUnmanaged(ElementUnion) = .empty;
|
||||||
|
try list.ensureTotalCapacity(page.call_arena, 3);
|
||||||
|
list.appendAssumeCapacity(try Element.toInterface(element));
|
||||||
|
|
||||||
|
// Since we are using a flat renderer there is no hierarchy of elements. What we do know is that the element is part of the main document.
|
||||||
|
// Thus we can add the HtmlHtmlElement and it's child HTMLBodyElement to the returned list.
|
||||||
|
// TBD Should we instead return every parent that is an element? Note that a child does not physically need to be overlapping the parent.
|
||||||
|
// Should we do a render pass on demand?
|
||||||
|
const doc_elem = try parser.documentGetDocumentElement(parser.documentHTMLToDocument(page.window.document)) orelse {
|
||||||
|
return list.items;
|
||||||
|
};
|
||||||
|
if (try parser.documentHTMLBody(page.window.document)) |body| {
|
||||||
|
list.appendAssumeCapacity(try Element.toInterface(parser.bodyToElement(body)));
|
||||||
|
}
|
||||||
|
list.appendAssumeCapacity(try Element.toInterface(doc_elem));
|
||||||
|
return list.items;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn documentIsLoaded(self: *parser.DocumentHTML, page: *Page) !void {
|
||||||
|
const state = try page.getOrCreateNodeState(@ptrCast(@alignCast(self)));
|
||||||
|
state.ready_state = .interactive;
|
||||||
|
|
||||||
|
log.debug(.script_event, "dispatch event", .{
|
||||||
|
.type = "DOMContentLoaded",
|
||||||
|
.source = "document",
|
||||||
|
});
|
||||||
|
|
||||||
|
const evt = try parser.eventCreate();
|
||||||
|
defer parser.eventDestroy(evt);
|
||||||
|
try parser.eventInit(evt, "DOMContentLoaded", .{ .bubbles = true, .cancelable = true });
|
||||||
|
_ = try parser.eventTargetDispatchEvent(parser.toEventTarget(parser.DocumentHTML, self), evt);
|
||||||
|
|
||||||
|
try page.window.dispatchForDocumentTarget(evt);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn documentIsComplete(self: *parser.DocumentHTML, page: *Page) !void {
|
||||||
|
const state = try page.getOrCreateNodeState(@ptrCast(@alignCast(self)));
|
||||||
|
state.ready_state = .complete;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const testing = @import("../../testing.zig");
|
||||||
|
test "Browser: HTML.Document" {
|
||||||
|
try testing.htmlRunner("html/document.html");
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user