mirror of
https://github.com/lightpanda-io/browser.git
synced 2025-10-29 15:13:28 +00:00
Compare commits
1 Commits
speedup_te
...
selenium-c
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ea19f7e348 |
38
.github/actions/install/action.yml
vendored
38
.github/actions/install/action.yml
vendored
@@ -5,7 +5,7 @@ inputs:
|
||||
zig:
|
||||
description: 'Zig version to install'
|
||||
required: false
|
||||
default: '0.15.1'
|
||||
default: '0.13.0'
|
||||
arch:
|
||||
description: 'CPU arch used to select the v8 lib'
|
||||
required: false
|
||||
@@ -17,11 +17,11 @@ inputs:
|
||||
zig-v8:
|
||||
description: 'zig v8 version to install'
|
||||
required: false
|
||||
default: 'v0.1.30'
|
||||
default: 'v0.1.11'
|
||||
v8:
|
||||
description: 'v8 version to install'
|
||||
required: false
|
||||
default: '14.0.365.4'
|
||||
default: '11.1.134'
|
||||
cache-dir:
|
||||
description: 'cache dir to use'
|
||||
required: false
|
||||
@@ -34,11 +34,9 @@ runs:
|
||||
- name: Install apt deps
|
||||
if: ${{ inputs.os == 'linux' }}
|
||||
shell: bash
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y wget xz-utils python3 ca-certificates git pkg-config libglib2.0-dev gperf libexpat1-dev cmake clang
|
||||
run: sudo apt-get install -y wget xz-utils python3 ca-certificates git pkg-config libglib2.0-dev gperf libexpat1-dev cmake clang
|
||||
|
||||
- uses: mlugg/setup-zig@v2
|
||||
- uses: mlugg/setup-zig@v1
|
||||
with:
|
||||
version: ${{ inputs.zig }}
|
||||
|
||||
@@ -61,29 +59,15 @@ runs:
|
||||
- name: install v8
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p v8/out/${{ inputs.os }}/debug/obj/zig/
|
||||
ln -s ${{ inputs.cache-dir }}/v8/libc_v8.a v8/out/${{ inputs.os }}/debug/obj/zig/libc_v8.a
|
||||
mkdir -p vendor/zig-js-runtime/vendor/v8/${{inputs.arch}}-${{inputs.os}}/debug
|
||||
ln -s ${{ inputs.cache-dir }}/v8/libc_v8.a vendor/zig-js-runtime/vendor/v8/${{inputs.arch}}-${{inputs.os}}/debug/libc_v8.a
|
||||
|
||||
mkdir -p v8/out/${{ inputs.os }}/release/obj/zig/
|
||||
ln -s ${{ inputs.cache-dir }}/v8/libc_v8.a v8/out/${{ inputs.os }}/release/obj/zig/libc_v8.a
|
||||
mkdir -p vendor/zig-js-runtime/vendor/v8/${{inputs.arch}}-${{inputs.os}}/release
|
||||
ln -s ${{ inputs.cache-dir }}/v8/libc_v8.a vendor/zig-js-runtime/vendor/v8/${{inputs.arch}}-${{inputs.os}}/release/libc_v8.a
|
||||
|
||||
- name: Cache libiconv
|
||||
id: cache-libiconv
|
||||
uses: actions/cache@v4
|
||||
env:
|
||||
cache-name: cache-libiconv
|
||||
with:
|
||||
path: ${{ inputs.cache-dir }}/libiconv
|
||||
key: vendor/libiconv/libiconv-1.17
|
||||
|
||||
- name: download libiconv
|
||||
if: ${{ steps.cache-libiconv.outputs.cache-hit != 'true' }}
|
||||
- name: libiconv
|
||||
shell: bash
|
||||
run: make download-libiconv
|
||||
|
||||
- name: build libiconv
|
||||
shell: bash
|
||||
run: make build-libiconv
|
||||
run: make install-libiconv
|
||||
|
||||
- name: build mimalloc
|
||||
shell: bash
|
||||
|
||||
110
.github/workflows/build.yml
vendored
110
.github/workflows/build.yml
vendored
@@ -1,11 +1,5 @@
|
||||
name: nightly build
|
||||
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ vars.NIGHTLY_BUILD_AWS_ACCESS_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.NIGHTLY_BUILD_AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_BUCKET: ${{ vars.NIGHTLY_BUILD_AWS_BUCKET }}
|
||||
AWS_REGION: ${{ vars.NIGHTLY_BUILD_AWS_REGION }}
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "2 2 * * *"
|
||||
@@ -23,7 +17,6 @@ jobs:
|
||||
OS: linux
|
||||
|
||||
runs-on: ubuntu-22.04
|
||||
timeout-minutes: 15
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -33,59 +26,13 @@ jobs:
|
||||
submodules: recursive
|
||||
|
||||
- uses: ./.github/actions/install
|
||||
with:
|
||||
os: ${{env.OS}}
|
||||
arch: ${{env.ARCH}}
|
||||
|
||||
- name: zig build
|
||||
run: zig build --release=safe -Doptimize=ReleaseSafe -Dcpu=x86_64 -Dgit_commit=$(git rev-parse --short ${{ github.sha }})
|
||||
run: zig build --release=safe -Doptimize=ReleaseSafe -Dengine=v8 -Dcpu=x86_64
|
||||
|
||||
- name: Rename binary
|
||||
run: mv zig-out/bin/lightpanda lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
||||
|
||||
- name: upload on s3
|
||||
run: |
|
||||
export DIR=`git show --no-patch --no-notes --pretty='%cs_%h'`
|
||||
aws s3 cp --storage-class=GLACIER_IR lightpanda-${{ env.ARCH }}-${{ env.OS }} s3://lpd-nightly-build/${DIR}/lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
||||
|
||||
- name: Upload the build
|
||||
uses: ncipollo/release-action@v1
|
||||
with:
|
||||
allowUpdates: true
|
||||
artifacts: lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
||||
tag: nightly
|
||||
|
||||
build-linux-aarch64:
|
||||
env:
|
||||
ARCH: aarch64
|
||||
OS: linux
|
||||
|
||||
runs-on: ubuntu-22.04-arm
|
||||
timeout-minutes: 15
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# fetch submodules recusively, to get zig-js-runtime submodules also.
|
||||
submodules: recursive
|
||||
|
||||
- uses: ./.github/actions/install
|
||||
with:
|
||||
os: ${{env.OS}}
|
||||
arch: ${{env.ARCH}}
|
||||
|
||||
- name: zig build
|
||||
run: zig build --release=safe -Doptimize=ReleaseSafe -Dcpu=generic -Dgit_commit=$(git rev-parse --short ${{ github.sha }})
|
||||
|
||||
- name: Rename binary
|
||||
run: mv zig-out/bin/lightpanda lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
||||
|
||||
- name: upload on s3
|
||||
run: |
|
||||
export DIR=`git show --no-patch --no-notes --pretty='%cs_%h'`
|
||||
aws s3 cp --storage-class=GLACIER_IR lightpanda-${{ env.ARCH }}-${{ env.OS }} s3://lpd-nightly-build/${DIR}/lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
||||
|
||||
- name: Upload the build
|
||||
uses: ncipollo/release-action@v1
|
||||
with:
|
||||
@@ -98,10 +45,7 @@ jobs:
|
||||
ARCH: aarch64
|
||||
OS: macos
|
||||
|
||||
# macos-14 runs on arm CPU. see
|
||||
# https://github.com/actions/runner-images?tab=readme-ov-file
|
||||
runs-on: macos-14
|
||||
timeout-minutes: 15
|
||||
runs-on: macos-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -116,59 +60,11 @@ jobs:
|
||||
arch: ${{env.ARCH}}
|
||||
|
||||
- name: zig build
|
||||
run: zig build --release=safe -Doptimize=ReleaseSafe -Dgit_commit=$(git rev-parse --short ${{ github.sha }})
|
||||
run: zig build --release=safe -Doptimize=ReleaseSafe -Dengine=v8
|
||||
|
||||
- name: Rename binary
|
||||
run: mv zig-out/bin/lightpanda lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
||||
|
||||
- name: upload on s3
|
||||
run: |
|
||||
export DIR=`git show --no-patch --no-notes --pretty='%cs_%h'`
|
||||
aws s3 cp --storage-class=GLACIER_IR lightpanda-${{ env.ARCH }}-${{ env.OS }} s3://lpd-nightly-build/${DIR}/lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
||||
|
||||
- name: Upload the build
|
||||
uses: ncipollo/release-action@v1
|
||||
with:
|
||||
allowUpdates: true
|
||||
artifacts: lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
||||
tag: nightly
|
||||
|
||||
build-macos-x86_64:
|
||||
env:
|
||||
ARCH: x86_64
|
||||
OS: macos
|
||||
|
||||
# macos-13 runs on x86 CPU. see
|
||||
# https://github.com/actions/runner-images?tab=readme-ov-file
|
||||
# If we want to build for macos-14 or superior, we need to switch to
|
||||
# macos-14-large.
|
||||
# No need for now, but maybe we will need it in the short term.
|
||||
runs-on: macos-13
|
||||
timeout-minutes: 15
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# fetch submodules recusively, to get zig-js-runtime submodules also.
|
||||
submodules: recursive
|
||||
|
||||
- uses: ./.github/actions/install
|
||||
with:
|
||||
os: ${{env.OS}}
|
||||
arch: ${{env.ARCH}}
|
||||
|
||||
- name: zig build
|
||||
run: zig build --release=safe -Doptimize=ReleaseSafe -Dgit_commit=$(git rev-parse --short ${{ github.sha }})
|
||||
|
||||
- name: Rename binary
|
||||
run: mv zig-out/bin/lightpanda lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
||||
|
||||
- name: upload on s3
|
||||
run: |
|
||||
export DIR=`git show --no-patch --no-notes --pretty='%cs_%h'`
|
||||
aws s3 cp --storage-class=GLACIER_IR lightpanda-${{ env.ARCH }}-${{ env.OS }} s3://lpd-nightly-build/${DIR}/lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
||||
|
||||
- name: Upload the build
|
||||
uses: ncipollo/release-action@v1
|
||||
with:
|
||||
|
||||
4
.github/workflows/cla.yml
vendored
4
.github/workflows/cla.yml
vendored
@@ -14,8 +14,6 @@ permissions:
|
||||
jobs:
|
||||
CLAAssistant:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
|
||||
steps:
|
||||
- name: "CLA Assistant"
|
||||
if: (github.event.comment.body == 'recheck' || github.event.comment.body == 'I have read the CLA Document and I hereby sign the CLA') || github.event_name == 'pull_request_target'
|
||||
@@ -28,7 +26,7 @@ jobs:
|
||||
path-to-document: 'https://github.com/lightpanda-io/browser/blob/main/CLA.md'
|
||||
# branch should not be protected
|
||||
branch: 'main'
|
||||
allowlist: krichprollsch,francisbouvier,katie-lpd,sjorsdonkers,bornlex
|
||||
allowlist: krichprollsch,francisbouvier,katie-lpd
|
||||
|
||||
remote-organization-name: lightpanda-io
|
||||
remote-repository-name: cla
|
||||
|
||||
232
.github/workflows/e2e-test.yml
vendored
232
.github/workflows/e2e-test.yml
vendored
@@ -1,232 +0,0 @@
|
||||
name: e2e-test
|
||||
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ vars.LPD_PERF_AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.LPD_PERF_AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_BUCKET: ${{ vars.LPD_PERF_AWS_BUCKET }}
|
||||
AWS_REGION: ${{ vars.LPD_PERF_AWS_REGION }}
|
||||
LIGHTPANDA_DISABLE_TELEMETRY: true
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- "build.zig"
|
||||
- "src/**/*.zig"
|
||||
- "src/*.zig"
|
||||
- "vendor/zig-js-runtime"
|
||||
- ".github/**"
|
||||
- "vendor/**"
|
||||
pull_request:
|
||||
|
||||
# By default GH trigger on types opened, synchronize and reopened.
|
||||
# see https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request
|
||||
# Since we skip the job when the PR is in draft state, we want to force CI
|
||||
# running when the PR is marked ready_for_review w/o other change.
|
||||
# see https://github.com/orgs/community/discussions/25722#discussioncomment-3248917
|
||||
types: [opened, synchronize, reopened, ready_for_review]
|
||||
|
||||
paths:
|
||||
- ".github/**"
|
||||
- "build.zig"
|
||||
- "src/**/*.zig"
|
||||
- "src/*.zig"
|
||||
- "vendor/**"
|
||||
- ".github/**"
|
||||
- "vendor/**"
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
zig-build-release:
|
||||
name: zig build release
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
|
||||
# Don't run the CI with draft PR.
|
||||
if: github.event.pull_request.draft == false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# fetch submodules recusively, to get zig-js-runtime submodules also.
|
||||
submodules: recursive
|
||||
|
||||
- uses: ./.github/actions/install
|
||||
|
||||
- name: zig build release
|
||||
run: zig build -Doptimize=ReleaseFast -Dcpu=x86_64 -Dgit_commit=$(git rev-parse --short ${{ github.sha }})
|
||||
|
||||
- name: upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: lightpanda-build-release
|
||||
path: |
|
||||
zig-out/bin/lightpanda
|
||||
retention-days: 1
|
||||
|
||||
demo-scripts:
|
||||
name: demo-scripts
|
||||
needs: zig-build-release
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: 'lightpanda-io/demo'
|
||||
fetch-depth: 0
|
||||
|
||||
- run: npm install
|
||||
|
||||
- name: download artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: lightpanda-build-release
|
||||
|
||||
- run: chmod a+x ./lightpanda
|
||||
|
||||
- name: run end to end tests
|
||||
run: |
|
||||
./lightpanda serve & echo $! > LPD.pid
|
||||
go run runner/main.go
|
||||
kill `cat LPD.pid`
|
||||
|
||||
- name: build proxy
|
||||
run: |
|
||||
cd proxy
|
||||
go build
|
||||
|
||||
- name: run end to end tests through proxy
|
||||
run: |
|
||||
./proxy/proxy & echo $! > PROXY.id
|
||||
./lightpanda serve --http_proxy 'http://127.0.0.1:3000' & echo $! > LPD.pid
|
||||
go run runner/main.go
|
||||
kill `cat LPD.pid` `cat PROXY.id`
|
||||
|
||||
- name: run request interception through proxy
|
||||
run: |
|
||||
export PROXY_USERNAME=username PROXY_PASSWORD=password
|
||||
./proxy/proxy & echo $! > PROXY.id
|
||||
./lightpanda serve & echo $! > LPD.pid
|
||||
URL=https://demo-browser.lightpanda.io/campfire-commerce/ node puppeteer/proxy_auth.js
|
||||
BASE_URL=https://demo-browser.lightpanda.io/ node playwright/proxy_auth.js
|
||||
kill `cat LPD.pid` `cat PROXY.id`
|
||||
|
||||
cdp-and-hyperfine-bench:
|
||||
name: cdp-and-hyperfine-bench
|
||||
needs: zig-build-release
|
||||
|
||||
env:
|
||||
MAX_MEMORY: 27000
|
||||
MAX_AVG_DURATION: 23
|
||||
LIGHTPANDA_DISABLE_TELEMETRY: true
|
||||
|
||||
# use a self host runner.
|
||||
runs-on: lpd-bench-hetzner
|
||||
timeout-minutes: 15
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: 'lightpanda-io/demo'
|
||||
fetch-depth: 0
|
||||
|
||||
- run: npm install
|
||||
|
||||
- name: download artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: lightpanda-build-release
|
||||
|
||||
- run: chmod a+x ./lightpanda
|
||||
|
||||
- name: start http
|
||||
run: |
|
||||
go run ws/main.go & echo $! > WS.pid
|
||||
sleep 2
|
||||
|
||||
- name: run puppeteer
|
||||
run: |
|
||||
./lightpanda serve & echo $! > LPD.pid
|
||||
sleep 2
|
||||
RUNS=100 npm run bench-puppeteer-cdp > puppeteer.out || exit 1
|
||||
cat /proc/`cat LPD.pid`/status |grep VmHWM|grep -oP '\d+' > LPD.VmHWM
|
||||
kill `cat LPD.pid`
|
||||
|
||||
- name: puppeteer result
|
||||
run: cat puppeteer.out
|
||||
|
||||
- name: memory regression
|
||||
run: |
|
||||
export LPD_VmHWM=`cat LPD.VmHWM`
|
||||
echo "Peak resident set size: $LPD_VmHWM"
|
||||
test "$LPD_VmHWM" -le "$MAX_MEMORY"
|
||||
|
||||
- name: duration regression
|
||||
run: |
|
||||
export PUPPETEER_AVG_DURATION=`cat puppeteer.out|grep 'avg run'|sed 's/avg run duration (ms) //'`
|
||||
echo "puppeteer avg duration: $PUPPETEER_AVG_DURATION"
|
||||
test "$PUPPETEER_AVG_DURATION" -le "$MAX_AVG_DURATION"
|
||||
|
||||
- name: json output
|
||||
run: |
|
||||
export AVG_DURATION=`cat puppeteer.out|grep 'avg run'|sed 's/avg run duration (ms) //'`
|
||||
export TOTAL_DURATION=`cat puppeteer.out|grep 'total duration'|sed 's/total duration (ms) //'`
|
||||
export LPD_VmHWM=`cat LPD.VmHWM`
|
||||
echo "{\"duration_total\":${TOTAL_DURATION},\"duration_avg\":${AVG_DURATION},\"mem_peak\":${LPD_VmHWM}}" > bench.json
|
||||
cat bench.json
|
||||
|
||||
- name: run hyperfine
|
||||
run: |
|
||||
hyperfine --export-json=hyperfine.json --warmup 3 --runs 20 --shell=none "./lightpanda --dump http://127.0.0.1:1234/campfire-commerce/"
|
||||
|
||||
- name: stop http
|
||||
run: kill `cat WS.pid`
|
||||
|
||||
- name: write commit
|
||||
run: |
|
||||
echo "${{github.sha}}" > commit.txt
|
||||
|
||||
- name: upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bench-results
|
||||
path: |
|
||||
bench.json
|
||||
hyperfine.json
|
||||
commit.txt
|
||||
retention-days: 10
|
||||
|
||||
|
||||
perf-fmt:
|
||||
name: perf-fmt
|
||||
needs: cdp-and-hyperfine-bench
|
||||
|
||||
# Don't execute on PR
|
||||
if: github.event_name != 'pull_request'
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
|
||||
container:
|
||||
image: ghcr.io/lightpanda-io/perf-fmt:latest
|
||||
credentials:
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
steps:
|
||||
- name: download artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bench-results
|
||||
|
||||
- name: format and send json result
|
||||
run: /perf-fmt cdp ${{ github.sha }} bench.json
|
||||
|
||||
- name: format and send json result
|
||||
run: /perf-fmt hyperfine ${{ github.sha }} hyperfine.json
|
||||
49
.github/workflows/wpt.yml
vendored
49
.github/workflows/wpt.yml
vendored
@@ -5,21 +5,46 @@ env:
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.LPD_PERF_AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_BUCKET: ${{ vars.LPD_PERF_AWS_BUCKET }}
|
||||
AWS_REGION: ${{ vars.LPD_PERF_AWS_REGION }}
|
||||
LIGHTPANDA_DISABLE_TELEMETRY: true
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "23 2 * * *"
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- "build.zig"
|
||||
- "src/**/*.zig"
|
||||
- "src/*.zig"
|
||||
- "tests/wpt/**"
|
||||
- "vendor/**"
|
||||
- ".github/**"
|
||||
pull_request:
|
||||
|
||||
# By default GH trigger on types opened, synchronize and reopened.
|
||||
# see https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request
|
||||
# Since we skip the job when the PR is in draft state, we want to force CI
|
||||
# running when the PR is marked ready_for_review w/o other change.
|
||||
# see https://github.com/orgs/community/discussions/25722#discussioncomment-3248917
|
||||
types: [opened, synchronize, reopened, ready_for_review]
|
||||
|
||||
paths:
|
||||
- ".github/**"
|
||||
- "build.zig"
|
||||
- "src/**/*.zig"
|
||||
- "src/*.zig"
|
||||
- "tests/wpt/**"
|
||||
- "vendor/**"
|
||||
- ".github/**"
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
wpt:
|
||||
name: web platform tests json output
|
||||
name: web platform tests
|
||||
|
||||
# Don't run the CI with draft PR.
|
||||
if: github.event.pull_request.draft == false
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 90
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -30,8 +55,15 @@ jobs:
|
||||
|
||||
- uses: ./.github/actions/install
|
||||
|
||||
- run: zig build wpt -Dengine=v8 -- --safe --summary
|
||||
|
||||
# For now WPT tests doesn't pass at all.
|
||||
# We accept then to continue the job on failure.
|
||||
# TODO remove the continue-on-error when tests will pass.
|
||||
continue-on-error: true
|
||||
|
||||
- name: json output
|
||||
run: zig build -Doptimize=ReleaseFast wpt -- --json > wpt.json
|
||||
run: zig build wpt -Dengine=v8 -- --safe --json > wpt.json
|
||||
|
||||
- name: write commit
|
||||
run: |
|
||||
@@ -50,9 +82,10 @@ jobs:
|
||||
name: perf-fmt
|
||||
needs: wpt
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
# Don't execute on PR
|
||||
if: github.event_name != 'pull_request'
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ghcr.io/lightpanda-io/perf-fmt:latest
|
||||
credentials:
|
||||
|
||||
5
.github/workflows/zig-fmt.yml
vendored
5
.github/workflows/zig-fmt.yml
vendored
@@ -1,7 +1,7 @@
|
||||
name: zig-fmt
|
||||
|
||||
env:
|
||||
ZIG_VERSION: 0.15.1
|
||||
ZIG_VERSION: 0.13.0
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
@@ -29,10 +29,9 @@ jobs:
|
||||
if: github.event.pull_request.draft == false
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
|
||||
steps:
|
||||
- uses: mlugg/setup-zig@v2
|
||||
- uses: mlugg/setup-zig@v1
|
||||
with:
|
||||
version: ${{ env.ZIG_VERSION }}
|
||||
|
||||
|
||||
55
.github/workflows/zig-test.yml
vendored
55
.github/workflows/zig-test.yml
vendored
@@ -56,7 +56,7 @@ jobs:
|
||||
- uses: ./.github/actions/install
|
||||
|
||||
- name: zig build debug
|
||||
run: zig build
|
||||
run: zig build -Dengine=v8
|
||||
|
||||
- name: upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
@@ -66,28 +66,28 @@ jobs:
|
||||
zig-out/bin/lightpanda
|
||||
retention-days: 1
|
||||
|
||||
browser-fetch:
|
||||
name: browser fetch
|
||||
needs: zig-build-dev
|
||||
zig-build-release:
|
||||
name: zig build release
|
||||
|
||||
# Don't run the CI with draft PR.
|
||||
if: github.event.pull_request.draft == false
|
||||
# Don't run the CI on PR
|
||||
if: github.event_name != 'pull_request'
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: download artifact
|
||||
uses: actions/download-artifact@v4
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
name: lightpanda-build-dev
|
||||
fetch-depth: 0
|
||||
# fetch submodules recusively, to get zig-js-runtime submodules also.
|
||||
submodules: recursive
|
||||
|
||||
- run: chmod a+x ./lightpanda
|
||||
- uses: ./.github/actions/install
|
||||
|
||||
- run: ./lightpanda fetch https://httpbin.io/xhr/get
|
||||
- name: zig build release
|
||||
run: zig build -Doptimize=ReleaseSafe -Dengine=v8
|
||||
|
||||
zig-test:
|
||||
name: zig test
|
||||
timeout-minutes: 15
|
||||
|
||||
# Don't run the CI with draft PR.
|
||||
if: github.event.pull_request.draft == false
|
||||
@@ -104,7 +104,7 @@ jobs:
|
||||
- uses: ./.github/actions/install
|
||||
|
||||
- name: zig build test
|
||||
run: zig build test -- --json > bench.json
|
||||
run: zig build test -Dengine=v8 -- --json > bench.json
|
||||
|
||||
- name: write commit
|
||||
run: |
|
||||
@@ -127,8 +127,6 @@ jobs:
|
||||
if: github.event_name != 'pull_request'
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
|
||||
container:
|
||||
image: ghcr.io/lightpanda-io/perf-fmt:latest
|
||||
credentials:
|
||||
@@ -143,3 +141,30 @@ jobs:
|
||||
|
||||
- name: format and send json result
|
||||
run: /perf-fmt bench-browser ${{ github.sha }} bench.json
|
||||
|
||||
demo-puppeteer:
|
||||
name: demo-puppeteer
|
||||
needs: zig-build-dev
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: 'lightpanda-io/demo'
|
||||
fetch-depth: 0
|
||||
|
||||
- run: npm install
|
||||
|
||||
- name: download artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: lightpanda-build-dev
|
||||
|
||||
- run: chmod a+x ./lightpanda
|
||||
|
||||
- name: run puppeteer
|
||||
run: |
|
||||
python3 -m http.server 1234 -d ./public &
|
||||
./lightpanda &
|
||||
RUNS=2 npm run bench-puppeteer-cdp
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -3,5 +3,3 @@ zig-cache
|
||||
zig-out
|
||||
/vendor/netsurf/out
|
||||
/vendor/libiconv/
|
||||
lightpanda.id
|
||||
/v8/
|
||||
|
||||
23
.gitmodules
vendored
23
.gitmodules
vendored
@@ -1,3 +1,6 @@
|
||||
[submodule "vendor/zig-js-runtime"]
|
||||
path = vendor/zig-js-runtime
|
||||
url = https://github.com/lightpanda-io/zig-js-runtime.git/
|
||||
[submodule "vendor/netsurf/libwapcaplet"]
|
||||
path = vendor/netsurf/libwapcaplet
|
||||
url = https://github.com/lightpanda-io/libwapcaplet.git/
|
||||
@@ -9,7 +12,7 @@
|
||||
url = https://github.com/lightpanda-io/libdom.git/
|
||||
[submodule "vendor/netsurf/share/netsurf-buildsystem"]
|
||||
path = vendor/netsurf/share/netsurf-buildsystem
|
||||
url = https://github.com/lightpanda-io/netsurf-buildsystem.git
|
||||
url = https://source.netsurf-browser.org/buildsystem.git
|
||||
[submodule "vendor/netsurf/libhubbub"]
|
||||
path = vendor/netsurf/libhubbub
|
||||
url = https://github.com/lightpanda-io/libhubbub.git/
|
||||
@@ -19,15 +22,9 @@
|
||||
[submodule "vendor/mimalloc"]
|
||||
path = vendor/mimalloc
|
||||
url = https://github.com/microsoft/mimalloc.git/
|
||||
[submodule "vendor/nghttp2"]
|
||||
path = vendor/nghttp2
|
||||
url = https://github.com/nghttp2/nghttp2.git
|
||||
[submodule "vendor/mbedtls"]
|
||||
path = vendor/mbedtls
|
||||
url = https://github.com/Mbed-TLS/mbedtls.git
|
||||
[submodule "vendor/zlib"]
|
||||
path = vendor/zlib
|
||||
url = https://github.com/madler/zlib.git
|
||||
[submodule "vendor/curl"]
|
||||
path = vendor/curl
|
||||
url = https://github.com/curl/curl.git
|
||||
[submodule "vendor/tls.zig"]
|
||||
path = vendor/tls.zig
|
||||
url = https://github.com/ianic/tls.zig.git/
|
||||
[submodule "vendor/zig-async-io"]
|
||||
path = vendor/zig-async-io
|
||||
url = https://github.com/lightpanda-io/zig-async-io.git/
|
||||
|
||||
67
Dockerfile
67
Dockerfile
@@ -1,11 +1,11 @@
|
||||
FROM debian:stable
|
||||
FROM ubuntu:22.04
|
||||
|
||||
ARG MINISIG=0.12
|
||||
ARG ZIG=0.15.1
|
||||
ARG ZIG=0.13.0
|
||||
ARG ZIG_MINISIG=RWSGOq2NVecA2UPNdBUZykf1CCb147pkmdtYxgb3Ti+JO/wCYvhbAb/U
|
||||
ARG V8=14.0.365.4
|
||||
ARG ZIG_V8=v0.1.30
|
||||
ARG TARGETPLATFORM
|
||||
ARG OS=linux
|
||||
ARG ARCH=x86_64
|
||||
ARG V8=11.1.134
|
||||
ARG ZIG_V8=v0.1.11
|
||||
|
||||
RUN apt-get update -yq && \
|
||||
apt-get install -yq xz-utils \
|
||||
@@ -16,23 +16,34 @@ RUN apt-get update -yq && \
|
||||
curl git
|
||||
|
||||
# install minisig
|
||||
RUN curl --fail -L -O https://github.com/jedisct1/minisign/releases/download/${MINISIG}/minisign-${MINISIG}-linux.tar.gz && \
|
||||
tar xvzf minisign-${MINISIG}-linux.tar.gz
|
||||
RUN curl -L -O https://github.com/jedisct1/minisign/releases/download/0.11/minisign-0.11-linux.tar.gz && \
|
||||
tar xvzf minisign-0.11-linux.tar.gz
|
||||
|
||||
# install zig
|
||||
RUN case $TARGETPLATFORM in \
|
||||
"linux/arm64") ARCH="aarch64" ;; \
|
||||
*) ARCH="x86_64" ;; \
|
||||
esac && \
|
||||
curl --fail -L -O https://ziglang.org/download/${ZIG}/zig-${ARCH}-linux-${ZIG}.tar.xz && \
|
||||
curl --fail -L -O https://ziglang.org/download/${ZIG}/zig-${ARCH}-linux-${ZIG}.tar.xz.minisig && \
|
||||
minisign-linux/${ARCH}/minisign -Vm zig-${ARCH}-linux-${ZIG}.tar.xz -P ${ZIG_MINISIG} && \
|
||||
tar xvf zig-${ARCH}-linux-${ZIG}.tar.xz && \
|
||||
mv zig-${ARCH}-linux-${ZIG} /usr/local/lib && \
|
||||
ln -s /usr/local/lib/zig-${ARCH}-linux-${ZIG}/zig /usr/local/bin/zig
|
||||
RUN curl -O https://ziglang.org/download/${ZIG}/zig-linux-x86_64-${ZIG}.tar.xz && \
|
||||
curl -O https://ziglang.org/download/${ZIG}/zig-linux-x86_64-${ZIG}.tar.xz.minisig
|
||||
|
||||
RUN minisign-linux/x86_64/minisign -Vm zig-linux-x86_64-${ZIG}.tar.xz -P ${ZIG_MINISIG}
|
||||
|
||||
# clean minisg
|
||||
RUN rm -fr minisign-0.11-linux.tar.gz minisign-linux
|
||||
|
||||
# install zig
|
||||
RUN tar xvf zig-linux-x86_64-${ZIG}.tar.xz && \
|
||||
mv zig-linux-x86_64-${ZIG} /usr/local/lib && \
|
||||
ln -s /usr/local/lib/zig-linux-x86_64-${ZIG}/zig /usr/local/bin/zig
|
||||
|
||||
# clean up zig install
|
||||
RUN rm -fr zig-linux-x86_64-${ZIG}.tar.xz zig-linux-x86_64-${ZIG}.tar.xz.minisig
|
||||
|
||||
# force use of http instead of ssh with github
|
||||
RUN cat <<EOF > /root/.gitconfig
|
||||
[url "https://github.com/"]
|
||||
insteadOf="git@github.com:"
|
||||
EOF
|
||||
|
||||
# clone lightpanda
|
||||
RUN git clone https://github.com/lightpanda-io/browser.git
|
||||
RUN git clone git@github.com:lightpanda-io/browser.git
|
||||
|
||||
WORKDIR /browser
|
||||
|
||||
@@ -40,23 +51,23 @@ WORKDIR /browser
|
||||
RUN git submodule init && \
|
||||
git submodule update --recursive
|
||||
|
||||
RUN cd vendor/zig-js-runtime && \
|
||||
git submodule init && \
|
||||
git submodule update --recursive
|
||||
|
||||
RUN make install-libiconv && \
|
||||
make install-netsurf && \
|
||||
make install-mimalloc
|
||||
|
||||
# download and install v8
|
||||
RUN case $TARGETPLATFORM in \
|
||||
"linux/arm64") ARCH="aarch64" ;; \
|
||||
*) ARCH="x86_64" ;; \
|
||||
esac && \
|
||||
curl --fail -L -o libc_v8.a https://github.com/lightpanda-io/zig-v8-fork/releases/download/${ZIG_V8}/libc_v8_${V8}_linux_${ARCH}.a && \
|
||||
mkdir -p v8/out/linux/release/obj/zig/ && \
|
||||
mv libc_v8.a v8/out/linux/release/obj/zig/libc_v8.a
|
||||
RUN curl -L -o libc_v8.a https://github.com/lightpanda-io/zig-v8-fork/releases/download/${ZIG_V8}/libc_v8_${V8}_${OS}_${ARCH}.a && \
|
||||
mkdir -p vendor/zig-js-runtime/vendor/v8/${ARCH}-${OS}/release && \
|
||||
mv libc_v8.a vendor/zig-js-runtime/vendor/v8/${ARCH}-${OS}/release/libc_v8.a
|
||||
|
||||
# build release
|
||||
RUN make build
|
||||
|
||||
FROM debian:stable-slim
|
||||
FROM ubuntu:22.04
|
||||
|
||||
# copy ca certificates
|
||||
COPY --from=0 /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ca-certificates.crt
|
||||
@@ -65,4 +76,4 @@ COPY --from=0 /browser/zig-out/bin/lightpanda /bin/lightpanda
|
||||
|
||||
EXPOSE 9222/tcp
|
||||
|
||||
CMD ["/bin/lightpanda", "serve", "--host", "0.0.0.0", "--port", "9222"]
|
||||
CMD ["/bin/lightpanda", "--host", "0.0.0.0", "--port", "9222"]
|
||||
|
||||
@@ -10,6 +10,7 @@ The default license for this project is [AGPL-3.0-only](LICENSE).
|
||||
The following files are licensed under MIT:
|
||||
|
||||
```
|
||||
src/http/Client.zig
|
||||
src/polyfill/fetch.js
|
||||
```
|
||||
|
||||
|
||||
71
Makefile
71
Makefile
@@ -3,7 +3,7 @@
|
||||
|
||||
ZIG := zig
|
||||
BC := $(dir $(abspath $(lastword $(MAKEFILE_LIST))))
|
||||
# option test filter make test F="server"
|
||||
# option test filter make unittest F="server"
|
||||
F=
|
||||
|
||||
# OS and ARCH
|
||||
@@ -11,9 +11,6 @@ kernel = $(shell uname -ms)
|
||||
ifeq ($(kernel), Darwin arm64)
|
||||
OS := macos
|
||||
ARCH := aarch64
|
||||
else ifeq ($(kernel), Darwin x86_64)
|
||||
OS := macos
|
||||
ARCH := x86_64
|
||||
else ifeq ($(kernel), Linux aarch64)
|
||||
OS := linux
|
||||
ARCH := aarch64
|
||||
@@ -47,8 +44,7 @@ help:
|
||||
|
||||
# $(ZIG) commands
|
||||
# ------------
|
||||
.PHONY: build build-dev run run-release shell test bench download-zig wpt data get-v8 build-v8 build-v8-dev
|
||||
.PHONY: end2end
|
||||
.PHONY: build build-dev run run-release shell test bench download-zig wpt unittest
|
||||
|
||||
zig_version = $(shell grep 'recommended_zig_version = "' "vendor/zig-js-runtime/build.zig" | cut -d'"' -f2)
|
||||
|
||||
@@ -63,74 +59,56 @@ download-zig:
|
||||
## Build in release-safe mode
|
||||
build:
|
||||
@printf "\e[36mBuilding (release safe)...\e[0m\n"
|
||||
$(ZIG) build -Doptimize=ReleaseSafe -Dgit_commit=$$(git rev-parse --short HEAD) || (printf "\e[33mBuild ERROR\e[0m\n"; exit 1;)
|
||||
@$(ZIG) build -Doptimize=ReleaseSafe -Dengine=v8 || (printf "\e[33mBuild ERROR\e[0m\n"; exit 1;)
|
||||
@printf "\e[33mBuild OK\e[0m\n"
|
||||
|
||||
## Build in debug mode
|
||||
build-dev:
|
||||
@printf "\e[36mBuilding (debug)...\e[0m\n"
|
||||
@$(ZIG) build -Dgit_commit=$$(git rev-parse --short HEAD) || (printf "\e[33mBuild ERROR\e[0m\n"; exit 1;)
|
||||
@$(ZIG) build -Dengine=v8 || (printf "\e[33mBuild ERROR\e[0m\n"; exit 1;)
|
||||
@printf "\e[33mBuild OK\e[0m\n"
|
||||
|
||||
## Run the server in release mode
|
||||
run: build
|
||||
@printf "\e[36mRunning...\e[0m\n"
|
||||
@./zig-out/bin/lightpanda || (printf "\e[33mRun ERROR\e[0m\n"; exit 1;)
|
||||
|
||||
## Run the server in debug mode
|
||||
run-debug: build-dev
|
||||
run: build
|
||||
@printf "\e[36mRunning...\e[0m\n"
|
||||
@./zig-out/bin/lightpanda || (printf "\e[33mRun ERROR\e[0m\n"; exit 1;)
|
||||
|
||||
## Run a JS shell in debug mode
|
||||
shell:
|
||||
@printf "\e[36mBuilding shell...\e[0m\n"
|
||||
@$(ZIG) build shell || (printf "\e[33mBuild ERROR\e[0m\n"; exit 1;)
|
||||
@$(ZIG) build shell -Dengine=v8 || (printf "\e[33mBuild ERROR\e[0m\n"; exit 1;)
|
||||
|
||||
## Run WPT tests
|
||||
wpt:
|
||||
@printf "\e[36mBuilding wpt...\e[0m\n"
|
||||
@$(ZIG) build wpt -- $(filter-out $@,$(MAKECMDGOALS)) || (printf "\e[33mBuild ERROR\e[0m\n"; exit 1;)
|
||||
@$(ZIG) build wpt -Dengine=v8 -- --safe $(filter-out $@,$(MAKECMDGOALS)) || (printf "\e[33mBuild ERROR\e[0m\n"; exit 1;)
|
||||
|
||||
wpt-summary:
|
||||
@printf "\e[36mBuilding wpt...\e[0m\n"
|
||||
@$(ZIG) build wpt -- --summary $(filter-out $@,$(MAKECMDGOALS)) || (printf "\e[33mBuild ERROR\e[0m\n"; exit 1;)
|
||||
@$(ZIG) build wpt -Dengine=v8 -- --safe --summary $(filter-out $@,$(MAKECMDGOALS)) || (printf "\e[33mBuild ERROR\e[0m\n"; exit 1;)
|
||||
|
||||
## Test
|
||||
test:
|
||||
@TEST_FILTER='${F}' $(ZIG) build test -freference-trace --summary all
|
||||
@printf "\e[36mTesting...\e[0m\n"
|
||||
@$(ZIG) build test -Dengine=v8 || (printf "\e[33mTest ERROR\e[0m\n"; exit 1;)
|
||||
@printf "\e[33mTest OK\e[0m\n"
|
||||
|
||||
## Run demo/runner end to end tests
|
||||
end2end:
|
||||
@test -d ../demo
|
||||
cd ../demo && go run runner/main.go
|
||||
|
||||
## v8
|
||||
get-v8:
|
||||
@printf "\e[36mGetting v8 source...\e[0m\n"
|
||||
@$(ZIG) build get-v8
|
||||
|
||||
build-v8-dev:
|
||||
@printf "\e[36mBuilding v8 (dev)...\e[0m\n"
|
||||
@$(ZIG) build build-v8
|
||||
|
||||
build-v8:
|
||||
@printf "\e[36mBuilding v8...\e[0m\n"
|
||||
@$(ZIG) build -Doptimize=ReleaseSafe build-v8
|
||||
unittest:
|
||||
@TEST_FILTER='${F}' $(ZIG) build unittest -freference-trace --summary all
|
||||
|
||||
# Install and build required dependencies commands
|
||||
# ------------
|
||||
.PHONY: install-submodule
|
||||
.PHONY: install-libiconv
|
||||
.PHONY: install-zig-js-runtime install-zig-js-runtime-dev install-libiconv
|
||||
.PHONY: _install-netsurf install-netsurf clean-netsurf test-netsurf install-netsurf-dev
|
||||
.PHONY: install-mimalloc install-mimalloc-dev clean-mimalloc
|
||||
.PHONY: install-dev install
|
||||
|
||||
## Install and build dependencies for release
|
||||
install: install-submodule install-libiconv install-netsurf install-mimalloc
|
||||
install: install-submodule install-zig-js-runtime install-libiconv install-netsurf install-mimalloc
|
||||
|
||||
## Install and build dependencies for dev
|
||||
install-dev: install-submodule install-libiconv install-netsurf-dev install-mimalloc-dev
|
||||
install-dev: install-submodule install-zig-js-runtime-dev install-libiconv install-netsurf-dev install-mimalloc-dev
|
||||
|
||||
install-netsurf-dev: _install-netsurf
|
||||
install-netsurf-dev: OPTCFLAGS := -O0 -g -DNDEBUG
|
||||
@@ -166,7 +144,7 @@ _install-netsurf: clean-netsurf
|
||||
BUILDDIR=$(BC_NS)/build/libdom make install && \
|
||||
printf "\e[33mRunning libdom example...\e[0m\n" && \
|
||||
cd examples && \
|
||||
$(ZIG) cc \
|
||||
zig cc \
|
||||
-I$(ICONV)/include \
|
||||
-I$(BC_NS)/include \
|
||||
-L$(ICONV)/lib \
|
||||
@@ -199,24 +177,27 @@ download-libiconv:
|
||||
ifeq ("$(wildcard vendor/libiconv/libiconv-1.17)","")
|
||||
@mkdir -p vendor/libiconv
|
||||
@cd vendor/libiconv && \
|
||||
curl -L https://github.com/lightpanda-io/libiconv/releases/download/1.17/libiconv-1.17.tar.gz | tar -xvzf -
|
||||
curl https://ftp.gnu.org/pub/gnu/libiconv/libiconv-1.17.tar.gz | tar -xvzf -
|
||||
endif
|
||||
|
||||
build-libiconv: clean-libiconv
|
||||
install-libiconv: download-libiconv clean-libiconv
|
||||
@cd vendor/libiconv/libiconv-1.17 && \
|
||||
./configure --prefix=$(ICONV) --enable-static && \
|
||||
make && make install
|
||||
|
||||
install-libiconv: download-libiconv build-libiconv
|
||||
|
||||
clean-libiconv:
|
||||
ifneq ("$(wildcard vendor/libiconv/libiconv-1.17/Makefile)","")
|
||||
@cd vendor/libiconv/libiconv-1.17 && \
|
||||
make clean
|
||||
endif
|
||||
|
||||
data:
|
||||
cd src/data && go run public_suffix_list_gen.go > public_suffix_list.zig
|
||||
install-zig-js-runtime-dev:
|
||||
@cd vendor/zig-js-runtime && \
|
||||
make install-dev
|
||||
|
||||
install-zig-js-runtime:
|
||||
@cd vendor/zig-js-runtime && \
|
||||
make install
|
||||
|
||||
.PHONY: _build_mimalloc
|
||||
|
||||
|
||||
190
README.md
190
README.md
@@ -8,79 +8,65 @@
|
||||
|
||||
<div align="center">
|
||||
|
||||
[](https://github.com/lightpanda-io/browser/commits/main)
|
||||
[](https://github.com/lightpanda-io/browser/blob/main/LICENSE)
|
||||
[](https://twitter.com/lightpanda_io)
|
||||
[](https://github.com/lightpanda-io/browser)
|
||||
|
||||
</div>
|
||||
|
||||
<div align="center">
|
||||
|
||||
<a href="https://trendshift.io/repositories/12815" target="_blank"><img src="https://trendshift.io/api/badge/repositories/12815" alt="lightpanda-io%2Fbrowser | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
|
||||
|
||||
</div>
|
||||
|
||||
Lightpanda is the open-source browser made for headless usage:
|
||||
|
||||
- Javascript execution
|
||||
- Support of Web APIs (partial, WIP)
|
||||
- Compatible with Playwright[^1], Puppeteer, chromedp through CDP
|
||||
- Compatible with Playwright, Puppeteer through CDP (WIP)
|
||||
|
||||
Fast web automation for AI agents, LLM training, scraping and testing:
|
||||
Fast web automation for AI agents, LLM training, scraping and testing with minimal memory footprint:
|
||||
|
||||
- Ultra-low memory footprint (9x less than Chrome)
|
||||
- Exceptionally fast execution (11x faster than Chrome)
|
||||
- Instant startup
|
||||
- Exceptionally fast execution (11x faster than Chrome) & instant startup
|
||||
|
||||
[<img width="350px" src="https://cdn.lightpanda.io/assets/images/github/execution-time.svg">
|
||||
](https://github.com/lightpanda-io/demo)
|
||||
 
|
||||
[<img width="350px" src="https://cdn.lightpanda.io/assets/images/github/memory-frame.svg">
|
||||
](https://github.com/lightpanda-io/demo)
|
||||
</div>
|
||||
<img width=500px src="https://cdn.lightpanda.io/assets/images/benchmark_2024-12-04.png">
|
||||
|
||||
_Puppeteer requesting 100 pages from a local website on a AWS EC2 m5.large instance.
|
||||
See [benchmark details](https://github.com/lightpanda-io/demo)._
|
||||
|
||||
[^1]: **Playwright support disclaimer:**
|
||||
Due to the nature of Playwright, a script that works with the current version of the browser may not function correctly with a future version. Playwright uses an intermediate JavaScript layer that selects an execution strategy based on the browser's available features. If Lightpanda adds a new [Web API](https://developer.mozilla.org/en-US/docs/Web/API), Playwright may choose to execute different code for the same script. This new code path could attempt to use features that are not yet implemented. Lightpanda makes an effort to add compatibility tests, but we can't cover all scenarios. If you encounter an issue, please create a [GitHub issue](https://github.com/lightpanda-io/browser/issues) and include the last known working version of the script.
|
||||
See [benchmark details](https://github.com/lightpanda-io/demo).
|
||||
|
||||
## Quick start
|
||||
|
||||
### Install
|
||||
**Install from the nightly builds**
|
||||
### Install from the nightly builds
|
||||
|
||||
You can download the last binary from the [nightly
|
||||
builds](https://github.com/lightpanda-io/browser/releases/tag/nightly) for
|
||||
Linux x86_64 and MacOS aarch64.
|
||||
|
||||
*For Linux*
|
||||
```console
|
||||
curl -L -o lightpanda https://github.com/lightpanda-io/browser/releases/download/nightly/lightpanda-x86_64-linux && \
|
||||
chmod a+x ./lightpanda
|
||||
# Download the binary
|
||||
$ wget https://github.com/lightpanda-io/browser/releases/download/nightly/lightpanda-x86_64-linux
|
||||
$ chmod a+x ./lightpanda-x86_64-linux
|
||||
$ ./lightpanda-x86_64-linux -h
|
||||
usage: ./lightpanda-x86_64-linux [options] [URL]
|
||||
|
||||
start Lightpanda browser
|
||||
|
||||
* if an url is provided the browser will fetch the page and exit
|
||||
* otherwhise the browser starts a CDP server
|
||||
|
||||
-h, --help Print this help message and exit.
|
||||
--host Host of the CDP server (default "127.0.0.1")
|
||||
--port Port of the CDP server (default "9222")
|
||||
--timeout Timeout for incoming connections of the CDP server (in seconds, default "3")
|
||||
--dump Dump document in stdout (fetch mode only)
|
||||
```
|
||||
|
||||
*For MacOS*
|
||||
```console
|
||||
curl -L -o lightpanda https://github.com/lightpanda-io/browser/releases/download/nightly/lightpanda-aarch64-macos && \
|
||||
chmod a+x ./lightpanda
|
||||
```
|
||||
|
||||
*For Windows + WSL2*
|
||||
|
||||
The Lightpanda browser is compatible to run on windows inside WSL. Follow the Linux instruction for installation from a WSL terminal.
|
||||
It is recommended to install clients like Puppeteer on the Windows host.
|
||||
|
||||
**Install from Docker**
|
||||
|
||||
Lightpanda provides [official Docker
|
||||
images](https://hub.docker.com/r/lightpanda/browser) for both Linux amd64 and
|
||||
arm64 architectures.
|
||||
The following command fetches the Docker image and starts a new container exposing Lightpanda's CDP server on port `9222`.
|
||||
```console
|
||||
docker run -d --name lightpanda -p 9222:9222 lightpanda/browser:nightly
|
||||
```
|
||||
|
||||
### Dump a URL
|
||||
### Dump an URL
|
||||
|
||||
```console
|
||||
./lightpanda fetch --dump https://lightpanda.io
|
||||
```
|
||||
```console
|
||||
$ ./lightpanda-x86_64-linux --dump https://lightpanda.io
|
||||
info(browser): GET https://lightpanda.io/ http.Status.ok
|
||||
info(browser): fetch script https://api.website.lightpanda.io/js/script.js: http.Status.ok
|
||||
info(browser): eval remote https://api.website.lightpanda.io/js/script.js: TypeError: Cannot read properties of undefined (reading 'pushState')
|
||||
@@ -90,9 +76,7 @@ info(browser): eval remote https://api.website.lightpanda.io/js/script.js: TypeE
|
||||
### Start a CDP server
|
||||
|
||||
```console
|
||||
./lightpanda serve --host 127.0.0.1 --port 9222
|
||||
```
|
||||
```console
|
||||
$ ./lightpanda-x86_64-linux --host 127.0.0.1 --port 9222
|
||||
info(websocket): starting blocking worker to listen on 127.0.0.1:9222
|
||||
info(server): accepting new conn...
|
||||
```
|
||||
@@ -114,62 +98,21 @@ const browser = await puppeteer.connect({
|
||||
const context = await browser.createBrowserContext();
|
||||
const page = await context.newPage();
|
||||
|
||||
// Dump all the links from the page.
|
||||
await page.goto('https://wikipedia.com/');
|
||||
|
||||
const links = await page.evaluate(() => {
|
||||
return Array.from(document.querySelectorAll('a')).map(row => {
|
||||
return row.getAttribute('href');
|
||||
});
|
||||
});
|
||||
|
||||
console.log(links);
|
||||
|
||||
await page.close();
|
||||
await context.close();
|
||||
await browser.disconnect();
|
||||
```
|
||||
|
||||
### Telemetry
|
||||
By default, Lightpanda collects and sends usage telemetry. This can be disabled by setting an environment variable `LIGHTPANDA_DISABLE_TELEMETRY=true`. You can read Lightpanda's privacy policy at: [https://lightpanda.io/privacy-policy](https://lightpanda.io/privacy-policy).
|
||||
|
||||
## Status
|
||||
|
||||
Lightpanda is in Beta and currently a work in progress. Stability and coverage are improving and many websites now work.
|
||||
You may still encounter errors or crashes. Please open an issue with specifics if so.
|
||||
|
||||
Here are the key features we have implemented:
|
||||
|
||||
- [x] HTTP loader (based on Libcurl)
|
||||
- [x] HTML parser and DOM tree (based on Netsurf libs)
|
||||
- [x] Javascript support (v8)
|
||||
- [x] DOM APIs
|
||||
- [x] Ajax
|
||||
- [x] XHR API
|
||||
- [x] Fetch API (polyfill)
|
||||
- [x] DOM dump
|
||||
- [x] CDP/websockets server
|
||||
- [x] Click
|
||||
- [x] Input form
|
||||
- [x] Cookies
|
||||
- [x] Custom HTTP headers
|
||||
- [x] Proxy support
|
||||
- [x] Network interception
|
||||
|
||||
NOTE: There are hundreds of Web APIs. Developing a browser (even just for headless mode) is a huge task. Coverage will increase over time.
|
||||
|
||||
You can also follow the progress of our Javascript support in our dedicated [zig-js-runtime](https://github.com/lightpanda-io/zig-js-runtime#development) project.
|
||||
|
||||
## Build from sources
|
||||
|
||||
### Prerequisites
|
||||
|
||||
Lightpanda is written with [Zig](https://ziglang.org/) `0.15.1`. You have to
|
||||
Lightpanda is written with [Zig](https://ziglang.org/) `0.13.0`. You have to
|
||||
install it with the right version in order to build the project.
|
||||
|
||||
Lightpanda also depends on
|
||||
[zig-js-runtime](https://github.com/lightpanda-io/zig-js-runtime/) (with v8),
|
||||
[Libcurl](https://curl.se/libcurl/),
|
||||
[Netsurf libs](https://www.netsurf-browser.org/) and
|
||||
[Mimalloc](https://microsoft.github.io/mimalloc).
|
||||
|
||||
@@ -181,15 +124,10 @@ For Debian/Ubuntu based Linux:
|
||||
sudo apt install xz-utils \
|
||||
python3 ca-certificates git \
|
||||
pkg-config libglib2.0-dev \
|
||||
gperf libexpat1-dev unzip rsync \
|
||||
gperf libexpat1-dev \
|
||||
cmake clang
|
||||
```
|
||||
|
||||
For systems with [Nix](https://nixos.org/download/), you can use the devShell:
|
||||
```
|
||||
nix develop
|
||||
```
|
||||
|
||||
For MacOS, you only need cmake:
|
||||
|
||||
```
|
||||
@@ -214,14 +152,6 @@ To init or update the submodules in the `vendor/` directory:
|
||||
make install-submodule
|
||||
```
|
||||
|
||||
**iconv**
|
||||
|
||||
libiconv is an internationalization library used by Netsurf.
|
||||
|
||||
```
|
||||
make install-libiconv
|
||||
```
|
||||
|
||||
**Netsurf libs**
|
||||
|
||||
Netsurf libs are used for HTML parsing and DOM tree generation.
|
||||
@@ -246,21 +176,17 @@ Note: when Mimalloc is built in dev mode, you can dump memory stats with the
|
||||
env var `MIMALLOC_SHOW_STATS=1`. See
|
||||
[https://microsoft.github.io/mimalloc/environment.html](https://microsoft.github.io/mimalloc/environment.html).
|
||||
|
||||
**v8**
|
||||
**zig-js-runtime**
|
||||
|
||||
First, get the tools necessary for building V8, as well as the V8 source code:
|
||||
Our own Zig/Javascript runtime, which includes the v8 Javascript engine.
|
||||
|
||||
This build task is very long and cpu consuming, as you will build v8 from sources.
|
||||
|
||||
```
|
||||
make get-v8
|
||||
make install-zig-js-runtime
|
||||
```
|
||||
|
||||
Next, build v8. This build task is very long and cpu consuming, as you will build v8 from sources.
|
||||
|
||||
```
|
||||
make build-v8
|
||||
```
|
||||
|
||||
For dev env, use `make build-v8-dev`.
|
||||
For dev env, use `make install-zig-js-runtime-dev`.
|
||||
|
||||
## Test
|
||||
|
||||
@@ -268,20 +194,6 @@ For dev env, use `make build-v8-dev`.
|
||||
|
||||
You can test Lightpanda by running `make test`.
|
||||
|
||||
### End to end tests
|
||||
|
||||
To run end to end tests, you need to clone the [demo
|
||||
repository](https://github.com/lightpanda-io/demo) into `../demo` dir.
|
||||
|
||||
You have to install the [demo's node
|
||||
requirements](https://github.com/lightpanda-io/demo?tab=readme-ov-file#dependencies-1)
|
||||
|
||||
You also need to install [Go](https://go.dev) > v1.24.
|
||||
|
||||
```
|
||||
make end2end
|
||||
```
|
||||
|
||||
### Web Platform Tests
|
||||
|
||||
Lightpanda is tested against the standardized [Web Platform
|
||||
@@ -348,3 +260,25 @@ If we want both Javascript and performance in a true headless browser, we need t
|
||||
- Not based on Chromium, Blink or WebKit
|
||||
- Low-level system programming language (Zig) with optimisations in mind
|
||||
- Opinionated: without graphical rendering
|
||||
|
||||
## Status
|
||||
|
||||
Lightpanda is still a work in progress and is currently at a Beta stage.
|
||||
|
||||
:warning: You should expect most websites to fail or crash.
|
||||
|
||||
Here are the key features we have implemented:
|
||||
|
||||
- [x] HTTP loader
|
||||
- [x] HTML parser and DOM tree (based on Netsurf libs)
|
||||
- [x] Javascript support (v8)
|
||||
- [x] Basic DOM APIs
|
||||
- [x] Ajax
|
||||
- [x] XHR API
|
||||
- [x] Fetch API
|
||||
- [x] DOM dump
|
||||
- [x] Basic CDP/websockets server
|
||||
|
||||
NOTE: There are hundreds of Web APIs. Developing a browser (even just for headless mode) is a huge task. Coverage will increase over time.
|
||||
|
||||
You can also follow the progress of our Javascript support in our dedicated [zig-js-runtime](https://github.com/lightpanda-io/zig-js-runtime#development) project.
|
||||
|
||||
890
build.zig
890
build.zig
@@ -17,15 +17,18 @@
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const builtin = @import("builtin");
|
||||
|
||||
const Build = std.Build;
|
||||
const jsruntime_path = "vendor/zig-js-runtime/";
|
||||
const jsruntime = @import("vendor/zig-js-runtime/build.zig");
|
||||
const jsruntime_pkgs = jsruntime.packages(jsruntime_path);
|
||||
|
||||
/// Do not rename this constant. It is scanned by some scripts to determine
|
||||
/// which zig version to install.
|
||||
const recommended_zig_version = "0.15.1";
|
||||
const recommended_zig_version = jsruntime.recommended_zig_version;
|
||||
|
||||
pub fn build(b: *Build) !void {
|
||||
pub fn build(b: *std.Build) !void {
|
||||
switch (comptime builtin.zig_version.order(std.SemanticVersion.parse(recommended_zig_version) catch unreachable)) {
|
||||
.eq => {},
|
||||
.lt => {
|
||||
@@ -39,397 +42,185 @@ pub fn build(b: *Build) !void {
|
||||
},
|
||||
}
|
||||
|
||||
var opts = b.addOptions();
|
||||
opts.addOption(
|
||||
[]const u8,
|
||||
"git_commit",
|
||||
b.option([]const u8, "git_commit", "Current git commit") orelse "dev",
|
||||
);
|
||||
|
||||
const target = b.standardTargetOptions(.{});
|
||||
const optimize = b.standardOptimizeOption(.{});
|
||||
const mode = b.standardOptimizeOption(.{});
|
||||
|
||||
// We're still using llvm because the new x86 backend seems to crash
|
||||
// with v8. This can be reproduced in zig-v8-fork.
|
||||
const options = jsruntime.buildOptions(b);
|
||||
|
||||
const lightpanda_module = b.addModule("lightpanda", .{
|
||||
// browser
|
||||
// -------
|
||||
|
||||
// compile and install
|
||||
const exe = b.addExecutable(.{
|
||||
.name = "lightpanda",
|
||||
.root_source_file = b.path("src/main.zig"),
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
.link_libc = true,
|
||||
.link_libcpp = true,
|
||||
.optimize = mode,
|
||||
});
|
||||
try addDependencies(b, lightpanda_module, opts);
|
||||
try common(b, exe, options);
|
||||
b.installArtifact(exe);
|
||||
|
||||
{
|
||||
// browser
|
||||
// -------
|
||||
|
||||
// compile and install
|
||||
const exe = b.addExecutable(.{
|
||||
.name = "lightpanda",
|
||||
.use_llvm = true,
|
||||
.root_module = lightpanda_module,
|
||||
});
|
||||
b.installArtifact(exe);
|
||||
|
||||
// run
|
||||
const run_cmd = b.addRunArtifact(exe);
|
||||
if (b.args) |args| {
|
||||
run_cmd.addArgs(args);
|
||||
}
|
||||
|
||||
// step
|
||||
const run_step = b.step("run", "Run the app");
|
||||
run_step.dependOn(&run_cmd.step);
|
||||
// run
|
||||
const run_cmd = b.addRunArtifact(exe);
|
||||
if (b.args) |args| {
|
||||
run_cmd.addArgs(args);
|
||||
}
|
||||
|
||||
{
|
||||
// tests
|
||||
// ----
|
||||
// step
|
||||
const run_step = b.step("run", "Run the app");
|
||||
run_step.dependOn(&run_cmd.step);
|
||||
|
||||
// compile
|
||||
const tests = b.addTest(.{
|
||||
.root_module = lightpanda_module,
|
||||
.use_llvm = true,
|
||||
.test_runner = .{ .path = b.path("src/test_runner.zig"), .mode = .simple },
|
||||
});
|
||||
// shell
|
||||
// -----
|
||||
|
||||
const run_tests = b.addRunArtifact(tests);
|
||||
if (b.args) |args| {
|
||||
run_tests.addArgs(args);
|
||||
}
|
||||
|
||||
// step
|
||||
const tests_step = b.step("test", "Run unit tests");
|
||||
tests_step.dependOn(&run_tests.step);
|
||||
}
|
||||
|
||||
{
|
||||
// wpt
|
||||
// -----
|
||||
const wpt_module = b.createModule(.{
|
||||
.root_source_file = b.path("src/main_wpt.zig"),
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
});
|
||||
try addDependencies(b, wpt_module, opts);
|
||||
|
||||
// compile and install
|
||||
const wpt = b.addExecutable(.{
|
||||
.name = "lightpanda-wpt",
|
||||
.use_llvm = true,
|
||||
.root_module = wpt_module,
|
||||
});
|
||||
|
||||
// run
|
||||
const wpt_cmd = b.addRunArtifact(wpt);
|
||||
if (b.args) |args| {
|
||||
wpt_cmd.addArgs(args);
|
||||
}
|
||||
// step
|
||||
const wpt_step = b.step("wpt", "WPT tests");
|
||||
wpt_step.dependOn(&wpt_cmd.step);
|
||||
}
|
||||
|
||||
{
|
||||
// get v8
|
||||
// -------
|
||||
const v8 = b.dependency("v8", .{ .target = target, .optimize = optimize });
|
||||
const get_v8 = b.addRunArtifact(v8.artifact("get-v8"));
|
||||
const get_step = b.step("get-v8", "Get v8");
|
||||
get_step.dependOn(&get_v8.step);
|
||||
}
|
||||
|
||||
{
|
||||
// build v8
|
||||
// -------
|
||||
const v8 = b.dependency("v8", .{ .target = target, .optimize = optimize });
|
||||
const build_v8 = b.addRunArtifact(v8.artifact("build-v8"));
|
||||
const build_step = b.step("build-v8", "Build v8");
|
||||
build_step.dependOn(&build_v8.step);
|
||||
}
|
||||
}
|
||||
|
||||
fn addDependencies(b: *Build, mod: *Build.Module, opts: *Build.Step.Options) !void {
|
||||
try moduleNetSurf(b, mod);
|
||||
mod.addImport("build_config", opts.createModule());
|
||||
|
||||
const target = mod.resolved_target.?;
|
||||
const dep_opts = .{
|
||||
// compile and install
|
||||
const shell = b.addExecutable(.{
|
||||
.name = "lightpanda-shell",
|
||||
.root_source_file = b.path("src/main_shell.zig"),
|
||||
.target = target,
|
||||
.optimize = mod.optimize.?,
|
||||
};
|
||||
.optimize = mode,
|
||||
});
|
||||
try common(b, shell, options);
|
||||
try jsruntime_pkgs.add_shell(shell);
|
||||
|
||||
mod.addIncludePath(b.path("vendor/lightpanda"));
|
||||
|
||||
{
|
||||
// v8
|
||||
const v8_opts = b.addOptions();
|
||||
v8_opts.addOption(bool, "inspector_subtype", false);
|
||||
|
||||
const v8_mod = b.dependency("v8", dep_opts).module("v8");
|
||||
v8_mod.addOptions("default_exports", v8_opts);
|
||||
mod.addImport("v8", v8_mod);
|
||||
|
||||
const release_dir = if (mod.optimize.? == .Debug) "debug" else "release";
|
||||
const os = switch (target.result.os.tag) {
|
||||
.linux => "linux",
|
||||
.macos => "macos",
|
||||
else => return error.UnsupportedPlatform,
|
||||
};
|
||||
var lib_path = try std.fmt.allocPrint(
|
||||
mod.owner.allocator,
|
||||
"v8/out/{s}/{s}/obj/zig/libc_v8.a",
|
||||
.{ os, release_dir },
|
||||
);
|
||||
std.fs.cwd().access(lib_path, .{}) catch {
|
||||
// legacy path
|
||||
lib_path = try std.fmt.allocPrint(
|
||||
mod.owner.allocator,
|
||||
"v8/out/{s}/obj/zig/libc_v8.a",
|
||||
.{release_dir},
|
||||
);
|
||||
};
|
||||
mod.addObjectFile(mod.owner.path(lib_path));
|
||||
|
||||
switch (target.result.os.tag) {
|
||||
.macos => {
|
||||
// v8 has a dependency, abseil-cpp, which, on Mac, uses CoreFoundation
|
||||
mod.addSystemFrameworkPath(.{ .cwd_relative = "/System/Library/Frameworks" });
|
||||
mod.linkFramework("CoreFoundation", .{});
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
// run
|
||||
const shell_cmd = b.addRunArtifact(shell);
|
||||
if (b.args) |args| {
|
||||
shell_cmd.addArgs(args);
|
||||
}
|
||||
|
||||
{
|
||||
//curl
|
||||
{
|
||||
const is_linux = target.result.os.tag == .linux;
|
||||
if (is_linux) {
|
||||
mod.addCMacro("HAVE_LINUX_TCP_H", "1");
|
||||
mod.addCMacro("HAVE_MSG_NOSIGNAL", "1");
|
||||
mod.addCMacro("HAVE_GETHOSTBYNAME_R", "1");
|
||||
}
|
||||
mod.addCMacro("_FILE_OFFSET_BITS", "64");
|
||||
mod.addCMacro("BUILDING_LIBCURL", "1");
|
||||
mod.addCMacro("CURL_DISABLE_AWS", "1");
|
||||
mod.addCMacro("CURL_DISABLE_DICT", "1");
|
||||
mod.addCMacro("CURL_DISABLE_DOH", "1");
|
||||
mod.addCMacro("CURL_DISABLE_FILE", "1");
|
||||
mod.addCMacro("CURL_DISABLE_FTP", "1");
|
||||
mod.addCMacro("CURL_DISABLE_GOPHER", "1");
|
||||
mod.addCMacro("CURL_DISABLE_KERBEROS", "1");
|
||||
mod.addCMacro("CURL_DISABLE_IMAP", "1");
|
||||
mod.addCMacro("CURL_DISABLE_IPFS", "1");
|
||||
mod.addCMacro("CURL_DISABLE_LDAP", "1");
|
||||
mod.addCMacro("CURL_DISABLE_LDAPS", "1");
|
||||
mod.addCMacro("CURL_DISABLE_MQTT", "1");
|
||||
mod.addCMacro("CURL_DISABLE_NTLM", "1");
|
||||
mod.addCMacro("CURL_DISABLE_PROGRESS_METER", "1");
|
||||
mod.addCMacro("CURL_DISABLE_POP3", "1");
|
||||
mod.addCMacro("CURL_DISABLE_RTSP", "1");
|
||||
mod.addCMacro("CURL_DISABLE_SMB", "1");
|
||||
mod.addCMacro("CURL_DISABLE_SMTP", "1");
|
||||
mod.addCMacro("CURL_DISABLE_TELNET", "1");
|
||||
mod.addCMacro("CURL_DISABLE_TFTP", "1");
|
||||
mod.addCMacro("CURL_EXTERN_SYMBOL", "__attribute__ ((__visibility__ (\"default\"))");
|
||||
mod.addCMacro("CURL_OS", if (is_linux) "\"Linux\"" else "\"mac\"");
|
||||
mod.addCMacro("CURL_STATICLIB", "1");
|
||||
mod.addCMacro("ENABLE_IPV6", "1");
|
||||
mod.addCMacro("HAVE_ALARM", "1");
|
||||
mod.addCMacro("HAVE_ALLOCA_H", "1");
|
||||
mod.addCMacro("HAVE_ARPA_INET_H", "1");
|
||||
mod.addCMacro("HAVE_ARPA_TFTP_H", "1");
|
||||
mod.addCMacro("HAVE_ASSERT_H", "1");
|
||||
mod.addCMacro("HAVE_BASENAME", "1");
|
||||
mod.addCMacro("HAVE_BOOL_T", "1");
|
||||
mod.addCMacro("HAVE_BUILTIN_AVAILABLE", "1");
|
||||
mod.addCMacro("HAVE_CLOCK_GETTIME_MONOTONIC", "1");
|
||||
mod.addCMacro("HAVE_DLFCN_H", "1");
|
||||
mod.addCMacro("HAVE_ERRNO_H", "1");
|
||||
mod.addCMacro("HAVE_FCNTL", "1");
|
||||
mod.addCMacro("HAVE_FCNTL_H", "1");
|
||||
mod.addCMacro("HAVE_FCNTL_O_NONBLOCK", "1");
|
||||
mod.addCMacro("HAVE_FREEADDRINFO", "1");
|
||||
mod.addCMacro("HAVE_FSETXATTR", "1");
|
||||
mod.addCMacro("HAVE_FSETXATTR_5", "1");
|
||||
mod.addCMacro("HAVE_FTRUNCATE", "1");
|
||||
mod.addCMacro("HAVE_GETADDRINFO", "1");
|
||||
mod.addCMacro("HAVE_GETEUID", "1");
|
||||
mod.addCMacro("HAVE_GETHOSTBYNAME", "1");
|
||||
mod.addCMacro("HAVE_GETHOSTBYNAME_R_6", "1");
|
||||
mod.addCMacro("HAVE_GETHOSTNAME", "1");
|
||||
mod.addCMacro("HAVE_GETPEERNAME", "1");
|
||||
mod.addCMacro("HAVE_GETPPID", "1");
|
||||
mod.addCMacro("HAVE_GETPPID", "1");
|
||||
mod.addCMacro("HAVE_GETPROTOBYNAME", "1");
|
||||
mod.addCMacro("HAVE_GETPWUID", "1");
|
||||
mod.addCMacro("HAVE_GETPWUID_R", "1");
|
||||
mod.addCMacro("HAVE_GETRLIMIT", "1");
|
||||
mod.addCMacro("HAVE_GETSOCKNAME", "1");
|
||||
mod.addCMacro("HAVE_GETTIMEOFDAY", "1");
|
||||
mod.addCMacro("HAVE_GMTIME_R", "1");
|
||||
mod.addCMacro("HAVE_IDN2_H", "1");
|
||||
mod.addCMacro("HAVE_IF_NAMETOINDEX", "1");
|
||||
mod.addCMacro("HAVE_IFADDRS_H", "1");
|
||||
mod.addCMacro("HAVE_INET_ADDR", "1");
|
||||
mod.addCMacro("HAVE_INET_PTON", "1");
|
||||
mod.addCMacro("HAVE_INTTYPES_H", "1");
|
||||
mod.addCMacro("HAVE_IOCTL", "1");
|
||||
mod.addCMacro("HAVE_IOCTL_FIONBIO", "1");
|
||||
mod.addCMacro("HAVE_IOCTL_SIOCGIFADDR", "1");
|
||||
mod.addCMacro("HAVE_LDAP_URL_PARSE", "1");
|
||||
mod.addCMacro("HAVE_LIBGEN_H", "1");
|
||||
mod.addCMacro("HAVE_LIBZ", "1");
|
||||
mod.addCMacro("HAVE_LL", "1");
|
||||
mod.addCMacro("HAVE_LOCALE_H", "1");
|
||||
mod.addCMacro("HAVE_LOCALTIME_R", "1");
|
||||
mod.addCMacro("HAVE_LONGLONG", "1");
|
||||
mod.addCMacro("HAVE_MALLOC_H", "1");
|
||||
mod.addCMacro("HAVE_MEMORY_H", "1");
|
||||
mod.addCMacro("HAVE_NET_IF_H", "1");
|
||||
mod.addCMacro("HAVE_NETDB_H", "1");
|
||||
mod.addCMacro("HAVE_NETINET_IN_H", "1");
|
||||
mod.addCMacro("HAVE_NETINET_TCP_H", "1");
|
||||
mod.addCMacro("HAVE_PIPE", "1");
|
||||
mod.addCMacro("HAVE_POLL", "1");
|
||||
mod.addCMacro("HAVE_POLL_FINE", "1");
|
||||
mod.addCMacro("HAVE_POLL_H", "1");
|
||||
mod.addCMacro("HAVE_POSIX_STRERROR_R", "1");
|
||||
mod.addCMacro("HAVE_PTHREAD_H", "1");
|
||||
mod.addCMacro("HAVE_PWD_H", "1");
|
||||
mod.addCMacro("HAVE_RECV", "1");
|
||||
mod.addCMacro("HAVE_SA_FAMILY_T", "1");
|
||||
mod.addCMacro("HAVE_SELECT", "1");
|
||||
mod.addCMacro("HAVE_SEND", "1");
|
||||
mod.addCMacro("HAVE_SETJMP_H", "1");
|
||||
mod.addCMacro("HAVE_SETLOCALE", "1");
|
||||
mod.addCMacro("HAVE_SETRLIMIT", "1");
|
||||
mod.addCMacro("HAVE_SETSOCKOPT", "1");
|
||||
mod.addCMacro("HAVE_SIGACTION", "1");
|
||||
mod.addCMacro("HAVE_SIGINTERRUPT", "1");
|
||||
mod.addCMacro("HAVE_SIGNAL", "1");
|
||||
mod.addCMacro("HAVE_SIGNAL_H", "1");
|
||||
mod.addCMacro("HAVE_SIGSETJMP", "1");
|
||||
mod.addCMacro("HAVE_SOCKADDR_IN6_SIN6_SCOPE_ID", "1");
|
||||
mod.addCMacro("HAVE_SOCKET", "1");
|
||||
mod.addCMacro("HAVE_STDBOOL_H", "1");
|
||||
mod.addCMacro("HAVE_STDINT_H", "1");
|
||||
mod.addCMacro("HAVE_STDIO_H", "1");
|
||||
mod.addCMacro("HAVE_STDLIB_H", "1");
|
||||
mod.addCMacro("HAVE_STRCASECMP", "1");
|
||||
mod.addCMacro("HAVE_STRDUP", "1");
|
||||
mod.addCMacro("HAVE_STRERROR_R", "1");
|
||||
mod.addCMacro("HAVE_STRING_H", "1");
|
||||
mod.addCMacro("HAVE_STRINGS_H", "1");
|
||||
mod.addCMacro("HAVE_STRSTR", "1");
|
||||
mod.addCMacro("HAVE_STRTOK_R", "1");
|
||||
mod.addCMacro("HAVE_STRTOLL", "1");
|
||||
mod.addCMacro("HAVE_STRUCT_SOCKADDR_STORAGE", "1");
|
||||
mod.addCMacro("HAVE_STRUCT_TIMEVAL", "1");
|
||||
mod.addCMacro("HAVE_SYS_IOCTL_H", "1");
|
||||
mod.addCMacro("HAVE_SYS_PARAM_H", "1");
|
||||
mod.addCMacro("HAVE_SYS_POLL_H", "1");
|
||||
mod.addCMacro("HAVE_SYS_RESOURCE_H", "1");
|
||||
mod.addCMacro("HAVE_SYS_SELECT_H", "1");
|
||||
mod.addCMacro("HAVE_SYS_SOCKET_H", "1");
|
||||
mod.addCMacro("HAVE_SYS_STAT_H", "1");
|
||||
mod.addCMacro("HAVE_SYS_TIME_H", "1");
|
||||
mod.addCMacro("HAVE_SYS_TYPES_H", "1");
|
||||
mod.addCMacro("HAVE_SYS_UIO_H", "1");
|
||||
mod.addCMacro("HAVE_SYS_UN_H", "1");
|
||||
mod.addCMacro("HAVE_TERMIO_H", "1");
|
||||
mod.addCMacro("HAVE_TERMIOS_H", "1");
|
||||
mod.addCMacro("HAVE_TIME_H", "1");
|
||||
mod.addCMacro("HAVE_UNAME", "1");
|
||||
mod.addCMacro("HAVE_UNISTD_H", "1");
|
||||
mod.addCMacro("HAVE_UTIME", "1");
|
||||
mod.addCMacro("HAVE_UTIME_H", "1");
|
||||
mod.addCMacro("HAVE_UTIMES", "1");
|
||||
mod.addCMacro("HAVE_VARIADIC_MACROS_C99", "1");
|
||||
mod.addCMacro("HAVE_VARIADIC_MACROS_GCC", "1");
|
||||
mod.addCMacro("HAVE_ZLIB_H", "1");
|
||||
mod.addCMacro("RANDOM_FILE", "\"/dev/urandom\"");
|
||||
mod.addCMacro("RECV_TYPE_ARG1", "int");
|
||||
mod.addCMacro("RECV_TYPE_ARG2", "void *");
|
||||
mod.addCMacro("RECV_TYPE_ARG3", "size_t");
|
||||
mod.addCMacro("RECV_TYPE_ARG4", "int");
|
||||
mod.addCMacro("RECV_TYPE_RETV", "ssize_t");
|
||||
mod.addCMacro("SEND_QUAL_ARG2", "const");
|
||||
mod.addCMacro("SEND_TYPE_ARG1", "int");
|
||||
mod.addCMacro("SEND_TYPE_ARG2", "void *");
|
||||
mod.addCMacro("SEND_TYPE_ARG3", "size_t");
|
||||
mod.addCMacro("SEND_TYPE_ARG4", "int");
|
||||
mod.addCMacro("SEND_TYPE_RETV", "ssize_t");
|
||||
mod.addCMacro("SIZEOF_CURL_OFF_T", "8");
|
||||
mod.addCMacro("SIZEOF_INT", "4");
|
||||
mod.addCMacro("SIZEOF_LONG", "8");
|
||||
mod.addCMacro("SIZEOF_OFF_T", "8");
|
||||
mod.addCMacro("SIZEOF_SHORT", "2");
|
||||
mod.addCMacro("SIZEOF_SIZE_T", "8");
|
||||
mod.addCMacro("SIZEOF_TIME_T", "8");
|
||||
mod.addCMacro("STDC_HEADERS", "1");
|
||||
mod.addCMacro("TIME_WITH_SYS_TIME", "1");
|
||||
mod.addCMacro("USE_NGHTTP2", "1");
|
||||
mod.addCMacro("USE_MBEDTLS", "1");
|
||||
mod.addCMacro("USE_THREADS_POSIX", "1");
|
||||
mod.addCMacro("USE_UNIX_SOCKETS", "1");
|
||||
}
|
||||
// step
|
||||
const shell_step = b.step("shell", "Run JS shell");
|
||||
shell_step.dependOn(&shell_cmd.step);
|
||||
|
||||
try buildZlib(b, mod);
|
||||
try buildMbedtls(b, mod);
|
||||
try buildNghttp2(b, mod);
|
||||
try buildCurl(b, mod);
|
||||
// test
|
||||
// ----
|
||||
|
||||
switch (target.result.os.tag) {
|
||||
.macos => {
|
||||
// needed for proxying on mac
|
||||
mod.addSystemFrameworkPath(.{ .cwd_relative = "/System/Library/Frameworks" });
|
||||
mod.linkFramework("CoreFoundation", .{});
|
||||
mod.linkFramework("SystemConfiguration", .{});
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
// compile
|
||||
const tests = b.addTest(.{
|
||||
.root_source_file = b.path("src/main_tests.zig"),
|
||||
.test_runner = b.path("src/main_tests.zig"),
|
||||
.target = target,
|
||||
.optimize = mode,
|
||||
});
|
||||
try common(b, tests, options);
|
||||
|
||||
// add jsruntime pretty deps
|
||||
tests.root_module.addAnonymousImport("pretty", .{
|
||||
.root_source_file = b.path("vendor/zig-js-runtime/src/pretty.zig"),
|
||||
});
|
||||
|
||||
const run_tests = b.addRunArtifact(tests);
|
||||
if (b.args) |args| {
|
||||
run_tests.addArgs(args);
|
||||
}
|
||||
|
||||
// step
|
||||
const test_step = b.step("test", "Run unit tests");
|
||||
test_step.dependOn(&run_tests.step);
|
||||
|
||||
// unittest
|
||||
// ----
|
||||
|
||||
// compile
|
||||
const unit_tests = b.addTest(.{
|
||||
.root_source_file = b.path("src/unit_tests.zig"),
|
||||
.test_runner = b.path("src/unit_tests.zig"),
|
||||
.target = target,
|
||||
.optimize = mode,
|
||||
});
|
||||
try common(b, unit_tests, options);
|
||||
|
||||
const run_unit_tests = b.addRunArtifact(unit_tests);
|
||||
if (b.args) |args| {
|
||||
run_unit_tests.addArgs(args);
|
||||
}
|
||||
|
||||
// step
|
||||
const unit_test_step = b.step("unittest", "Run unit tests");
|
||||
unit_test_step.dependOn(&run_unit_tests.step);
|
||||
|
||||
// wpt
|
||||
// -----
|
||||
|
||||
// compile and install
|
||||
const wpt = b.addExecutable(.{
|
||||
.name = "lightpanda-wpt",
|
||||
.root_source_file = b.path("src/main_wpt.zig"),
|
||||
.target = target,
|
||||
.optimize = mode,
|
||||
});
|
||||
try common(b, wpt, options);
|
||||
|
||||
// run
|
||||
const wpt_cmd = b.addRunArtifact(wpt);
|
||||
if (b.args) |args| {
|
||||
wpt_cmd.addArgs(args);
|
||||
}
|
||||
// step
|
||||
const wpt_step = b.step("wpt", "WPT tests");
|
||||
wpt_step.dependOn(&wpt_cmd.step);
|
||||
}
|
||||
|
||||
fn moduleNetSurf(b: *Build, mod: *Build.Module) !void {
|
||||
const target = mod.resolved_target.?;
|
||||
fn common(
|
||||
b: *std.Build,
|
||||
step: *std.Build.Step.Compile,
|
||||
options: jsruntime.Options,
|
||||
) !void {
|
||||
const target = step.root_module.resolved_target.?;
|
||||
const jsruntimemod = try jsruntime_pkgs.module(
|
||||
b,
|
||||
options,
|
||||
step.root_module.optimize.?,
|
||||
target,
|
||||
);
|
||||
step.root_module.addImport("jsruntime", jsruntimemod);
|
||||
|
||||
const netsurf = try moduleNetSurf(b, target);
|
||||
netsurf.addImport("jsruntime", jsruntimemod);
|
||||
step.root_module.addImport("netsurf", netsurf);
|
||||
|
||||
const asyncio = b.addModule("asyncio", .{
|
||||
.root_source_file = b.path("vendor/zig-async-io/src/lib.zig"),
|
||||
});
|
||||
step.root_module.addImport("asyncio", asyncio);
|
||||
|
||||
const tlsmod = b.addModule("tls", .{
|
||||
.root_source_file = b.path("vendor/tls.zig/src/main.zig"),
|
||||
});
|
||||
step.root_module.addImport("tls", tlsmod);
|
||||
}
|
||||
|
||||
fn moduleNetSurf(b: *std.Build, target: std.Build.ResolvedTarget) !*std.Build.Module {
|
||||
const mod = b.addModule("netsurf", .{
|
||||
.root_source_file = b.path("src/netsurf/netsurf.zig"),
|
||||
.target = target,
|
||||
});
|
||||
|
||||
const os = target.result.os.tag;
|
||||
const arch = target.result.cpu.arch;
|
||||
|
||||
// iconv
|
||||
const libiconv_lib_path = try std.fmt.allocPrint(
|
||||
b.allocator,
|
||||
mod.owner.allocator,
|
||||
"vendor/libiconv/out/{s}-{s}/lib/libiconv.a",
|
||||
.{ @tagName(os), @tagName(arch) },
|
||||
);
|
||||
const libiconv_include_path = try std.fmt.allocPrint(
|
||||
b.allocator,
|
||||
mod.owner.allocator,
|
||||
"vendor/libiconv/out/{s}-{s}/lib/libiconv.a",
|
||||
.{ @tagName(os), @tagName(arch) },
|
||||
);
|
||||
mod.addObjectFile(b.path(libiconv_lib_path));
|
||||
mod.addIncludePath(b.path(libiconv_include_path));
|
||||
|
||||
{
|
||||
// mimalloc
|
||||
const mimalloc = "vendor/mimalloc";
|
||||
const lib_path = try std.fmt.allocPrint(
|
||||
b.allocator,
|
||||
mimalloc ++ "/out/{s}-{s}/lib/libmimalloc.a",
|
||||
.{ @tagName(os), @tagName(arch) },
|
||||
);
|
||||
mod.addObjectFile(b.path(lib_path));
|
||||
mod.addIncludePath(b.path(mimalloc ++ "/include"));
|
||||
}
|
||||
// mimalloc
|
||||
mod.addImport("mimalloc", (try moduleMimalloc(b, target)));
|
||||
|
||||
// netsurf libs
|
||||
const ns = "vendor/netsurf";
|
||||
const ns_include_path = try std.fmt.allocPrint(
|
||||
b.allocator,
|
||||
mod.owner.allocator,
|
||||
ns ++ "/out/{s}-{s}/include",
|
||||
.{ @tagName(os), @tagName(arch) },
|
||||
);
|
||||
@@ -443,383 +234,34 @@ fn moduleNetSurf(b: *Build, mod: *Build.Module) !void {
|
||||
};
|
||||
inline for (libs) |lib| {
|
||||
const ns_lib_path = try std.fmt.allocPrint(
|
||||
b.allocator,
|
||||
mod.owner.allocator,
|
||||
ns ++ "/out/{s}-{s}/lib/" ++ lib ++ ".a",
|
||||
.{ @tagName(os), @tagName(arch) },
|
||||
);
|
||||
mod.addObjectFile(b.path(ns_lib_path));
|
||||
mod.addIncludePath(b.path(ns ++ "/" ++ lib ++ "/src"));
|
||||
}
|
||||
|
||||
return mod;
|
||||
}
|
||||
|
||||
fn buildZlib(b: *Build, m: *Build.Module) !void {
|
||||
const zlib = b.addLibrary(.{
|
||||
.name = "zlib",
|
||||
.root_module = m,
|
||||
fn moduleMimalloc(b: *std.Build, target: std.Build.ResolvedTarget) !*std.Build.Module {
|
||||
const mod = b.addModule("mimalloc", .{
|
||||
.root_source_file = b.path("src/mimalloc/mimalloc.zig"),
|
||||
.target = target,
|
||||
});
|
||||
|
||||
const root = "vendor/zlib/";
|
||||
zlib.installHeader(b.path(root ++ "zlib.h"), "zlib.h");
|
||||
zlib.installHeader(b.path(root ++ "zconf.h"), "zconf.h");
|
||||
zlib.addCSourceFiles(.{ .flags = &.{
|
||||
"-DHAVE_SYS_TYPES_H",
|
||||
"-DHAVE_STDINT_H",
|
||||
"-DHAVE_STDDEF_H",
|
||||
}, .files = &.{
|
||||
root ++ "adler32.c",
|
||||
root ++ "compress.c",
|
||||
root ++ "crc32.c",
|
||||
root ++ "deflate.c",
|
||||
root ++ "gzclose.c",
|
||||
root ++ "gzlib.c",
|
||||
root ++ "gzread.c",
|
||||
root ++ "gzwrite.c",
|
||||
root ++ "inflate.c",
|
||||
root ++ "infback.c",
|
||||
root ++ "inftrees.c",
|
||||
root ++ "inffast.c",
|
||||
root ++ "trees.c",
|
||||
root ++ "uncompr.c",
|
||||
root ++ "zutil.c",
|
||||
} });
|
||||
}
|
||||
|
||||
fn buildMbedtls(b: *Build, m: *Build.Module) !void {
|
||||
const mbedtls = b.addLibrary(.{
|
||||
.name = "mbedtls",
|
||||
.root_module = m,
|
||||
});
|
||||
|
||||
const root = "vendor/mbedtls/";
|
||||
mbedtls.addIncludePath(b.path(root ++ "include"));
|
||||
mbedtls.addIncludePath(b.path(root ++ "library"));
|
||||
|
||||
mbedtls.addCSourceFiles(.{ .flags = &.{}, .files = &.{
|
||||
root ++ "library/aes.c",
|
||||
root ++ "library/aesni.c",
|
||||
root ++ "library/aesce.c",
|
||||
root ++ "library/aria.c",
|
||||
root ++ "library/asn1parse.c",
|
||||
root ++ "library/asn1write.c",
|
||||
root ++ "library/base64.c",
|
||||
root ++ "library/bignum.c",
|
||||
root ++ "library/bignum_core.c",
|
||||
root ++ "library/bignum_mod.c",
|
||||
root ++ "library/bignum_mod_raw.c",
|
||||
root ++ "library/camellia.c",
|
||||
root ++ "library/ccm.c",
|
||||
root ++ "library/chacha20.c",
|
||||
root ++ "library/chachapoly.c",
|
||||
root ++ "library/cipher.c",
|
||||
root ++ "library/cipher_wrap.c",
|
||||
root ++ "library/constant_time.c",
|
||||
root ++ "library/cmac.c",
|
||||
root ++ "library/ctr_drbg.c",
|
||||
root ++ "library/des.c",
|
||||
root ++ "library/dhm.c",
|
||||
root ++ "library/ecdh.c",
|
||||
root ++ "library/ecdsa.c",
|
||||
root ++ "library/ecjpake.c",
|
||||
root ++ "library/ecp.c",
|
||||
root ++ "library/ecp_curves.c",
|
||||
root ++ "library/entropy.c",
|
||||
root ++ "library/entropy_poll.c",
|
||||
root ++ "library/error.c",
|
||||
root ++ "library/gcm.c",
|
||||
root ++ "library/hkdf.c",
|
||||
root ++ "library/hmac_drbg.c",
|
||||
root ++ "library/lmots.c",
|
||||
root ++ "library/lms.c",
|
||||
root ++ "library/md.c",
|
||||
root ++ "library/md5.c",
|
||||
root ++ "library/memory_buffer_alloc.c",
|
||||
root ++ "library/nist_kw.c",
|
||||
root ++ "library/oid.c",
|
||||
root ++ "library/padlock.c",
|
||||
root ++ "library/pem.c",
|
||||
root ++ "library/pk.c",
|
||||
root ++ "library/pk_ecc.c",
|
||||
root ++ "library/pk_wrap.c",
|
||||
root ++ "library/pkcs12.c",
|
||||
root ++ "library/pkcs5.c",
|
||||
root ++ "library/pkparse.c",
|
||||
root ++ "library/pkwrite.c",
|
||||
root ++ "library/platform.c",
|
||||
root ++ "library/platform_util.c",
|
||||
root ++ "library/poly1305.c",
|
||||
root ++ "library/psa_crypto.c",
|
||||
root ++ "library/psa_crypto_aead.c",
|
||||
root ++ "library/psa_crypto_cipher.c",
|
||||
root ++ "library/psa_crypto_client.c",
|
||||
root ++ "library/psa_crypto_ffdh.c",
|
||||
root ++ "library/psa_crypto_driver_wrappers_no_static.c",
|
||||
root ++ "library/psa_crypto_ecp.c",
|
||||
root ++ "library/psa_crypto_hash.c",
|
||||
root ++ "library/psa_crypto_mac.c",
|
||||
root ++ "library/psa_crypto_pake.c",
|
||||
root ++ "library/psa_crypto_rsa.c",
|
||||
root ++ "library/psa_crypto_se.c",
|
||||
root ++ "library/psa_crypto_slot_management.c",
|
||||
root ++ "library/psa_crypto_storage.c",
|
||||
root ++ "library/psa_its_file.c",
|
||||
root ++ "library/psa_util.c",
|
||||
root ++ "library/ripemd160.c",
|
||||
root ++ "library/rsa.c",
|
||||
root ++ "library/rsa_alt_helpers.c",
|
||||
root ++ "library/sha1.c",
|
||||
root ++ "library/sha3.c",
|
||||
root ++ "library/sha256.c",
|
||||
root ++ "library/sha512.c",
|
||||
root ++ "library/threading.c",
|
||||
root ++ "library/timing.c",
|
||||
root ++ "library/version.c",
|
||||
root ++ "library/version_features.c",
|
||||
root ++ "library/pkcs7.c",
|
||||
root ++ "library/x509.c",
|
||||
root ++ "library/x509_create.c",
|
||||
root ++ "library/x509_crl.c",
|
||||
root ++ "library/x509_crt.c",
|
||||
root ++ "library/x509_csr.c",
|
||||
root ++ "library/x509write.c",
|
||||
root ++ "library/x509write_crt.c",
|
||||
root ++ "library/x509write_csr.c",
|
||||
root ++ "library/debug.c",
|
||||
root ++ "library/mps_reader.c",
|
||||
root ++ "library/mps_trace.c",
|
||||
root ++ "library/net_sockets.c",
|
||||
root ++ "library/ssl_cache.c",
|
||||
root ++ "library/ssl_ciphersuites.c",
|
||||
root ++ "library/ssl_client.c",
|
||||
root ++ "library/ssl_cookie.c",
|
||||
root ++ "library/ssl_debug_helpers_generated.c",
|
||||
root ++ "library/ssl_msg.c",
|
||||
root ++ "library/ssl_ticket.c",
|
||||
root ++ "library/ssl_tls.c",
|
||||
root ++ "library/ssl_tls12_client.c",
|
||||
root ++ "library/ssl_tls12_server.c",
|
||||
root ++ "library/ssl_tls13_keys.c",
|
||||
root ++ "library/ssl_tls13_server.c",
|
||||
root ++ "library/ssl_tls13_client.c",
|
||||
root ++ "library/ssl_tls13_generic.c",
|
||||
} });
|
||||
}
|
||||
|
||||
fn buildNghttp2(b: *Build, m: *Build.Module) !void {
|
||||
const nghttp2 = b.addLibrary(.{
|
||||
.name = "nghttp2",
|
||||
.root_module = m,
|
||||
});
|
||||
|
||||
const root = "vendor/nghttp2/";
|
||||
nghttp2.addIncludePath(b.path(root ++ "lib"));
|
||||
nghttp2.addIncludePath(b.path(root ++ "lib/includes"));
|
||||
nghttp2.addCSourceFiles(.{ .flags = &.{
|
||||
"-DNGHTTP2_STATICLIB",
|
||||
"-DHAVE_NETINET_IN",
|
||||
"-DHAVE_TIME_H",
|
||||
}, .files = &.{
|
||||
root ++ "lib/sfparse.c",
|
||||
root ++ "lib/nghttp2_alpn.c",
|
||||
root ++ "lib/nghttp2_buf.c",
|
||||
root ++ "lib/nghttp2_callbacks.c",
|
||||
root ++ "lib/nghttp2_debug.c",
|
||||
root ++ "lib/nghttp2_extpri.c",
|
||||
root ++ "lib/nghttp2_frame.c",
|
||||
root ++ "lib/nghttp2_hd.c",
|
||||
root ++ "lib/nghttp2_hd_huffman.c",
|
||||
root ++ "lib/nghttp2_hd_huffman_data.c",
|
||||
root ++ "lib/nghttp2_helper.c",
|
||||
root ++ "lib/nghttp2_http.c",
|
||||
root ++ "lib/nghttp2_map.c",
|
||||
root ++ "lib/nghttp2_mem.c",
|
||||
root ++ "lib/nghttp2_option.c",
|
||||
root ++ "lib/nghttp2_outbound_item.c",
|
||||
root ++ "lib/nghttp2_pq.c",
|
||||
root ++ "lib/nghttp2_priority_spec.c",
|
||||
root ++ "lib/nghttp2_queue.c",
|
||||
root ++ "lib/nghttp2_rcbuf.c",
|
||||
root ++ "lib/nghttp2_session.c",
|
||||
root ++ "lib/nghttp2_stream.c",
|
||||
root ++ "lib/nghttp2_submit.c",
|
||||
root ++ "lib/nghttp2_version.c",
|
||||
root ++ "lib/nghttp2_ratelim.c",
|
||||
root ++ "lib/nghttp2_time.c",
|
||||
} });
|
||||
}
|
||||
|
||||
fn buildCurl(b: *Build, m: *Build.Module) !void {
|
||||
const curl = b.addLibrary(.{
|
||||
.name = "curl",
|
||||
.root_module = m,
|
||||
});
|
||||
|
||||
const root = "vendor/curl/";
|
||||
|
||||
curl.addIncludePath(b.path(root ++ "lib"));
|
||||
curl.addIncludePath(b.path(root ++ "include"));
|
||||
curl.addCSourceFiles(.{
|
||||
.flags = &.{},
|
||||
.files = &.{
|
||||
root ++ "lib/altsvc.c",
|
||||
root ++ "lib/amigaos.c",
|
||||
root ++ "lib/asyn-ares.c",
|
||||
root ++ "lib/asyn-base.c",
|
||||
root ++ "lib/asyn-thrdd.c",
|
||||
root ++ "lib/bufq.c",
|
||||
root ++ "lib/bufref.c",
|
||||
root ++ "lib/cf-h1-proxy.c",
|
||||
root ++ "lib/cf-h2-proxy.c",
|
||||
root ++ "lib/cf-haproxy.c",
|
||||
root ++ "lib/cf-https-connect.c",
|
||||
root ++ "lib/cf-socket.c",
|
||||
root ++ "lib/cfilters.c",
|
||||
root ++ "lib/conncache.c",
|
||||
root ++ "lib/connect.c",
|
||||
root ++ "lib/content_encoding.c",
|
||||
root ++ "lib/cookie.c",
|
||||
root ++ "lib/cshutdn.c",
|
||||
root ++ "lib/curl_addrinfo.c",
|
||||
root ++ "lib/curl_des.c",
|
||||
root ++ "lib/curl_endian.c",
|
||||
root ++ "lib/curl_fnmatch.c",
|
||||
root ++ "lib/curl_get_line.c",
|
||||
root ++ "lib/curl_gethostname.c",
|
||||
root ++ "lib/curl_gssapi.c",
|
||||
root ++ "lib/curl_memrchr.c",
|
||||
root ++ "lib/curl_ntlm_core.c",
|
||||
root ++ "lib/curl_range.c",
|
||||
root ++ "lib/curl_rtmp.c",
|
||||
root ++ "lib/curl_sasl.c",
|
||||
root ++ "lib/curl_sha512_256.c",
|
||||
root ++ "lib/curl_sspi.c",
|
||||
root ++ "lib/curl_threads.c",
|
||||
root ++ "lib/curl_trc.c",
|
||||
root ++ "lib/cw-out.c",
|
||||
root ++ "lib/cw-pause.c",
|
||||
root ++ "lib/dict.c",
|
||||
root ++ "lib/doh.c",
|
||||
root ++ "lib/dynhds.c",
|
||||
root ++ "lib/easy.c",
|
||||
root ++ "lib/easygetopt.c",
|
||||
root ++ "lib/easyoptions.c",
|
||||
root ++ "lib/escape.c",
|
||||
root ++ "lib/fake_addrinfo.c",
|
||||
root ++ "lib/file.c",
|
||||
root ++ "lib/fileinfo.c",
|
||||
root ++ "lib/fopen.c",
|
||||
root ++ "lib/formdata.c",
|
||||
root ++ "lib/ftp.c",
|
||||
root ++ "lib/ftplistparser.c",
|
||||
root ++ "lib/getenv.c",
|
||||
root ++ "lib/getinfo.c",
|
||||
root ++ "lib/gopher.c",
|
||||
root ++ "lib/hash.c",
|
||||
root ++ "lib/headers.c",
|
||||
root ++ "lib/hmac.c",
|
||||
root ++ "lib/hostip.c",
|
||||
root ++ "lib/hostip4.c",
|
||||
root ++ "lib/hostip6.c",
|
||||
root ++ "lib/hsts.c",
|
||||
root ++ "lib/http.c",
|
||||
root ++ "lib/http1.c",
|
||||
root ++ "lib/http2.c",
|
||||
root ++ "lib/http_aws_sigv4.c",
|
||||
root ++ "lib/http_chunks.c",
|
||||
root ++ "lib/http_digest.c",
|
||||
root ++ "lib/http_negotiate.c",
|
||||
root ++ "lib/http_ntlm.c",
|
||||
root ++ "lib/http_proxy.c",
|
||||
root ++ "lib/httpsrr.c",
|
||||
root ++ "lib/idn.c",
|
||||
root ++ "lib/if2ip.c",
|
||||
root ++ "lib/imap.c",
|
||||
root ++ "lib/krb5.c",
|
||||
root ++ "lib/ldap.c",
|
||||
root ++ "lib/llist.c",
|
||||
root ++ "lib/macos.c",
|
||||
root ++ "lib/md4.c",
|
||||
root ++ "lib/md5.c",
|
||||
root ++ "lib/memdebug.c",
|
||||
root ++ "lib/mime.c",
|
||||
root ++ "lib/mprintf.c",
|
||||
root ++ "lib/mqtt.c",
|
||||
root ++ "lib/multi.c",
|
||||
root ++ "lib/multi_ev.c",
|
||||
root ++ "lib/netrc.c",
|
||||
root ++ "lib/noproxy.c",
|
||||
root ++ "lib/openldap.c",
|
||||
root ++ "lib/parsedate.c",
|
||||
root ++ "lib/pingpong.c",
|
||||
root ++ "lib/pop3.c",
|
||||
root ++ "lib/progress.c",
|
||||
root ++ "lib/psl.c",
|
||||
root ++ "lib/rand.c",
|
||||
root ++ "lib/rename.c",
|
||||
root ++ "lib/request.c",
|
||||
root ++ "lib/rtsp.c",
|
||||
root ++ "lib/select.c",
|
||||
root ++ "lib/sendf.c",
|
||||
root ++ "lib/setopt.c",
|
||||
root ++ "lib/sha256.c",
|
||||
root ++ "lib/share.c",
|
||||
root ++ "lib/slist.c",
|
||||
root ++ "lib/smb.c",
|
||||
root ++ "lib/smtp.c",
|
||||
root ++ "lib/socketpair.c",
|
||||
root ++ "lib/socks.c",
|
||||
root ++ "lib/socks_gssapi.c",
|
||||
root ++ "lib/socks_sspi.c",
|
||||
root ++ "lib/speedcheck.c",
|
||||
root ++ "lib/splay.c",
|
||||
root ++ "lib/strcase.c",
|
||||
root ++ "lib/strdup.c",
|
||||
root ++ "lib/strequal.c",
|
||||
root ++ "lib/strerror.c",
|
||||
root ++ "lib/system_win32.c",
|
||||
root ++ "lib/telnet.c",
|
||||
root ++ "lib/tftp.c",
|
||||
root ++ "lib/transfer.c",
|
||||
root ++ "lib/uint-bset.c",
|
||||
root ++ "lib/uint-hash.c",
|
||||
root ++ "lib/uint-spbset.c",
|
||||
root ++ "lib/uint-table.c",
|
||||
root ++ "lib/url.c",
|
||||
root ++ "lib/urlapi.c",
|
||||
root ++ "lib/version.c",
|
||||
root ++ "lib/ws.c",
|
||||
root ++ "lib/curlx/base64.c",
|
||||
root ++ "lib/curlx/dynbuf.c",
|
||||
root ++ "lib/curlx/inet_ntop.c",
|
||||
root ++ "lib/curlx/nonblock.c",
|
||||
root ++ "lib/curlx/strparse.c",
|
||||
root ++ "lib/curlx/timediff.c",
|
||||
root ++ "lib/curlx/timeval.c",
|
||||
root ++ "lib/curlx/wait.c",
|
||||
root ++ "lib/curlx/warnless.c",
|
||||
root ++ "lib/vquic/curl_ngtcp2.c",
|
||||
root ++ "lib/vquic/curl_osslq.c",
|
||||
root ++ "lib/vquic/curl_quiche.c",
|
||||
root ++ "lib/vquic/vquic.c",
|
||||
root ++ "lib/vquic/vquic-tls.c",
|
||||
root ++ "lib/vauth/cleartext.c",
|
||||
root ++ "lib/vauth/cram.c",
|
||||
root ++ "lib/vauth/digest.c",
|
||||
root ++ "lib/vauth/digest_sspi.c",
|
||||
root ++ "lib/vauth/gsasl.c",
|
||||
root ++ "lib/vauth/krb5_gssapi.c",
|
||||
root ++ "lib/vauth/krb5_sspi.c",
|
||||
root ++ "lib/vauth/ntlm.c",
|
||||
root ++ "lib/vauth/ntlm_sspi.c",
|
||||
root ++ "lib/vauth/oauth2.c",
|
||||
root ++ "lib/vauth/spnego_gssapi.c",
|
||||
root ++ "lib/vauth/spnego_sspi.c",
|
||||
root ++ "lib/vauth/vauth.c",
|
||||
root ++ "lib/vtls/cipher_suite.c",
|
||||
root ++ "lib/vtls/mbedtls.c",
|
||||
root ++ "lib/vtls/mbedtls_threadlock.c",
|
||||
root ++ "lib/vtls/vtls.c",
|
||||
root ++ "lib/vtls/vtls_scache.c",
|
||||
root ++ "lib/vtls/x509asn1.c",
|
||||
},
|
||||
});
|
||||
const os = target.result.os.tag;
|
||||
const arch = target.result.cpu.arch;
|
||||
|
||||
const mimalloc = "vendor/mimalloc";
|
||||
const lib_path = try std.fmt.allocPrint(
|
||||
mod.owner.allocator,
|
||||
mimalloc ++ "/out/{s}-{s}/lib/libmimalloc.a",
|
||||
.{ @tagName(os), @tagName(arch) },
|
||||
);
|
||||
mod.addObjectFile(b.path(lib_path));
|
||||
mod.addIncludePath(b.path(mimalloc ++ "/include"));
|
||||
|
||||
return mod;
|
||||
}
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
.{
|
||||
.name = .browser,
|
||||
.paths = .{""},
|
||||
.version = "0.0.0",
|
||||
.fingerprint = 0xda130f3af836cea0,
|
||||
.dependencies = .{
|
||||
.v8 = .{
|
||||
.url = "https://github.com/lightpanda-io/zig-v8-fork/archive/7177ee1ae267a44751a0e7e012e257177699a375.tar.gz",
|
||||
.hash = "v8-0.0.0-xddH63TCAwC1D1hEiOtbEnLBbtz9ZPHrdiGWLcBcYQB7",
|
||||
},
|
||||
// .v8 = .{ .path = "../zig-v8-fork" }
|
||||
},
|
||||
}
|
||||
180
flake.lock
generated
180
flake.lock
generated
@@ -1,180 +0,0 @@
|
||||
{
|
||||
"nodes": {
|
||||
"flake-compat": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1696426674,
|
||||
"narHash": "sha256-kvjfFW7WAETZlt09AgDn1MrtKzP7t90Vf7vypd3OL1U=",
|
||||
"owner": "edolstra",
|
||||
"repo": "flake-compat",
|
||||
"rev": "0f9255e01c2351cc7d116c072cb317785dd33b33",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "edolstra",
|
||||
"repo": "flake-compat",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-utils": {
|
||||
"inputs": {
|
||||
"systems": "systems"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1731533236,
|
||||
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-utils_2": {
|
||||
"inputs": {
|
||||
"systems": "systems_2"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1705309234,
|
||||
"narHash": "sha256-uNRRNRKmJyCRC/8y1RqBkqWBLM034y4qN7EprSdmgyA=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "1ef2e671c3b0c19053962c07dbda38332dcebf26",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"gitignore": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
"zlsPkg",
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1709087332,
|
||||
"narHash": "sha256-HG2cCnktfHsKV0s4XW83gU3F57gaTljL9KNSuG6bnQs=",
|
||||
"owner": "hercules-ci",
|
||||
"repo": "gitignore.nix",
|
||||
"rev": "637db329424fd7e46cf4185293b9cc8c88c95394",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "hercules-ci",
|
||||
"repo": "gitignore.nix",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1756822655,
|
||||
"narHash": "sha256-xQAk8xLy7srAkR5NMZFsQFioL02iTHuuEIs3ohGpgdk=",
|
||||
"owner": "nixos",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "4bdac60bfe32c41103ae500ddf894c258291dd61",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nixos",
|
||||
"ref": "release-25.05",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"flake-utils": "flake-utils",
|
||||
"nixpkgs": "nixpkgs",
|
||||
"zigPkgs": "zigPkgs",
|
||||
"zlsPkg": "zlsPkg"
|
||||
}
|
||||
},
|
||||
"systems": {
|
||||
"locked": {
|
||||
"lastModified": 1681028828,
|
||||
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"systems_2": {
|
||||
"locked": {
|
||||
"lastModified": 1681028828,
|
||||
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"zigPkgs": {
|
||||
"inputs": {
|
||||
"flake-compat": "flake-compat",
|
||||
"flake-utils": "flake-utils_2",
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1756555914,
|
||||
"narHash": "sha256-7yoSPIVEuL+3Wzf6e7NHuW3zmruHizRrYhGerjRHTLI=",
|
||||
"owner": "mitchellh",
|
||||
"repo": "zig-overlay",
|
||||
"rev": "d0df3a2fd0f11134409d6d5ea0e510e5e477f7d6",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "mitchellh",
|
||||
"repo": "zig-overlay",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"zlsPkg": {
|
||||
"inputs": {
|
||||
"gitignore": "gitignore",
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
],
|
||||
"zig-overlay": [
|
||||
"zigPkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1756048867,
|
||||
"narHash": "sha256-GFzSHUljcxy7sM1PaabbkQUdUnLwpherekPWJFxXtnk=",
|
||||
"owner": "zigtools",
|
||||
"repo": "zls",
|
||||
"rev": "ce6c8f02c78e622421cfc2405c67c5222819ec03",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "zigtools",
|
||||
"ref": "0.15.0",
|
||||
"repo": "zls",
|
||||
"type": "github"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
||||
77
flake.nix
77
flake.nix
@@ -1,77 +0,0 @@
|
||||
{
|
||||
description = "headless browser designed for AI and automation";
|
||||
|
||||
inputs = {
|
||||
nixpkgs.url = "github:nixos/nixpkgs/release-25.05";
|
||||
|
||||
zigPkgs.url = "github:mitchellh/zig-overlay";
|
||||
zigPkgs.inputs.nixpkgs.follows = "nixpkgs";
|
||||
|
||||
zlsPkg.url = "github:zigtools/zls/0.15.0";
|
||||
zlsPkg.inputs.zig-overlay.follows = "zigPkgs";
|
||||
zlsPkg.inputs.nixpkgs.follows = "nixpkgs";
|
||||
|
||||
flake-utils.url = "github:numtide/flake-utils";
|
||||
};
|
||||
|
||||
outputs =
|
||||
{
|
||||
nixpkgs,
|
||||
zigPkgs,
|
||||
zlsPkg,
|
||||
flake-utils,
|
||||
...
|
||||
}:
|
||||
flake-utils.lib.eachDefaultSystem (
|
||||
system:
|
||||
let
|
||||
overlays = [
|
||||
(final: prev: {
|
||||
zigpkgs = zigPkgs.packages.${prev.system};
|
||||
zls = zlsPkg.packages.${prev.system}.default;
|
||||
})
|
||||
];
|
||||
|
||||
pkgs = import nixpkgs {
|
||||
inherit system overlays;
|
||||
};
|
||||
|
||||
# We need crtbeginS.o for building.
|
||||
crtFiles = pkgs.runCommand "crt-files" { } ''
|
||||
mkdir -p $out/lib
|
||||
cp -r ${pkgs.gcc.cc}/lib/gcc $out/lib/gcc
|
||||
'';
|
||||
|
||||
# This build pipeline is very unhappy without an FHS-compliant env.
|
||||
fhs = pkgs.buildFHSEnv {
|
||||
name = "fhs-shell";
|
||||
multiArch = true;
|
||||
targetPkgs =
|
||||
pkgs: with pkgs; [
|
||||
# Build Tools
|
||||
zigpkgs."0.15.1"
|
||||
zls
|
||||
python3
|
||||
pkg-config
|
||||
cmake
|
||||
gperf
|
||||
|
||||
# GCC
|
||||
gcc
|
||||
gcc.cc.lib
|
||||
crtFiles
|
||||
|
||||
# Libaries
|
||||
expat.dev
|
||||
glib.dev
|
||||
glibc.dev
|
||||
zlib
|
||||
zlib.dev
|
||||
];
|
||||
};
|
||||
in
|
||||
{
|
||||
devShells.default = fhs.env;
|
||||
}
|
||||
);
|
||||
}
|
||||
@@ -1,118 +0,0 @@
|
||||
const std = @import("std");
|
||||
|
||||
const TestHTTPServer = @This();
|
||||
|
||||
shutdown: bool,
|
||||
listener: ?std.net.Server,
|
||||
handler: Handler,
|
||||
|
||||
const Handler = *const fn (req: *std.http.Server.Request) anyerror!void;
|
||||
|
||||
pub fn init(handler: Handler) TestHTTPServer {
|
||||
return .{
|
||||
.shutdown = true,
|
||||
.listener = null,
|
||||
.handler = handler,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: *TestHTTPServer) void {
|
||||
self.shutdown = true;
|
||||
if (self.listener) |*listener| {
|
||||
listener.deinit();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run(self: *TestHTTPServer, wg: *std.Thread.WaitGroup) !void {
|
||||
const address = try std.net.Address.parseIp("127.0.0.1", 9582);
|
||||
|
||||
self.listener = try address.listen(.{ .reuse_address = true });
|
||||
var listener = &self.listener.?;
|
||||
|
||||
wg.finish();
|
||||
|
||||
while (true) {
|
||||
const conn = listener.accept() catch |err| {
|
||||
if (self.shutdown) {
|
||||
return;
|
||||
}
|
||||
return err;
|
||||
};
|
||||
const thrd = try std.Thread.spawn(.{}, handleConnection, .{ self, conn });
|
||||
thrd.detach();
|
||||
}
|
||||
}
|
||||
|
||||
fn handleConnection(self: *TestHTTPServer, conn: std.net.Server.Connection) !void {
|
||||
defer conn.stream.close();
|
||||
|
||||
var req_buf: [2048]u8 = undefined;
|
||||
var conn_reader = conn.stream.reader(&req_buf);
|
||||
var conn_writer = conn.stream.writer(&req_buf);
|
||||
|
||||
var http_server = std.http.Server.init(conn_reader.interface(), &conn_writer.interface);
|
||||
|
||||
while (true) {
|
||||
var req = http_server.receiveHead() catch |err| switch (err) {
|
||||
error.ReadFailed => continue,
|
||||
error.HttpConnectionClosing => continue,
|
||||
else => {
|
||||
std.debug.print("Test HTTP Server error: {}\n", .{err});
|
||||
return err;
|
||||
},
|
||||
};
|
||||
self.handler(&req) catch |err| {
|
||||
std.debug.print("test http error '{s}': {}\n", .{ req.head.target, err });
|
||||
try req.respond("server error", .{ .status = .internal_server_error });
|
||||
return;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
pub fn sendFile(req: *std.http.Server.Request, file_path: []const u8) !void {
|
||||
var file = std.fs.cwd().openFile(file_path, .{}) catch |err| switch (err) {
|
||||
error.FileNotFound => return req.respond("server error", .{ .status = .not_found }),
|
||||
else => return err,
|
||||
};
|
||||
defer file.close();
|
||||
|
||||
const stat = try file.stat();
|
||||
var send_buffer: [4096]u8 = undefined;
|
||||
|
||||
var res = try req.respondStreaming(&send_buffer, .{
|
||||
.content_length = stat.size,
|
||||
.respond_options = .{
|
||||
.extra_headers = &.{
|
||||
.{ .name = "content-type", .value = getContentType(file_path) },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
var read_buffer: [4096]u8 = undefined;
|
||||
var reader = file.reader(&read_buffer);
|
||||
_ = try res.writer.sendFileAll(&reader, .unlimited);
|
||||
try res.writer.flush();
|
||||
try res.end();
|
||||
}
|
||||
|
||||
fn getContentType(file_path: []const u8) []const u8 {
|
||||
if (std.mem.endsWith(u8, file_path, ".js")) {
|
||||
return "application/json";
|
||||
}
|
||||
|
||||
if (std.mem.endsWith(u8, file_path, ".html")) {
|
||||
return "text/html";
|
||||
}
|
||||
|
||||
if (std.mem.endsWith(u8, file_path, ".htm")) {
|
||||
return "text/html";
|
||||
}
|
||||
|
||||
if (std.mem.endsWith(u8, file_path, ".xml")) {
|
||||
// some wpt tests do this
|
||||
return "text/xml";
|
||||
}
|
||||
|
||||
std.debug.print("TestHTTPServer asked to serve an unknown file type: {s}\n", .{file_path});
|
||||
return "text/html";
|
||||
}
|
||||
47
src/apiweb.zig
Normal file
47
src/apiweb.zig
Normal file
@@ -0,0 +1,47 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const generate = @import("generate.zig");
|
||||
|
||||
const Console = @import("jsruntime").Console;
|
||||
|
||||
const DOM = @import("dom/dom.zig");
|
||||
const HTML = @import("html/html.zig");
|
||||
const Events = @import("events/event.zig");
|
||||
const XHR = @import("xhr/xhr.zig");
|
||||
const Storage = @import("storage/storage.zig");
|
||||
const URL = @import("url/url.zig");
|
||||
const Iterators = @import("iterator/iterator.zig");
|
||||
const XMLSerializer = @import("xmlserializer/xmlserializer.zig");
|
||||
|
||||
pub const HTMLDocument = @import("html/document.zig").HTMLDocument;
|
||||
|
||||
// Interfaces
|
||||
pub const Interfaces = generate.Tuple(.{
|
||||
Console,
|
||||
DOM.Interfaces,
|
||||
Events.Interfaces,
|
||||
HTML.Interfaces,
|
||||
XHR.Interfaces,
|
||||
Storage.Interfaces,
|
||||
URL.Interfaces,
|
||||
Iterators.Interfaces,
|
||||
XMLSerializer.Interfaces,
|
||||
}){};
|
||||
|
||||
pub const UserContext = @import("user_context.zig").UserContext;
|
||||
115
src/app.zig
115
src/app.zig
@@ -1,115 +0,0 @@
|
||||
const std = @import("std");
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const log = @import("log.zig");
|
||||
const Http = @import("http/Http.zig");
|
||||
const Platform = @import("runtime/js.zig").Platform;
|
||||
|
||||
const Telemetry = @import("telemetry/telemetry.zig").Telemetry;
|
||||
const Notification = @import("notification.zig").Notification;
|
||||
|
||||
// Container for global state / objects that various parts of the system
|
||||
// might need.
|
||||
pub const App = struct {
|
||||
http: Http,
|
||||
config: Config,
|
||||
platform: Platform,
|
||||
allocator: Allocator,
|
||||
telemetry: Telemetry,
|
||||
app_dir_path: ?[]const u8,
|
||||
notification: *Notification,
|
||||
|
||||
pub const RunMode = enum {
|
||||
help,
|
||||
fetch,
|
||||
serve,
|
||||
version,
|
||||
};
|
||||
|
||||
pub const Config = struct {
|
||||
run_mode: RunMode,
|
||||
tls_verify_host: bool = true,
|
||||
http_proxy: ?[:0]const u8 = null,
|
||||
proxy_bearer_token: ?[:0]const u8 = null,
|
||||
http_timeout_ms: ?u31 = null,
|
||||
http_connect_timeout_ms: ?u31 = null,
|
||||
http_max_host_open: ?u8 = null,
|
||||
http_max_concurrent: ?u8 = null,
|
||||
user_agent: [:0]const u8,
|
||||
};
|
||||
|
||||
pub fn init(allocator: Allocator, config: Config) !*App {
|
||||
const app = try allocator.create(App);
|
||||
errdefer allocator.destroy(app);
|
||||
|
||||
const notification = try Notification.init(allocator, null);
|
||||
errdefer notification.deinit();
|
||||
|
||||
var http = try Http.init(allocator, .{
|
||||
.max_host_open = config.http_max_host_open orelse 4,
|
||||
.max_concurrent = config.http_max_concurrent orelse 10,
|
||||
.timeout_ms = config.http_timeout_ms orelse 5000,
|
||||
.connect_timeout_ms = config.http_connect_timeout_ms orelse 0,
|
||||
.http_proxy = config.http_proxy,
|
||||
.tls_verify_host = config.tls_verify_host,
|
||||
.proxy_bearer_token = config.proxy_bearer_token,
|
||||
.user_agent = config.user_agent,
|
||||
});
|
||||
errdefer http.deinit();
|
||||
|
||||
const platform = try Platform.init();
|
||||
errdefer platform.deinit();
|
||||
|
||||
const app_dir_path = getAndMakeAppDir(allocator);
|
||||
|
||||
app.* = .{
|
||||
.http = http,
|
||||
.allocator = allocator,
|
||||
.telemetry = undefined,
|
||||
.platform = platform,
|
||||
.app_dir_path = app_dir_path,
|
||||
.notification = notification,
|
||||
.config = config,
|
||||
};
|
||||
|
||||
app.telemetry = try Telemetry.init(app, config.run_mode);
|
||||
errdefer app.telemetry.deinit();
|
||||
|
||||
try app.telemetry.register(app.notification);
|
||||
|
||||
return app;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *App) void {
|
||||
const allocator = self.allocator;
|
||||
if (self.app_dir_path) |app_dir_path| {
|
||||
allocator.free(app_dir_path);
|
||||
}
|
||||
self.telemetry.deinit();
|
||||
self.notification.deinit();
|
||||
self.http.deinit();
|
||||
self.platform.deinit();
|
||||
allocator.destroy(self);
|
||||
}
|
||||
};
|
||||
|
||||
fn getAndMakeAppDir(allocator: Allocator) ?[]const u8 {
|
||||
if (@import("builtin").is_test) {
|
||||
return allocator.dupe(u8, "/tmp") catch unreachable;
|
||||
}
|
||||
const app_dir_path = std.fs.getAppDataDir(allocator, "lightpanda") catch |err| {
|
||||
log.warn(.app, "get data dir", .{ .err = err });
|
||||
return null;
|
||||
};
|
||||
|
||||
std.fs.cwd().makePath(app_dir_path) catch |err| switch (err) {
|
||||
error.PathAlreadyExists => return app_dir_path,
|
||||
else => {
|
||||
allocator.free(app_dir_path);
|
||||
log.warn(.app, "create data dir", .{ .err = err, .path = app_dir_path });
|
||||
return null;
|
||||
},
|
||||
};
|
||||
return app_dir_path;
|
||||
}
|
||||
@@ -1,52 +0,0 @@
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
// Parses data:[<media-type>][;base64],<data>
|
||||
pub fn parse(allocator: Allocator, src: []const u8) !?[]const u8 {
|
||||
if (!std.mem.startsWith(u8, src, "data:")) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const uri = src[5..];
|
||||
const data_starts = std.mem.indexOfScalar(u8, uri, ',') orelse return null;
|
||||
|
||||
var data = uri[data_starts + 1 ..];
|
||||
|
||||
// Extract the encoding.
|
||||
const metadata = uri[0..data_starts];
|
||||
if (std.mem.endsWith(u8, metadata, ";base64")) {
|
||||
const decoder = std.base64.standard.Decoder;
|
||||
const decoded_size = try decoder.calcSizeForSlice(data);
|
||||
|
||||
const buffer = try allocator.alloc(u8, decoded_size);
|
||||
errdefer allocator.free(buffer);
|
||||
|
||||
try decoder.decode(buffer, data);
|
||||
data = buffer;
|
||||
}
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
const testing = @import("../testing.zig");
|
||||
test "DataURI: parse valid" {
|
||||
try test_valid("data:text/javascript; charset=utf-8;base64,Zm9v", "foo");
|
||||
try test_valid("data:text/javascript; charset=utf-8;,foo", "foo");
|
||||
try test_valid("data:,foo", "foo");
|
||||
}
|
||||
|
||||
test "DataURI: parse invalid" {
|
||||
try test_cannot_parse("atad:,foo");
|
||||
try test_cannot_parse("data:foo");
|
||||
try test_cannot_parse("data:");
|
||||
}
|
||||
|
||||
fn test_valid(uri: []const u8, expected: []const u8) !void {
|
||||
defer testing.reset();
|
||||
const data_uri = try parse(testing.arena_allocator, uri) orelse return error.TestFailed;
|
||||
try testing.expectEqual(expected, data_uri);
|
||||
}
|
||||
|
||||
fn test_cannot_parse(uri: []const u8) !void {
|
||||
try testing.expectEqual(null, parse(undefined, uri));
|
||||
}
|
||||
@@ -1,161 +0,0 @@
|
||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const Scheduler = @This();
|
||||
|
||||
high_priority: Queue,
|
||||
|
||||
// For repeating tasks. We only want to run these if there are other things to
|
||||
// do. We don't, for example, want a window.setInterval or the page.runMicrotasks
|
||||
// to block the page.wait.
|
||||
low_priority: Queue,
|
||||
|
||||
// we expect allocator to be the page arena, hence we never call high_priority.deinit
|
||||
pub fn init(allocator: Allocator) Scheduler {
|
||||
return .{
|
||||
.high_priority = Queue.init(allocator, {}),
|
||||
.low_priority = Queue.init(allocator, {}),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn reset(self: *Scheduler) void {
|
||||
self.high_priority.clearRetainingCapacity();
|
||||
self.low_priority.clearRetainingCapacity();
|
||||
}
|
||||
|
||||
const AddOpts = struct {
|
||||
name: []const u8 = "",
|
||||
low_priority: bool = false,
|
||||
};
|
||||
pub fn add(self: *Scheduler, ctx: *anyopaque, func: Task.Func, ms: u32, opts: AddOpts) !void {
|
||||
var low_priority = opts.low_priority;
|
||||
if (ms > 5_000) {
|
||||
// we don't want tasks in the far future to block page.wait from
|
||||
// completing. However, if page.wait is called multiple times (maybe
|
||||
// a CDP driver is wait for something to happen), then we do want
|
||||
// to [eventually] run these when their time is up.
|
||||
low_priority = true;
|
||||
}
|
||||
|
||||
var q = if (low_priority) &self.low_priority else &self.high_priority;
|
||||
return q.add(.{
|
||||
.ms = std.time.milliTimestamp() + ms,
|
||||
.ctx = ctx,
|
||||
.func = func,
|
||||
.name = opts.name,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn run(self: *Scheduler) !?i32 {
|
||||
_ = try self.runQueue(&self.low_priority);
|
||||
return self.runQueue(&self.high_priority);
|
||||
}
|
||||
|
||||
fn runQueue(self: *Scheduler, queue: *Queue) !?i32 {
|
||||
// this is O(1)
|
||||
if (queue.count() == 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const now = std.time.milliTimestamp();
|
||||
|
||||
var next = queue.peek();
|
||||
while (next) |task| {
|
||||
const time_to_next = task.ms - now;
|
||||
if (time_to_next > 0) {
|
||||
// @intCast is petty safe since we limit tasks to just 5 seconds
|
||||
// in the future
|
||||
return @intCast(time_to_next);
|
||||
}
|
||||
|
||||
if (task.func(task.ctx)) |repeat_delay| {
|
||||
// if we do (now + 0) then our WHILE loop will run endlessly.
|
||||
// no task should ever return 0
|
||||
std.debug.assert(repeat_delay != 0);
|
||||
|
||||
var copy = task;
|
||||
copy.ms = now + repeat_delay;
|
||||
try self.low_priority.add(copy);
|
||||
}
|
||||
_ = queue.remove();
|
||||
next = queue.peek();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
const Task = struct {
|
||||
ms: i64,
|
||||
func: Func,
|
||||
ctx: *anyopaque,
|
||||
name: []const u8,
|
||||
|
||||
const Func = *const fn (ctx: *anyopaque) ?u32;
|
||||
};
|
||||
|
||||
const Queue = std.PriorityQueue(Task, void, struct {
|
||||
fn compare(_: void, a: Task, b: Task) std.math.Order {
|
||||
return std.math.order(a.ms, b.ms);
|
||||
}
|
||||
}.compare);
|
||||
|
||||
const testing = @import("../testing.zig");
|
||||
test "Scheduler" {
|
||||
defer testing.reset();
|
||||
|
||||
var task = TestTask{ .allocator = testing.arena_allocator };
|
||||
|
||||
var s = Scheduler.init(testing.arena_allocator);
|
||||
try testing.expectEqual(null, s.run());
|
||||
try testing.expectEqual(0, task.calls.items.len);
|
||||
|
||||
try s.add(&task, TestTask.run1, 3, .{});
|
||||
|
||||
try testing.expectDelta(3, try s.run(), 1);
|
||||
try testing.expectEqual(0, task.calls.items.len);
|
||||
|
||||
std.Thread.sleep(std.time.ns_per_ms * 5);
|
||||
try testing.expectEqual(null, s.run());
|
||||
try testing.expectEqualSlices(u32, &.{1}, task.calls.items);
|
||||
|
||||
try s.add(&task, TestTask.run2, 3, .{});
|
||||
try s.add(&task, TestTask.run1, 2, .{});
|
||||
|
||||
std.Thread.sleep(std.time.ns_per_ms * 5);
|
||||
try testing.expectDelta(null, try s.run(), 1);
|
||||
try testing.expectEqualSlices(u32, &.{ 1, 1, 2 }, task.calls.items);
|
||||
}
|
||||
|
||||
const TestTask = struct {
|
||||
allocator: Allocator,
|
||||
calls: std.ArrayListUnmanaged(u32) = .{},
|
||||
|
||||
fn run1(ctx: *anyopaque) ?u32 {
|
||||
var self: *TestTask = @ptrCast(@alignCast(ctx));
|
||||
self.calls.append(self.allocator, 1) catch unreachable;
|
||||
return null;
|
||||
}
|
||||
|
||||
fn run2(ctx: *anyopaque) ?u32 {
|
||||
var self: *TestTask = @ptrCast(@alignCast(ctx));
|
||||
self.calls.append(self.allocator, 2) catch unreachable;
|
||||
return 2;
|
||||
}
|
||||
};
|
||||
@@ -1,832 +0,0 @@
|
||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const log = @import("../log.zig");
|
||||
const parser = @import("netsurf.zig");
|
||||
|
||||
const Env = @import("env.zig").Env;
|
||||
const Page = @import("page.zig").Page;
|
||||
const DataURI = @import("DataURI.zig");
|
||||
const Http = @import("../http/Http.zig");
|
||||
const Browser = @import("browser.zig").Browser;
|
||||
const URL = @import("../url.zig").URL;
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
const ArrayListUnmanaged = std.ArrayListUnmanaged;
|
||||
|
||||
const ScriptManager = @This();
|
||||
|
||||
page: *Page,
|
||||
|
||||
// used to prevent recursive evalutaion
|
||||
is_evaluating: bool,
|
||||
|
||||
// used to prevent executing scripts while we're doing a blocking load
|
||||
is_blocking: bool = false,
|
||||
|
||||
// Only once this is true can deferred scripts be run
|
||||
static_scripts_done: bool,
|
||||
|
||||
// List of async scripts. We don't care about the execution order of these, but
|
||||
// on shutdown/abort, we need to cleanup any pending ones.
|
||||
asyncs: OrderList,
|
||||
|
||||
// When an async script is ready to be evaluated, it's moved from asyncs to
|
||||
// this list. You might think we can evaluate an async script as soon as it's
|
||||
// done, but we can only evaluate scripts when `is_blocking == false`. So this
|
||||
// becomes a list of scripts to execute on the next evaluate().
|
||||
asyncs_ready: OrderList,
|
||||
|
||||
// Normal scripts (non-deferred & non-async). These must be executed in order
|
||||
scripts: OrderList,
|
||||
|
||||
// List of deferred scripts. These must be executed in order, but only once
|
||||
// dom_loaded == true,
|
||||
deferreds: OrderList,
|
||||
|
||||
shutdown: bool = false,
|
||||
|
||||
client: *Http.Client,
|
||||
allocator: Allocator,
|
||||
buffer_pool: BufferPool,
|
||||
script_pool: std.heap.MemoryPool(PendingScript),
|
||||
|
||||
const OrderList = std.DoublyLinkedList;
|
||||
|
||||
pub fn init(browser: *Browser, page: *Page) ScriptManager {
|
||||
// page isn't fully initialized, we can setup our reference, but that's it.
|
||||
const allocator = browser.allocator;
|
||||
return .{
|
||||
.page = page,
|
||||
.asyncs = .{},
|
||||
.scripts = .{},
|
||||
.deferreds = .{},
|
||||
.asyncs_ready = .{},
|
||||
.is_evaluating = false,
|
||||
.allocator = allocator,
|
||||
.client = browser.http_client,
|
||||
.static_scripts_done = false,
|
||||
.buffer_pool = BufferPool.init(allocator, 5),
|
||||
.script_pool = std.heap.MemoryPool(PendingScript).init(allocator),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: *ScriptManager) void {
|
||||
self.reset();
|
||||
self.buffer_pool.deinit();
|
||||
self.script_pool.deinit();
|
||||
}
|
||||
|
||||
pub fn reset(self: *ScriptManager) void {
|
||||
self.clearList(&self.asyncs);
|
||||
self.clearList(&self.scripts);
|
||||
self.clearList(&self.deferreds);
|
||||
self.clearList(&self.asyncs_ready);
|
||||
self.static_scripts_done = false;
|
||||
}
|
||||
|
||||
fn clearList(_: *const ScriptManager, list: *OrderList) void {
|
||||
while (list.first) |node| {
|
||||
const pending_script: *PendingScript = @fieldParentPtr("node", node);
|
||||
// this removes it from the list
|
||||
pending_script.deinit();
|
||||
}
|
||||
std.debug.assert(list.first == null);
|
||||
}
|
||||
|
||||
pub fn addFromElement(self: *ScriptManager, element: *parser.Element) !void {
|
||||
if (try parser.elementGetAttribute(element, "nomodule") != null) {
|
||||
// these scripts should only be loaded if we don't support modules
|
||||
// but since we do support modules, we can just skip them.
|
||||
return;
|
||||
}
|
||||
|
||||
// If a script tag gets dynamically created and added to the dom:
|
||||
// document.getElementsByTagName('head')[0].appendChild(script)
|
||||
// that script tag will immediately get executed by our scriptAddedCallback.
|
||||
// However, if the location where the script tag is inserted happens to be
|
||||
// below where processHTMLDoc currently is, then we'll re-run that same script
|
||||
// again in processHTMLDoc. This flag is used to let us know if a specific
|
||||
// <script> has already been processed.
|
||||
if (try parser.scriptGetProcessed(@ptrCast(element))) {
|
||||
return;
|
||||
}
|
||||
try parser.scriptSetProcessed(@ptrCast(element), true);
|
||||
|
||||
const kind: Script.Kind = blk: {
|
||||
const script_type = try parser.elementGetAttribute(element, "type") orelse break :blk .javascript;
|
||||
if (script_type.len == 0) {
|
||||
break :blk .javascript;
|
||||
}
|
||||
if (std.ascii.eqlIgnoreCase(script_type, "application/javascript")) {
|
||||
break :blk .javascript;
|
||||
}
|
||||
if (std.ascii.eqlIgnoreCase(script_type, "text/javascript")) {
|
||||
break :blk .javascript;
|
||||
}
|
||||
if (std.ascii.eqlIgnoreCase(script_type, "module")) {
|
||||
break :blk .module;
|
||||
}
|
||||
|
||||
// "type" could be anything, but only the above are ones we need to process.
|
||||
// Common other ones are application/json, application/ld+json, text/template
|
||||
|
||||
return;
|
||||
};
|
||||
|
||||
const page = self.page;
|
||||
var source: Script.Source = undefined;
|
||||
var remote_url: ?[:0]const u8 = null;
|
||||
if (try parser.elementGetAttribute(element, "src")) |src| {
|
||||
if (try DataURI.parse(page.arena, src)) |data_uri| {
|
||||
source = .{ .@"inline" = data_uri };
|
||||
}
|
||||
remote_url = try URL.stitch(page.arena, src, page.url.raw, .{ .null_terminated = true });
|
||||
source = .{ .remote = .{} };
|
||||
} else {
|
||||
const inline_source = parser.nodeTextContent(@ptrCast(element)) orelse return;
|
||||
source = .{ .@"inline" = inline_source };
|
||||
}
|
||||
|
||||
var script = Script{
|
||||
.kind = kind,
|
||||
.element = element,
|
||||
.source = source,
|
||||
.url = remote_url orelse page.url.raw,
|
||||
.is_defer = if (remote_url == null) false else try parser.elementGetAttribute(element, "defer") != null,
|
||||
.is_async = if (remote_url == null) false else try parser.elementGetAttribute(element, "async") != null,
|
||||
};
|
||||
|
||||
if (source == .@"inline" and self.scripts.first == null) {
|
||||
// inline script with no pending scripts, execute it immediately.
|
||||
// (if there is a pending script, then we cannot execute this immediately
|
||||
// as it needs to best executed in order)
|
||||
return script.eval(page);
|
||||
}
|
||||
|
||||
const pending_script = try self.script_pool.create();
|
||||
errdefer self.script_pool.destroy(pending_script);
|
||||
pending_script.* = .{
|
||||
.script = script,
|
||||
.complete = false,
|
||||
.manager = self,
|
||||
.node = .{},
|
||||
};
|
||||
|
||||
if (source == .@"inline") {
|
||||
// if we're here, it means that we have pending scripts (i.e. self.scripts
|
||||
// is not empty). Because the script is inline, it's complete/ready, but
|
||||
// we need to process them in order
|
||||
pending_script.complete = true;
|
||||
self.scripts.append(&pending_script.node);
|
||||
return;
|
||||
} else {
|
||||
log.debug(.http, "script queue", .{ .url = remote_url.? });
|
||||
}
|
||||
|
||||
pending_script.getList().append(&pending_script.node);
|
||||
|
||||
errdefer pending_script.deinit();
|
||||
|
||||
var headers = try self.client.newHeaders();
|
||||
try page.requestCookie(.{}).headersForRequest(page.arena, remote_url.?, &headers);
|
||||
|
||||
try self.client.request(.{
|
||||
.url = remote_url.?,
|
||||
.ctx = pending_script,
|
||||
.method = .GET,
|
||||
.headers = headers,
|
||||
.cookie_jar = page.cookie_jar,
|
||||
.resource_type = .script,
|
||||
.start_callback = if (log.enabled(.http, .debug)) startCallback else null,
|
||||
.header_callback = headerCallback,
|
||||
.data_callback = dataCallback,
|
||||
.done_callback = doneCallback,
|
||||
.error_callback = errorCallback,
|
||||
});
|
||||
}
|
||||
|
||||
// @TODO: Improving this would have the simplest biggest performance improvement
|
||||
// for most sites.
|
||||
//
|
||||
// For JS imports (both static and dynamic), we currently block to get the
|
||||
// result (the content of the file).
|
||||
//
|
||||
// For static imports, this is necessary, since v8 is expecting the compiled module
|
||||
// as part of the function return. (we should try to pre-load the JavaScript
|
||||
// source via module.GetModuleRequests(), but that's for a later time).
|
||||
//
|
||||
// For dynamic dynamic imports, this is not strictly necessary since the v8
|
||||
// call returns a Promise; we could make this a normal get call, associated with
|
||||
// the promise, and when done, resolve the promise.
|
||||
//
|
||||
// In both cases, for now at least, we just issue a "blocking" request. We block
|
||||
// by ticking the http client until the script is complete.
|
||||
//
|
||||
// This uses the client.blockingRequest call which has a dedicated handle for
|
||||
// these blocking requests. Because they are blocking, we're guaranteed to have
|
||||
// only 1 at a time, thus the 1 reserved handle.
|
||||
//
|
||||
// You almost don't need the http client's blocking handle. In most cases, you
|
||||
// should always have 1 free handle whenever you get here, because we always
|
||||
// release the handle before executing the doneCallback. So, if a module does:
|
||||
// import * as x from 'blah'
|
||||
// And we need to load 'blah', there should always be 1 free handle - the handle
|
||||
// of the http GET we just completed before executing the module.
|
||||
// The exception to this, and the reason we need a special blocking handle, is
|
||||
// for inline modules within the HTML page itself:
|
||||
// <script type=module>import ....</script>
|
||||
// Unlike external modules which can only ever be executed after releasing an
|
||||
// http handle, these are executed without there necessarily being a free handle.
|
||||
// Thus, Http/Client.zig maintains a dedicated handle for these calls.
|
||||
pub fn blockingGet(self: *ScriptManager, url: [:0]const u8) !BlockingResult {
|
||||
std.debug.assert(self.is_blocking == false);
|
||||
|
||||
self.is_blocking = true;
|
||||
defer {
|
||||
self.is_blocking = false;
|
||||
|
||||
// we blocked evaluation while loading this script, there could be
|
||||
// scripts ready to process.
|
||||
self.evaluate();
|
||||
}
|
||||
|
||||
var blocking = Blocking{
|
||||
.allocator = self.allocator,
|
||||
.buffer_pool = &self.buffer_pool,
|
||||
};
|
||||
|
||||
var headers = try self.client.newHeaders();
|
||||
try self.page.requestCookie(.{}).headersForRequest(self.page.arena, url, &headers);
|
||||
|
||||
var client = self.client;
|
||||
try client.blockingRequest(.{
|
||||
.url = url,
|
||||
.method = .GET,
|
||||
.headers = headers,
|
||||
.cookie_jar = self.page.cookie_jar,
|
||||
.ctx = &blocking,
|
||||
.resource_type = .script,
|
||||
.start_callback = if (log.enabled(.http, .debug)) Blocking.startCallback else null,
|
||||
.header_callback = Blocking.headerCallback,
|
||||
.data_callback = Blocking.dataCallback,
|
||||
.done_callback = Blocking.doneCallback,
|
||||
.error_callback = Blocking.errorCallback,
|
||||
});
|
||||
|
||||
// rely on http's timeout settings to avoid an endless/long loop.
|
||||
while (true) {
|
||||
_ = try client.tick(200);
|
||||
switch (blocking.state) {
|
||||
.running => {},
|
||||
.done => |result| return result,
|
||||
.err => |err| return err,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn staticScriptsDone(self: *ScriptManager) void {
|
||||
std.debug.assert(self.static_scripts_done == false);
|
||||
self.static_scripts_done = true;
|
||||
}
|
||||
|
||||
// try to evaluate completed scripts (in order). This is called whenever a script
|
||||
// is completed.
|
||||
fn evaluate(self: *ScriptManager) void {
|
||||
if (self.is_evaluating) {
|
||||
// It's possible for a script.eval to cause evaluate to be called again.
|
||||
// This is particularly true with blockingGet, but even without this,
|
||||
// it's theoretically possible (but unlikely). We could make this work
|
||||
// but there's little reason to support the complexity.
|
||||
return;
|
||||
}
|
||||
|
||||
if (self.is_blocking) {
|
||||
// Cannot evaluate scripts while a blocking-load is in progress. Not
|
||||
// only could that result in incorrect evaluation order, it could
|
||||
// trigger another blocking request, while we're doing a blocking request.
|
||||
return;
|
||||
}
|
||||
|
||||
const page = self.page;
|
||||
self.is_evaluating = true;
|
||||
defer self.is_evaluating = false;
|
||||
|
||||
// every script in asyncs_ready is ready to be evaluated.
|
||||
while (self.asyncs_ready.first) |n| {
|
||||
var pending_script: *PendingScript = @fieldParentPtr("node", n);
|
||||
defer pending_script.deinit();
|
||||
pending_script.script.eval(page);
|
||||
}
|
||||
|
||||
while (self.scripts.first) |n| {
|
||||
var pending_script: *PendingScript = @fieldParentPtr("node", n);
|
||||
if (pending_script.complete == false) {
|
||||
return;
|
||||
}
|
||||
defer pending_script.deinit();
|
||||
pending_script.script.eval(page);
|
||||
}
|
||||
|
||||
if (self.static_scripts_done == false) {
|
||||
// We can only execute deferred scripts if
|
||||
// 1 - all the normal scripts are done
|
||||
// 2 - we've finished parsing the HTML and at least queued all the scripts
|
||||
// The last one isn't obvious, but it's possible for self.scripts to
|
||||
// be empty not because we're done executing all the normal scripts
|
||||
// but because we're done executing some (or maybe none), but we're still
|
||||
// parsing the HTML.
|
||||
return;
|
||||
}
|
||||
|
||||
while (self.deferreds.first) |n| {
|
||||
var pending_script: *PendingScript = @fieldParentPtr("node", n);
|
||||
if (pending_script.complete == false) {
|
||||
return;
|
||||
}
|
||||
defer pending_script.deinit();
|
||||
pending_script.script.eval(page);
|
||||
}
|
||||
|
||||
// When all scripts (normal and deferred) are done loading, the document
|
||||
// state changes (this ultimately triggers the DOMContentLoaded event)
|
||||
page.documentIsLoaded();
|
||||
|
||||
if (self.asyncs.first == null) {
|
||||
// 1 - there are no async scripts pending
|
||||
// 2 - we checkecked static_scripts_done == true above
|
||||
// 3 - we drained self.scripts above
|
||||
// 4 - we drained self.deferred above
|
||||
page.documentIsComplete();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn isDone(self: *const ScriptManager) bool {
|
||||
return self.asyncs.first == null and // there are no more async scripts
|
||||
self.static_scripts_done and // and we've finished parsing the HTML to queue all <scripts>
|
||||
self.scripts.first == null and // and there are no more <script src=> to wait for
|
||||
self.deferreds.first == null; // and there are no more <script defer src=> to wait for
|
||||
}
|
||||
|
||||
fn startCallback(transfer: *Http.Transfer) !void {
|
||||
const script: *PendingScript = @ptrCast(@alignCast(transfer.ctx));
|
||||
script.startCallback(transfer) catch |err| {
|
||||
log.err(.http, "SM.startCallback", .{ .err = err, .transfer = transfer });
|
||||
return err;
|
||||
};
|
||||
}
|
||||
|
||||
fn headerCallback(transfer: *Http.Transfer) !void {
|
||||
const script: *PendingScript = @ptrCast(@alignCast(transfer.ctx));
|
||||
try script.headerCallback(transfer);
|
||||
}
|
||||
|
||||
fn dataCallback(transfer: *Http.Transfer, data: []const u8) !void {
|
||||
const script: *PendingScript = @ptrCast(@alignCast(transfer.ctx));
|
||||
script.dataCallback(transfer, data) catch |err| {
|
||||
log.err(.http, "SM.dataCallback", .{ .err = err, .transfer = transfer, .len = data.len });
|
||||
return err;
|
||||
};
|
||||
}
|
||||
|
||||
fn doneCallback(ctx: *anyopaque) !void {
|
||||
const script: *PendingScript = @ptrCast(@alignCast(ctx));
|
||||
script.doneCallback();
|
||||
}
|
||||
|
||||
fn errorCallback(ctx: *anyopaque, err: anyerror) void {
|
||||
const script: *PendingScript = @ptrCast(@alignCast(ctx));
|
||||
script.errorCallback(err);
|
||||
}
|
||||
|
||||
// A script which is pending execution.
|
||||
// It could be pending because:
|
||||
// (a) we're still downloading its content or
|
||||
// (b) this is a non-async script that has to be executed in order
|
||||
pub const PendingScript = struct {
|
||||
script: Script,
|
||||
complete: bool,
|
||||
node: OrderList.Node,
|
||||
manager: *ScriptManager,
|
||||
|
||||
fn deinit(self: *PendingScript) void {
|
||||
const script = &self.script;
|
||||
const manager = self.manager;
|
||||
|
||||
if (script.source == .remote) {
|
||||
manager.buffer_pool.release(script.source.remote);
|
||||
}
|
||||
self.getList().remove(&self.node);
|
||||
}
|
||||
|
||||
fn remove(self: *PendingScript) void {
|
||||
if (self.node) |*node| {
|
||||
self.getList().remove(node);
|
||||
self.node = null;
|
||||
}
|
||||
}
|
||||
|
||||
fn startCallback(self: *PendingScript, transfer: *Http.Transfer) !void {
|
||||
_ = self;
|
||||
log.debug(.http, "script fetch start", .{ .req = transfer });
|
||||
}
|
||||
|
||||
fn headerCallback(self: *PendingScript, transfer: *Http.Transfer) !void {
|
||||
const header = &transfer.response_header.?;
|
||||
if (header.status != 200) {
|
||||
log.info(.http, "script header", .{
|
||||
.req = transfer,
|
||||
.status = header.status,
|
||||
.content_type = header.contentType(),
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
log.debug(.http, "script header", .{
|
||||
.req = transfer,
|
||||
.status = header.status,
|
||||
.content_type = header.contentType(),
|
||||
});
|
||||
|
||||
// If this isn't true, then we'll likely leak memory. If you don't
|
||||
// set `CURLOPT_SUPPRESS_CONNECT_HEADERS` and CONNECT to a proxy, this
|
||||
// will fail. This assertion exists to catch incorrect assumptions about
|
||||
// how libcurl works, or about how we've configured it.
|
||||
std.debug.assert(self.script.source.remote.capacity == 0);
|
||||
var buffer = self.manager.buffer_pool.get();
|
||||
if (transfer.getContentLength()) |cl| {
|
||||
try buffer.ensureTotalCapacity(self.manager.allocator, cl);
|
||||
}
|
||||
self.script.source = .{ .remote = buffer };
|
||||
}
|
||||
|
||||
fn dataCallback(self: *PendingScript, transfer: *Http.Transfer, data: []const u8) !void {
|
||||
_ = transfer;
|
||||
// too verbose
|
||||
// log.debug(.http, "script data chunk", .{
|
||||
// .req = transfer,
|
||||
// .len = data.len,
|
||||
// });
|
||||
|
||||
try self.script.source.remote.appendSlice(self.manager.allocator, data);
|
||||
}
|
||||
|
||||
fn doneCallback(self: *PendingScript) void {
|
||||
log.debug(.http, "script fetch complete", .{ .req = self.script.url });
|
||||
|
||||
const manager = self.manager;
|
||||
self.complete = true;
|
||||
if (self.script.is_async) {
|
||||
manager.asyncs.remove(&self.node);
|
||||
manager.asyncs_ready.append(&self.node);
|
||||
}
|
||||
manager.evaluate();
|
||||
}
|
||||
|
||||
fn errorCallback(self: *PendingScript, err: anyerror) void {
|
||||
log.warn(.http, "script fetch error", .{ .req = self.script.url, .err = err });
|
||||
|
||||
const manager = self.manager;
|
||||
|
||||
self.deinit();
|
||||
|
||||
if (manager.shutdown) {
|
||||
return;
|
||||
}
|
||||
|
||||
manager.evaluate();
|
||||
}
|
||||
|
||||
fn getList(self: *const PendingScript) *OrderList {
|
||||
// When a script has both the async and defer flag set, it should be
|
||||
// treated as async. Async is newer, so some websites use both so that
|
||||
// if async isn't known, it'll fallback to defer.
|
||||
|
||||
const script = &self.script;
|
||||
if (script.is_async) {
|
||||
return if (self.complete) &self.manager.asyncs_ready else &self.manager.asyncs;
|
||||
}
|
||||
|
||||
if (script.is_defer) {
|
||||
return &self.manager.deferreds;
|
||||
}
|
||||
|
||||
return &self.manager.scripts;
|
||||
}
|
||||
};
|
||||
|
||||
const Script = struct {
|
||||
kind: Kind,
|
||||
url: []const u8,
|
||||
is_async: bool,
|
||||
is_defer: bool,
|
||||
source: Source,
|
||||
element: *parser.Element,
|
||||
|
||||
const Kind = enum {
|
||||
module,
|
||||
javascript,
|
||||
};
|
||||
|
||||
const Callback = union(enum) {
|
||||
string: []const u8,
|
||||
function: Env.Function,
|
||||
};
|
||||
|
||||
const Source = union(enum) {
|
||||
@"inline": []const u8,
|
||||
remote: std.ArrayListUnmanaged(u8),
|
||||
|
||||
fn content(self: Source) []const u8 {
|
||||
return switch (self) {
|
||||
.remote => |buf| buf.items,
|
||||
.@"inline" => |c| c,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
fn eval(self: *Script, page: *Page) void {
|
||||
page.setCurrentScript(@ptrCast(self.element)) catch |err| {
|
||||
log.err(.browser, "set document script", .{ .err = err });
|
||||
return;
|
||||
};
|
||||
|
||||
defer page.setCurrentScript(null) catch |err| {
|
||||
log.err(.browser, "clear document script", .{ .err = err });
|
||||
};
|
||||
|
||||
// inline scripts aren't cached. remote ones are.
|
||||
const cacheable = self.source == .remote;
|
||||
|
||||
const url = self.url;
|
||||
|
||||
log.info(.browser, "executing script", .{
|
||||
.src = url,
|
||||
.kind = self.kind,
|
||||
.cacheable = cacheable,
|
||||
});
|
||||
|
||||
const js_context = page.main_context;
|
||||
var try_catch: Env.TryCatch = undefined;
|
||||
try_catch.init(js_context);
|
||||
defer try_catch.deinit();
|
||||
|
||||
const success = blk: {
|
||||
const content = self.source.content();
|
||||
switch (self.kind) {
|
||||
.javascript => _ = js_context.eval(content, url) catch break :blk false,
|
||||
.module => {
|
||||
// We don't care about waiting for the evaluation here.
|
||||
_ = js_context.module(content, url, cacheable) catch break :blk false;
|
||||
},
|
||||
}
|
||||
break :blk true;
|
||||
};
|
||||
|
||||
if (success) {
|
||||
self.executeCallback("onload", page);
|
||||
return;
|
||||
}
|
||||
|
||||
if (page.delayed_navigation) {
|
||||
// If we're navigating to another page, an error is expected
|
||||
// since we probably terminated the script forcefully.
|
||||
return;
|
||||
}
|
||||
|
||||
const msg = try_catch.err(page.arena) catch |err| @errorName(err) orelse "unknown";
|
||||
log.warn(.user_script, "eval script", .{
|
||||
.url = url,
|
||||
.err = msg,
|
||||
.cacheable = cacheable,
|
||||
});
|
||||
|
||||
self.executeCallback("onerror", page);
|
||||
}
|
||||
|
||||
fn executeCallback(self: *const Script, comptime typ: []const u8, page: *Page) void {
|
||||
const callback = self.getCallback(typ, page) orelse return;
|
||||
|
||||
switch (callback) {
|
||||
.string => |str| {
|
||||
var try_catch: Env.TryCatch = undefined;
|
||||
try_catch.init(page.main_context);
|
||||
defer try_catch.deinit();
|
||||
|
||||
_ = page.main_context.exec(str, typ) catch |err| {
|
||||
const msg = try_catch.err(page.arena) catch @errorName(err) orelse "unknown";
|
||||
log.warn(.user_script, "script callback", .{
|
||||
.url = self.url,
|
||||
.err = msg,
|
||||
.type = typ,
|
||||
.@"inline" = true,
|
||||
});
|
||||
};
|
||||
},
|
||||
.function => |f| {
|
||||
const Event = @import("events/event.zig").Event;
|
||||
const loadevt = parser.eventCreate() catch |err| {
|
||||
log.err(.browser, "SM event creation", .{ .err = err });
|
||||
return;
|
||||
};
|
||||
defer parser.eventDestroy(loadevt);
|
||||
|
||||
var result: Env.Function.Result = undefined;
|
||||
const iface = Event.toInterface(loadevt);
|
||||
f.tryCall(void, .{iface}, &result) catch {
|
||||
log.warn(.user_script, "script callback", .{
|
||||
.url = self.url,
|
||||
.type = typ,
|
||||
.err = result.exception,
|
||||
.stack = result.stack,
|
||||
.@"inline" = false,
|
||||
});
|
||||
};
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn getCallback(self: *const Script, comptime typ: []const u8, page: *Page) ?Callback {
|
||||
const element = self.element;
|
||||
// first we check if there was an el.onload set directly on the
|
||||
// element in JavaScript (if so, it'd be stored in the node state)
|
||||
if (page.getNodeState(@ptrCast(element))) |se| {
|
||||
if (@field(se, typ)) |function| {
|
||||
return .{ .function = function };
|
||||
}
|
||||
}
|
||||
// if we have no node state, or if the node state has no onload/onerror
|
||||
// then check for the onload/onerror attribute
|
||||
if (parser.elementGetAttribute(element, typ) catch null) |string| {
|
||||
return .{ .string = string };
|
||||
}
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
const BufferPool = struct {
|
||||
count: usize,
|
||||
available: List = .{},
|
||||
allocator: Allocator,
|
||||
max_concurrent_transfers: u8,
|
||||
mem_pool: std.heap.MemoryPool(Container),
|
||||
|
||||
const List = std.DoublyLinkedList;
|
||||
|
||||
const Container = struct {
|
||||
node: List.Node,
|
||||
buf: std.ArrayListUnmanaged(u8),
|
||||
};
|
||||
|
||||
fn init(allocator: Allocator, max_concurrent_transfers: u8) BufferPool {
|
||||
return .{
|
||||
.available = .{},
|
||||
.count = 0,
|
||||
.allocator = allocator,
|
||||
.max_concurrent_transfers = max_concurrent_transfers,
|
||||
.mem_pool = std.heap.MemoryPool(Container).init(allocator),
|
||||
};
|
||||
}
|
||||
|
||||
fn deinit(self: *BufferPool) void {
|
||||
const allocator = self.allocator;
|
||||
|
||||
var node = self.available.first;
|
||||
while (node) |n| {
|
||||
const container: *Container = @fieldParentPtr("node", n);
|
||||
container.buf.deinit(allocator);
|
||||
node = n.next;
|
||||
}
|
||||
self.mem_pool.deinit();
|
||||
}
|
||||
|
||||
fn get(self: *BufferPool) std.ArrayListUnmanaged(u8) {
|
||||
const node = self.available.popFirst() orelse {
|
||||
// return a new buffer
|
||||
return .{};
|
||||
};
|
||||
|
||||
self.count -= 1;
|
||||
const container: *Container = @fieldParentPtr("node", node);
|
||||
defer self.mem_pool.destroy(container);
|
||||
return container.buf;
|
||||
}
|
||||
|
||||
fn release(self: *BufferPool, buffer: ArrayListUnmanaged(u8)) void {
|
||||
// create mutable copy
|
||||
var b = buffer;
|
||||
|
||||
if (self.count == self.max_concurrent_transfers) {
|
||||
b.deinit(self.allocator);
|
||||
return;
|
||||
}
|
||||
|
||||
const container = self.mem_pool.create() catch |err| {
|
||||
b.deinit(self.allocator);
|
||||
log.err(.http, "SM BufferPool release", .{ .err = err });
|
||||
return;
|
||||
};
|
||||
|
||||
b.clearRetainingCapacity();
|
||||
container.* = .{ .buf = b, .node = .{} };
|
||||
self.count += 1;
|
||||
self.available.append(&container.node);
|
||||
}
|
||||
};
|
||||
|
||||
const Blocking = struct {
|
||||
allocator: Allocator,
|
||||
buffer_pool: *BufferPool,
|
||||
state: State = .{ .running = {} },
|
||||
buffer: std.ArrayListUnmanaged(u8) = .{},
|
||||
|
||||
const State = union(enum) {
|
||||
running: void,
|
||||
err: anyerror,
|
||||
done: BlockingResult,
|
||||
};
|
||||
|
||||
fn startCallback(transfer: *Http.Transfer) !void {
|
||||
log.debug(.http, "script fetch start", .{ .req = transfer, .blocking = true });
|
||||
}
|
||||
|
||||
fn headerCallback(transfer: *Http.Transfer) !void {
|
||||
const header = &transfer.response_header.?;
|
||||
log.debug(.http, "script header", .{
|
||||
.req = transfer,
|
||||
.blocking = true,
|
||||
.status = header.status,
|
||||
.content_type = header.contentType(),
|
||||
});
|
||||
|
||||
if (header.status != 200) {
|
||||
return error.InvalidStatusCode;
|
||||
}
|
||||
|
||||
var self: *Blocking = @ptrCast(@alignCast(transfer.ctx));
|
||||
self.buffer = self.buffer_pool.get();
|
||||
}
|
||||
|
||||
fn dataCallback(transfer: *Http.Transfer, data: []const u8) !void {
|
||||
// too verbose
|
||||
// log.debug(.http, "script data chunk", .{
|
||||
// .req = transfer,
|
||||
// .blocking = true,
|
||||
// });
|
||||
|
||||
var self: *Blocking = @ptrCast(@alignCast(transfer.ctx));
|
||||
self.buffer.appendSlice(self.allocator, data) catch |err| {
|
||||
log.err(.http, "SM.dataCallback", .{
|
||||
.err = err,
|
||||
.len = data.len,
|
||||
.blocking = true,
|
||||
.transfer = transfer,
|
||||
});
|
||||
return err;
|
||||
};
|
||||
}
|
||||
|
||||
fn doneCallback(ctx: *anyopaque) !void {
|
||||
var self: *Blocking = @ptrCast(@alignCast(ctx));
|
||||
self.state = .{ .done = .{
|
||||
.buffer = self.buffer,
|
||||
.buffer_pool = self.buffer_pool,
|
||||
} };
|
||||
}
|
||||
|
||||
fn errorCallback(ctx: *anyopaque, err: anyerror) void {
|
||||
var self: *Blocking = @ptrCast(@alignCast(ctx));
|
||||
self.state = .{ .err = err };
|
||||
self.buffer_pool.release(self.buffer);
|
||||
}
|
||||
};
|
||||
|
||||
pub const BlockingResult = struct {
|
||||
buffer: std.ArrayListUnmanaged(u8),
|
||||
buffer_pool: *BufferPool,
|
||||
|
||||
pub fn deinit(self: *BlockingResult) void {
|
||||
self.buffer_pool.release(self.buffer);
|
||||
}
|
||||
|
||||
pub fn src(self: *const BlockingResult) []const u8 {
|
||||
return self.buffer.items;
|
||||
}
|
||||
};
|
||||
@@ -1,77 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
// Sometimes we need to extend libdom. For example, its HTMLDocument doesn't
|
||||
// have a readyState. We have a couple different options, such as making the
|
||||
// correction in libdom directly. Another option stems from the fact that every
|
||||
// libdom node has an opaque embedder_data field. This is the struct that we
|
||||
// lazily load into that field.
|
||||
//
|
||||
// It didn't originally start off as a collection of every single extension, but
|
||||
// this quickly proved necessary, since different fields are needed on the same
|
||||
// data at different levels of the prototype chain. This isn't memory efficient.
|
||||
|
||||
const Env = @import("env.zig").Env;
|
||||
const parser = @import("netsurf.zig");
|
||||
const DataSet = @import("html/DataSet.zig");
|
||||
const ShadowRoot = @import("dom/shadow_root.zig").ShadowRoot;
|
||||
const StyleSheet = @import("cssom/StyleSheet.zig");
|
||||
const CSSStyleDeclaration = @import("cssom/CSSStyleDeclaration.zig");
|
||||
|
||||
// for HTMLScript (but probably needs to be added to more)
|
||||
onload: ?Env.Function = null,
|
||||
onerror: ?Env.Function = null,
|
||||
|
||||
// for HTMLElement
|
||||
style: CSSStyleDeclaration = .empty,
|
||||
dataset: ?DataSet = null,
|
||||
template_content: ?*parser.DocumentFragment = null,
|
||||
|
||||
// For dom/element
|
||||
shadow_root: ?*ShadowRoot = null,
|
||||
|
||||
// for html/document
|
||||
ready_state: ReadyState = .loading,
|
||||
|
||||
// for html/HTMLStyleElement
|
||||
style_sheet: ?*StyleSheet = null,
|
||||
|
||||
// for dom/document
|
||||
active_element: ?*parser.Element = null,
|
||||
adopted_style_sheets: ?Env.JsObject = null,
|
||||
|
||||
// for HTMLSelectElement
|
||||
// By default, if no option is explicitly selected, the first option should
|
||||
// be selected. However, libdom doesn't do this, and it sets the
|
||||
// selectedIndex to -1, which is a valid value for "nothing selected".
|
||||
// Therefore, when libdom says the selectedIndex == -1, we don't know if
|
||||
// it means that nothing is selected, or if the first option is selected by
|
||||
// default.
|
||||
// There are cases where this won't work, but when selectedIndex is
|
||||
// explicitly set, we set this boolean flag. Then, when we're getting then
|
||||
// selectedIndex, if this flag is == false, which is to say that if
|
||||
// selectedIndex hasn't been explicitly set AND if we have at least 1 option
|
||||
// AND if it isn't a multi select, we can make the 1st item selected by
|
||||
// default (by returning selectedIndex == 0).
|
||||
explicit_index_set: bool = false,
|
||||
|
||||
const ReadyState = enum {
|
||||
loading,
|
||||
interactive,
|
||||
complete,
|
||||
};
|
||||
@@ -17,100 +17,677 @@
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
const ArenaAllocator = std.heap.ArenaAllocator;
|
||||
const Types = @import("root").Types;
|
||||
|
||||
const State = @import("State.zig");
|
||||
const Env = @import("env.zig").Env;
|
||||
const App = @import("../app.zig").App;
|
||||
const Session = @import("session.zig").Session;
|
||||
const Notification = @import("../notification.zig").Notification;
|
||||
const parser = @import("netsurf");
|
||||
const Loader = @import("loader.zig").Loader;
|
||||
const Dump = @import("dump.zig");
|
||||
const Mime = @import("mime.zig").Mime;
|
||||
|
||||
const log = @import("../log.zig");
|
||||
const HttpClient = @import("../http/Client.zig");
|
||||
const jsruntime = @import("jsruntime");
|
||||
const Loop = jsruntime.Loop;
|
||||
const Env = jsruntime.Env;
|
||||
const Module = jsruntime.Module;
|
||||
|
||||
const apiweb = @import("../apiweb.zig");
|
||||
|
||||
const Window = @import("../html/window.zig").Window;
|
||||
const Walker = @import("../dom/walker.zig").WalkerDepthFirst;
|
||||
|
||||
const URL = @import("../url/url.zig").URL;
|
||||
const Location = @import("../html/location.zig").Location;
|
||||
|
||||
const storage = @import("../storage/storage.zig");
|
||||
|
||||
const FetchResult = @import("../http/Client.zig").Client.FetchResult;
|
||||
|
||||
const UserContext = @import("../user_context.zig").UserContext;
|
||||
const HttpClient = @import("asyncio").Client;
|
||||
|
||||
const polyfill = @import("../polyfill/polyfill.zig");
|
||||
|
||||
const log = std.log.scoped(.browser);
|
||||
|
||||
pub const user_agent = "Lightpanda/1.0";
|
||||
|
||||
// Browser is an instance of the browser.
|
||||
// You can create multiple browser instances.
|
||||
// A browser contains only one session.
|
||||
// TODO allow multiple sessions per browser.
|
||||
pub const Browser = struct {
|
||||
env: *Env,
|
||||
app: *App,
|
||||
session: ?Session,
|
||||
allocator: Allocator,
|
||||
http_client: *HttpClient,
|
||||
page_arena: ArenaAllocator,
|
||||
session_arena: ArenaAllocator,
|
||||
transfer_arena: ArenaAllocator,
|
||||
notification: *Notification,
|
||||
state_pool: std.heap.MemoryPool(State),
|
||||
session: Session = undefined,
|
||||
agent: []const u8 = user_agent,
|
||||
|
||||
pub fn init(app: *App) !Browser {
|
||||
const allocator = app.allocator;
|
||||
const uri = "about:blank";
|
||||
|
||||
const env = try Env.init(allocator, &app.platform, .{});
|
||||
errdefer env.deinit();
|
||||
pub fn init(self: *Browser, alloc: std.mem.Allocator, loop: *Loop, vm: jsruntime.VM) !void {
|
||||
// We want to ensure the caller initialised a VM, but the browser
|
||||
// doesn't use it directly...
|
||||
_ = vm;
|
||||
|
||||
const notification = try Notification.init(allocator, app.notification);
|
||||
app.http.client.notification = notification;
|
||||
app.http.client.next_request_id = 0; // Should we track ids in CDP only?
|
||||
errdefer notification.deinit();
|
||||
|
||||
return .{
|
||||
.app = app,
|
||||
.env = env,
|
||||
.session = null,
|
||||
.allocator = allocator,
|
||||
.notification = notification,
|
||||
.http_client = app.http.client,
|
||||
.page_arena = ArenaAllocator.init(allocator),
|
||||
.session_arena = ArenaAllocator.init(allocator),
|
||||
.transfer_arena = ArenaAllocator.init(allocator),
|
||||
.state_pool = std.heap.MemoryPool(State).init(allocator),
|
||||
};
|
||||
try Session.init(&self.session, alloc, loop, uri);
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Browser) void {
|
||||
self.closeSession();
|
||||
self.env.deinit();
|
||||
self.page_arena.deinit();
|
||||
self.session_arena.deinit();
|
||||
self.transfer_arena.deinit();
|
||||
self.http_client.notification = null;
|
||||
self.notification.deinit();
|
||||
self.state_pool.deinit();
|
||||
self.session.deinit();
|
||||
}
|
||||
|
||||
pub fn newSession(self: *Browser) !*Session {
|
||||
self.closeSession();
|
||||
self.session = @as(Session, undefined);
|
||||
const session = &self.session.?;
|
||||
try Session.init(session, self);
|
||||
return session;
|
||||
pub fn newSession(
|
||||
self: *Browser,
|
||||
alloc: std.mem.Allocator,
|
||||
loop: *jsruntime.Loop,
|
||||
) !void {
|
||||
self.session.deinit();
|
||||
try Session.init(&self.session, alloc, loop, uri);
|
||||
}
|
||||
|
||||
pub fn closeSession(self: *Browser) void {
|
||||
if (self.session) |*session| {
|
||||
session.deinit();
|
||||
self.session = null;
|
||||
_ = self.session_arena.reset(.{ .retain_with_limit = 1 * 1024 * 1024 });
|
||||
self.env.lowMemoryNotification();
|
||||
}
|
||||
}
|
||||
pub fn currentPage(self: *Browser) ?*Page {
|
||||
if (self.session.page == null) return null;
|
||||
|
||||
pub fn runMicrotasks(self: *const Browser) void {
|
||||
self.env.runMicrotasks();
|
||||
}
|
||||
|
||||
pub fn runMessageLoop(self: *const Browser) void {
|
||||
while (self.env.pumpMessageLoop()) {
|
||||
log.debug(.browser, "pumpMessageLoop", .{});
|
||||
}
|
||||
self.env.runIdleTasks();
|
||||
return &self.session.page.?;
|
||||
}
|
||||
};
|
||||
|
||||
const testing = @import("../testing.zig");
|
||||
test "Browser" {
|
||||
try testing.htmlRunner("browser.html");
|
||||
}
|
||||
// Session is like a browser's tab.
|
||||
// It owns the js env and the loader for all the pages of the session.
|
||||
// You can create successively multiple pages for a session, but you must
|
||||
// deinit a page before running another one.
|
||||
pub const Session = struct {
|
||||
// allocator used to init the arena.
|
||||
alloc: std.mem.Allocator,
|
||||
|
||||
// The arena is used only to bound the js env init b/c it leaks memory.
|
||||
// see https://github.com/lightpanda-io/jsruntime-lib/issues/181
|
||||
//
|
||||
// The arena is initialised with self.alloc allocator.
|
||||
// all others Session deps use directly self.alloc and not the arena.
|
||||
arena: std.heap.ArenaAllocator,
|
||||
|
||||
uri: []const u8,
|
||||
|
||||
// TODO handle proxy
|
||||
loader: Loader,
|
||||
env: Env = undefined,
|
||||
inspector: ?jsruntime.Inspector = null,
|
||||
|
||||
window: Window,
|
||||
|
||||
// TODO move the shed to the browser?
|
||||
storageShed: storage.Shed,
|
||||
page: ?Page = null,
|
||||
httpClient: HttpClient,
|
||||
|
||||
jstypes: [Types.len]usize = undefined,
|
||||
|
||||
fn init(self: *Session, alloc: std.mem.Allocator, loop: *Loop, uri: []const u8) !void {
|
||||
self.* = Session{
|
||||
.uri = uri,
|
||||
.alloc = alloc,
|
||||
.arena = std.heap.ArenaAllocator.init(alloc),
|
||||
.window = Window.create(null, .{ .agent = user_agent }),
|
||||
.loader = Loader.init(alloc),
|
||||
.storageShed = storage.Shed.init(alloc),
|
||||
.httpClient = undefined,
|
||||
};
|
||||
|
||||
Env.init(&self.env, self.arena.allocator(), loop, null);
|
||||
self.httpClient = .{ .allocator = alloc };
|
||||
try self.env.load(&self.jstypes);
|
||||
}
|
||||
|
||||
fn fetchModule(ctx: *anyopaque, referrer: ?jsruntime.Module, specifier: []const u8) !jsruntime.Module {
|
||||
_ = referrer;
|
||||
|
||||
const self: *Session = @ptrCast(@alignCast(ctx));
|
||||
|
||||
if (self.page == null) return error.NoPage;
|
||||
|
||||
log.debug("fetch module: specifier: {s}", .{specifier});
|
||||
const alloc = self.arena.allocator();
|
||||
const body = try self.page.?.fetchData(alloc, specifier);
|
||||
defer alloc.free(body);
|
||||
|
||||
return self.env.compileModule(body, specifier);
|
||||
}
|
||||
|
||||
fn deinit(self: *Session) void {
|
||||
if (self.page) |*p| p.deinit();
|
||||
|
||||
if (self.inspector) |inspector| {
|
||||
inspector.deinit(self.alloc);
|
||||
}
|
||||
|
||||
self.env.deinit();
|
||||
self.arena.deinit();
|
||||
|
||||
self.httpClient.deinit();
|
||||
self.loader.deinit();
|
||||
self.storageShed.deinit();
|
||||
}
|
||||
|
||||
pub fn initInspector(
|
||||
self: *Session,
|
||||
ctx: anytype,
|
||||
onResp: jsruntime.InspectorOnResponseFn,
|
||||
onEvent: jsruntime.InspectorOnEventFn,
|
||||
) !void {
|
||||
const ctx_opaque = @as(*anyopaque, @ptrCast(ctx));
|
||||
self.inspector = try jsruntime.Inspector.init(self.alloc, self.env, ctx_opaque, onResp, onEvent);
|
||||
self.env.setInspector(self.inspector.?);
|
||||
}
|
||||
|
||||
pub fn callInspector(self: *Session, msg: []const u8) void {
|
||||
if (self.inspector) |inspector| {
|
||||
inspector.send(msg, self.env);
|
||||
} else {
|
||||
@panic("No Inspector");
|
||||
}
|
||||
}
|
||||
|
||||
// NOTE: the caller is not the owner of the returned value,
|
||||
// the pointer on Page is just returned as a convenience
|
||||
pub fn createPage(self: *Session) !*Page {
|
||||
if (self.page != null) return error.SessionPageExists;
|
||||
const p: Page = undefined;
|
||||
self.page = p;
|
||||
Page.init(&self.page.?, self.alloc, self);
|
||||
return &self.page.?;
|
||||
}
|
||||
};
|
||||
|
||||
// Page navigates to an url.
|
||||
// You can navigates multiple urls with the same page, but you have to call
|
||||
// end() to stop the previous navigation before starting a new one.
|
||||
// The page handle all its memory in an arena allocator. The arena is reseted
|
||||
// when end() is called.
|
||||
pub const Page = struct {
|
||||
arena: std.heap.ArenaAllocator,
|
||||
session: *Session,
|
||||
doc: ?*parser.Document = null,
|
||||
|
||||
// handle url
|
||||
rawuri: ?[]const u8 = null,
|
||||
uri: std.Uri = undefined,
|
||||
origin: ?[]const u8 = null,
|
||||
|
||||
// html url and location
|
||||
url: ?URL = null,
|
||||
location: Location = .{},
|
||||
|
||||
raw_data: ?[]const u8 = null,
|
||||
|
||||
fn init(
|
||||
self: *Page,
|
||||
alloc: std.mem.Allocator,
|
||||
session: *Session,
|
||||
) void {
|
||||
self.* = .{
|
||||
.arena = std.heap.ArenaAllocator.init(alloc),
|
||||
.session = session,
|
||||
};
|
||||
}
|
||||
|
||||
// start js env.
|
||||
// - auxData: extra data forwarded to the Inspector
|
||||
// see Inspector.contextCreated
|
||||
pub fn start(self: *Page, auxData: ?[]const u8) !void {
|
||||
// start JS env
|
||||
log.debug("start js env", .{});
|
||||
try self.session.env.start();
|
||||
|
||||
// register the module loader
|
||||
try self.session.env.setModuleLoadFn(self.session, Session.fetchModule);
|
||||
|
||||
// add global objects
|
||||
log.debug("setup global env", .{});
|
||||
try self.session.env.bindGlobal(&self.session.window);
|
||||
|
||||
// load polyfills
|
||||
try polyfill.load(self.arena.allocator(), self.session.env);
|
||||
|
||||
// inspector
|
||||
if (self.session.inspector) |inspector| {
|
||||
log.debug("inspector context created", .{});
|
||||
inspector.contextCreated(self.session.env, "", self.origin orelse "://", auxData);
|
||||
}
|
||||
}
|
||||
|
||||
// reset js env and mem arena.
|
||||
pub fn end(self: *Page) void {
|
||||
self.session.env.stop();
|
||||
// TODO unload document: https://html.spec.whatwg.org/#unloading-documents
|
||||
|
||||
if (self.url) |*u| u.deinit(self.arena.allocator());
|
||||
self.url = null;
|
||||
self.location.url = null;
|
||||
self.session.window.replaceLocation(&self.location) catch |e| {
|
||||
log.err("reset window location: {any}", .{e});
|
||||
};
|
||||
self.doc = null;
|
||||
|
||||
// clear netsurf memory arena.
|
||||
parser.deinit();
|
||||
|
||||
_ = self.arena.reset(.free_all);
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Page) void {
|
||||
self.end();
|
||||
self.arena.deinit();
|
||||
self.session.page = null;
|
||||
}
|
||||
|
||||
// dump writes the page content into the given file.
|
||||
pub fn dump(self: *Page, out: std.fs.File) !void {
|
||||
|
||||
// if no HTML document pointer available, dump the data content only.
|
||||
if (self.doc == null) {
|
||||
// no data loaded, nothing to do.
|
||||
if (self.raw_data == null) return;
|
||||
return try out.writeAll(self.raw_data.?);
|
||||
}
|
||||
|
||||
// if the page has a pointer to a document, dumps the HTML.
|
||||
try Dump.writeHTML(self.doc.?, out);
|
||||
}
|
||||
|
||||
pub fn wait(self: *Page) !void {
|
||||
|
||||
// try catch
|
||||
var try_catch: jsruntime.TryCatch = undefined;
|
||||
try_catch.init(self.session.env);
|
||||
defer try_catch.deinit();
|
||||
|
||||
self.session.env.wait() catch |err| {
|
||||
// the js env could not be started if the document wasn't an HTML.
|
||||
if (err == error.EnvNotStarted) return;
|
||||
|
||||
const alloc = self.arena.allocator();
|
||||
if (try try_catch.err(alloc, self.session.env)) |msg| {
|
||||
defer alloc.free(msg);
|
||||
log.info("wait error: {s}", .{msg});
|
||||
return;
|
||||
}
|
||||
};
|
||||
log.debug("wait: OK", .{});
|
||||
}
|
||||
|
||||
// spec reference: https://html.spec.whatwg.org/#document-lifecycle
|
||||
// - auxData: extra data forwarded to the Inspector
|
||||
// see Inspector.contextCreated
|
||||
pub fn navigate(self: *Page, uri: []const u8, auxData: ?[]const u8) !void {
|
||||
const alloc = self.arena.allocator();
|
||||
|
||||
log.debug("starting GET {s}", .{uri});
|
||||
|
||||
// if the uri is about:blank, nothing to do.
|
||||
if (std.mem.eql(u8, "about:blank", uri)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// own the url
|
||||
if (self.rawuri) |prev| alloc.free(prev);
|
||||
self.rawuri = try alloc.dupe(u8, uri);
|
||||
self.uri = std.Uri.parse(self.rawuri.?) catch try std.Uri.parseAfterScheme("", self.rawuri.?);
|
||||
|
||||
if (self.url) |*prev| prev.deinit(alloc);
|
||||
self.url = try URL.constructor(alloc, self.rawuri.?, null);
|
||||
self.location.url = &self.url.?;
|
||||
try self.session.window.replaceLocation(&self.location);
|
||||
|
||||
// prepare origin value.
|
||||
var buf = std.ArrayList(u8).init(alloc);
|
||||
defer buf.deinit();
|
||||
try self.uri.writeToStream(.{
|
||||
.scheme = true,
|
||||
.authority = true,
|
||||
}, buf.writer());
|
||||
self.origin = try buf.toOwnedSlice();
|
||||
|
||||
// TODO handle fragment in url.
|
||||
|
||||
// load the data
|
||||
var resp = try self.session.loader.get(alloc, self.uri);
|
||||
defer resp.deinit();
|
||||
|
||||
const req = resp.req;
|
||||
|
||||
log.info("GET {any} {d}", .{ self.uri, @intFromEnum(req.response.status) });
|
||||
|
||||
// TODO handle redirection
|
||||
log.debug("{?} {d} {s}", .{
|
||||
req.response.version,
|
||||
@intFromEnum(req.response.status),
|
||||
req.response.reason,
|
||||
// TODO log headers
|
||||
});
|
||||
|
||||
// TODO handle charset
|
||||
// https://html.spec.whatwg.org/#content-type
|
||||
var it = req.response.iterateHeaders();
|
||||
var ct: ?[]const u8 = null;
|
||||
while (true) {
|
||||
const h = it.next() orelse break;
|
||||
if (std.ascii.eqlIgnoreCase(h.name, "Content-Type")) {
|
||||
ct = try alloc.dupe(u8, h.value);
|
||||
}
|
||||
}
|
||||
if (ct == null) {
|
||||
// no content type in HTTP headers.
|
||||
// TODO try to sniff mime type from the body.
|
||||
log.info("no content-type HTTP header", .{});
|
||||
return;
|
||||
}
|
||||
defer alloc.free(ct.?);
|
||||
|
||||
log.debug("header content-type: {s}", .{ct.?});
|
||||
var mime = try Mime.parse(alloc, ct.?);
|
||||
defer mime.deinit();
|
||||
|
||||
if (mime.isHTML()) {
|
||||
try self.loadHTMLDoc(req.reader(), mime.charset orelse "utf-8", auxData);
|
||||
} else {
|
||||
log.info("non-HTML document: {s}", .{ct.?});
|
||||
|
||||
// save the body into the page.
|
||||
self.raw_data = try req.reader().readAllAlloc(alloc, 16 * 1024 * 1024);
|
||||
}
|
||||
}
|
||||
|
||||
// https://html.spec.whatwg.org/#read-html
|
||||
fn loadHTMLDoc(self: *Page, reader: anytype, charset: []const u8, auxData: ?[]const u8) !void {
|
||||
const alloc = self.arena.allocator();
|
||||
|
||||
// start netsurf memory arena.
|
||||
try parser.init();
|
||||
|
||||
log.debug("parse html with charset {s}", .{charset});
|
||||
|
||||
const ccharset = try alloc.dupeZ(u8, charset);
|
||||
defer alloc.free(ccharset);
|
||||
|
||||
const html_doc = try parser.documentHTMLParse(reader, ccharset);
|
||||
const doc = parser.documentHTMLToDocument(html_doc);
|
||||
|
||||
// save a document's pointer in the page.
|
||||
self.doc = doc;
|
||||
|
||||
// TODO set document.readyState to interactive
|
||||
// https://html.spec.whatwg.org/#reporting-document-loading-status
|
||||
|
||||
// inject the URL to the document including the fragment.
|
||||
try parser.documentSetDocumentURI(doc, self.rawuri orelse "about:blank");
|
||||
|
||||
// TODO set the referrer to the document.
|
||||
|
||||
try self.session.window.replaceDocument(html_doc);
|
||||
self.session.window.setStorageShelf(
|
||||
try self.session.storageShed.getOrPut(self.origin orelse "null"),
|
||||
);
|
||||
|
||||
// https://html.spec.whatwg.org/#read-html
|
||||
|
||||
// inspector
|
||||
if (self.session.inspector) |inspector| {
|
||||
inspector.contextCreated(self.session.env, "", self.origin.?, auxData);
|
||||
}
|
||||
|
||||
// replace the user context document with the new one.
|
||||
try self.session.env.setUserContext(.{
|
||||
.document = html_doc,
|
||||
.httpClient = &self.session.httpClient,
|
||||
});
|
||||
|
||||
// browse the DOM tree to retrieve scripts
|
||||
// TODO execute the synchronous scripts during the HTL parsing.
|
||||
// TODO fetch the script resources concurrently but execute them in the
|
||||
// declaration order for synchronous ones.
|
||||
|
||||
// sasync stores scripts which can be run asynchronously.
|
||||
// for now they are just run after the non-async one in order to
|
||||
// dispatch DOMContentLoaded the sooner as possible.
|
||||
var sasync = std.ArrayList(Script).init(alloc);
|
||||
defer sasync.deinit();
|
||||
|
||||
const root = parser.documentToNode(doc);
|
||||
const walker = Walker{};
|
||||
var next: ?*parser.Node = null;
|
||||
while (true) {
|
||||
next = try walker.get_next(root, next) orelse break;
|
||||
|
||||
// ignore non-elements nodes.
|
||||
if (try parser.nodeType(next.?) != .element) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const e = parser.nodeToElement(next.?);
|
||||
|
||||
// ignore non-js script.
|
||||
const script = try Script.init(e) orelse continue;
|
||||
if (script.kind == .unknown) continue;
|
||||
|
||||
// Ignore the defer attribute b/c we analyze all script
|
||||
// after the document has been parsed.
|
||||
// https://developer.mozilla.org/en-US/docs/Web/HTML/Element/script#defer
|
||||
|
||||
// TODO use fetchpriority
|
||||
// https://developer.mozilla.org/en-US/docs/Web/HTML/Element/script#fetchpriority
|
||||
|
||||
// > async
|
||||
// > For classic scripts, if the async attribute is present,
|
||||
// > then the classic script will be fetched in parallel to
|
||||
// > parsing and evaluated as soon as it is available.
|
||||
// https://developer.mozilla.org/en-US/docs/Web/HTML/Element/script#async
|
||||
if (script.isasync) {
|
||||
try sasync.append(script);
|
||||
continue;
|
||||
}
|
||||
|
||||
// TODO handle for attribute
|
||||
// TODO handle event attribute
|
||||
|
||||
// TODO defer
|
||||
// > This Boolean attribute is set to indicate to a browser
|
||||
// > that the script is meant to be executed after the
|
||||
// > document has been parsed, but before firing
|
||||
// > DOMContentLoaded.
|
||||
// https://developer.mozilla.org/en-US/docs/Web/HTML/Element/script#defer
|
||||
// defer allow us to load a script w/o blocking the rest of
|
||||
// evaluations.
|
||||
|
||||
// > Scripts without async, defer or type="module"
|
||||
// > attributes, as well as inline scripts without the
|
||||
// > type="module" attribute, are fetched and executed
|
||||
// > immediately before the browser continues to parse the
|
||||
// > page.
|
||||
// https://developer.mozilla.org/en-US/docs/Web/HTML/Element/script#notes
|
||||
try parser.documentHTMLSetCurrentScript(html_doc, @ptrCast(e));
|
||||
self.evalScript(script) catch |err| log.warn("evaljs: {any}", .{err});
|
||||
try parser.documentHTMLSetCurrentScript(html_doc, null);
|
||||
}
|
||||
|
||||
// TODO wait for deferred scripts
|
||||
|
||||
// dispatch DOMContentLoaded before the transition to "complete",
|
||||
// at the point where all subresources apart from async script elements
|
||||
// have loaded.
|
||||
// https://html.spec.whatwg.org/#reporting-document-loading-status
|
||||
const evt = try parser.eventCreate();
|
||||
defer parser.eventDestroy(evt);
|
||||
|
||||
try parser.eventInit(evt, "DOMContentLoaded", .{ .bubbles = true, .cancelable = true });
|
||||
_ = try parser.eventTargetDispatchEvent(parser.toEventTarget(parser.DocumentHTML, html_doc), evt);
|
||||
|
||||
// eval async scripts.
|
||||
for (sasync.items) |s| {
|
||||
try parser.documentHTMLSetCurrentScript(html_doc, @ptrCast(s.element));
|
||||
self.evalScript(s) catch |err| log.warn("evaljs: {any}", .{err});
|
||||
try parser.documentHTMLSetCurrentScript(html_doc, null);
|
||||
}
|
||||
|
||||
// TODO wait for async scripts
|
||||
|
||||
// TODO set document.readyState to complete
|
||||
|
||||
// dispatch window.load event
|
||||
const loadevt = try parser.eventCreate();
|
||||
defer parser.eventDestroy(loadevt);
|
||||
|
||||
try parser.eventInit(loadevt, "load", .{});
|
||||
_ = try parser.eventTargetDispatchEvent(
|
||||
parser.toEventTarget(Window, &self.session.window),
|
||||
loadevt,
|
||||
);
|
||||
}
|
||||
|
||||
// evalScript evaluates the src in priority.
|
||||
// if no src is present, we evaluate the text source.
|
||||
// https://html.spec.whatwg.org/multipage/scripting.html#script-processing-model
|
||||
fn evalScript(self: *Page, s: Script) !void {
|
||||
const alloc = self.arena.allocator();
|
||||
|
||||
// https://html.spec.whatwg.org/multipage/webappapis.html#fetch-a-classic-script
|
||||
const opt_src = try parser.elementGetAttribute(s.element, "src");
|
||||
if (opt_src) |src| {
|
||||
log.debug("starting GET {s}", .{src});
|
||||
|
||||
self.fetchScript(s) catch |err| {
|
||||
switch (err) {
|
||||
FetchError.BadStatusCode => return err,
|
||||
|
||||
// TODO If el's result is null, then fire an event named error at
|
||||
// el, and return.
|
||||
FetchError.NoBody => return,
|
||||
|
||||
FetchError.JsErr => {}, // nothing to do here.
|
||||
else => return err,
|
||||
}
|
||||
};
|
||||
|
||||
// TODO If el's from an external file is true, then fire an event
|
||||
// named load at el.
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// TODO handle charset attribute
|
||||
const opt_text = try parser.nodeTextContent(parser.elementToNode(s.element));
|
||||
if (opt_text) |text| {
|
||||
try s.eval(alloc, self.session.env, text);
|
||||
return;
|
||||
}
|
||||
|
||||
// nothing has been loaded.
|
||||
// TODO If el's result is null, then fire an event named error at
|
||||
// el, and return.
|
||||
}
|
||||
|
||||
const FetchError = error{
|
||||
BadStatusCode,
|
||||
NoBody,
|
||||
JsErr,
|
||||
};
|
||||
|
||||
// the caller owns the returned string
|
||||
fn fetchData(self: *Page, alloc: std.mem.Allocator, src: []const u8) ![]const u8 {
|
||||
log.debug("starting fetch {s}", .{src});
|
||||
|
||||
var buffer: [1024]u8 = undefined;
|
||||
var b: []u8 = buffer[0..];
|
||||
const u = try std.Uri.resolve_inplace(self.uri, src, &b);
|
||||
|
||||
var fetchres = try self.session.loader.get(alloc, u);
|
||||
defer fetchres.deinit();
|
||||
|
||||
const resp = fetchres.req.response;
|
||||
|
||||
log.info("fetch {any}: {d}", .{ u, resp.status });
|
||||
|
||||
if (resp.status != .ok) return FetchError.BadStatusCode;
|
||||
|
||||
// TODO check content-type
|
||||
const body = try fetchres.req.reader().readAllAlloc(alloc, 16 * 1024 * 1024);
|
||||
|
||||
// check no body
|
||||
if (body.len == 0) return FetchError.NoBody;
|
||||
|
||||
return body;
|
||||
}
|
||||
|
||||
// fetchScript senf a GET request to the src and execute the script
|
||||
// received.
|
||||
fn fetchScript(self: *Page, s: Script) !void {
|
||||
const alloc = self.arena.allocator();
|
||||
const body = try self.fetchData(alloc, s.src);
|
||||
defer alloc.free(body);
|
||||
|
||||
try s.eval(alloc, self.session.env, body);
|
||||
}
|
||||
|
||||
const Script = struct {
|
||||
element: *parser.Element,
|
||||
kind: Kind,
|
||||
isasync: bool,
|
||||
|
||||
src: []const u8,
|
||||
|
||||
const Kind = enum {
|
||||
unknown,
|
||||
javascript,
|
||||
module,
|
||||
};
|
||||
|
||||
fn init(e: *parser.Element) !?Script {
|
||||
// ignore non-script tags
|
||||
const tag = try parser.elementHTMLGetTagType(@as(*parser.ElementHTML, @ptrCast(e)));
|
||||
if (tag != .script) return null;
|
||||
|
||||
return .{
|
||||
.element = e,
|
||||
.kind = kind(try parser.elementGetAttribute(e, "type")),
|
||||
.isasync = try parser.elementGetAttribute(e, "async") != null,
|
||||
|
||||
.src = try parser.elementGetAttribute(e, "src") orelse "inline",
|
||||
};
|
||||
}
|
||||
|
||||
// > type
|
||||
// > Attribute is not set (default), an empty string, or a JavaScript MIME
|
||||
// > type indicates that the script is a "classic script", containing
|
||||
// > JavaScript code.
|
||||
// https://developer.mozilla.org/en-US/docs/Web/HTML/Element/script#attribute_is_not_set_default_an_empty_string_or_a_javascript_mime_type
|
||||
fn kind(stype: ?[]const u8) Kind {
|
||||
if (stype == null or stype.?.len == 0) return .javascript;
|
||||
if (std.mem.eql(u8, stype.?, "application/javascript")) return .javascript;
|
||||
if (std.mem.eql(u8, stype.?, "module")) return .module;
|
||||
|
||||
return .unknown;
|
||||
}
|
||||
|
||||
fn eval(self: Script, alloc: std.mem.Allocator, env: Env, body: []const u8) !void {
|
||||
var try_catch: jsruntime.TryCatch = undefined;
|
||||
try_catch.init(env);
|
||||
defer try_catch.deinit();
|
||||
|
||||
const res = switch (self.kind) {
|
||||
.unknown => return error.UnknownScript,
|
||||
.javascript => env.exec(body, self.src),
|
||||
.module => env.module(body, self.src),
|
||||
} catch {
|
||||
if (try try_catch.err(alloc, env)) |msg| {
|
||||
defer alloc.free(msg);
|
||||
log.info("eval script {s}: {s}", .{ self.src, msg });
|
||||
}
|
||||
return FetchError.JsErr;
|
||||
};
|
||||
|
||||
if (builtin.mode == .Debug) {
|
||||
const msg = try res.toString(alloc, env);
|
||||
defer alloc.free(msg);
|
||||
log.debug("eval script {s}: {s}", .{ self.src, msg });
|
||||
}
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,167 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
const log = @import("../../log.zig");
|
||||
|
||||
const Page = @import("../page.zig").Page;
|
||||
const JsObject = @import("../env.zig").Env.JsObject;
|
||||
|
||||
pub const Console = struct {
|
||||
// TODO: configurable writer
|
||||
timers: std.StringHashMapUnmanaged(u32) = .{},
|
||||
counts: std.StringHashMapUnmanaged(u32) = .{},
|
||||
|
||||
pub fn _lp(values: []JsObject, page: *Page) !void {
|
||||
if (values.len == 0) {
|
||||
return;
|
||||
}
|
||||
log.fatal(.console, "lightpanda", .{ .args = try serializeValues(values, page) });
|
||||
}
|
||||
|
||||
pub fn _log(values: []JsObject, page: *Page) !void {
|
||||
if (values.len == 0) {
|
||||
return;
|
||||
}
|
||||
log.info(.console, "info", .{ .args = try serializeValues(values, page) });
|
||||
}
|
||||
|
||||
pub fn _info(values: []JsObject, page: *Page) !void {
|
||||
return _log(values, page);
|
||||
}
|
||||
|
||||
pub fn _debug(values: []JsObject, page: *Page) !void {
|
||||
if (values.len == 0) {
|
||||
return;
|
||||
}
|
||||
log.debug(.console, "debug", .{ .args = try serializeValues(values, page) });
|
||||
}
|
||||
|
||||
pub fn _warn(values: []JsObject, page: *Page) !void {
|
||||
if (values.len == 0) {
|
||||
return;
|
||||
}
|
||||
log.warn(.console, "warn", .{ .args = try serializeValues(values, page) });
|
||||
}
|
||||
|
||||
pub fn _error(values: []JsObject, page: *Page) !void {
|
||||
if (values.len == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
log.warn(.console, "error", .{
|
||||
.args = try serializeValues(values, page),
|
||||
.stack = page.stackTrace() catch "???",
|
||||
});
|
||||
}
|
||||
|
||||
pub fn _clear() void {}
|
||||
|
||||
pub fn _count(self: *Console, label_: ?[]const u8, page: *Page) !void {
|
||||
const label = label_ orelse "default";
|
||||
const gop = try self.counts.getOrPut(page.arena, label);
|
||||
|
||||
var current: u32 = 0;
|
||||
if (gop.found_existing) {
|
||||
current = gop.value_ptr.*;
|
||||
} else {
|
||||
gop.key_ptr.* = try page.arena.dupe(u8, label);
|
||||
}
|
||||
|
||||
const count = current + 1;
|
||||
gop.value_ptr.* = count;
|
||||
|
||||
log.info(.console, "count", .{ .label = label, .count = count });
|
||||
}
|
||||
|
||||
pub fn _countReset(self: *Console, label_: ?[]const u8) !void {
|
||||
const label = label_ orelse "default";
|
||||
const kv = self.counts.fetchRemove(label) orelse {
|
||||
log.info(.console, "invalid counter", .{ .label = label });
|
||||
return;
|
||||
};
|
||||
log.info(.console, "count reset", .{ .label = label, .count = kv.value });
|
||||
}
|
||||
|
||||
pub fn _time(self: *Console, label_: ?[]const u8, page: *Page) !void {
|
||||
const label = label_ orelse "default";
|
||||
const gop = try self.timers.getOrPut(page.arena, label);
|
||||
|
||||
if (gop.found_existing) {
|
||||
log.info(.console, "duplicate timer", .{ .label = label });
|
||||
return;
|
||||
}
|
||||
gop.key_ptr.* = try page.arena.dupe(u8, label);
|
||||
gop.value_ptr.* = timestamp();
|
||||
}
|
||||
|
||||
pub fn _timeLog(self: *Console, label_: ?[]const u8) void {
|
||||
const elapsed = timestamp();
|
||||
const label = label_ orelse "default";
|
||||
const start = self.timers.get(label) orelse {
|
||||
log.info(.console, "invalid timer", .{ .label = label });
|
||||
return;
|
||||
};
|
||||
log.info(.console, "timer", .{ .label = label, .elapsed = elapsed - start });
|
||||
}
|
||||
|
||||
pub fn _timeStop(self: *Console, label_: ?[]const u8) void {
|
||||
const elapsed = timestamp();
|
||||
const label = label_ orelse "default";
|
||||
const kv = self.timers.fetchRemove(label) orelse {
|
||||
log.info(.console, "invalid timer", .{ .label = label });
|
||||
return;
|
||||
};
|
||||
|
||||
log.warn(.console, "timer stop", .{ .label = label, .elapsed = elapsed - kv.value });
|
||||
}
|
||||
|
||||
pub fn _assert(assertion: JsObject, values: []JsObject, page: *Page) !void {
|
||||
if (assertion.isTruthy()) {
|
||||
return;
|
||||
}
|
||||
var serialized_values: []const u8 = "";
|
||||
if (values.len > 0) {
|
||||
serialized_values = try serializeValues(values, page);
|
||||
}
|
||||
log.info(.console, "assertion failed", .{ .values = serialized_values });
|
||||
}
|
||||
|
||||
fn serializeValues(values: []JsObject, page: *Page) ![]const u8 {
|
||||
if (values.len == 0) {
|
||||
return "";
|
||||
}
|
||||
|
||||
const arena = page.call_arena;
|
||||
const separator = log.separator();
|
||||
var arr: std.ArrayListUnmanaged(u8) = .{};
|
||||
|
||||
for (values, 1..) |value, i| {
|
||||
try arr.appendSlice(arena, separator);
|
||||
try arr.writer(arena).print("{d}: ", .{i});
|
||||
const serialized = if (builtin.mode == .Debug) value.toDetailString() else value.toString();
|
||||
try arr.appendSlice(arena, try serialized);
|
||||
}
|
||||
return arr.items;
|
||||
}
|
||||
};
|
||||
|
||||
fn timestamp() u32 {
|
||||
return @import("../../datetime.zig").timestamp();
|
||||
}
|
||||
@@ -1,71 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const Env = @import("../env.zig").Env;
|
||||
const uuidv4 = @import("../../id.zig").uuidv4;
|
||||
|
||||
// https://w3c.github.io/webcrypto/#crypto-interface
|
||||
pub const Crypto = struct {
|
||||
_not_empty: bool = true,
|
||||
|
||||
pub fn _getRandomValues(_: *const Crypto, js_obj: Env.JsObject) !Env.JsObject {
|
||||
var into = try js_obj.toZig(Crypto, "getRandomValues", RandomValues);
|
||||
const buf = into.asBuffer();
|
||||
if (buf.len > 65_536) {
|
||||
return error.QuotaExceededError;
|
||||
}
|
||||
std.crypto.random.bytes(buf);
|
||||
return js_obj;
|
||||
}
|
||||
|
||||
pub fn _randomUUID(_: *const Crypto) [36]u8 {
|
||||
var hex: [36]u8 = undefined;
|
||||
uuidv4(&hex);
|
||||
return hex;
|
||||
}
|
||||
};
|
||||
|
||||
const RandomValues = union(enum) {
|
||||
int8: []i8,
|
||||
uint8: []u8,
|
||||
int16: []i16,
|
||||
uint16: []u16,
|
||||
int32: []i32,
|
||||
uint32: []u32,
|
||||
int64: []i64,
|
||||
uint64: []u64,
|
||||
|
||||
fn asBuffer(self: RandomValues) []u8 {
|
||||
return switch (self) {
|
||||
.int8 => |b| (@as([]u8, @ptrCast(b)))[0..b.len],
|
||||
.uint8 => |b| (@as([]u8, @ptrCast(b)))[0..b.len],
|
||||
.int16 => |b| (@as([]u8, @ptrCast(b)))[0 .. b.len * 2],
|
||||
.uint16 => |b| (@as([]u8, @ptrCast(b)))[0 .. b.len * 2],
|
||||
.int32 => |b| (@as([]u8, @ptrCast(b)))[0 .. b.len * 4],
|
||||
.uint32 => |b| (@as([]u8, @ptrCast(b)))[0 .. b.len * 4],
|
||||
.int64 => |b| (@as([]u8, @ptrCast(b)))[0 .. b.len * 8],
|
||||
.uint64 => |b| (@as([]u8, @ptrCast(b)))[0 .. b.len * 8],
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: Crypto" {
|
||||
try testing.htmlRunner("crypto.html");
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,289 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const CSSConstants = struct {
|
||||
const IMPORTANT = "!important";
|
||||
const URL_PREFIX = "url(";
|
||||
};
|
||||
|
||||
const CSSParserState = enum {
|
||||
seek_name,
|
||||
in_name,
|
||||
seek_colon,
|
||||
seek_value,
|
||||
in_value,
|
||||
in_quoted_value,
|
||||
in_single_quoted_value,
|
||||
in_url,
|
||||
in_important,
|
||||
};
|
||||
|
||||
const CSSDeclaration = struct {
|
||||
name: []const u8,
|
||||
value: []const u8,
|
||||
is_important: bool,
|
||||
};
|
||||
|
||||
const CSSParser = @This();
|
||||
state: CSSParserState,
|
||||
name_start: usize,
|
||||
name_end: usize,
|
||||
value_start: usize,
|
||||
position: usize,
|
||||
paren_depth: usize,
|
||||
escape_next: bool,
|
||||
|
||||
pub fn init() CSSParser {
|
||||
return .{
|
||||
.state = .seek_name,
|
||||
.name_start = 0,
|
||||
.name_end = 0,
|
||||
.value_start = 0,
|
||||
.position = 0,
|
||||
.paren_depth = 0,
|
||||
.escape_next = false,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn parseDeclarations(arena: Allocator, text: []const u8) ![]CSSDeclaration {
|
||||
var parser = init();
|
||||
var declarations: std.ArrayListUnmanaged(CSSDeclaration) = .empty;
|
||||
|
||||
while (parser.position < text.len) {
|
||||
const c = text[parser.position];
|
||||
|
||||
switch (parser.state) {
|
||||
.seek_name => {
|
||||
if (!std.ascii.isWhitespace(c)) {
|
||||
parser.name_start = parser.position;
|
||||
parser.state = .in_name;
|
||||
continue;
|
||||
}
|
||||
},
|
||||
.in_name => {
|
||||
if (c == ':') {
|
||||
parser.name_end = parser.position;
|
||||
parser.state = .seek_value;
|
||||
} else if (std.ascii.isWhitespace(c)) {
|
||||
parser.name_end = parser.position;
|
||||
parser.state = .seek_colon;
|
||||
}
|
||||
},
|
||||
.seek_colon => {
|
||||
if (c == ':') {
|
||||
parser.state = .seek_value;
|
||||
} else if (!std.ascii.isWhitespace(c)) {
|
||||
parser.state = .seek_name;
|
||||
continue;
|
||||
}
|
||||
},
|
||||
.seek_value => {
|
||||
if (!std.ascii.isWhitespace(c)) {
|
||||
parser.value_start = parser.position;
|
||||
if (c == '"') {
|
||||
parser.state = .in_quoted_value;
|
||||
} else if (c == '\'') {
|
||||
parser.state = .in_single_quoted_value;
|
||||
} else if (c == 'u' and parser.position + CSSConstants.URL_PREFIX.len <= text.len and std.mem.startsWith(u8, text[parser.position..], CSSConstants.URL_PREFIX)) {
|
||||
parser.state = .in_url;
|
||||
parser.paren_depth = 1;
|
||||
parser.position += 3;
|
||||
} else {
|
||||
parser.state = .in_value;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
},
|
||||
.in_value => {
|
||||
if (parser.escape_next) {
|
||||
parser.escape_next = false;
|
||||
} else if (c == '\\') {
|
||||
parser.escape_next = true;
|
||||
} else if (c == '(') {
|
||||
parser.paren_depth += 1;
|
||||
} else if (c == ')' and parser.paren_depth > 0) {
|
||||
parser.paren_depth -= 1;
|
||||
} else if (c == ';' and parser.paren_depth == 0) {
|
||||
try parser.finishDeclaration(arena, &declarations, text);
|
||||
parser.state = .seek_name;
|
||||
}
|
||||
},
|
||||
.in_quoted_value => {
|
||||
if (parser.escape_next) {
|
||||
parser.escape_next = false;
|
||||
} else if (c == '\\') {
|
||||
parser.escape_next = true;
|
||||
} else if (c == '"') {
|
||||
parser.state = .in_value;
|
||||
}
|
||||
},
|
||||
.in_single_quoted_value => {
|
||||
if (parser.escape_next) {
|
||||
parser.escape_next = false;
|
||||
} else if (c == '\\') {
|
||||
parser.escape_next = true;
|
||||
} else if (c == '\'') {
|
||||
parser.state = .in_value;
|
||||
}
|
||||
},
|
||||
.in_url => {
|
||||
if (parser.escape_next) {
|
||||
parser.escape_next = false;
|
||||
} else if (c == '\\') {
|
||||
parser.escape_next = true;
|
||||
} else if (c == '(') {
|
||||
parser.paren_depth += 1;
|
||||
} else if (c == ')') {
|
||||
parser.paren_depth -= 1;
|
||||
if (parser.paren_depth == 0) {
|
||||
parser.state = .in_value;
|
||||
}
|
||||
}
|
||||
},
|
||||
.in_important => {},
|
||||
}
|
||||
|
||||
parser.position += 1;
|
||||
}
|
||||
|
||||
try parser.finalize(arena, &declarations, text);
|
||||
|
||||
return declarations.items;
|
||||
}
|
||||
|
||||
fn finishDeclaration(self: *CSSParser, arena: Allocator, declarations: *std.ArrayListUnmanaged(CSSDeclaration), text: []const u8) !void {
|
||||
const name = std.mem.trim(u8, text[self.name_start..self.name_end], &std.ascii.whitespace);
|
||||
if (name.len == 0) return;
|
||||
|
||||
const raw_value = text[self.value_start..self.position];
|
||||
const value = std.mem.trim(u8, raw_value, &std.ascii.whitespace);
|
||||
|
||||
var final_value = value;
|
||||
var is_important = false;
|
||||
|
||||
if (std.mem.endsWith(u8, value, CSSConstants.IMPORTANT)) {
|
||||
is_important = true;
|
||||
final_value = std.mem.trimRight(u8, value[0 .. value.len - CSSConstants.IMPORTANT.len], &std.ascii.whitespace);
|
||||
}
|
||||
|
||||
try declarations.append(arena, .{
|
||||
.name = name,
|
||||
.value = final_value,
|
||||
.is_important = is_important,
|
||||
});
|
||||
}
|
||||
|
||||
fn finalize(self: *CSSParser, arena: Allocator, declarations: *std.ArrayListUnmanaged(CSSDeclaration), text: []const u8) !void {
|
||||
if (self.state != .in_value) {
|
||||
return;
|
||||
}
|
||||
return self.finishDeclaration(arena, declarations, text);
|
||||
}
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: CSS.Parser - Simple property" {
|
||||
defer testing.reset();
|
||||
|
||||
const text = "color: red;";
|
||||
const allocator = testing.arena_allocator;
|
||||
|
||||
const declarations = try CSSParser.parseDeclarations(allocator, text);
|
||||
|
||||
try testing.expectEqual(1, declarations.len);
|
||||
try testing.expectEqual("color", declarations[0].name);
|
||||
try testing.expectEqual("red", declarations[0].value);
|
||||
try testing.expectEqual(false, declarations[0].is_important);
|
||||
}
|
||||
|
||||
test "Browser: CSS.Parser - Property with !important" {
|
||||
defer testing.reset();
|
||||
const text = "margin: 10px !important;";
|
||||
const allocator = testing.arena_allocator;
|
||||
|
||||
const declarations = try CSSParser.parseDeclarations(allocator, text);
|
||||
|
||||
try testing.expectEqual(1, declarations.len);
|
||||
try testing.expectEqual("margin", declarations[0].name);
|
||||
try testing.expectEqual("10px", declarations[0].value);
|
||||
try testing.expectEqual(true, declarations[0].is_important);
|
||||
}
|
||||
|
||||
test "Browser: CSS.Parser - Multiple properties" {
|
||||
defer testing.reset();
|
||||
const text = "color: red; font-size: 12px; margin: 5px !important;";
|
||||
const allocator = testing.arena_allocator;
|
||||
|
||||
const declarations = try CSSParser.parseDeclarations(allocator, text);
|
||||
|
||||
try testing.expect(declarations.len == 3);
|
||||
|
||||
try testing.expectEqual("color", declarations[0].name);
|
||||
try testing.expectEqual("red", declarations[0].value);
|
||||
try testing.expectEqual(false, declarations[0].is_important);
|
||||
|
||||
try testing.expectEqual("font-size", declarations[1].name);
|
||||
try testing.expectEqual("12px", declarations[1].value);
|
||||
try testing.expectEqual(false, declarations[1].is_important);
|
||||
|
||||
try testing.expectEqual("margin", declarations[2].name);
|
||||
try testing.expectEqual("5px", declarations[2].value);
|
||||
try testing.expectEqual(true, declarations[2].is_important);
|
||||
}
|
||||
|
||||
test "Browser: CSS.Parser - Quoted value with semicolon" {
|
||||
defer testing.reset();
|
||||
const text = "content: \"Hello; world!\";";
|
||||
const allocator = testing.arena_allocator;
|
||||
|
||||
const declarations = try CSSParser.parseDeclarations(allocator, text);
|
||||
|
||||
try testing.expectEqual(1, declarations.len);
|
||||
try testing.expectEqual("content", declarations[0].name);
|
||||
try testing.expectEqual("\"Hello; world!\"", declarations[0].value);
|
||||
try testing.expectEqual(false, declarations[0].is_important);
|
||||
}
|
||||
|
||||
test "Browser: CSS.Parser - URL value" {
|
||||
defer testing.reset();
|
||||
const text = "background-image: url(\"test.png\");";
|
||||
const allocator = testing.arena_allocator;
|
||||
|
||||
const declarations = try CSSParser.parseDeclarations(allocator, text);
|
||||
|
||||
try testing.expectEqual(1, declarations.len);
|
||||
try testing.expectEqual("background-image", declarations[0].name);
|
||||
try testing.expectEqual("url(\"test.png\")", declarations[0].value);
|
||||
try testing.expectEqual(false, declarations[0].is_important);
|
||||
}
|
||||
|
||||
test "Browser: CSS.Parser - Whitespace handling" {
|
||||
defer testing.reset();
|
||||
const text = " color : purple ; margin : 10px ; ";
|
||||
const allocator = testing.arena_allocator;
|
||||
|
||||
const declarations = try CSSParser.parseDeclarations(allocator, text);
|
||||
|
||||
try testing.expectEqual(2, declarations.len);
|
||||
try testing.expectEqual("color", declarations[0].name);
|
||||
try testing.expectEqual("purple", declarations[0].value);
|
||||
try testing.expectEqual("margin", declarations[1].name);
|
||||
try testing.expectEqual("10px", declarations[1].value);
|
||||
}
|
||||
@@ -1,41 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const CSSStyleSheet = @import("CSSStyleSheet.zig");
|
||||
|
||||
pub const Interfaces = .{
|
||||
CSSRule,
|
||||
CSSImportRule,
|
||||
};
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/CSSRule
|
||||
const CSSRule = @This();
|
||||
css_text: []const u8,
|
||||
parent_rule: ?*CSSRule = null,
|
||||
parent_stylesheet: ?*CSSStyleSheet = null,
|
||||
|
||||
pub const CSSImportRule = struct {
|
||||
pub const prototype = *CSSRule;
|
||||
href: []const u8,
|
||||
layer_name: ?[]const u8,
|
||||
media: void,
|
||||
style_sheet: CSSStyleSheet,
|
||||
supports_text: ?[]const u8,
|
||||
};
|
||||
@@ -1,51 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const CSSRule = @import("CSSRule.zig");
|
||||
|
||||
const CSSImportRule = CSSRule.CSSImportRule;
|
||||
|
||||
const CSSRuleList = @This();
|
||||
list: std.ArrayListUnmanaged([]const u8),
|
||||
|
||||
pub fn constructor() CSSRuleList {
|
||||
return .{ .list = .empty };
|
||||
}
|
||||
|
||||
pub fn _item(self: *CSSRuleList, _index: u32) ?CSSRule {
|
||||
const index: usize = @intCast(_index);
|
||||
|
||||
if (index > self.list.items.len) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// todo: for now, just return null.
|
||||
// this depends on properly parsing CSSRule
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn get_length(self: *CSSRuleList) u32 {
|
||||
return @intCast(self.list.items.len);
|
||||
}
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: CSS.CSSRuleList" {
|
||||
try testing.htmlRunner("cssom/css_rule_list.html");
|
||||
}
|
||||
@@ -1,958 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const Page = @import("../page.zig").Page;
|
||||
const CSSRule = @import("CSSRule.zig");
|
||||
const CSSParser = @import("CSSParser.zig");
|
||||
|
||||
const Property = struct {
|
||||
value: []const u8,
|
||||
priority: bool,
|
||||
};
|
||||
|
||||
const CSSStyleDeclaration = @This();
|
||||
|
||||
properties: std.StringArrayHashMapUnmanaged(Property),
|
||||
|
||||
pub const empty: CSSStyleDeclaration = .{
|
||||
.properties = .empty,
|
||||
};
|
||||
|
||||
pub fn get_cssFloat(self: *const CSSStyleDeclaration) []const u8 {
|
||||
return self._getPropertyValue("float");
|
||||
}
|
||||
|
||||
pub fn set_cssFloat(self: *CSSStyleDeclaration, value: ?[]const u8, page: *Page) !void {
|
||||
const final_value = value orelse "";
|
||||
return self._setProperty("float", final_value, null, page);
|
||||
}
|
||||
|
||||
pub fn get_cssText(self: *const CSSStyleDeclaration, page: *Page) ![]const u8 {
|
||||
var buffer: std.ArrayListUnmanaged(u8) = .empty;
|
||||
const writer = buffer.writer(page.call_arena);
|
||||
var it = self.properties.iterator();
|
||||
while (it.next()) |entry| {
|
||||
const name = entry.key_ptr.*;
|
||||
const property = entry.value_ptr;
|
||||
const escaped = try escapeCSSValue(page.call_arena, property.value);
|
||||
try writer.print("{s}: {s}", .{ name, escaped });
|
||||
if (property.priority) {
|
||||
try writer.writeAll(" !important; ");
|
||||
} else {
|
||||
try writer.writeAll("; ");
|
||||
}
|
||||
}
|
||||
return buffer.items;
|
||||
}
|
||||
|
||||
// TODO Propagate also upward to parent node
|
||||
pub fn set_cssText(self: *CSSStyleDeclaration, text: []const u8, page: *Page) !void {
|
||||
self.properties.clearRetainingCapacity();
|
||||
|
||||
// call_arena is safe here, because _setProperty will dupe the name
|
||||
// using the page's longer-living arena.
|
||||
const declarations = try CSSParser.parseDeclarations(page.call_arena, text);
|
||||
|
||||
for (declarations) |decl| {
|
||||
if (!isValidPropertyName(decl.name)) {
|
||||
continue;
|
||||
}
|
||||
const priority: ?[]const u8 = if (decl.is_important) "important" else null;
|
||||
try self._setProperty(decl.name, decl.value, priority, page);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_length(self: *const CSSStyleDeclaration) usize {
|
||||
return self.properties.count();
|
||||
}
|
||||
|
||||
pub fn get_parentRule(_: *const CSSStyleDeclaration) ?CSSRule {
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn _getPropertyPriority(self: *const CSSStyleDeclaration, name: []const u8) []const u8 {
|
||||
const property = self.properties.getPtr(name) orelse return "";
|
||||
return if (property.priority) "important" else "";
|
||||
}
|
||||
|
||||
// TODO should handle properly shorthand properties and canonical forms
|
||||
pub fn _getPropertyValue(self: *const CSSStyleDeclaration, name: []const u8) []const u8 {
|
||||
if (self.properties.getPtr(name)) |property| {
|
||||
return property.value;
|
||||
}
|
||||
|
||||
// default to everything being visible (unless it's been explicitly set)
|
||||
if (std.mem.eql(u8, name, "visibility")) {
|
||||
return "visible";
|
||||
}
|
||||
|
||||
return "";
|
||||
}
|
||||
|
||||
pub fn _item(self: *const CSSStyleDeclaration, index: usize) []const u8 {
|
||||
const values = self.properties.entries.items(.key);
|
||||
if (index >= values.len) {
|
||||
return "";
|
||||
}
|
||||
return values[index];
|
||||
}
|
||||
|
||||
pub fn _removeProperty(self: *CSSStyleDeclaration, name: []const u8) ![]const u8 {
|
||||
const property = self.properties.fetchOrderedRemove(name) orelse return "";
|
||||
return property.value.value;
|
||||
}
|
||||
|
||||
pub fn _setProperty(self: *CSSStyleDeclaration, name: []const u8, value: []const u8, priority: ?[]const u8, page: *Page) !void {
|
||||
const gop = try self.properties.getOrPut(page.arena, name);
|
||||
if (!gop.found_existing) {
|
||||
const owned_name = try page.arena.dupe(u8, name);
|
||||
gop.key_ptr.* = owned_name;
|
||||
}
|
||||
|
||||
const owned_value = try page.arena.dupe(u8, value);
|
||||
const is_important = priority != null and std.ascii.eqlIgnoreCase(priority.?, "important");
|
||||
gop.value_ptr.* = .{ .value = owned_value, .priority = is_important };
|
||||
}
|
||||
|
||||
pub fn named_get(self: *const CSSStyleDeclaration, name: []const u8, _: *bool) []const u8 {
|
||||
return self._getPropertyValue(name);
|
||||
}
|
||||
|
||||
pub fn named_set(self: *CSSStyleDeclaration, name: []const u8, value: []const u8, _: *bool, page: *Page) !void {
|
||||
return self._setProperty(name, value, null, page);
|
||||
}
|
||||
|
||||
fn isNumericWithUnit(value: []const u8) bool {
|
||||
if (value.len == 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const first = value[0];
|
||||
|
||||
if (!std.ascii.isDigit(first) and first != '+' and first != '-' and first != '.') {
|
||||
return false;
|
||||
}
|
||||
|
||||
var i: usize = 0;
|
||||
var has_digit = false;
|
||||
var decimal_point = false;
|
||||
|
||||
while (i < value.len) : (i += 1) {
|
||||
const c = value[i];
|
||||
if (std.ascii.isDigit(c)) {
|
||||
has_digit = true;
|
||||
} else if (c == '.' and !decimal_point) {
|
||||
decimal_point = true;
|
||||
} else if ((c == 'e' or c == 'E') and has_digit) {
|
||||
if (i + 1 >= value.len) return false;
|
||||
if (value[i + 1] != '+' and value[i + 1] != '-' and !std.ascii.isDigit(value[i + 1])) break;
|
||||
i += 1;
|
||||
if (value[i] == '+' or value[i] == '-') {
|
||||
i += 1;
|
||||
}
|
||||
var has_exp_digits = false;
|
||||
while (i < value.len and std.ascii.isDigit(value[i])) : (i += 1) {
|
||||
has_exp_digits = true;
|
||||
}
|
||||
if (!has_exp_digits) return false;
|
||||
break;
|
||||
} else if (c != '-' and c != '+') {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!has_digit) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (i == value.len) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const unit = value[i..];
|
||||
return CSSKeywords.isValidUnit(unit);
|
||||
}
|
||||
|
||||
fn isHexColor(value: []const u8) bool {
|
||||
if (value.len == 0) {
|
||||
return false;
|
||||
}
|
||||
if (value[0] != '#') {
|
||||
return false;
|
||||
}
|
||||
|
||||
const hex_part = value[1..];
|
||||
if (hex_part.len != 3 and hex_part.len != 6 and hex_part.len != 8) {
|
||||
return false;
|
||||
}
|
||||
|
||||
for (hex_part) |c| {
|
||||
if (!std.ascii.isHex(c)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
fn isMultiValueProperty(value: []const u8) bool {
|
||||
var parts = std.mem.splitAny(u8, value, " ");
|
||||
var multi_value_parts: usize = 0;
|
||||
var all_parts_valid = true;
|
||||
|
||||
while (parts.next()) |part| {
|
||||
if (part.len == 0) continue;
|
||||
multi_value_parts += 1;
|
||||
|
||||
if (isNumericWithUnit(part)) {
|
||||
continue;
|
||||
}
|
||||
if (isHexColor(part)) {
|
||||
continue;
|
||||
}
|
||||
if (CSSKeywords.isKnownKeyword(part)) {
|
||||
continue;
|
||||
}
|
||||
if (CSSKeywords.startsWithFunction(part)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
all_parts_valid = false;
|
||||
break;
|
||||
}
|
||||
|
||||
return multi_value_parts >= 2 and all_parts_valid;
|
||||
}
|
||||
|
||||
fn isAlreadyQuoted(value: []const u8) bool {
|
||||
return value.len >= 2 and ((value[0] == '"' and value[value.len - 1] == '"') or
|
||||
(value[0] == '\'' and value[value.len - 1] == '\''));
|
||||
}
|
||||
|
||||
fn isValidPropertyName(name: []const u8) bool {
|
||||
if (name.len == 0) return false;
|
||||
|
||||
if (std.mem.startsWith(u8, name, "--")) {
|
||||
if (name.len == 2) return false;
|
||||
for (name[2..]) |c| {
|
||||
if (!std.ascii.isAlphanumeric(c) and c != '-' and c != '_') {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
const first_char = name[0];
|
||||
if (!std.ascii.isAlphabetic(first_char) and first_char != '-') {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (first_char == '-') {
|
||||
if (name.len < 2) return false;
|
||||
|
||||
if (!std.ascii.isAlphabetic(name[1])) {
|
||||
return false;
|
||||
}
|
||||
|
||||
for (name[2..]) |c| {
|
||||
if (!std.ascii.isAlphanumeric(c) and c != '-') {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (name[1..]) |c| {
|
||||
if (!std.ascii.isAlphanumeric(c) and c != '-') {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
fn extractImportant(value: []const u8) struct { value: []const u8, is_important: bool } {
|
||||
const trimmed = std.mem.trim(u8, value, &std.ascii.whitespace);
|
||||
|
||||
if (std.mem.endsWith(u8, trimmed, "!important")) {
|
||||
const clean_value = std.mem.trimRight(u8, trimmed[0 .. trimmed.len - 10], &std.ascii.whitespace);
|
||||
return .{ .value = clean_value, .is_important = true };
|
||||
}
|
||||
|
||||
return .{ .value = trimmed, .is_important = false };
|
||||
}
|
||||
|
||||
fn needsQuotes(value: []const u8) bool {
|
||||
if (value.len == 0) return true;
|
||||
if (isAlreadyQuoted(value)) return false;
|
||||
|
||||
if (CSSKeywords.containsSpecialChar(value)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (std.mem.indexOfScalar(u8, value, ' ') == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const is_url = std.mem.startsWith(u8, value, "url(");
|
||||
const is_function = CSSKeywords.startsWithFunction(value);
|
||||
|
||||
return !isMultiValueProperty(value) and
|
||||
!is_url and
|
||||
!is_function;
|
||||
}
|
||||
|
||||
fn escapeCSSValue(arena: std.mem.Allocator, value: []const u8) ![]const u8 {
|
||||
if (!needsQuotes(value)) {
|
||||
return value;
|
||||
}
|
||||
var out: std.ArrayListUnmanaged(u8) = .empty;
|
||||
|
||||
// We'll need at least this much space, +2 for the quotes
|
||||
try out.ensureTotalCapacity(arena, value.len + 2);
|
||||
const writer = out.writer(arena);
|
||||
|
||||
try writer.writeByte('"');
|
||||
|
||||
for (value, 0..) |c, i| {
|
||||
switch (c) {
|
||||
'"' => try writer.writeAll("\\\""),
|
||||
'\\' => try writer.writeAll("\\\\"),
|
||||
'\n' => try writer.writeAll("\\A "),
|
||||
'\r' => try writer.writeAll("\\D "),
|
||||
'\t' => try writer.writeAll("\\9 "),
|
||||
0...8, 11, 12, 14...31, 127 => {
|
||||
try writer.print("\\{x}", .{c});
|
||||
if (i + 1 < value.len and std.ascii.isHex(value[i + 1])) {
|
||||
try writer.writeByte(' ');
|
||||
}
|
||||
},
|
||||
else => try writer.writeByte(c),
|
||||
}
|
||||
}
|
||||
|
||||
try writer.writeByte('"');
|
||||
return out.items;
|
||||
}
|
||||
|
||||
fn isKnownKeyword(value: []const u8) bool {
|
||||
return CSSKeywords.isKnownKeyword(value);
|
||||
}
|
||||
|
||||
fn containsSpecialChar(value: []const u8) bool {
|
||||
return CSSKeywords.containsSpecialChar(value);
|
||||
}
|
||||
|
||||
const CSSKeywords = struct {
|
||||
const BORDER_STYLES = [_][]const u8{
|
||||
"none", "solid", "dotted", "dashed", "double", "groove", "ridge", "inset", "outset",
|
||||
};
|
||||
|
||||
const COLOR_NAMES = [_][]const u8{
|
||||
"black", "white", "red", "green", "blue", "yellow", "purple", "gray", "transparent",
|
||||
"currentColor", "inherit",
|
||||
};
|
||||
|
||||
const POSITION_KEYWORDS = [_][]const u8{
|
||||
"auto", "center", "left", "right", "top", "bottom",
|
||||
};
|
||||
|
||||
const BACKGROUND_REPEAT = [_][]const u8{
|
||||
"repeat", "no-repeat", "repeat-x", "repeat-y", "space", "round",
|
||||
};
|
||||
|
||||
const FONT_STYLES = [_][]const u8{
|
||||
"normal", "italic", "oblique", "bold", "bolder", "lighter",
|
||||
};
|
||||
|
||||
const FONT_SIZES = [_][]const u8{
|
||||
"xx-small", "x-small", "small", "medium", "large", "x-large", "xx-large",
|
||||
"smaller", "larger",
|
||||
};
|
||||
|
||||
const FONT_FAMILIES = [_][]const u8{
|
||||
"serif", "sans-serif", "monospace", "cursive", "fantasy", "system-ui",
|
||||
};
|
||||
|
||||
const CSS_GLOBAL = [_][]const u8{
|
||||
"initial", "inherit", "unset", "revert",
|
||||
};
|
||||
|
||||
const DISPLAY_VALUES = [_][]const u8{
|
||||
"block", "inline", "inline-block", "flex", "grid", "none",
|
||||
};
|
||||
|
||||
const UNITS = [_][]const u8{
|
||||
// LENGTH
|
||||
"px", "em", "rem", "vw", "vh", "vmin", "vmax", "%", "pt", "pc", "in", "cm", "mm",
|
||||
"ex", "ch", "fr",
|
||||
|
||||
// ANGLE
|
||||
"deg", "rad", "grad", "turn",
|
||||
|
||||
// TIME
|
||||
"s", "ms",
|
||||
|
||||
// FREQUENCY
|
||||
"hz", "khz",
|
||||
|
||||
// RESOLUTION
|
||||
"dpi", "dpcm",
|
||||
"dppx",
|
||||
};
|
||||
|
||||
const SPECIAL_CHARS = [_]u8{
|
||||
'"', '\'', ';', '{', '}', '\\', '<', '>', '/', '\n', '\t', '\r', '\x00', '\x7F',
|
||||
};
|
||||
|
||||
const FUNCTIONS = [_][]const u8{
|
||||
"rgb(", "rgba(", "hsl(", "hsla(", "url(", "calc(", "var(", "attr(",
|
||||
"linear-gradient(", "radial-gradient(", "conic-gradient(", "translate(", "rotate(", "scale(", "skew(", "matrix(",
|
||||
};
|
||||
|
||||
const KEYWORDS = BORDER_STYLES ++ COLOR_NAMES ++ POSITION_KEYWORDS ++
|
||||
BACKGROUND_REPEAT ++ FONT_STYLES ++ FONT_SIZES ++ FONT_FAMILIES ++
|
||||
CSS_GLOBAL ++ DISPLAY_VALUES;
|
||||
|
||||
const MAX_KEYWORD_LEN = lengthOfLongestValue(&KEYWORDS);
|
||||
|
||||
pub fn isKnownKeyword(value: []const u8) bool {
|
||||
if (value.len > MAX_KEYWORD_LEN) {
|
||||
return false;
|
||||
}
|
||||
var buf: [MAX_KEYWORD_LEN]u8 = undefined;
|
||||
const normalized = std.ascii.lowerString(&buf, value);
|
||||
|
||||
for (KEYWORDS) |keyword| {
|
||||
if (std.ascii.eqlIgnoreCase(normalized, keyword)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
pub fn containsSpecialChar(value: []const u8) bool {
|
||||
return std.mem.indexOfAny(u8, value, &SPECIAL_CHARS) != null;
|
||||
}
|
||||
|
||||
const MAX_UNIT_LEN = lengthOfLongestValue(&UNITS);
|
||||
|
||||
pub fn isValidUnit(unit: []const u8) bool {
|
||||
if (unit.len > MAX_UNIT_LEN) {
|
||||
return false;
|
||||
}
|
||||
var buf: [MAX_UNIT_LEN]u8 = undefined;
|
||||
const normalized = std.ascii.lowerString(&buf, unit);
|
||||
|
||||
for (UNITS) |u| {
|
||||
if (std.mem.eql(u8, normalized, u)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
pub fn startsWithFunction(value: []const u8) bool {
|
||||
const pos = std.mem.indexOfScalar(u8, value, '(') orelse return false;
|
||||
if (pos == 0) return false;
|
||||
|
||||
if (std.mem.indexOfScalarPos(u8, value, pos, ')') == null) {
|
||||
return false;
|
||||
}
|
||||
const function_name = value[0..pos];
|
||||
return isValidFunctionName(function_name);
|
||||
}
|
||||
|
||||
fn isValidFunctionName(name: []const u8) bool {
|
||||
if (name.len == 0) return false;
|
||||
|
||||
const first = name[0];
|
||||
if (!std.ascii.isAlphabetic(first) and first != '_' and first != '-') {
|
||||
return false;
|
||||
}
|
||||
|
||||
for (name[1..]) |c| {
|
||||
if (!std.ascii.isAlphanumeric(c) and c != '_' and c != '-') {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
fn lengthOfLongestValue(values: []const []const u8) usize {
|
||||
var max: usize = 0;
|
||||
for (values) |v| {
|
||||
max = @max(v.len, max);
|
||||
}
|
||||
return max;
|
||||
}
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: CSS.StyleDeclaration" {
|
||||
try testing.htmlRunner("cssom/css_style_declaration.html");
|
||||
}
|
||||
|
||||
test "Browser: CSS.StyleDeclaration: isNumericWithUnit - valid numbers with units" {
|
||||
try testing.expect(isNumericWithUnit("10px"));
|
||||
try testing.expect(isNumericWithUnit("3.14em"));
|
||||
try testing.expect(isNumericWithUnit("-5rem"));
|
||||
try testing.expect(isNumericWithUnit("+12.5%"));
|
||||
try testing.expect(isNumericWithUnit("0vh"));
|
||||
try testing.expect(isNumericWithUnit(".5vw"));
|
||||
}
|
||||
|
||||
test "Browser: CSS.StyleDeclaration: isNumericWithUnit - scientific notation" {
|
||||
try testing.expect(isNumericWithUnit("1e5px"));
|
||||
try testing.expect(isNumericWithUnit("2.5E-3em"));
|
||||
try testing.expect(isNumericWithUnit("1e+2rem"));
|
||||
try testing.expect(isNumericWithUnit("-3.14e10px"));
|
||||
}
|
||||
|
||||
test "Browser: CSS.StyleDeclaration: isNumericWithUnit - edge cases and invalid inputs" {
|
||||
try testing.expect(!isNumericWithUnit(""));
|
||||
|
||||
try testing.expect(!isNumericWithUnit("px"));
|
||||
try testing.expect(!isNumericWithUnit("--px"));
|
||||
try testing.expect(!isNumericWithUnit(".px"));
|
||||
|
||||
try testing.expect(!isNumericWithUnit("1e"));
|
||||
try testing.expect(!isNumericWithUnit("1epx"));
|
||||
try testing.expect(!isNumericWithUnit("1e+"));
|
||||
try testing.expect(!isNumericWithUnit("1e+px"));
|
||||
|
||||
try testing.expect(!isNumericWithUnit("1.2.3px"));
|
||||
|
||||
try testing.expect(!isNumericWithUnit("10xyz"));
|
||||
try testing.expect(!isNumericWithUnit("5invalid"));
|
||||
|
||||
try testing.expect(isNumericWithUnit("10"));
|
||||
try testing.expect(isNumericWithUnit("3.14"));
|
||||
try testing.expect(isNumericWithUnit("-5"));
|
||||
}
|
||||
|
||||
test "Browser: CSS.StyleDeclaration: isHexColor - valid hex colors" {
|
||||
try testing.expect(isHexColor("#000"));
|
||||
try testing.expect(isHexColor("#fff"));
|
||||
try testing.expect(isHexColor("#123456"));
|
||||
try testing.expect(isHexColor("#abcdef"));
|
||||
try testing.expect(isHexColor("#ABCDEF"));
|
||||
try testing.expect(isHexColor("#12345678"));
|
||||
}
|
||||
|
||||
test "Browser: CSS.StyleDeclaration: isHexColor - invalid hex colors" {
|
||||
try testing.expect(!isHexColor(""));
|
||||
try testing.expect(!isHexColor("#"));
|
||||
try testing.expect(!isHexColor("000"));
|
||||
try testing.expect(!isHexColor("#00"));
|
||||
try testing.expect(!isHexColor("#0000"));
|
||||
try testing.expect(!isHexColor("#00000"));
|
||||
try testing.expect(!isHexColor("#0000000"));
|
||||
try testing.expect(!isHexColor("#000000000"));
|
||||
try testing.expect(!isHexColor("#gggggg"));
|
||||
try testing.expect(!isHexColor("#123xyz"));
|
||||
}
|
||||
|
||||
test "Browser: CSS.StyleDeclaration: isMultiValueProperty - valid multi-value properties" {
|
||||
try testing.expect(isMultiValueProperty("10px 20px"));
|
||||
try testing.expect(isMultiValueProperty("solid red"));
|
||||
try testing.expect(isMultiValueProperty("#fff black"));
|
||||
try testing.expect(isMultiValueProperty("1em 2em 3em 4em"));
|
||||
try testing.expect(isMultiValueProperty("rgb(255,0,0) solid"));
|
||||
}
|
||||
|
||||
test "Browser: CSS.StyleDeclaration: isMultiValueProperty - invalid multi-value properties" {
|
||||
try testing.expect(!isMultiValueProperty(""));
|
||||
try testing.expect(!isMultiValueProperty("10px"));
|
||||
try testing.expect(!isMultiValueProperty("invalid unknown"));
|
||||
try testing.expect(!isMultiValueProperty("10px invalid"));
|
||||
try testing.expect(!isMultiValueProperty(" "));
|
||||
}
|
||||
|
||||
test "Browser: CSS.StyleDeclaration: isAlreadyQuoted - various quoting scenarios" {
|
||||
try testing.expect(isAlreadyQuoted("\"hello\""));
|
||||
try testing.expect(isAlreadyQuoted("'world'"));
|
||||
try testing.expect(isAlreadyQuoted("\"\""));
|
||||
try testing.expect(isAlreadyQuoted("''"));
|
||||
|
||||
try testing.expect(!isAlreadyQuoted(""));
|
||||
try testing.expect(!isAlreadyQuoted("hello"));
|
||||
try testing.expect(!isAlreadyQuoted("\""));
|
||||
try testing.expect(!isAlreadyQuoted("'"));
|
||||
try testing.expect(!isAlreadyQuoted("\"hello'"));
|
||||
try testing.expect(!isAlreadyQuoted("'hello\""));
|
||||
try testing.expect(!isAlreadyQuoted("\"hello"));
|
||||
try testing.expect(!isAlreadyQuoted("hello\""));
|
||||
}
|
||||
|
||||
test "Browser: CSS.StyleDeclaration: isValidPropertyName - valid property names" {
|
||||
try testing.expect(isValidPropertyName("color"));
|
||||
try testing.expect(isValidPropertyName("background-color"));
|
||||
try testing.expect(isValidPropertyName("-webkit-transform"));
|
||||
try testing.expect(isValidPropertyName("font-size"));
|
||||
try testing.expect(isValidPropertyName("margin-top"));
|
||||
try testing.expect(isValidPropertyName("z-index"));
|
||||
try testing.expect(isValidPropertyName("line-height"));
|
||||
}
|
||||
|
||||
test "Browser: CSS.StyleDeclaration: isValidPropertyName - invalid property names" {
|
||||
try testing.expect(!isValidPropertyName(""));
|
||||
try testing.expect(!isValidPropertyName("123color"));
|
||||
try testing.expect(!isValidPropertyName("color!"));
|
||||
try testing.expect(!isValidPropertyName("color space"));
|
||||
try testing.expect(!isValidPropertyName("@color"));
|
||||
try testing.expect(!isValidPropertyName("color.test"));
|
||||
try testing.expect(!isValidPropertyName("color_test"));
|
||||
}
|
||||
|
||||
test "Browser: CSS.StyleDeclaration: extractImportant - with and without !important" {
|
||||
var result = extractImportant("red !important");
|
||||
try testing.expect(result.is_important);
|
||||
try testing.expectEqual("red", result.value);
|
||||
|
||||
result = extractImportant("blue");
|
||||
try testing.expect(!result.is_important);
|
||||
try testing.expectEqual("blue", result.value);
|
||||
|
||||
result = extractImportant(" green !important ");
|
||||
try testing.expect(result.is_important);
|
||||
try testing.expectEqual("green", result.value);
|
||||
|
||||
result = extractImportant("!important");
|
||||
try testing.expect(result.is_important);
|
||||
try testing.expectEqual("", result.value);
|
||||
|
||||
result = extractImportant("important");
|
||||
try testing.expect(!result.is_important);
|
||||
try testing.expectEqual("important", result.value);
|
||||
}
|
||||
|
||||
test "Browser: CSS.StyleDeclaration: needsQuotes - various scenarios" {
|
||||
try testing.expect(needsQuotes(""));
|
||||
try testing.expect(needsQuotes("hello world"));
|
||||
try testing.expect(needsQuotes("test;"));
|
||||
try testing.expect(needsQuotes("a{b}"));
|
||||
try testing.expect(needsQuotes("test\"quote"));
|
||||
|
||||
try testing.expect(!needsQuotes("\"already quoted\""));
|
||||
try testing.expect(!needsQuotes("'already quoted'"));
|
||||
try testing.expect(!needsQuotes("url(image.png)"));
|
||||
try testing.expect(!needsQuotes("rgb(255, 0, 0)"));
|
||||
try testing.expect(!needsQuotes("10px 20px"));
|
||||
try testing.expect(!needsQuotes("simple"));
|
||||
}
|
||||
|
||||
test "Browser: CSS.StyleDeclaration: escapeCSSValue - escaping various characters" {
|
||||
const allocator = testing.arena_allocator;
|
||||
|
||||
var result = try escapeCSSValue(allocator, "simple");
|
||||
try testing.expectEqual("simple", result);
|
||||
|
||||
result = try escapeCSSValue(allocator, "\"already quoted\"");
|
||||
try testing.expectEqual("\"already quoted\"", result);
|
||||
|
||||
result = try escapeCSSValue(allocator, "test\"quote");
|
||||
try testing.expectEqual("\"test\\\"quote\"", result);
|
||||
|
||||
result = try escapeCSSValue(allocator, "test\nline");
|
||||
try testing.expectEqual("\"test\\A line\"", result);
|
||||
|
||||
result = try escapeCSSValue(allocator, "test\\back");
|
||||
try testing.expectEqual("\"test\\\\back\"", result);
|
||||
}
|
||||
|
||||
test "Browser: CSS.StyleDeclaration: CSSKeywords.isKnownKeyword - case sensitivity" {
|
||||
try testing.expect(CSSKeywords.isKnownKeyword("red"));
|
||||
try testing.expect(CSSKeywords.isKnownKeyword("solid"));
|
||||
try testing.expect(CSSKeywords.isKnownKeyword("center"));
|
||||
try testing.expect(CSSKeywords.isKnownKeyword("inherit"));
|
||||
|
||||
try testing.expect(CSSKeywords.isKnownKeyword("RED"));
|
||||
try testing.expect(CSSKeywords.isKnownKeyword("Red"));
|
||||
try testing.expect(CSSKeywords.isKnownKeyword("SOLID"));
|
||||
try testing.expect(CSSKeywords.isKnownKeyword("Center"));
|
||||
|
||||
try testing.expect(!CSSKeywords.isKnownKeyword("invalid"));
|
||||
try testing.expect(!CSSKeywords.isKnownKeyword("unknown"));
|
||||
try testing.expect(!CSSKeywords.isKnownKeyword(""));
|
||||
}
|
||||
|
||||
test "Browser: CSS.StyleDeclaration: CSSKeywords.containsSpecialChar - various special characters" {
|
||||
try testing.expect(CSSKeywords.containsSpecialChar("test\"quote"));
|
||||
try testing.expect(CSSKeywords.containsSpecialChar("test'quote"));
|
||||
try testing.expect(CSSKeywords.containsSpecialChar("test;end"));
|
||||
try testing.expect(CSSKeywords.containsSpecialChar("test{brace"));
|
||||
try testing.expect(CSSKeywords.containsSpecialChar("test}brace"));
|
||||
try testing.expect(CSSKeywords.containsSpecialChar("test\\back"));
|
||||
try testing.expect(CSSKeywords.containsSpecialChar("test<angle"));
|
||||
try testing.expect(CSSKeywords.containsSpecialChar("test>angle"));
|
||||
try testing.expect(CSSKeywords.containsSpecialChar("test/slash"));
|
||||
|
||||
try testing.expect(!CSSKeywords.containsSpecialChar("normal-text"));
|
||||
try testing.expect(!CSSKeywords.containsSpecialChar("text123"));
|
||||
try testing.expect(!CSSKeywords.containsSpecialChar(""));
|
||||
}
|
||||
|
||||
test "Browser: CSS.StyleDeclaration: CSSKeywords.isValidUnit - various units" {
|
||||
try testing.expect(CSSKeywords.isValidUnit("px"));
|
||||
try testing.expect(CSSKeywords.isValidUnit("em"));
|
||||
try testing.expect(CSSKeywords.isValidUnit("rem"));
|
||||
try testing.expect(CSSKeywords.isValidUnit("%"));
|
||||
|
||||
try testing.expect(CSSKeywords.isValidUnit("deg"));
|
||||
try testing.expect(CSSKeywords.isValidUnit("rad"));
|
||||
|
||||
try testing.expect(CSSKeywords.isValidUnit("s"));
|
||||
try testing.expect(CSSKeywords.isValidUnit("ms"));
|
||||
|
||||
try testing.expect(CSSKeywords.isValidUnit("PX"));
|
||||
|
||||
try testing.expect(!CSSKeywords.isValidUnit("invalid"));
|
||||
try testing.expect(!CSSKeywords.isValidUnit(""));
|
||||
}
|
||||
|
||||
test "Browser: CSS.StyleDeclaration: CSSKeywords.startsWithFunction - function detection" {
|
||||
try testing.expect(CSSKeywords.startsWithFunction("rgb(255, 0, 0)"));
|
||||
try testing.expect(CSSKeywords.startsWithFunction("rgba(255, 0, 0, 0.5)"));
|
||||
try testing.expect(CSSKeywords.startsWithFunction("url(image.png)"));
|
||||
try testing.expect(CSSKeywords.startsWithFunction("calc(100% - 20px)"));
|
||||
try testing.expect(CSSKeywords.startsWithFunction("var(--custom-property)"));
|
||||
try testing.expect(CSSKeywords.startsWithFunction("linear-gradient(to right, red, blue)"));
|
||||
|
||||
try testing.expect(CSSKeywords.startsWithFunction("custom-function(args)"));
|
||||
try testing.expect(CSSKeywords.startsWithFunction("unknown(test)"));
|
||||
|
||||
try testing.expect(!CSSKeywords.startsWithFunction("not-a-function"));
|
||||
try testing.expect(!CSSKeywords.startsWithFunction("missing-paren)"));
|
||||
try testing.expect(!CSSKeywords.startsWithFunction("missing-close("));
|
||||
try testing.expect(!CSSKeywords.startsWithFunction(""));
|
||||
try testing.expect(!CSSKeywords.startsWithFunction("rgb"));
|
||||
}
|
||||
|
||||
test "Browser: CSS.StyleDeclaration: isNumericWithUnit - whitespace handling" {
|
||||
try testing.expect(!isNumericWithUnit(" 10px"));
|
||||
try testing.expect(!isNumericWithUnit("10 px"));
|
||||
try testing.expect(!isNumericWithUnit("10px "));
|
||||
try testing.expect(!isNumericWithUnit(" 10 px "));
|
||||
}
|
||||
|
||||
test "Browser: CSS.StyleDeclaration: extractImportant - whitespace edge cases" {
|
||||
var result = extractImportant(" ");
|
||||
try testing.expect(!result.is_important);
|
||||
try testing.expectEqual("", result.value);
|
||||
|
||||
result = extractImportant("\t\n\r !important\t\n");
|
||||
try testing.expect(result.is_important);
|
||||
try testing.expectEqual("", result.value);
|
||||
|
||||
result = extractImportant("red\t!important");
|
||||
try testing.expect(result.is_important);
|
||||
try testing.expectEqual("red", result.value);
|
||||
}
|
||||
|
||||
test "Browser: CSS.StyleDeclaration: isHexColor - mixed case handling" {
|
||||
try testing.expect(isHexColor("#AbC"));
|
||||
try testing.expect(isHexColor("#123aBc"));
|
||||
try testing.expect(isHexColor("#FFffFF"));
|
||||
try testing.expect(isHexColor("#000FFF"));
|
||||
}
|
||||
|
||||
test "Browser: CSS.StyleDeclaration: edge case - very long inputs" {
|
||||
const long_valid = "a" ** 1000 ++ "px";
|
||||
try testing.expect(!isNumericWithUnit(long_valid)); // not numeric
|
||||
|
||||
const long_property = "a-" ** 100 ++ "property";
|
||||
try testing.expect(isValidPropertyName(long_property));
|
||||
|
||||
const long_hex = "#" ++ "a" ** 20;
|
||||
try testing.expect(!isHexColor(long_hex));
|
||||
}
|
||||
|
||||
test "Browser: CSS.StyleDeclaration: boundary conditions - numeric parsing" {
|
||||
try testing.expect(isNumericWithUnit("0px"));
|
||||
try testing.expect(isNumericWithUnit("0.0px"));
|
||||
try testing.expect(isNumericWithUnit(".0px"));
|
||||
try testing.expect(isNumericWithUnit("0.px"));
|
||||
|
||||
try testing.expect(isNumericWithUnit("999999999px"));
|
||||
try testing.expect(isNumericWithUnit("1.7976931348623157e+308px"));
|
||||
|
||||
try testing.expect(isNumericWithUnit("0.000000001px"));
|
||||
try testing.expect(isNumericWithUnit("1e-100px"));
|
||||
}
|
||||
|
||||
test "Browser: CSS.StyleDeclaration: extractImportant - malformed important declarations" {
|
||||
var result = extractImportant("red ! important");
|
||||
try testing.expect(!result.is_important);
|
||||
try testing.expectEqual("red ! important", result.value);
|
||||
|
||||
result = extractImportant("red !Important");
|
||||
try testing.expect(!result.is_important);
|
||||
try testing.expectEqual("red !Important", result.value);
|
||||
|
||||
result = extractImportant("red !IMPORTANT");
|
||||
try testing.expect(!result.is_important);
|
||||
try testing.expectEqual("red !IMPORTANT", result.value);
|
||||
|
||||
result = extractImportant("!importantred");
|
||||
try testing.expect(!result.is_important);
|
||||
try testing.expectEqual("!importantred", result.value);
|
||||
|
||||
result = extractImportant("red !important !important");
|
||||
try testing.expect(result.is_important);
|
||||
try testing.expectEqual("red !important", result.value);
|
||||
}
|
||||
|
||||
test "Browser: CSS.StyleDeclaration: isMultiValueProperty - complex spacing scenarios" {
|
||||
try testing.expect(isMultiValueProperty("10px 20px"));
|
||||
try testing.expect(isMultiValueProperty("solid red"));
|
||||
|
||||
try testing.expect(isMultiValueProperty(" 10px 20px "));
|
||||
|
||||
try testing.expect(!isMultiValueProperty("10px\t20px"));
|
||||
try testing.expect(!isMultiValueProperty("10px\n20px"));
|
||||
|
||||
try testing.expect(isMultiValueProperty("10px 20px 30px"));
|
||||
}
|
||||
|
||||
test "Browser: CSS.StyleDeclaration: isAlreadyQuoted - edge cases with quotes" {
|
||||
try testing.expect(isAlreadyQuoted("\"'hello'\""));
|
||||
try testing.expect(isAlreadyQuoted("'\"hello\"'"));
|
||||
|
||||
try testing.expect(isAlreadyQuoted("\"hello\\\"world\""));
|
||||
try testing.expect(isAlreadyQuoted("'hello\\'world'"));
|
||||
|
||||
try testing.expect(!isAlreadyQuoted("\"hello"));
|
||||
try testing.expect(!isAlreadyQuoted("hello\""));
|
||||
try testing.expect(!isAlreadyQuoted("'hello"));
|
||||
try testing.expect(!isAlreadyQuoted("hello'"));
|
||||
|
||||
try testing.expect(isAlreadyQuoted("\"a\""));
|
||||
try testing.expect(isAlreadyQuoted("'b'"));
|
||||
}
|
||||
|
||||
test "Browser: CSS.StyleDeclaration: needsQuotes - function and URL edge cases" {
|
||||
try testing.expect(!needsQuotes("rgb(255, 0, 0)"));
|
||||
try testing.expect(!needsQuotes("calc(100% - 20px)"));
|
||||
|
||||
try testing.expect(!needsQuotes("url(path with spaces.jpg)"));
|
||||
|
||||
try testing.expect(!needsQuotes("linear-gradient(to right, red, blue)"));
|
||||
|
||||
try testing.expect(needsQuotes("rgb(255, 0, 0"));
|
||||
}
|
||||
|
||||
test "Browser: CSS.StyleDeclaration: escapeCSSValue - control characters and Unicode" {
|
||||
const allocator = testing.arena_allocator;
|
||||
|
||||
var result = try escapeCSSValue(allocator, "test\ttab");
|
||||
try testing.expectEqual("\"test\\9 tab\"", result);
|
||||
|
||||
result = try escapeCSSValue(allocator, "test\rreturn");
|
||||
try testing.expectEqual("\"test\\D return\"", result);
|
||||
|
||||
result = try escapeCSSValue(allocator, "test\x00null");
|
||||
try testing.expectEqual("\"test\\0null\"", result);
|
||||
|
||||
result = try escapeCSSValue(allocator, "test\x7Fdel");
|
||||
try testing.expectEqual("\"test\\7f del\"", result);
|
||||
|
||||
result = try escapeCSSValue(allocator, "test\"quote\nline\\back");
|
||||
try testing.expectEqual("\"test\\\"quote\\A line\\\\back\"", result);
|
||||
}
|
||||
|
||||
test "Browser: CSS.StyleDeclaration: isValidPropertyName - CSS custom properties and vendor prefixes" {
|
||||
try testing.expect(isValidPropertyName("--custom-color"));
|
||||
try testing.expect(isValidPropertyName("--my-variable"));
|
||||
try testing.expect(isValidPropertyName("--123"));
|
||||
|
||||
try testing.expect(isValidPropertyName("-webkit-transform"));
|
||||
try testing.expect(isValidPropertyName("-moz-border-radius"));
|
||||
try testing.expect(isValidPropertyName("-ms-filter"));
|
||||
try testing.expect(isValidPropertyName("-o-transition"));
|
||||
|
||||
try testing.expect(!isValidPropertyName("-123invalid"));
|
||||
try testing.expect(!isValidPropertyName("--"));
|
||||
try testing.expect(!isValidPropertyName("-"));
|
||||
}
|
||||
|
||||
test "Browser: CSS.StyleDeclaration: startsWithFunction - case sensitivity and partial matches" {
|
||||
try testing.expect(CSSKeywords.startsWithFunction("RGB(255, 0, 0)"));
|
||||
try testing.expect(CSSKeywords.startsWithFunction("Rgb(255, 0, 0)"));
|
||||
try testing.expect(CSSKeywords.startsWithFunction("URL(image.png)"));
|
||||
|
||||
try testing.expect(CSSKeywords.startsWithFunction("rg(something)"));
|
||||
try testing.expect(CSSKeywords.startsWithFunction("ur(something)"));
|
||||
|
||||
try testing.expect(CSSKeywords.startsWithFunction("rgb(1,2,3)"));
|
||||
try testing.expect(CSSKeywords.startsWithFunction("rgba(1,2,3,4)"));
|
||||
|
||||
try testing.expect(CSSKeywords.startsWithFunction("my-custom-function(args)"));
|
||||
try testing.expect(CSSKeywords.startsWithFunction("function-with-dashes(test)"));
|
||||
|
||||
try testing.expect(!CSSKeywords.startsWithFunction("123function(test)"));
|
||||
}
|
||||
|
||||
test "Browser: CSS.StyleDeclaration: isHexColor - Unicode and invalid characters" {
|
||||
try testing.expect(!isHexColor("#ghijkl"));
|
||||
try testing.expect(!isHexColor("#12345g"));
|
||||
try testing.expect(!isHexColor("#xyz"));
|
||||
|
||||
try testing.expect(!isHexColor("#АВС"));
|
||||
|
||||
try testing.expect(!isHexColor("#1234567g"));
|
||||
try testing.expect(!isHexColor("#g2345678"));
|
||||
}
|
||||
|
||||
test "Browser: CSS.StyleDeclaration: complex integration scenarios" {
|
||||
const allocator = testing.arena_allocator;
|
||||
|
||||
try testing.expect(isMultiValueProperty("rgb(255,0,0) url(bg.jpg)"));
|
||||
|
||||
try testing.expect(!needsQuotes("calc(100% - 20px)"));
|
||||
|
||||
const result = try escapeCSSValue(allocator, "fake(function with spaces");
|
||||
try testing.expectEqual("\"fake(function with spaces\"", result);
|
||||
|
||||
const important_result = extractImportant("rgb(255,0,0) !important");
|
||||
try testing.expect(important_result.is_important);
|
||||
try testing.expectEqual("rgb(255,0,0)", important_result.value);
|
||||
}
|
||||
|
||||
test "Browser: CSS.StyleDeclaration: performance edge cases - empty and minimal inputs" {
|
||||
try testing.expect(!isNumericWithUnit(""));
|
||||
try testing.expect(!isHexColor(""));
|
||||
try testing.expect(!isMultiValueProperty(""));
|
||||
try testing.expect(!isAlreadyQuoted(""));
|
||||
try testing.expect(!isValidPropertyName(""));
|
||||
try testing.expect(needsQuotes(""));
|
||||
try testing.expect(!CSSKeywords.isKnownKeyword(""));
|
||||
try testing.expect(!CSSKeywords.containsSpecialChar(""));
|
||||
try testing.expect(!CSSKeywords.isValidUnit(""));
|
||||
try testing.expect(!CSSKeywords.startsWithFunction(""));
|
||||
|
||||
try testing.expect(!isNumericWithUnit("a"));
|
||||
try testing.expect(!isHexColor("a"));
|
||||
try testing.expect(!isMultiValueProperty("a"));
|
||||
try testing.expect(!isAlreadyQuoted("a"));
|
||||
try testing.expect(isValidPropertyName("a"));
|
||||
try testing.expect(!needsQuotes("a"));
|
||||
}
|
||||
@@ -1,97 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const Env = @import("../env.zig").Env;
|
||||
const Page = @import("../page.zig").Page;
|
||||
const StyleSheet = @import("StyleSheet.zig");
|
||||
const CSSRuleList = @import("CSSRuleList.zig");
|
||||
const CSSImportRule = @import("CSSRule.zig").CSSImportRule;
|
||||
|
||||
const CSSStyleSheet = @This();
|
||||
pub const prototype = *StyleSheet;
|
||||
|
||||
proto: StyleSheet,
|
||||
css_rules: CSSRuleList,
|
||||
owner_rule: ?*CSSImportRule,
|
||||
|
||||
const CSSStyleSheetOpts = struct {
|
||||
base_url: ?[]const u8 = null,
|
||||
// TODO: Suupport media
|
||||
disabled: bool = false,
|
||||
};
|
||||
|
||||
pub fn constructor(_opts: ?CSSStyleSheetOpts) !CSSStyleSheet {
|
||||
const opts = _opts orelse CSSStyleSheetOpts{};
|
||||
return .{
|
||||
.proto = .{ .disabled = opts.disabled },
|
||||
.css_rules = .constructor(),
|
||||
.owner_rule = null,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn get_ownerRule(_: *CSSStyleSheet) ?*CSSImportRule {
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn get_cssRules(self: *CSSStyleSheet) *CSSRuleList {
|
||||
return &self.css_rules;
|
||||
}
|
||||
|
||||
pub fn _insertRule(self: *CSSStyleSheet, rule: []const u8, _index: ?usize, page: *Page) !usize {
|
||||
const index = _index orelse 0;
|
||||
if (index > self.css_rules.list.items.len) {
|
||||
return error.IndexSize;
|
||||
}
|
||||
|
||||
const arena = page.arena;
|
||||
try self.css_rules.list.insert(arena, index, try arena.dupe(u8, rule));
|
||||
return index;
|
||||
}
|
||||
|
||||
pub fn _deleteRule(self: *CSSStyleSheet, index: usize) !void {
|
||||
if (index > self.css_rules.list.items.len) {
|
||||
return error.IndexSize;
|
||||
}
|
||||
|
||||
_ = self.css_rules.list.orderedRemove(index);
|
||||
}
|
||||
|
||||
pub fn _replace(self: *CSSStyleSheet, text: []const u8, page: *Page) !Env.Promise {
|
||||
_ = self;
|
||||
_ = text;
|
||||
// TODO: clear self.css_rules
|
||||
// parse text and re-populate self.css_rules
|
||||
|
||||
const resolver = page.main_context.createPromiseResolver();
|
||||
try resolver.resolve({});
|
||||
return resolver.promise();
|
||||
}
|
||||
|
||||
pub fn _replaceSync(self: *CSSStyleSheet, text: []const u8) !void {
|
||||
_ = self;
|
||||
_ = text;
|
||||
// TODO: clear self.css_rules
|
||||
// parse text and re-populate self.css_rules
|
||||
}
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: CSS.StyleSheet" {
|
||||
try testing.htmlRunner("cssom/css_stylesheet.html");
|
||||
}
|
||||
@@ -1,55 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const parser = @import("../netsurf.zig");
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/StyleSheet#specifications
|
||||
const StyleSheet = @This();
|
||||
|
||||
disabled: bool = false,
|
||||
href: []const u8 = "",
|
||||
owner_node: ?*parser.Node = null,
|
||||
parent_stylesheet: ?*StyleSheet = null,
|
||||
title: []const u8 = "",
|
||||
type: []const u8 = "text/css",
|
||||
|
||||
pub fn get_disabled(self: *const StyleSheet) bool {
|
||||
return self.disabled;
|
||||
}
|
||||
|
||||
pub fn get_href(self: *const StyleSheet) []const u8 {
|
||||
return self.href;
|
||||
}
|
||||
|
||||
// TODO: media
|
||||
|
||||
pub fn get_ownerNode(self: *const StyleSheet) ?*parser.Node {
|
||||
return self.owner_node;
|
||||
}
|
||||
|
||||
pub fn get_parentStyleSheet(self: *const StyleSheet) ?*StyleSheet {
|
||||
return self.parent_stylesheet;
|
||||
}
|
||||
|
||||
pub fn get_title(self: *const StyleSheet) []const u8 {
|
||||
return self.title;
|
||||
}
|
||||
|
||||
pub fn get_type(self: *const StyleSheet) []const u8 {
|
||||
return self.type;
|
||||
}
|
||||
@@ -1,25 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
pub const Interfaces = .{
|
||||
@import("StyleSheet.zig"),
|
||||
@import("CSSStyleSheet.zig"),
|
||||
@import("CSSStyleDeclaration.zig"),
|
||||
@import("CSSRuleList.zig"),
|
||||
@import("CSSRule.zig").Interfaces,
|
||||
};
|
||||
@@ -1,109 +0,0 @@
|
||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const Page = @import("../page.zig").Page;
|
||||
const JsObject = @import("../env.zig").JsObject;
|
||||
const Promise = @import("../env.zig").Promise;
|
||||
const PromiseResolver = @import("../env.zig").PromiseResolver;
|
||||
|
||||
const Animation = @This();
|
||||
|
||||
effect: ?JsObject,
|
||||
timeline: ?JsObject,
|
||||
ready_resolver: ?PromiseResolver,
|
||||
finished_resolver: ?PromiseResolver,
|
||||
|
||||
pub fn constructor(effect: ?JsObject, timeline: ?JsObject) !Animation {
|
||||
return .{
|
||||
.effect = if (effect) |eo| try eo.persist() else null,
|
||||
.timeline = if (timeline) |to| try to.persist() else null,
|
||||
.ready_resolver = null,
|
||||
.finished_resolver = null,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn get_playState(self: *const Animation) []const u8 {
|
||||
_ = self;
|
||||
return "finished";
|
||||
}
|
||||
|
||||
pub fn get_pending(self: *const Animation) bool {
|
||||
_ = self;
|
||||
return false;
|
||||
}
|
||||
|
||||
pub fn get_finished(self: *Animation, page: *Page) !Promise {
|
||||
if (self.finished_resolver == null) {
|
||||
const resolver = page.main_context.createPromiseResolver();
|
||||
try resolver.resolve(self);
|
||||
self.finished_resolver = resolver;
|
||||
}
|
||||
return self.finished_resolver.?.promise();
|
||||
}
|
||||
|
||||
pub fn get_ready(self: *Animation, page: *Page) !Promise {
|
||||
// never resolved, because we're always "finished"
|
||||
if (self.ready_resolver == null) {
|
||||
const resolver = page.main_context.createPromiseResolver();
|
||||
self.ready_resolver = resolver;
|
||||
}
|
||||
return self.ready_resolver.?.promise();
|
||||
}
|
||||
|
||||
pub fn get_effect(self: *const Animation) ?JsObject {
|
||||
return self.effect;
|
||||
}
|
||||
|
||||
pub fn set_effect(self: *Animation, effect: JsObject) !void {
|
||||
self.effect = try effect.persist();
|
||||
}
|
||||
|
||||
pub fn get_timeline(self: *const Animation) ?JsObject {
|
||||
return self.timeline;
|
||||
}
|
||||
|
||||
pub fn set_timeline(self: *Animation, timeline: JsObject) !void {
|
||||
self.timeline = try timeline.persist();
|
||||
}
|
||||
|
||||
pub fn _play(self: *const Animation) void {
|
||||
_ = self;
|
||||
}
|
||||
|
||||
pub fn _pause(self: *const Animation) void {
|
||||
_ = self;
|
||||
}
|
||||
|
||||
pub fn _cancel(self: *const Animation) void {
|
||||
_ = self;
|
||||
}
|
||||
|
||||
pub fn _finish(self: *const Animation) void {
|
||||
_ = self;
|
||||
}
|
||||
|
||||
pub fn _reverse(self: *const Animation) void {
|
||||
_ = self;
|
||||
}
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: DOM.Animation" {
|
||||
try testing.htmlRunner("dom/animation.html");
|
||||
}
|
||||
@@ -1,290 +0,0 @@
|
||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const log = @import("../../log.zig");
|
||||
const parser = @import("../netsurf.zig");
|
||||
|
||||
const Env = @import("../env.zig").Env;
|
||||
const Page = @import("../page.zig").Page;
|
||||
const EventTarget = @import("../dom/event_target.zig").EventTarget;
|
||||
const EventHandler = @import("../events/event.zig").EventHandler;
|
||||
|
||||
const JsObject = Env.JsObject;
|
||||
const Function = Env.Function;
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const MAX_QUEUE_SIZE = 10;
|
||||
|
||||
pub const Interfaces = .{ MessageChannel, MessagePort };
|
||||
|
||||
const MessageChannel = @This();
|
||||
|
||||
port1: *MessagePort,
|
||||
port2: *MessagePort,
|
||||
|
||||
pub fn constructor(page: *Page) !MessageChannel {
|
||||
// Why do we allocate this rather than storing directly in the struct?
|
||||
// https://github.com/lightpanda-io/project/discussions/165
|
||||
const port1 = try page.arena.create(MessagePort);
|
||||
const port2 = try page.arena.create(MessagePort);
|
||||
port1.* = .{
|
||||
.pair = port2,
|
||||
};
|
||||
port2.* = .{
|
||||
.pair = port1,
|
||||
};
|
||||
|
||||
return .{
|
||||
.port1 = port1,
|
||||
.port2 = port2,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn get_port1(self: *const MessageChannel) *MessagePort {
|
||||
return self.port1;
|
||||
}
|
||||
|
||||
pub fn get_port2(self: *const MessageChannel) *MessagePort {
|
||||
return self.port2;
|
||||
}
|
||||
|
||||
pub const MessagePort = struct {
|
||||
pub const prototype = *EventTarget;
|
||||
|
||||
proto: parser.EventTargetTBase = .{ .internal_target_type = .message_port },
|
||||
|
||||
pair: *MessagePort,
|
||||
closed: bool = false,
|
||||
started: bool = false,
|
||||
onmessage_cbk: ?Function = null,
|
||||
onmessageerror_cbk: ?Function = null,
|
||||
// This is the queue of messages to dispatch to THIS MessagePort when the
|
||||
// MessagePort is started.
|
||||
queue: std.ArrayListUnmanaged(JsObject) = .empty,
|
||||
|
||||
pub const PostMessageOption = union(enum) {
|
||||
transfer: JsObject,
|
||||
options: Opts,
|
||||
|
||||
pub const Opts = struct {
|
||||
transfer: JsObject,
|
||||
};
|
||||
};
|
||||
|
||||
pub fn _postMessage(self: *MessagePort, obj: JsObject, opts_: ?PostMessageOption, page: *Page) !void {
|
||||
if (self.closed) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (opts_ != null) {
|
||||
log.warn(.web_api, "not implemented", .{ .feature = "MessagePort postMessage options" });
|
||||
}
|
||||
|
||||
try self.pair.dispatchOrQueue(obj, page.arena);
|
||||
}
|
||||
|
||||
// Start impacts the ability to receive a message.
|
||||
// Given pair1 (started) and pair2 (not started), then:
|
||||
// pair2.postMessage('x'); //will be dispatched to pair1.onmessage
|
||||
// pair1.postMessage('x'); // will be queued until pair2 is started
|
||||
pub fn _start(self: *MessagePort) !void {
|
||||
if (self.started) {
|
||||
return;
|
||||
}
|
||||
self.started = true;
|
||||
for (self.queue.items) |data| {
|
||||
try self.dispatch(data);
|
||||
}
|
||||
// we'll never use this queue again, but it's allocated with an arena
|
||||
// we don't even need to clear it, but it seems a bit safer to do at
|
||||
// least that
|
||||
self.queue.clearRetainingCapacity();
|
||||
}
|
||||
|
||||
// Closing seems to stop both the publishing and receiving of messages,
|
||||
// effectively rendering the channel useless. It cannot be reversed.
|
||||
pub fn _close(self: *MessagePort) void {
|
||||
self.closed = true;
|
||||
self.pair.closed = true;
|
||||
}
|
||||
|
||||
pub fn get_onmessage(self: *MessagePort) ?Function {
|
||||
return self.onmessage_cbk;
|
||||
}
|
||||
pub fn get_onmessageerror(self: *MessagePort) ?Function {
|
||||
return self.onmessageerror_cbk;
|
||||
}
|
||||
|
||||
pub fn set_onmessage(self: *MessagePort, listener: EventHandler.Listener, page: *Page) !void {
|
||||
if (self.onmessage_cbk) |cbk| {
|
||||
try self.unregister("message", cbk.id);
|
||||
}
|
||||
self.onmessage_cbk = try self.register(page.arena, "message", listener);
|
||||
|
||||
// When onmessage is set directly, then it's like start() was called.
|
||||
// If addEventListener('message') is used, the app has to call start()
|
||||
// explicitly.
|
||||
try self._start();
|
||||
}
|
||||
|
||||
pub fn set_onmessageerror(self: *MessagePort, listener: EventHandler.Listener, page: *Page) !void {
|
||||
if (self.onmessageerror_cbk) |cbk| {
|
||||
try self.unregister("messageerror", cbk.id);
|
||||
}
|
||||
self.onmessageerror_cbk = try self.register(page.arena, "messageerror", listener);
|
||||
}
|
||||
|
||||
// called from our pair. If port1.postMessage("x") is called, then this
|
||||
// will be called on port2.
|
||||
fn dispatchOrQueue(self: *MessagePort, obj: JsObject, arena: Allocator) !void {
|
||||
// our pair should have checked this already
|
||||
std.debug.assert(self.closed == false);
|
||||
|
||||
if (self.started) {
|
||||
return self.dispatch(try obj.persist());
|
||||
}
|
||||
|
||||
if (self.queue.items.len > MAX_QUEUE_SIZE) {
|
||||
// This isn't part of the spec, but not putting a limit is reckless
|
||||
return error.MessageQueueLimit;
|
||||
}
|
||||
return self.queue.append(arena, try obj.persist());
|
||||
}
|
||||
|
||||
fn dispatch(self: *MessagePort, obj: JsObject) !void {
|
||||
// obj is already persisted, don't use `MessageEvent.constructor`, but
|
||||
// go directly to `init`, which assumes persisted objects.
|
||||
var evt = try MessageEvent.init(.{ .data = obj });
|
||||
_ = try parser.eventTargetDispatchEvent(
|
||||
parser.toEventTarget(MessagePort, self),
|
||||
@as(*parser.Event, @ptrCast(&evt)),
|
||||
);
|
||||
}
|
||||
|
||||
fn register(
|
||||
self: *MessagePort,
|
||||
alloc: Allocator,
|
||||
typ: []const u8,
|
||||
listener: EventHandler.Listener,
|
||||
) !?Function {
|
||||
const target = @as(*parser.EventTarget, @ptrCast(self));
|
||||
const eh = (try EventHandler.register(alloc, target, typ, listener, null)) orelse unreachable;
|
||||
return eh.callback;
|
||||
}
|
||||
|
||||
fn unregister(self: *MessagePort, typ: []const u8, cbk_id: usize) !void {
|
||||
const et = @as(*parser.EventTarget, @ptrCast(self));
|
||||
const lst = try parser.eventTargetHasListener(et, typ, false, cbk_id);
|
||||
if (lst == null) {
|
||||
return;
|
||||
}
|
||||
try parser.eventTargetRemoveEventListener(et, typ, lst.?, false);
|
||||
}
|
||||
};
|
||||
|
||||
pub const MessageEvent = struct {
|
||||
const Event = @import("../events/event.zig").Event;
|
||||
const DOMException = @import("exceptions.zig").DOMException;
|
||||
|
||||
pub const prototype = *Event;
|
||||
pub const Exception = DOMException;
|
||||
pub const union_make_copy = true;
|
||||
|
||||
proto: parser.Event,
|
||||
data: ?JsObject,
|
||||
|
||||
// You would think if port1 sends to port2, the source would be port2
|
||||
// (which is how I read the documentation), but it appears to always be
|
||||
// null. It can always be set explicitly via the constructor;
|
||||
source: ?JsObject,
|
||||
|
||||
origin: []const u8,
|
||||
|
||||
// This is used for Server-Sent events. Appears to always be an empty
|
||||
// string for MessagePort messages.
|
||||
last_event_id: []const u8,
|
||||
|
||||
// This might be related to the "transfer" option of postMessage which
|
||||
// we don't yet support. For "normal" message, it's always an empty array.
|
||||
// Though it could be set explicitly via the constructor
|
||||
ports: []*MessagePort,
|
||||
|
||||
const Options = struct {
|
||||
data: ?JsObject = null,
|
||||
source: ?JsObject = null,
|
||||
origin: []const u8 = "",
|
||||
lastEventId: []const u8 = "",
|
||||
ports: []*MessagePort = &.{},
|
||||
};
|
||||
|
||||
pub fn constructor(opts: Options) !MessageEvent {
|
||||
return init(.{
|
||||
.data = if (opts.data) |obj| try obj.persist() else null,
|
||||
.source = if (opts.source) |obj| try obj.persist() else null,
|
||||
.ports = opts.ports,
|
||||
.origin = opts.origin,
|
||||
.lastEventId = opts.lastEventId,
|
||||
});
|
||||
}
|
||||
|
||||
// This is like "constructor", but it assumes JsObjects have already been
|
||||
// persisted. Necessary because this `new MessageEvent()` can be called
|
||||
// directly from JS OR from a port.postMessage. In the latter case, data
|
||||
// may have already been persisted (as it might need to be queued);
|
||||
fn init(opts: Options) !MessageEvent {
|
||||
const event = try parser.eventCreate();
|
||||
defer parser.eventDestroy(event);
|
||||
try parser.eventInit(event, "message", .{});
|
||||
parser.eventSetInternalType(event, .message_event);
|
||||
|
||||
return .{
|
||||
.proto = event.*,
|
||||
.data = opts.data,
|
||||
.source = opts.source,
|
||||
.ports = opts.ports,
|
||||
.origin = opts.origin,
|
||||
.last_event_id = opts.lastEventId,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn get_data(self: *const MessageEvent) !?JsObject {
|
||||
return self.data;
|
||||
}
|
||||
|
||||
pub fn get_origin(self: *const MessageEvent) []const u8 {
|
||||
return self.origin;
|
||||
}
|
||||
|
||||
pub fn get_source(self: *const MessageEvent) ?JsObject {
|
||||
return self.source;
|
||||
}
|
||||
|
||||
pub fn get_ports(self: *const MessageEvent) []*MessagePort {
|
||||
return self.ports;
|
||||
}
|
||||
|
||||
pub fn get_lastEventId(self: *const MessageEvent) []const u8 {
|
||||
return self.last_event_id;
|
||||
}
|
||||
};
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: DOM.MessageChannel" {
|
||||
try testing.htmlRunner("dom/message_channel.html");
|
||||
}
|
||||
@@ -1,75 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
const parser = @import("../netsurf.zig");
|
||||
|
||||
const Node = @import("node.zig").Node;
|
||||
|
||||
// WEB IDL https://dom.spec.whatwg.org/#attr
|
||||
pub const Attr = struct {
|
||||
pub const Self = parser.Attribute;
|
||||
pub const prototype = *Node;
|
||||
pub const subtype = .node;
|
||||
|
||||
pub fn get_namespaceURI(self: *parser.Attribute) ?[]const u8 {
|
||||
return parser.nodeGetNamespace(parser.attributeToNode(self));
|
||||
}
|
||||
|
||||
pub fn get_prefix(self: *parser.Attribute) ?[]const u8 {
|
||||
return parser.nodeGetPrefix(parser.attributeToNode(self));
|
||||
}
|
||||
|
||||
pub fn get_localName(self: *parser.Attribute) ![]const u8 {
|
||||
return parser.nodeLocalName(parser.attributeToNode(self));
|
||||
}
|
||||
|
||||
pub fn get_name(self: *parser.Attribute) ![]const u8 {
|
||||
return parser.attributeGetName(self);
|
||||
}
|
||||
|
||||
pub fn get_value(self: *parser.Attribute) !?[]const u8 {
|
||||
return parser.attributeGetValue(self);
|
||||
}
|
||||
|
||||
pub fn set_value(self: *parser.Attribute, v: []const u8) !?[]const u8 {
|
||||
if (try parser.attributeGetOwnerElement(self)) |el| {
|
||||
// if possible, go through the element, as that triggers a
|
||||
// DOMAttrModified event (which MutationObserver cares about)
|
||||
const name = try parser.attributeGetName(self);
|
||||
try parser.elementSetAttribute(el, name, v);
|
||||
} else {
|
||||
try parser.attributeSetValue(self, v);
|
||||
}
|
||||
return v;
|
||||
}
|
||||
|
||||
pub fn get_ownerElement(self: *parser.Attribute) !?*parser.Element {
|
||||
return try parser.attributeGetOwnerElement(self);
|
||||
}
|
||||
|
||||
pub fn get_specified(_: *parser.Attribute) bool {
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
// Tests
|
||||
// -----
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: DOM.Attribute" {
|
||||
try testing.htmlRunner("dom/attribute.html");
|
||||
}
|
||||
@@ -1,134 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const parser = @import("../netsurf.zig");
|
||||
|
||||
const Node = @import("node.zig").Node;
|
||||
const Comment = @import("comment.zig").Comment;
|
||||
const Text = @import("text.zig");
|
||||
const ProcessingInstruction = @import("processing_instruction.zig").ProcessingInstruction;
|
||||
const Element = @import("element.zig").Element;
|
||||
const ElementUnion = @import("element.zig").Union;
|
||||
|
||||
// CharacterData interfaces
|
||||
pub const Interfaces = .{
|
||||
Comment,
|
||||
Text.Text,
|
||||
Text.Interfaces,
|
||||
ProcessingInstruction,
|
||||
};
|
||||
|
||||
// CharacterData implementation
|
||||
pub const CharacterData = struct {
|
||||
pub const Self = parser.CharacterData;
|
||||
pub const prototype = *Node;
|
||||
pub const subtype = .node;
|
||||
|
||||
// JS funcs
|
||||
// --------
|
||||
|
||||
// Read attributes
|
||||
|
||||
pub fn get_length(self: *parser.CharacterData) !u32 {
|
||||
return try parser.characterDataLength(self);
|
||||
}
|
||||
|
||||
pub fn get_nextElementSibling(self: *parser.CharacterData) !?ElementUnion {
|
||||
const res = parser.nodeNextElementSibling(parser.characterDataToNode(self));
|
||||
if (res == null) {
|
||||
return null;
|
||||
}
|
||||
return try Element.toInterface(res.?);
|
||||
}
|
||||
|
||||
pub fn get_previousElementSibling(self: *parser.CharacterData) !?ElementUnion {
|
||||
const res = parser.nodePreviousElementSibling(parser.characterDataToNode(self));
|
||||
if (res == null) {
|
||||
return null;
|
||||
}
|
||||
return try Element.toInterface(res.?);
|
||||
}
|
||||
|
||||
// Read/Write attributes
|
||||
|
||||
pub fn get_data(self: *parser.CharacterData) []const u8 {
|
||||
return parser.characterDataData(self);
|
||||
}
|
||||
|
||||
pub fn set_data(self: *parser.CharacterData, data: []const u8) !void {
|
||||
return try parser.characterDataSetData(self, data);
|
||||
}
|
||||
|
||||
// JS methods
|
||||
// ----------
|
||||
|
||||
pub fn _appendData(self: *parser.CharacterData, data: []const u8) !void {
|
||||
return try parser.characterDataAppendData(self, data);
|
||||
}
|
||||
|
||||
pub fn _deleteData(self: *parser.CharacterData, offset: u32, count: u32) !void {
|
||||
return try parser.characterDataDeleteData(self, offset, count);
|
||||
}
|
||||
|
||||
pub fn _insertData(self: *parser.CharacterData, offset: u32, data: []const u8) !void {
|
||||
return try parser.characterDataInsertData(self, offset, data);
|
||||
}
|
||||
|
||||
pub fn _replaceData(self: *parser.CharacterData, offset: u32, count: u32, data: []const u8) !void {
|
||||
return try parser.characterDataReplaceData(self, offset, count, data);
|
||||
}
|
||||
|
||||
pub fn _substringData(self: *parser.CharacterData, offset: u32, count: u32) ![]const u8 {
|
||||
return parser.characterDataSubstringData(self, offset, count);
|
||||
}
|
||||
|
||||
// netsurf's CharacterData (text, comment) doesn't implement the
|
||||
// dom_node_get_attributes and thus will crash if we try to call nodeIsEqualNode.
|
||||
pub fn _isEqualNode(self: *parser.CharacterData, other_node: *parser.Node) bool {
|
||||
if (parser.nodeType(@ptrCast(@alignCast(self))) != parser.nodeType(other_node)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const other: *parser.CharacterData = @ptrCast(other_node);
|
||||
if (std.mem.eql(u8, get_data(self), get_data(other)) == false) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
pub fn _before(self: *parser.CharacterData, nodes: []const Node.NodeOrText) !void {
|
||||
const ref_node = parser.characterDataToNode(self);
|
||||
return Node.before(ref_node, nodes);
|
||||
}
|
||||
|
||||
pub fn _after(self: *parser.CharacterData, nodes: []const Node.NodeOrText) !void {
|
||||
const ref_node = parser.characterDataToNode(self);
|
||||
return Node.after(ref_node, nodes);
|
||||
}
|
||||
};
|
||||
|
||||
// Tests
|
||||
// -----
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: DOM.CharacterData" {
|
||||
try testing.htmlRunner("dom/character_data.html");
|
||||
}
|
||||
@@ -1,322 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const parser = @import("../netsurf.zig");
|
||||
const Page = @import("../page.zig").Page;
|
||||
|
||||
const Node = @import("node.zig").Node;
|
||||
const NodeList = @import("nodelist.zig").NodeList;
|
||||
const NodeUnion = @import("node.zig").Union;
|
||||
|
||||
const collection = @import("html_collection.zig");
|
||||
const css = @import("css.zig");
|
||||
|
||||
const Element = @import("element.zig").Element;
|
||||
const ElementUnion = @import("element.zig").Union;
|
||||
const TreeWalker = @import("tree_walker.zig").TreeWalker;
|
||||
const CSSStyleSheet = @import("../cssom/CSSStyleSheet.zig");
|
||||
const NodeIterator = @import("node_iterator.zig").NodeIterator;
|
||||
const Range = @import("range.zig").Range;
|
||||
|
||||
const CustomEvent = @import("../events/custom_event.zig").CustomEvent;
|
||||
|
||||
const Env = @import("../env.zig").Env;
|
||||
|
||||
const DOMImplementation = @import("implementation.zig").DOMImplementation;
|
||||
|
||||
// WEB IDL https://dom.spec.whatwg.org/#document
|
||||
pub const Document = struct {
|
||||
pub const Self = parser.Document;
|
||||
pub const prototype = *Node;
|
||||
pub const subtype = .node;
|
||||
|
||||
pub fn constructor(page: *const Page) !*parser.DocumentHTML {
|
||||
const doc = try parser.documentCreateDocument(
|
||||
try parser.documentHTMLGetTitle(page.window.document),
|
||||
);
|
||||
|
||||
// we have to work w/ document instead of html document.
|
||||
const ddoc = parser.documentHTMLToDocument(doc);
|
||||
const ccur = parser.documentHTMLToDocument(page.window.document);
|
||||
try parser.documentSetDocumentURI(ddoc, try parser.documentGetDocumentURI(ccur));
|
||||
try parser.documentSetInputEncoding(ddoc, try parser.documentGetInputEncoding(ccur));
|
||||
|
||||
return doc;
|
||||
}
|
||||
|
||||
// JS funcs
|
||||
// --------
|
||||
pub fn get_implementation(_: *parser.Document) DOMImplementation {
|
||||
return DOMImplementation{};
|
||||
}
|
||||
|
||||
pub fn get_documentElement(self: *parser.Document) !?ElementUnion {
|
||||
const e = try parser.documentGetDocumentElement(self);
|
||||
if (e == null) return null;
|
||||
return try Element.toInterface(e.?);
|
||||
}
|
||||
|
||||
pub fn get_documentURI(self: *parser.Document) ![]const u8 {
|
||||
return try parser.documentGetDocumentURI(self);
|
||||
}
|
||||
|
||||
pub fn get_URL(self: *parser.Document) ![]const u8 {
|
||||
return try get_documentURI(self);
|
||||
}
|
||||
|
||||
// TODO implement contentType
|
||||
pub fn get_contentType(self: *parser.Document) []const u8 {
|
||||
_ = self;
|
||||
return "text/html";
|
||||
}
|
||||
|
||||
// TODO implement compactMode
|
||||
pub fn get_compatMode(self: *parser.Document) []const u8 {
|
||||
_ = self;
|
||||
return "CSS1Compat";
|
||||
}
|
||||
|
||||
pub fn get_characterSet(self: *parser.Document) ![]const u8 {
|
||||
return try parser.documentGetInputEncoding(self);
|
||||
}
|
||||
|
||||
// alias of get_characterSet
|
||||
pub fn get_charset(self: *parser.Document) ![]const u8 {
|
||||
return try get_characterSet(self);
|
||||
}
|
||||
|
||||
// alias of get_characterSet
|
||||
pub fn get_inputEncoding(self: *parser.Document) ![]const u8 {
|
||||
return try get_characterSet(self);
|
||||
}
|
||||
|
||||
pub fn get_doctype(self: *parser.Document) !?*parser.DocumentType {
|
||||
return try parser.documentGetDoctype(self);
|
||||
}
|
||||
|
||||
pub fn _createEvent(_: *parser.Document, eventCstr: []const u8) !union(enum) {
|
||||
base: *parser.Event,
|
||||
custom: CustomEvent,
|
||||
} {
|
||||
const eqlIgnoreCase = std.ascii.eqlIgnoreCase;
|
||||
|
||||
if (eqlIgnoreCase(eventCstr, "Event") or eqlIgnoreCase(eventCstr, "Events") or eqlIgnoreCase(eventCstr, "HTMLEvents")) {
|
||||
return .{ .base = try parser.eventCreate() };
|
||||
}
|
||||
|
||||
// Not documented in MDN but supported in Chrome.
|
||||
// This is actually both instance of `Event` and `CustomEvent`.
|
||||
if (std.ascii.eqlIgnoreCase(eventCstr, "CustomEvent")) {
|
||||
return .{ .custom = try CustomEvent.constructor(eventCstr, null) };
|
||||
}
|
||||
|
||||
return error.NotSupported;
|
||||
}
|
||||
|
||||
pub fn _getElementById(self: *parser.Document, id: []const u8) !?ElementUnion {
|
||||
const e = try parser.documentGetElementById(self, id) orelse return null;
|
||||
return try Element.toInterface(e);
|
||||
}
|
||||
|
||||
pub fn _createElement(self: *parser.Document, tag_name: []const u8) !ElementUnion {
|
||||
// The element’s namespace is the HTML namespace when document is an HTML document
|
||||
// https://dom.spec.whatwg.org/#ref-for-dom-document-createelement%E2%91%A0
|
||||
const e = try parser.documentCreateElementNS(self, "http://www.w3.org/1999/xhtml", tag_name);
|
||||
return Element.toInterface(e);
|
||||
}
|
||||
|
||||
pub fn _createElementNS(self: *parser.Document, ns: []const u8, tag_name: []const u8) !ElementUnion {
|
||||
const e = try parser.documentCreateElementNS(self, ns, tag_name);
|
||||
return try Element.toInterface(e);
|
||||
}
|
||||
|
||||
// We can't simply use libdom dom_document_get_elements_by_tag_name here.
|
||||
// Indeed, netsurf implemented a previous dom spec when
|
||||
// getElementsByTagName returned a NodeList.
|
||||
// But since
|
||||
// https://github.com/whatwg/dom/commit/190700b7c12ecfd3b5ebdb359ab1d6ea9cbf7749
|
||||
// the spec changed to return an HTMLCollection instead.
|
||||
// That's why we reimplemented getElementsByTagName by using an
|
||||
// HTMLCollection in zig here.
|
||||
pub fn _getElementsByTagName(self: *parser.Document, tag_name: Env.String) !collection.HTMLCollection {
|
||||
return collection.HTMLCollectionByTagName(parser.documentToNode(self), tag_name.string, .{
|
||||
.include_root = true,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn _getElementsByClassName(self: *parser.Document, class_names: Env.String) !collection.HTMLCollection {
|
||||
return collection.HTMLCollectionByClassName(parser.documentToNode(self), class_names.string, .{
|
||||
.include_root = true,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn _createDocumentFragment(self: *parser.Document) !*parser.DocumentFragment {
|
||||
return try parser.documentCreateDocumentFragment(self);
|
||||
}
|
||||
|
||||
pub fn _createTextNode(self: *parser.Document, data: []const u8) !*parser.Text {
|
||||
return try parser.documentCreateTextNode(self, data);
|
||||
}
|
||||
|
||||
pub fn _createCDATASection(self: *parser.Document, data: []const u8) !*parser.CDATASection {
|
||||
return try parser.documentCreateCDATASection(self, data);
|
||||
}
|
||||
|
||||
pub fn _createComment(self: *parser.Document, data: []const u8) !*parser.Comment {
|
||||
return try parser.documentCreateComment(self, data);
|
||||
}
|
||||
|
||||
pub fn _createProcessingInstruction(self: *parser.Document, target: []const u8, data: []const u8) !*parser.ProcessingInstruction {
|
||||
return try parser.documentCreateProcessingInstruction(self, target, data);
|
||||
}
|
||||
|
||||
pub fn _importNode(self: *parser.Document, node: *parser.Node, deep: ?bool) !NodeUnion {
|
||||
const n = try parser.documentImportNode(self, node, deep orelse false);
|
||||
return try Node.toInterface(n);
|
||||
}
|
||||
|
||||
pub fn _adoptNode(self: *parser.Document, node: *parser.Node) !NodeUnion {
|
||||
const n = try parser.documentAdoptNode(self, node);
|
||||
return try Node.toInterface(n);
|
||||
}
|
||||
|
||||
pub fn _createAttribute(self: *parser.Document, name: []const u8) !*parser.Attribute {
|
||||
return try parser.documentCreateAttribute(self, name);
|
||||
}
|
||||
|
||||
pub fn _createAttributeNS(self: *parser.Document, ns: []const u8, qname: []const u8) !*parser.Attribute {
|
||||
return try parser.documentCreateAttributeNS(self, ns, qname);
|
||||
}
|
||||
|
||||
// ParentNode
|
||||
// https://dom.spec.whatwg.org/#parentnode
|
||||
pub fn get_children(self: *parser.Document) !collection.HTMLCollection {
|
||||
return collection.HTMLCollectionChildren(parser.documentToNode(self), .{
|
||||
.include_root = false,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn get_firstElementChild(self: *parser.Document) !?ElementUnion {
|
||||
const elt = try parser.documentGetDocumentElement(self) orelse return null;
|
||||
return try Element.toInterface(elt);
|
||||
}
|
||||
|
||||
pub fn get_lastElementChild(self: *parser.Document) !?ElementUnion {
|
||||
const elt = try parser.documentGetDocumentElement(self) orelse return null;
|
||||
return try Element.toInterface(elt);
|
||||
}
|
||||
|
||||
pub fn get_childElementCount(self: *parser.Document) !u32 {
|
||||
_ = try parser.documentGetDocumentElement(self) orelse return 0;
|
||||
return 1;
|
||||
}
|
||||
|
||||
pub fn _querySelector(self: *parser.Document, selector: []const u8, page: *Page) !?ElementUnion {
|
||||
if (selector.len == 0) return null;
|
||||
|
||||
const n = try css.querySelector(page.call_arena, parser.documentToNode(self), selector);
|
||||
|
||||
if (n == null) return null;
|
||||
|
||||
return try Element.toInterface(parser.nodeToElement(n.?));
|
||||
}
|
||||
|
||||
pub fn _querySelectorAll(self: *parser.Document, selector: []const u8, page: *Page) !NodeList {
|
||||
return css.querySelectorAll(page.arena, parser.documentToNode(self), selector);
|
||||
}
|
||||
|
||||
pub fn _prepend(self: *parser.Document, nodes: []const Node.NodeOrText) !void {
|
||||
return Node.prepend(parser.documentToNode(self), nodes);
|
||||
}
|
||||
|
||||
pub fn _append(self: *parser.Document, nodes: []const Node.NodeOrText) !void {
|
||||
return Node.append(parser.documentToNode(self), nodes);
|
||||
}
|
||||
|
||||
pub fn _replaceChildren(self: *parser.Document, nodes: []const Node.NodeOrText) !void {
|
||||
return Node.replaceChildren(parser.documentToNode(self), nodes);
|
||||
}
|
||||
|
||||
pub fn _createTreeWalker(_: *parser.Document, root: *parser.Node, what_to_show: ?TreeWalker.WhatToShow, filter: ?TreeWalker.TreeWalkerOpts) !TreeWalker {
|
||||
return TreeWalker.init(root, what_to_show, filter);
|
||||
}
|
||||
|
||||
pub fn _createNodeIterator(_: *parser.Document, root: *parser.Node, what_to_show: ?NodeIterator.WhatToShow, filter: ?NodeIterator.NodeIteratorOpts) !NodeIterator {
|
||||
return NodeIterator.init(root, what_to_show, filter);
|
||||
}
|
||||
|
||||
pub fn getActiveElement(self: *parser.Document, page: *Page) !?*parser.Element {
|
||||
if (page.getNodeState(@ptrCast(@alignCast(self)))) |state| {
|
||||
if (state.active_element) |ae| {
|
||||
return ae;
|
||||
}
|
||||
}
|
||||
|
||||
if (try parser.documentHTMLBody(page.window.document)) |body| {
|
||||
return @ptrCast(@alignCast(body));
|
||||
}
|
||||
|
||||
return try parser.documentGetDocumentElement(self);
|
||||
}
|
||||
|
||||
pub fn get_activeElement(self: *parser.Document, page: *Page) !?ElementUnion {
|
||||
const ae = (try getActiveElement(self, page)) orelse return null;
|
||||
return try Element.toInterface(ae);
|
||||
}
|
||||
|
||||
// TODO: some elements can't be focused, like if they're disabled
|
||||
// but there doesn't seem to be a generic way to check this. For example
|
||||
// we could look for the "disabled" attribute, but that's only meaningful
|
||||
// on certain types, and libdom's vtable doesn't seem to expose this.
|
||||
pub fn setFocus(self: *parser.Document, e: *parser.ElementHTML, page: *Page) !void {
|
||||
const state = try page.getOrCreateNodeState(@ptrCast(@alignCast(self)));
|
||||
state.active_element = @ptrCast(e);
|
||||
}
|
||||
|
||||
pub fn _createRange(_: *parser.Document, page: *Page) Range {
|
||||
return Range.constructor(page);
|
||||
}
|
||||
|
||||
// TODO: dummy implementation
|
||||
pub fn get_styleSheets(_: *parser.Document) []CSSStyleSheet {
|
||||
return &.{};
|
||||
}
|
||||
|
||||
pub fn get_adoptedStyleSheets(self: *parser.Document, page: *Page) !Env.JsObject {
|
||||
const state = try page.getOrCreateNodeState(@ptrCast(@alignCast(self)));
|
||||
if (state.adopted_style_sheets) |obj| {
|
||||
return obj;
|
||||
}
|
||||
|
||||
const obj = try page.main_context.newArray(0).persist();
|
||||
state.adopted_style_sheets = obj;
|
||||
return obj;
|
||||
}
|
||||
|
||||
pub fn set_adoptedStyleSheets(self: *parser.Document, sheets: Env.JsObject, page: *Page) !void {
|
||||
const state = try page.getOrCreateNodeState(@ptrCast(@alignCast(self)));
|
||||
state.adopted_style_sheets = try sheets.persist();
|
||||
}
|
||||
};
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: DOM.Document" {
|
||||
try testing.htmlRunner("dom/document.html");
|
||||
}
|
||||
@@ -1,96 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const css = @import("css.zig");
|
||||
const parser = @import("../netsurf.zig");
|
||||
const Page = @import("../page.zig").Page;
|
||||
const NodeList = @import("nodelist.zig").NodeList;
|
||||
const Element = @import("element.zig").Element;
|
||||
const ElementUnion = @import("element.zig").Union;
|
||||
const collection = @import("html_collection.zig");
|
||||
|
||||
const Node = @import("node.zig").Node;
|
||||
|
||||
// WEB IDL https://dom.spec.whatwg.org/#documentfragment
|
||||
pub const DocumentFragment = struct {
|
||||
pub const Self = parser.DocumentFragment;
|
||||
pub const prototype = *Node;
|
||||
pub const subtype = .node;
|
||||
|
||||
pub fn constructor(page: *const Page) !*parser.DocumentFragment {
|
||||
return parser.documentCreateDocumentFragment(
|
||||
parser.documentHTMLToDocument(page.window.document),
|
||||
);
|
||||
}
|
||||
|
||||
pub fn _isEqualNode(self: *parser.DocumentFragment, other_node: *parser.Node) bool {
|
||||
const other_type = parser.nodeType(other_node);
|
||||
if (other_type != .document_fragment) {
|
||||
return false;
|
||||
}
|
||||
_ = self;
|
||||
return true;
|
||||
}
|
||||
|
||||
pub fn _prepend(self: *parser.DocumentFragment, nodes: []const Node.NodeOrText) !void {
|
||||
return Node.prepend(parser.documentFragmentToNode(self), nodes);
|
||||
}
|
||||
|
||||
pub fn _append(self: *parser.DocumentFragment, nodes: []const Node.NodeOrText) !void {
|
||||
return Node.append(parser.documentFragmentToNode(self), nodes);
|
||||
}
|
||||
|
||||
pub fn _replaceChildren(self: *parser.DocumentFragment, nodes: []const Node.NodeOrText) !void {
|
||||
return Node.replaceChildren(parser.documentFragmentToNode(self), nodes);
|
||||
}
|
||||
|
||||
pub fn _querySelector(self: *parser.DocumentFragment, selector: []const u8, page: *Page) !?ElementUnion {
|
||||
if (selector.len == 0) return null;
|
||||
|
||||
const n = try css.querySelector(page.call_arena, parser.documentFragmentToNode(self), selector);
|
||||
|
||||
if (n == null) return null;
|
||||
|
||||
return try Element.toInterface(parser.nodeToElement(n.?));
|
||||
}
|
||||
|
||||
pub fn _querySelectorAll(self: *parser.DocumentFragment, selector: []const u8, page: *Page) !NodeList {
|
||||
return css.querySelectorAll(page.arena, parser.documentFragmentToNode(self), selector);
|
||||
}
|
||||
|
||||
pub fn get_childElementCount(self: *parser.DocumentFragment) !u32 {
|
||||
var children = try get_children(self);
|
||||
return children.get_length();
|
||||
}
|
||||
|
||||
pub fn get_children(self: *parser.DocumentFragment) !collection.HTMLCollection {
|
||||
return collection.HTMLCollectionChildren(parser.documentFragmentToNode(self), .{
|
||||
.include_root = false,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn _getElementById(self: *parser.DocumentFragment, id: []const u8) !?ElementUnion {
|
||||
const e = try parser.nodeGetElementById(@ptrCast(@alignCast(self)), id) orelse return null;
|
||||
return try Element.toInterface(e);
|
||||
}
|
||||
};
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: DOM.DocumentFragment" {
|
||||
try testing.htmlRunner("dom/document_fragment.html");
|
||||
}
|
||||
@@ -1,67 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const parser = @import("../netsurf.zig");
|
||||
|
||||
const Node = @import("node.zig").Node;
|
||||
|
||||
// WEB IDL https://dom.spec.whatwg.org/#documenttype
|
||||
pub const DocumentType = struct {
|
||||
pub const Self = parser.DocumentType;
|
||||
pub const prototype = *Node;
|
||||
pub const subtype = .node;
|
||||
|
||||
pub fn get_name(self: *parser.DocumentType) ![]const u8 {
|
||||
return parser.documentTypeGetName(self);
|
||||
}
|
||||
|
||||
pub fn get_publicId(self: *parser.DocumentType) []const u8 {
|
||||
return parser.documentTypeGetPublicId(self);
|
||||
}
|
||||
|
||||
pub fn get_systemId(self: *parser.DocumentType) []const u8 {
|
||||
return parser.documentTypeGetSystemId(self);
|
||||
}
|
||||
|
||||
// netsurf's DocumentType doesn't implement the dom_node_get_attributes
|
||||
// and thus will crash if we try to call nodeIsEqualNode.
|
||||
pub fn _isEqualNode(self: *parser.DocumentType, other_node: *parser.Node) !bool {
|
||||
if (parser.nodeType(other_node) != .document_type) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const other: *parser.DocumentType = @ptrCast(other_node);
|
||||
if (std.mem.eql(u8, try get_name(self), try get_name(other)) == false) {
|
||||
return false;
|
||||
}
|
||||
if (std.mem.eql(u8, get_publicId(self), get_publicId(other)) == false) {
|
||||
return false;
|
||||
}
|
||||
if (std.mem.eql(u8, get_systemId(self), get_systemId(other)) == false) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: DOM.DocumentType" {
|
||||
try testing.htmlRunner("dom/document_type.html");
|
||||
}
|
||||
@@ -1,608 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const parser = @import("../netsurf.zig");
|
||||
const Env = @import("../env.zig").Env;
|
||||
const Page = @import("../page.zig").Page;
|
||||
|
||||
const css = @import("css.zig");
|
||||
const log = @import("../../log.zig");
|
||||
const dump = @import("../dump.zig");
|
||||
const collection = @import("html_collection.zig");
|
||||
|
||||
const Node = @import("node.zig").Node;
|
||||
const Walker = @import("walker.zig").WalkerDepthFirst;
|
||||
const NodeList = @import("nodelist.zig").NodeList;
|
||||
const HTMLElem = @import("../html/elements.zig");
|
||||
const ShadowRoot = @import("../dom/shadow_root.zig").ShadowRoot;
|
||||
|
||||
const Animation = @import("Animation.zig");
|
||||
const JsObject = @import("../env.zig").JsObject;
|
||||
|
||||
pub const Union = @import("../html/elements.zig").Union;
|
||||
|
||||
// WEB IDL https://dom.spec.whatwg.org/#element
|
||||
pub const Element = struct {
|
||||
pub const Self = parser.Element;
|
||||
pub const prototype = *Node;
|
||||
pub const subtype = .node;
|
||||
|
||||
pub const DOMRect = struct {
|
||||
x: f64,
|
||||
y: f64,
|
||||
width: f64,
|
||||
height: f64,
|
||||
bottom: f64,
|
||||
right: f64,
|
||||
top: f64,
|
||||
left: f64,
|
||||
};
|
||||
|
||||
pub fn toInterface(e: *parser.Element) !Union {
|
||||
return toInterfaceT(Union, e);
|
||||
}
|
||||
|
||||
pub fn toInterfaceT(comptime T: type, e: *parser.Element) !T {
|
||||
const tagname = try parser.elementGetTagName(e) orelse {
|
||||
// If the owner's document is HTML, assume we have an HTMLElement.
|
||||
const doc = parser.nodeOwnerDocument(parser.elementToNode(e));
|
||||
if (doc != null and !doc.?.is_html) {
|
||||
return .{ .HTMLElement = @as(*parser.ElementHTML, @ptrCast(e)) };
|
||||
}
|
||||
|
||||
return .{ .Element = e };
|
||||
};
|
||||
|
||||
// TODO SVGElement and MathML are not supported yet.
|
||||
|
||||
const tag = parser.Tag.fromString(tagname) catch {
|
||||
// If the owner's document is HTML, assume we have an HTMLElement.
|
||||
const doc = parser.nodeOwnerDocument(parser.elementToNode(e));
|
||||
if (doc != null and doc.?.is_html) {
|
||||
return .{ .HTMLElement = @as(*parser.ElementHTML, @ptrCast(e)) };
|
||||
}
|
||||
|
||||
return .{ .Element = e };
|
||||
};
|
||||
|
||||
return HTMLElem.toInterfaceFromTag(T, e, tag);
|
||||
}
|
||||
|
||||
// JS funcs
|
||||
// --------
|
||||
|
||||
pub fn get_namespaceURI(self: *parser.Element) ?[]const u8 {
|
||||
return parser.nodeGetNamespace(parser.elementToNode(self));
|
||||
}
|
||||
|
||||
pub fn get_prefix(self: *parser.Element) ?[]const u8 {
|
||||
return parser.nodeGetPrefix(parser.elementToNode(self));
|
||||
}
|
||||
|
||||
pub fn get_localName(self: *parser.Element) ![]const u8 {
|
||||
return try parser.nodeLocalName(parser.elementToNode(self));
|
||||
}
|
||||
|
||||
pub fn get_tagName(self: *parser.Element) ![]const u8 {
|
||||
return try parser.nodeName(parser.elementToNode(self));
|
||||
}
|
||||
|
||||
pub fn get_dir(self: *parser.Element) ![]const u8 {
|
||||
return try parser.elementGetAttribute(self, "dir") orelse "";
|
||||
}
|
||||
|
||||
pub fn set_dir(self: *parser.Element, dir: []const u8) !void {
|
||||
return parser.elementSetAttribute(self, "dir", dir);
|
||||
}
|
||||
|
||||
pub fn get_id(self: *parser.Element) ![]const u8 {
|
||||
return try parser.elementGetAttribute(self, "id") orelse "";
|
||||
}
|
||||
|
||||
pub fn set_id(self: *parser.Element, id: []const u8) !void {
|
||||
return try parser.elementSetAttribute(self, "id", id);
|
||||
}
|
||||
|
||||
pub fn get_className(self: *parser.Element) ![]const u8 {
|
||||
return try parser.elementGetAttribute(self, "class") orelse "";
|
||||
}
|
||||
|
||||
pub fn set_className(self: *parser.Element, class: []const u8) !void {
|
||||
return try parser.elementSetAttribute(self, "class", class);
|
||||
}
|
||||
|
||||
pub fn get_slot(self: *parser.Element) ![]const u8 {
|
||||
return try parser.elementGetAttribute(self, "slot") orelse "";
|
||||
}
|
||||
|
||||
pub fn set_slot(self: *parser.Element, slot: []const u8) !void {
|
||||
return try parser.elementSetAttribute(self, "slot", slot);
|
||||
}
|
||||
|
||||
pub fn get_classList(self: *parser.Element) !*parser.TokenList {
|
||||
return try parser.tokenListCreate(self, "class");
|
||||
}
|
||||
|
||||
pub fn get_attributes(self: *parser.Element) !*parser.NamedNodeMap {
|
||||
// An element must have non-nil attributes.
|
||||
return try parser.nodeGetAttributes(parser.elementToNode(self)) orelse unreachable;
|
||||
}
|
||||
|
||||
pub fn get_innerHTML(self: *parser.Element, page: *Page) ![]const u8 {
|
||||
var aw = std.Io.Writer.Allocating.init(page.call_arena);
|
||||
try dump.writeChildren(parser.elementToNode(self), .{}, &aw.writer);
|
||||
return aw.written();
|
||||
}
|
||||
|
||||
pub fn get_outerHTML(self: *parser.Element, page: *Page) ![]const u8 {
|
||||
var aw = std.Io.Writer.Allocating.init(page.call_arena);
|
||||
try dump.writeNode(parser.elementToNode(self), .{}, &aw.writer);
|
||||
return aw.written();
|
||||
}
|
||||
|
||||
pub fn set_innerHTML(self: *parser.Element, str: []const u8, page: *Page) !void {
|
||||
const node = parser.elementToNode(self);
|
||||
const doc = parser.nodeOwnerDocument(node) orelse return parser.DOMError.WrongDocument;
|
||||
// parse the fragment
|
||||
const fragment = try parser.documentParseFragmentFromStr(doc, str);
|
||||
|
||||
// remove existing children
|
||||
try Node.removeChildren(node);
|
||||
|
||||
const fragment_node = parser.documentFragmentToNode(fragment);
|
||||
|
||||
// I'm not sure what the exact behavior is supposed to be. Initially,
|
||||
// we were only copying the body of the document fragment. But it seems
|
||||
// like head elements should be copied too. Specifically, some sites
|
||||
// create script tags via innerHTML, which we need to capture.
|
||||
// If you play with this in a browser, you should notice that the
|
||||
// behavior is different depending on whether you're in a blank page
|
||||
// or an actual document. In a blank page, something like:
|
||||
// x.innerHTML = '<script></script>';
|
||||
// does _not_ create an empty script, but in a real page, it does. Weird.
|
||||
const html = parser.nodeFirstChild(fragment_node) orelse return;
|
||||
const head = parser.nodeFirstChild(html) orelse return;
|
||||
const body = parser.nodeNextSibling(head) orelse return;
|
||||
|
||||
if (try parser.elementTag(self) == .template) {
|
||||
// HTMLElementTemplate is special. We don't append these as children
|
||||
// of the template, but instead set its content as the body of the
|
||||
// fragment. Simpler to do this by copying the body children into
|
||||
// a new fragment
|
||||
const clean = try parser.documentCreateDocumentFragment(doc);
|
||||
const children = try parser.nodeGetChildNodes(body);
|
||||
const ln = parser.nodeListLength(children);
|
||||
for (0..ln) |_| {
|
||||
// always index 0, because nodeAppendChild moves the node out of
|
||||
// the nodeList and into the new tree
|
||||
const child = parser.nodeListItem(children, 0) orelse continue;
|
||||
_ = try parser.nodeAppendChild(@ptrCast(@alignCast(clean)), child);
|
||||
}
|
||||
|
||||
const state = try page.getOrCreateNodeState(node);
|
||||
state.template_content = clean;
|
||||
return;
|
||||
}
|
||||
|
||||
// For any node other than a template, we copy the head and body elements
|
||||
// as child nodes of the element
|
||||
{
|
||||
// First, copy some of the head element
|
||||
const children = try parser.nodeGetChildNodes(head);
|
||||
const ln = parser.nodeListLength(children);
|
||||
for (0..ln) |_| {
|
||||
// always index 0, because nodeAppendChild moves the node out of
|
||||
// the nodeList and into the new tree
|
||||
const child = parser.nodeListItem(children, 0) orelse continue;
|
||||
_ = try parser.nodeAppendChild(node, child);
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
const children = try parser.nodeGetChildNodes(body);
|
||||
const ln = parser.nodeListLength(children);
|
||||
for (0..ln) |_| {
|
||||
// always index 0, because nodeAppendChild moves the node out of
|
||||
// the nodeList and into the new tree
|
||||
const child = parser.nodeListItem(children, 0) orelse continue;
|
||||
_ = try parser.nodeAppendChild(node, child);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// The closest() method of the Element interface traverses the element and its parents (heading toward the document root) until it finds a node that matches the specified CSS selector.
|
||||
// Returns the closest ancestor Element or itself, which matches the selectors. If there are no such element, null.
|
||||
pub fn _closest(self: *parser.Element, selector: []const u8, page: *Page) !?*parser.Element {
|
||||
const cssParse = @import("../css/css.zig").parse;
|
||||
const CssNodeWrap = @import("../css/libdom.zig").Node;
|
||||
const select = try cssParse(page.call_arena, selector, .{});
|
||||
|
||||
var current: CssNodeWrap = .{ .node = parser.elementToNode(self) };
|
||||
while (true) {
|
||||
if (try select.match(current)) {
|
||||
if (!current.isElement()) {
|
||||
log.err(.browser, "closest invalid type", .{ .type = try current.tag() });
|
||||
return null;
|
||||
}
|
||||
return parser.nodeToElement(current.node);
|
||||
}
|
||||
current = current.parent() orelse return null;
|
||||
}
|
||||
}
|
||||
|
||||
// don't use parser.nodeHasAttributes(...) because that returns true/false
|
||||
// based on the type, e.g. a node never as attributes, an element always has
|
||||
// attributes. But, Element.hasAttributes is supposed to return true only
|
||||
// if the element has at least 1 attribute.
|
||||
pub fn _hasAttributes(self: *parser.Element) !bool {
|
||||
// an element _must_ have at least an empty attribute
|
||||
const node_map = try parser.nodeGetAttributes(parser.elementToNode(self)) orelse unreachable;
|
||||
return try parser.namedNodeMapGetLength(node_map) > 0;
|
||||
}
|
||||
|
||||
pub fn _getAttribute(self: *parser.Element, qname: []const u8) !?[]const u8 {
|
||||
return try parser.elementGetAttribute(self, qname);
|
||||
}
|
||||
|
||||
pub fn _getAttributeNS(self: *parser.Element, ns: []const u8, qname: []const u8) !?[]const u8 {
|
||||
return try parser.elementGetAttributeNS(self, ns, qname);
|
||||
}
|
||||
|
||||
pub fn _setAttribute(self: *parser.Element, qname: []const u8, value: []const u8) !void {
|
||||
return try parser.elementSetAttribute(self, qname, value);
|
||||
}
|
||||
|
||||
pub fn _setAttributeNS(self: *parser.Element, ns: []const u8, qname: []const u8, value: []const u8) !void {
|
||||
return try parser.elementSetAttributeNS(self, ns, qname, value);
|
||||
}
|
||||
|
||||
pub fn _removeAttribute(self: *parser.Element, qname: []const u8) !void {
|
||||
return try parser.elementRemoveAttribute(self, qname);
|
||||
}
|
||||
|
||||
pub fn _removeAttributeNS(self: *parser.Element, ns: []const u8, qname: []const u8) !void {
|
||||
return try parser.elementRemoveAttributeNS(self, ns, qname);
|
||||
}
|
||||
|
||||
pub fn _hasAttribute(self: *parser.Element, qname: []const u8) !bool {
|
||||
return try parser.elementHasAttribute(self, qname);
|
||||
}
|
||||
|
||||
pub fn _hasAttributeNS(self: *parser.Element, ns: []const u8, qname: []const u8) !bool {
|
||||
return try parser.elementHasAttributeNS(self, ns, qname);
|
||||
}
|
||||
|
||||
// https://dom.spec.whatwg.org/#dom-element-toggleattribute
|
||||
pub fn _toggleAttribute(self: *parser.Element, qname: []u8, force: ?bool) !bool {
|
||||
_ = std.ascii.lowerString(qname, qname);
|
||||
const exists = try parser.elementHasAttribute(self, qname);
|
||||
|
||||
// If attribute is null, then:
|
||||
if (!exists) {
|
||||
// If force is not given or is true, create an attribute whose
|
||||
// local name is qualifiedName, value is the empty string and node
|
||||
// document is this’s node document, then append this attribute to
|
||||
// this, and then return true.
|
||||
if (force == null or force.?) {
|
||||
try parser.elementSetAttribute(self, qname, "");
|
||||
return true;
|
||||
}
|
||||
if (try parser.validateName(qname) == false) {
|
||||
return parser.DOMError.InvalidCharacter;
|
||||
}
|
||||
|
||||
// Return false.
|
||||
return false;
|
||||
}
|
||||
|
||||
// Otherwise, if force is not given or is false, remove an attribute
|
||||
// given qualifiedName and this, and then return false.
|
||||
if (force == null or !force.?) {
|
||||
try parser.elementRemoveAttribute(self, qname);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Return true.
|
||||
return true;
|
||||
}
|
||||
|
||||
pub fn _getAttributeNames(self: *parser.Element, page: *Page) ![]const []const u8 {
|
||||
const attributes = try parser.nodeGetAttributes(@ptrCast(self)) orelse return &.{};
|
||||
const ln = try parser.namedNodeMapGetLength(attributes);
|
||||
|
||||
const names = try page.call_arena.alloc([]const u8, ln);
|
||||
var at: usize = 0;
|
||||
|
||||
for (0..ln) |i| {
|
||||
const attribute = try parser.namedNodeMapItem(attributes, @intCast(i)) orelse break;
|
||||
names[at] = try parser.attributeGetName(attribute);
|
||||
at += 1;
|
||||
}
|
||||
|
||||
return names[0..at];
|
||||
}
|
||||
|
||||
pub fn _getAttributeNode(self: *parser.Element, name: []const u8) !?*parser.Attribute {
|
||||
return try parser.elementGetAttributeNode(self, name);
|
||||
}
|
||||
|
||||
pub fn _getAttributeNodeNS(self: *parser.Element, ns: []const u8, name: []const u8) !?*parser.Attribute {
|
||||
return try parser.elementGetAttributeNodeNS(self, ns, name);
|
||||
}
|
||||
|
||||
pub fn _setAttributeNode(self: *parser.Element, attr: *parser.Attribute) !?*parser.Attribute {
|
||||
return try parser.elementSetAttributeNode(self, attr);
|
||||
}
|
||||
|
||||
pub fn _setAttributeNodeNS(self: *parser.Element, attr: *parser.Attribute) !?*parser.Attribute {
|
||||
return try parser.elementSetAttributeNodeNS(self, attr);
|
||||
}
|
||||
|
||||
pub fn _removeAttributeNode(self: *parser.Element, attr: *parser.Attribute) !*parser.Attribute {
|
||||
return try parser.elementRemoveAttributeNode(self, attr);
|
||||
}
|
||||
|
||||
pub fn _getElementsByTagName(self: *parser.Element, tag_name: Env.String) !collection.HTMLCollection {
|
||||
return collection.HTMLCollectionByTagName(
|
||||
parser.elementToNode(self),
|
||||
tag_name.string,
|
||||
.{ .include_root = false },
|
||||
);
|
||||
}
|
||||
|
||||
pub fn _getElementsByClassName(self: *parser.Element, class_names: Env.String) !collection.HTMLCollection {
|
||||
return try collection.HTMLCollectionByClassName(
|
||||
parser.elementToNode(self),
|
||||
class_names.string,
|
||||
.{ .include_root = false },
|
||||
);
|
||||
}
|
||||
|
||||
// ParentNode
|
||||
// https://dom.spec.whatwg.org/#parentnode
|
||||
pub fn get_children(self: *parser.Element) !collection.HTMLCollection {
|
||||
return collection.HTMLCollectionChildren(parser.elementToNode(self), .{
|
||||
.include_root = false,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn get_firstElementChild(self: *parser.Element) !?Union {
|
||||
var children = try get_children(self);
|
||||
return try children._item(0);
|
||||
}
|
||||
|
||||
pub fn get_lastElementChild(self: *parser.Element) !?Union {
|
||||
// TODO we could check the last child node first, if it's an element,
|
||||
// we can return it directly instead of looping twice over the
|
||||
// children.
|
||||
var children = try get_children(self);
|
||||
const ln = try children.get_length();
|
||||
if (ln == 0) return null;
|
||||
return try children._item(ln - 1);
|
||||
}
|
||||
|
||||
pub fn get_childElementCount(self: *parser.Element) !u32 {
|
||||
var children = try get_children(self);
|
||||
return try children.get_length();
|
||||
}
|
||||
|
||||
// NonDocumentTypeChildNode
|
||||
// https://dom.spec.whatwg.org/#interface-nondocumenttypechildnode
|
||||
pub fn get_previousElementSibling(self: *parser.Element) !?Union {
|
||||
const res = parser.nodePreviousElementSibling(parser.elementToNode(self));
|
||||
if (res == null) return null;
|
||||
return try toInterface(res.?);
|
||||
}
|
||||
|
||||
pub fn get_nextElementSibling(self: *parser.Element) !?Union {
|
||||
const res = parser.nodeNextElementSibling(parser.elementToNode(self));
|
||||
if (res == null) return null;
|
||||
return try toInterface(res.?);
|
||||
}
|
||||
|
||||
fn getElementById(self: *parser.Element, id: []const u8) !?*parser.Node {
|
||||
// walk over the node tree fo find the node by id.
|
||||
const root = parser.elementToNode(self);
|
||||
const walker = Walker{};
|
||||
var next: ?*parser.Node = null;
|
||||
while (true) {
|
||||
next = try walker.get_next(root, next) orelse return null;
|
||||
// ignore non-element nodes.
|
||||
if (parser.nodeType(next.?) != .element) {
|
||||
continue;
|
||||
}
|
||||
const e = parser.nodeToElement(next.?);
|
||||
if (std.mem.eql(u8, id, try get_id(e))) return next;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn _querySelector(self: *parser.Element, selector: []const u8, page: *Page) !?Union {
|
||||
if (selector.len == 0) return null;
|
||||
|
||||
const n = try css.querySelector(page.call_arena, parser.elementToNode(self), selector);
|
||||
|
||||
if (n == null) return null;
|
||||
|
||||
return try toInterface(parser.nodeToElement(n.?));
|
||||
}
|
||||
|
||||
pub fn _querySelectorAll(self: *parser.Element, selector: []const u8, page: *Page) !NodeList {
|
||||
return css.querySelectorAll(page.arena, parser.elementToNode(self), selector);
|
||||
}
|
||||
|
||||
pub fn _prepend(self: *parser.Element, nodes: []const Node.NodeOrText) !void {
|
||||
return Node.prepend(parser.elementToNode(self), nodes);
|
||||
}
|
||||
|
||||
pub fn _append(self: *parser.Element, nodes: []const Node.NodeOrText) !void {
|
||||
return Node.append(parser.elementToNode(self), nodes);
|
||||
}
|
||||
|
||||
pub fn _before(self: *parser.Element, nodes: []const Node.NodeOrText) !void {
|
||||
const ref_node = parser.elementToNode(self);
|
||||
return Node.before(ref_node, nodes);
|
||||
}
|
||||
|
||||
pub fn _after(self: *parser.Element, nodes: []const Node.NodeOrText) !void {
|
||||
const ref_node = parser.elementToNode(self);
|
||||
return Node.after(ref_node, nodes);
|
||||
}
|
||||
|
||||
pub fn _replaceChildren(self: *parser.Element, nodes: []const Node.NodeOrText) !void {
|
||||
return Node.replaceChildren(parser.elementToNode(self), nodes);
|
||||
}
|
||||
|
||||
// A DOMRect object providing information about the size of an element and its position relative to the viewport.
|
||||
// Returns a 0 DOMRect object if the element is eventually detached from the main window
|
||||
pub fn _getBoundingClientRect(self: *parser.Element, page: *Page) !DOMRect {
|
||||
// Since we are lazy rendering we need to do this check. We could store the renderer in a viewport such that it could cache these, but it would require tracking changes.
|
||||
if (!page.isNodeAttached(parser.elementToNode(self))) {
|
||||
return DOMRect{
|
||||
.x = 0,
|
||||
.y = 0,
|
||||
.width = 0,
|
||||
.height = 0,
|
||||
.bottom = 0,
|
||||
.right = 0,
|
||||
.top = 0,
|
||||
.left = 0,
|
||||
};
|
||||
}
|
||||
return page.renderer.getRect(self);
|
||||
}
|
||||
|
||||
// Returns a collection of DOMRect objects that indicate the bounding rectangles for each CSS border box in a client.
|
||||
// We do not render so it only always return the element's bounding rect.
|
||||
// Returns an empty array if the element is eventually detached from the main window
|
||||
pub fn _getClientRects(self: *parser.Element, page: *Page) ![]DOMRect {
|
||||
if (!page.isNodeAttached(parser.elementToNode(self))) {
|
||||
return &.{};
|
||||
}
|
||||
const heap_ptr = try page.call_arena.create(DOMRect);
|
||||
heap_ptr.* = try page.renderer.getRect(self);
|
||||
return heap_ptr[0..1];
|
||||
}
|
||||
|
||||
pub fn get_clientWidth(_: *parser.Element, page: *Page) u32 {
|
||||
return page.renderer.width();
|
||||
}
|
||||
|
||||
pub fn get_clientHeight(_: *parser.Element, page: *Page) u32 {
|
||||
return page.renderer.height();
|
||||
}
|
||||
|
||||
pub fn _matches(self: *parser.Element, selectors: []const u8, page: *Page) !bool {
|
||||
const cssParse = @import("../css/css.zig").parse;
|
||||
const CssNodeWrap = @import("../css/libdom.zig").Node;
|
||||
const s = try cssParse(page.call_arena, selectors, .{});
|
||||
return s.match(CssNodeWrap{ .node = parser.elementToNode(self) });
|
||||
}
|
||||
|
||||
pub fn _scrollIntoViewIfNeeded(_: *parser.Element, center_if_needed: ?bool) void {
|
||||
_ = center_if_needed;
|
||||
}
|
||||
|
||||
const CheckVisibilityOpts = struct {
|
||||
contentVisibilityAuto: bool,
|
||||
opacityProperty: bool,
|
||||
visibilityProperty: bool,
|
||||
};
|
||||
|
||||
pub fn _checkVisibility(self: *parser.Element, opts: ?CheckVisibilityOpts) bool {
|
||||
_ = self;
|
||||
_ = opts;
|
||||
return true;
|
||||
}
|
||||
|
||||
const AttachShadowOpts = struct {
|
||||
mode: []const u8, // must be specified
|
||||
};
|
||||
pub fn _attachShadow(self: *parser.Element, opts: AttachShadowOpts, page: *Page) !*ShadowRoot {
|
||||
const mode = std.meta.stringToEnum(ShadowRoot.Mode, opts.mode) orelse return error.InvalidArgument;
|
||||
const state = try page.getOrCreateNodeState(@ptrCast(@alignCast(self)));
|
||||
if (state.shadow_root) |sr| {
|
||||
if (mode != sr.mode) {
|
||||
// this is the behavior per the spec
|
||||
return error.NotSupportedError;
|
||||
}
|
||||
|
||||
try Node.removeChildren(@ptrCast(@alignCast(sr.proto)));
|
||||
return sr;
|
||||
}
|
||||
|
||||
// Not sure what to do if there is no owner document
|
||||
const doc = parser.nodeOwnerDocument(@ptrCast(self)) orelse return error.InvalidArgument;
|
||||
const fragment = try parser.documentCreateDocumentFragment(doc);
|
||||
const sr = try page.arena.create(ShadowRoot);
|
||||
sr.* = .{
|
||||
.host = self,
|
||||
.mode = mode,
|
||||
.proto = fragment,
|
||||
};
|
||||
state.shadow_root = sr;
|
||||
parser.documentFragmentSetHost(sr.proto, @ptrCast(@alignCast(self)));
|
||||
|
||||
// Storing the ShadowRoot on the element makes sense, it's the ShadowRoot's
|
||||
// parent. When we render, we go top-down, so we'll have the element, get
|
||||
// its shadowroot, and go on. that's what the above code does.
|
||||
// But we sometimes need to go bottom-up, e.g when we have a slot element
|
||||
// and want to find the containing parent. Unforatunately , we don't have
|
||||
// that link, so we need to create it. In the DOM, the ShadowRoot is
|
||||
// represented by this DocumentFragment (it's the ShadowRoot's base prototype)
|
||||
// So we can also store the ShadowRoot in the DocumentFragment's state.
|
||||
const fragment_state = try page.getOrCreateNodeState(@ptrCast(@alignCast(fragment)));
|
||||
fragment_state.shadow_root = sr;
|
||||
|
||||
return sr;
|
||||
}
|
||||
|
||||
pub fn get_shadowRoot(self: *parser.Element, page: *Page) ?*ShadowRoot {
|
||||
const state = page.getNodeState(@ptrCast(@alignCast(self))) orelse return null;
|
||||
const sr = state.shadow_root orelse return null;
|
||||
if (sr.mode == .closed) {
|
||||
return null;
|
||||
}
|
||||
return sr;
|
||||
}
|
||||
|
||||
pub fn _animate(self: *parser.Element, effect: JsObject, opts: JsObject) !Animation {
|
||||
_ = self;
|
||||
_ = opts;
|
||||
return Animation.constructor(effect, null);
|
||||
}
|
||||
|
||||
pub fn _remove(self: *parser.Element) !void {
|
||||
// TODO: This hasn't been tested to make sure all references to this
|
||||
// node are properly updated. A lot of libdom is lazy and will look
|
||||
// for related elements JIT by walking the tree, but there could be
|
||||
// cases in libdom or the Zig WebAPI where this reference is kept
|
||||
const as_node: *parser.Node = @ptrCast(self);
|
||||
const parent = parser.nodeParentNode(as_node) orelse return;
|
||||
_ = try Node._removeChild(parent, as_node);
|
||||
}
|
||||
};
|
||||
|
||||
// Tests
|
||||
// -----
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: DOM.Element" {
|
||||
try testing.htmlRunner("dom/element.html");
|
||||
}
|
||||
@@ -1,165 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const parser = @import("../netsurf.zig");
|
||||
const Page = @import("../page.zig").Page;
|
||||
|
||||
const EventHandler = @import("../events/event.zig").EventHandler;
|
||||
|
||||
const DOMException = @import("exceptions.zig").DOMException;
|
||||
const nod = @import("node.zig");
|
||||
|
||||
pub const Union = union(enum) {
|
||||
node: nod.Union,
|
||||
xhr: *@import("../xhr/xhr.zig").XMLHttpRequest,
|
||||
plain: *parser.EventTarget,
|
||||
message_port: *@import("MessageChannel.zig").MessagePort,
|
||||
screen: *@import("../html/screen.zig").Screen,
|
||||
screen_orientation: *@import("../html/screen.zig").ScreenOrientation,
|
||||
performance: *@import("performance.zig").Performance,
|
||||
media_query_list: *@import("../html/media_query_list.zig").MediaQueryList,
|
||||
};
|
||||
|
||||
// EventTarget implementation
|
||||
pub const EventTarget = struct {
|
||||
pub const Self = parser.EventTarget;
|
||||
pub const Exception = DOMException;
|
||||
|
||||
// Extend libdom event target for pure zig struct.
|
||||
base: parser.EventTargetTBase = parser.EventTargetTBase{ .internal_target_type = .plain },
|
||||
|
||||
pub fn toInterface(et: *parser.EventTarget, page: *Page) !Union {
|
||||
// libdom assumes that all event targets are libdom nodes. They are not.
|
||||
|
||||
switch (parser.eventTargetInternalType(et)) {
|
||||
.libdom_node => {
|
||||
return .{ .node = try nod.Node.toInterface(@as(*parser.Node, @ptrCast(et))) };
|
||||
},
|
||||
.plain => return .{ .plain = et },
|
||||
.abort_signal => {
|
||||
// AbortSignal is a special case, it has its own internal type.
|
||||
// We return it as a node, but we need to handle it differently.
|
||||
return .{ .node = .{ .AbortSignal = @fieldParentPtr("proto", @as(*parser.EventTargetTBase, @ptrCast(et))) } };
|
||||
},
|
||||
.window => {
|
||||
// The window is a common non-node target, but it's easy to handle as its a singleton.
|
||||
std.debug.assert(@intFromPtr(et) == @intFromPtr(&page.window.base));
|
||||
return .{ .node = .{ .Window = &page.window } };
|
||||
},
|
||||
.xhr => {
|
||||
const XMLHttpRequestEventTarget = @import("../xhr/event_target.zig").XMLHttpRequestEventTarget;
|
||||
const base: *XMLHttpRequestEventTarget = @fieldParentPtr("base", @as(*parser.EventTargetTBase, @ptrCast(et)));
|
||||
return .{ .xhr = @fieldParentPtr("proto", base) };
|
||||
},
|
||||
.message_port => {
|
||||
return .{ .message_port = @fieldParentPtr("proto", @as(*parser.EventTargetTBase, @ptrCast(et))) };
|
||||
},
|
||||
.screen => {
|
||||
return .{ .screen = @fieldParentPtr("proto", @as(*parser.EventTargetTBase, @ptrCast(et))) };
|
||||
},
|
||||
.screen_orientation => {
|
||||
return .{ .screen_orientation = @fieldParentPtr("proto", @as(*parser.EventTargetTBase, @ptrCast(et))) };
|
||||
},
|
||||
.performance => {
|
||||
return .{ .performance = @fieldParentPtr("base", @as(*parser.EventTargetTBase, @ptrCast(et))) };
|
||||
},
|
||||
.media_query_list => {
|
||||
return .{ .media_query_list = @fieldParentPtr("base", @as(*parser.EventTargetTBase, @ptrCast(et))) };
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// JS funcs
|
||||
// --------
|
||||
pub fn constructor(page: *Page) !*parser.EventTarget {
|
||||
const et = try page.arena.create(EventTarget);
|
||||
return @ptrCast(&et.base);
|
||||
}
|
||||
|
||||
pub fn _addEventListener(
|
||||
self: *parser.EventTarget,
|
||||
typ: []const u8,
|
||||
listener: EventHandler.Listener,
|
||||
opts: ?EventHandler.Opts,
|
||||
page: *Page,
|
||||
) !void {
|
||||
_ = try EventHandler.register(page.arena, self, typ, listener, opts);
|
||||
}
|
||||
|
||||
const RemoveEventListenerOpts = union(enum) {
|
||||
opts: Opts,
|
||||
capture: bool,
|
||||
|
||||
const Opts = struct {
|
||||
capture: ?bool,
|
||||
};
|
||||
};
|
||||
|
||||
pub fn _removeEventListener(
|
||||
self: *parser.EventTarget,
|
||||
typ: []const u8,
|
||||
listener: EventHandler.Listener,
|
||||
opts_: ?RemoveEventListenerOpts,
|
||||
) !void {
|
||||
var capture = false;
|
||||
if (opts_) |opts| {
|
||||
capture = switch (opts) {
|
||||
.capture => |c| c,
|
||||
.opts => |o| o.capture orelse false,
|
||||
};
|
||||
}
|
||||
|
||||
const cbk = (try listener.callback(self)) orelse return;
|
||||
|
||||
// check if event target has already this listener
|
||||
const lst = try parser.eventTargetHasListener(
|
||||
self,
|
||||
typ,
|
||||
capture,
|
||||
cbk.id,
|
||||
);
|
||||
if (lst == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
// remove listener
|
||||
try parser.eventTargetRemoveEventListener(
|
||||
self,
|
||||
typ,
|
||||
lst.?,
|
||||
capture,
|
||||
);
|
||||
}
|
||||
|
||||
pub fn _dispatchEvent(self: *parser.EventTarget, event: *parser.Event, page: *Page) !bool {
|
||||
const res = try parser.eventTargetDispatchEvent(self, event);
|
||||
|
||||
if (!parser.eventBubbles(event) or parser.eventIsStopped(event)) {
|
||||
return res;
|
||||
}
|
||||
|
||||
try page.window.dispatchForDocumentTarget(event);
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: DOM.EventTarget" {
|
||||
try testing.htmlRunner("dom/event_target.html");
|
||||
}
|
||||
@@ -1,456 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const parser = @import("../netsurf.zig");
|
||||
|
||||
const Element = @import("element.zig").Element;
|
||||
const Union = @import("element.zig").Union;
|
||||
const JsThis = @import("../env.zig").JsThis;
|
||||
const Walker = @import("walker.zig").Walker;
|
||||
|
||||
const Matcher = union(enum) {
|
||||
matchByName: MatchByName,
|
||||
matchByTagName: MatchByTagName,
|
||||
matchByClassName: MatchByClassName,
|
||||
matchByLinks: MatchByLinks,
|
||||
matchByAnchors: MatchByAnchors,
|
||||
matchTrue: struct {},
|
||||
matchFalse: struct {},
|
||||
|
||||
pub fn match(self: Matcher, node: *parser.Node) !bool {
|
||||
switch (self) {
|
||||
.matchTrue => return true,
|
||||
.matchFalse => return false,
|
||||
.matchByLinks => return MatchByLinks.match(node),
|
||||
.matchByAnchors => return MatchByAnchors.match(node),
|
||||
inline else => |m| return m.match(node),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub const MatchByTagName = struct {
|
||||
// tag is used to select node against their name.
|
||||
// tag comparison is case insensitive.
|
||||
tag: []const u8,
|
||||
is_wildcard: bool,
|
||||
|
||||
fn init(tag_name: []const u8) MatchByTagName {
|
||||
if (std.mem.eql(u8, tag_name, "*")) {
|
||||
return .{ .tag = "*", .is_wildcard = true };
|
||||
}
|
||||
|
||||
return .{
|
||||
.tag = tag_name,
|
||||
.is_wildcard = false,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn match(self: MatchByTagName, node: *parser.Node) !bool {
|
||||
return self.is_wildcard or std.ascii.eqlIgnoreCase(self.tag, try parser.nodeName(node));
|
||||
}
|
||||
};
|
||||
|
||||
pub fn HTMLCollectionByTagName(
|
||||
root: ?*parser.Node,
|
||||
tag_name: []const u8,
|
||||
opts: Opts,
|
||||
) HTMLCollection {
|
||||
return .{
|
||||
.root = root,
|
||||
.walker = .{ .walkerDepthFirst = .{} },
|
||||
.matcher = .{ .matchByTagName = MatchByTagName.init(tag_name) },
|
||||
.mutable = opts.mutable,
|
||||
.include_root = opts.include_root,
|
||||
};
|
||||
}
|
||||
|
||||
pub const MatchByClassName = struct {
|
||||
class_names: []const u8,
|
||||
|
||||
fn init(class_names: []const u8) !MatchByClassName {
|
||||
return .{
|
||||
.class_names = class_names,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn match(self: MatchByClassName, node: *parser.Node) !bool {
|
||||
const e = parser.nodeToElement(node);
|
||||
|
||||
var it = std.mem.splitScalar(u8, self.class_names, ' ');
|
||||
while (it.next()) |c| {
|
||||
if (!try parser.elementHasClass(e, c)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
pub fn HTMLCollectionByClassName(
|
||||
root: ?*parser.Node,
|
||||
class_names: []const u8,
|
||||
opts: Opts,
|
||||
) !HTMLCollection {
|
||||
return HTMLCollection{
|
||||
.root = root,
|
||||
.walker = .{ .walkerDepthFirst = .{} },
|
||||
.matcher = .{ .matchByClassName = try MatchByClassName.init(class_names) },
|
||||
.mutable = opts.mutable,
|
||||
.include_root = opts.include_root,
|
||||
};
|
||||
}
|
||||
|
||||
pub const MatchByName = struct {
|
||||
name: []const u8,
|
||||
|
||||
fn init(name: []const u8) !MatchByName {
|
||||
return .{ .name = name };
|
||||
}
|
||||
|
||||
pub fn match(self: MatchByName, node: *parser.Node) !bool {
|
||||
const e = parser.nodeToElement(node);
|
||||
const nname = try parser.elementGetAttribute(e, "name") orelse return false;
|
||||
return std.mem.eql(u8, self.name, nname);
|
||||
}
|
||||
};
|
||||
|
||||
pub fn HTMLCollectionByName(
|
||||
root: ?*parser.Node,
|
||||
name: []const u8,
|
||||
opts: Opts,
|
||||
) !HTMLCollection {
|
||||
return HTMLCollection{
|
||||
.root = root,
|
||||
.walker = .{ .walkerDepthFirst = .{} },
|
||||
.matcher = .{ .matchByName = try MatchByName.init(name) },
|
||||
.mutable = opts.mutable,
|
||||
.include_root = opts.include_root,
|
||||
};
|
||||
}
|
||||
|
||||
// HTMLAllCollection is a special type: instances of it are falsy. It's the only
|
||||
// object in the WebAPI that behaves like this - in fact, it's even a special
|
||||
// case in the JavaScript spec.
|
||||
// This is important, because a lot of browser detection rely on this behavior
|
||||
// to determine what browser is running.
|
||||
|
||||
// It's also possible to use an instance like a function:
|
||||
// document.all(3)
|
||||
// document.all('some_id')
|
||||
pub const HTMLAllCollection = struct {
|
||||
pub const prototype = *HTMLCollection;
|
||||
|
||||
proto: HTMLCollection,
|
||||
|
||||
pub const mark_as_undetectable = true;
|
||||
|
||||
pub fn init(root: ?*parser.Node) HTMLAllCollection {
|
||||
return .{ .proto = .{
|
||||
.root = root,
|
||||
.walker = .{ .walkerDepthFirst = .{} },
|
||||
.matcher = .{ .matchTrue = .{} },
|
||||
.include_root = true,
|
||||
} };
|
||||
}
|
||||
|
||||
const CAllAsFunctionArg = union(enum) {
|
||||
index: u32,
|
||||
id: []const u8,
|
||||
};
|
||||
|
||||
pub fn jsCallAsFunction(self: *HTMLAllCollection, arg: CAllAsFunctionArg) !?Union {
|
||||
return switch (arg) {
|
||||
.index => |i| self.proto._item(i),
|
||||
.id => |id| self.proto._namedItem(id),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub fn HTMLCollectionChildren(
|
||||
root: ?*parser.Node,
|
||||
opts: Opts,
|
||||
) HTMLCollection {
|
||||
return HTMLCollection{
|
||||
.root = root,
|
||||
.walker = .{ .walkerChildren = .{} },
|
||||
.matcher = .{ .matchTrue = .{} },
|
||||
.mutable = opts.mutable,
|
||||
.include_root = opts.include_root,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn HTMLCollectionEmpty() HTMLCollection {
|
||||
return .{
|
||||
.root = null,
|
||||
.walker = .{ .walkerNone = .{} },
|
||||
.matcher = .{ .matchFalse = .{} },
|
||||
.include_root = false,
|
||||
};
|
||||
}
|
||||
|
||||
// MatchByLinks matches the a and area elements in the Document that have href
|
||||
// attributes.
|
||||
// https://html.spec.whatwg.org/#dom-document-links
|
||||
pub const MatchByLinks = struct {
|
||||
pub fn match(node: *parser.Node) !bool {
|
||||
const tag = try parser.nodeName(node);
|
||||
if (!std.ascii.eqlIgnoreCase(tag, "a") and !std.ascii.eqlIgnoreCase(tag, "area")) {
|
||||
return false;
|
||||
}
|
||||
const elem = @as(*parser.Element, @ptrCast(node));
|
||||
return parser.elementHasAttribute(elem, "href");
|
||||
}
|
||||
};
|
||||
|
||||
pub fn HTMLCollectionByLinks(root: ?*parser.Node, opts: Opts) HTMLCollection {
|
||||
return .{
|
||||
.root = root,
|
||||
.walker = .{ .walkerDepthFirst = .{} },
|
||||
.matcher = .{ .matchByLinks = .{} },
|
||||
.mutable = opts.mutable,
|
||||
.include_root = opts.include_root,
|
||||
};
|
||||
}
|
||||
|
||||
// MatchByAnchors matches the a elements in the Document that have name
|
||||
// attributes.
|
||||
// https://html.spec.whatwg.org/#dom-document-anchors
|
||||
pub const MatchByAnchors = struct {
|
||||
pub fn match(node: *parser.Node) !bool {
|
||||
const tag = try parser.nodeName(node);
|
||||
if (!std.ascii.eqlIgnoreCase(tag, "a")) return false;
|
||||
|
||||
const elem = @as(*parser.Element, @ptrCast(node));
|
||||
return parser.elementHasAttribute(elem, "name");
|
||||
}
|
||||
};
|
||||
|
||||
pub fn HTMLCollectionByAnchors(root: ?*parser.Node, opts: Opts) HTMLCollection {
|
||||
return .{
|
||||
.root = root,
|
||||
.walker = .{ .walkerDepthFirst = .{} },
|
||||
.matcher = .{ .matchByAnchors = .{} },
|
||||
.mutable = opts.mutable,
|
||||
.include_root = opts.include_root,
|
||||
};
|
||||
}
|
||||
|
||||
pub const HTMLCollectionIterator = struct {
|
||||
coll: *HTMLCollection,
|
||||
index: u32 = 0,
|
||||
|
||||
pub const Return = struct {
|
||||
value: ?Union,
|
||||
done: bool,
|
||||
};
|
||||
|
||||
pub fn _next(self: *HTMLCollectionIterator) !Return {
|
||||
const e = try self.coll._item(self.index);
|
||||
if (e == null) {
|
||||
return Return{
|
||||
.value = null,
|
||||
.done = true,
|
||||
};
|
||||
}
|
||||
|
||||
self.index += 1;
|
||||
return Return{
|
||||
.value = e,
|
||||
.done = false,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
const Opts = struct {
|
||||
include_root: bool,
|
||||
mutable: bool = false,
|
||||
};
|
||||
|
||||
// WEB IDL https://dom.spec.whatwg.org/#htmlcollection
|
||||
// HTMLCollection is re implemented in zig here because libdom
|
||||
// dom_html_collection expects a comparison function callback as arguement.
|
||||
// But we wanted a dynamically comparison here, according to the match tagname.
|
||||
pub const HTMLCollection = struct {
|
||||
matcher: Matcher,
|
||||
walker: Walker,
|
||||
|
||||
root: ?*parser.Node,
|
||||
|
||||
// By default the HTMLCollection walk on the root's descendant only.
|
||||
// But on somes cases, like for dom document, we want to walk over the root
|
||||
// itself.
|
||||
include_root: bool = false,
|
||||
|
||||
mutable: bool = false,
|
||||
|
||||
// save a state for the collection to improve the _item speed.
|
||||
cur_idx: ?u32 = null,
|
||||
cur_node: ?*parser.Node = null,
|
||||
|
||||
// start returns the first node to walk on.
|
||||
fn start(self: *const HTMLCollection) !?*parser.Node {
|
||||
if (self.root == null) return null;
|
||||
|
||||
if (self.include_root) {
|
||||
return self.root.?;
|
||||
}
|
||||
|
||||
return try self.walker.get_next(self.root.?, null);
|
||||
}
|
||||
|
||||
pub fn _symbol_iterator(self: *HTMLCollection) HTMLCollectionIterator {
|
||||
return HTMLCollectionIterator{
|
||||
.coll = self,
|
||||
};
|
||||
}
|
||||
|
||||
/// get_length computes the collection's length dynamically according to
|
||||
/// the current root structure.
|
||||
// TODO: nodes retrieved must be de-referenced.
|
||||
pub fn get_length(self: *HTMLCollection) !u32 {
|
||||
if (self.root == null) return 0;
|
||||
|
||||
var len: u32 = 0;
|
||||
var node = try self.start() orelse return 0;
|
||||
|
||||
while (true) {
|
||||
if (parser.nodeType(node) == .element) {
|
||||
if (try self.matcher.match(node)) {
|
||||
len += 1;
|
||||
}
|
||||
}
|
||||
|
||||
node = try self.walker.get_next(self.root.?, node) orelse break;
|
||||
}
|
||||
|
||||
return len;
|
||||
}
|
||||
|
||||
pub fn item(self: *HTMLCollection, index: u32) !?*parser.Node {
|
||||
if (self.root == null) return null;
|
||||
|
||||
var i: u32 = 0;
|
||||
var node: *parser.Node = undefined;
|
||||
|
||||
// Use the current state to improve speed if possible.
|
||||
if (self.mutable == false and self.cur_idx != null and index >= self.cur_idx.?) {
|
||||
i = self.cur_idx.?;
|
||||
node = self.cur_node.?;
|
||||
} else {
|
||||
node = try self.start() orelse return null;
|
||||
}
|
||||
|
||||
while (true) {
|
||||
if (parser.nodeType(node) == .element) {
|
||||
if (try self.matcher.match(node)) {
|
||||
// check if we found the searched element.
|
||||
if (i == index) {
|
||||
// save the current state
|
||||
self.cur_node = node;
|
||||
self.cur_idx = i;
|
||||
|
||||
return node;
|
||||
}
|
||||
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
|
||||
node = try self.walker.get_next(self.root.?, node) orelse break;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn _item(self: *HTMLCollection, index: u32) !?Union {
|
||||
const node = try self.item(index) orelse return null;
|
||||
const e = @as(*parser.Element, @ptrCast(node));
|
||||
return try Element.toInterface(e);
|
||||
}
|
||||
|
||||
pub fn _namedItem(self: *const HTMLCollection, name: []const u8) !?Union {
|
||||
if (self.root == null) return null;
|
||||
if (name.len == 0) return null;
|
||||
|
||||
var node = try self.start() orelse return null;
|
||||
|
||||
while (true) {
|
||||
if (parser.nodeType(node) == .element) {
|
||||
if (try self.matcher.match(node)) {
|
||||
const elem = @as(*parser.Element, @ptrCast(node));
|
||||
|
||||
var attr = try parser.elementGetAttribute(elem, "id");
|
||||
// check if the node id corresponds to the name argument.
|
||||
if (attr != null and std.mem.eql(u8, name, attr.?)) {
|
||||
return try Element.toInterface(elem);
|
||||
}
|
||||
|
||||
attr = try parser.elementGetAttribute(elem, "name");
|
||||
// check if the node id corresponds to the name argument.
|
||||
if (attr != null and std.mem.eql(u8, name, attr.?)) {
|
||||
return try Element.toInterface(elem);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
node = try self.walker.get_next(self.root.?, node) orelse break;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
fn item_name(elt: *parser.Element) !?[]const u8 {
|
||||
if (try parser.elementGetAttribute(elt, "id")) |v| {
|
||||
return v;
|
||||
}
|
||||
if (try parser.elementGetAttribute(elt, "name")) |v| {
|
||||
return v;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn postAttach(self: *HTMLCollection, js_this: JsThis) !void {
|
||||
const len = try self.get_length();
|
||||
for (0..len) |i| {
|
||||
const node = try self.item(@intCast(i)) orelse unreachable;
|
||||
const e = @as(*parser.Element, @ptrCast(node));
|
||||
const as_interface = try Element.toInterface(e);
|
||||
try js_this.setIndex(@intCast(i), as_interface, .{});
|
||||
|
||||
if (try item_name(e)) |name| {
|
||||
// Even though an entry might have an empty id, the spec says
|
||||
// that namedItem("") should always return null
|
||||
if (name.len > 0) {
|
||||
// Named fields should not be enumerable (it is defined with
|
||||
// the LegacyUnenumerableNamedProperties flag.)
|
||||
try js_this.set(name, as_interface, .{ .DONT_ENUM = true });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: DOM.HTMLCollection" {
|
||||
try testing.htmlRunner("dom/html_collection.html");
|
||||
}
|
||||
@@ -1,56 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const parser = @import("../netsurf.zig");
|
||||
const DOMException = @import("exceptions.zig").DOMException;
|
||||
|
||||
// WEB IDL https://dom.spec.whatwg.org/#domimplementation
|
||||
pub const DOMImplementation = struct {
|
||||
pub const Exception = DOMException;
|
||||
|
||||
pub fn _createDocumentType(
|
||||
_: *DOMImplementation,
|
||||
qname: [:0]const u8,
|
||||
publicId: [:0]const u8,
|
||||
systemId: [:0]const u8,
|
||||
) !*parser.DocumentType {
|
||||
return try parser.domImplementationCreateDocumentType(qname, publicId, systemId);
|
||||
}
|
||||
|
||||
pub fn _createDocument(
|
||||
_: *DOMImplementation,
|
||||
namespace: ?[:0]const u8,
|
||||
qname: ?[:0]const u8,
|
||||
doctype: ?*parser.DocumentType,
|
||||
) !*parser.Document {
|
||||
return try parser.domImplementationCreateDocument(namespace, qname, doctype);
|
||||
}
|
||||
|
||||
pub fn _createHTMLDocument(_: *DOMImplementation, title: ?[]const u8) !*parser.DocumentHTML {
|
||||
return try parser.domImplementationCreateHTMLDocument(title);
|
||||
}
|
||||
|
||||
pub fn _hasFeature(_: *DOMImplementation) bool {
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: DOM.Implementation" {
|
||||
try testing.htmlRunner("dom/implementation.html");
|
||||
}
|
||||
@@ -1,186 +0,0 @@
|
||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const log = @import("../../log.zig");
|
||||
const parser = @import("../netsurf.zig");
|
||||
const Page = @import("../page.zig").Page;
|
||||
|
||||
const Env = @import("../env.zig").Env;
|
||||
const Element = @import("element.zig").Element;
|
||||
|
||||
pub const Interfaces = .{
|
||||
IntersectionObserver,
|
||||
IntersectionObserverEntry,
|
||||
};
|
||||
|
||||
// This is supposed to listen to change between the root and observation targets.
|
||||
// However, our rendered stores everything as 1 pixel sized boxes in a long row that never changes.
|
||||
// As such, there are no changes to intersections between the root and any target.
|
||||
// Instead we keep a list of all entries that are being observed.
|
||||
// The callback is called with all entries everytime a new entry is added(observed).
|
||||
// Potentially we should also call the callback at a regular interval.
|
||||
// The returned Entries are phony, they always indicate full intersection.
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/IntersectionObserver
|
||||
pub const IntersectionObserver = struct {
|
||||
page: *Page,
|
||||
callback: Env.Function,
|
||||
options: IntersectionObserverOptions,
|
||||
|
||||
observed_entries: std.ArrayListUnmanaged(IntersectionObserverEntry),
|
||||
|
||||
// new IntersectionObserver(callback)
|
||||
// new IntersectionObserver(callback, options) [not supported yet]
|
||||
pub fn constructor(callback: Env.Function, options_: ?IntersectionObserverOptions, page: *Page) !IntersectionObserver {
|
||||
var options = IntersectionObserverOptions{
|
||||
.root = parser.documentToNode(parser.documentHTMLToDocument(page.window.document)),
|
||||
.rootMargin = "0px 0px 0px 0px",
|
||||
.threshold = .{ .single = 0.0 },
|
||||
};
|
||||
if (options_) |*o| {
|
||||
if (o.root) |root| {
|
||||
options.root = root;
|
||||
} // Other properties are not used due to the way we render
|
||||
}
|
||||
|
||||
return .{
|
||||
.page = page,
|
||||
.callback = callback,
|
||||
.options = options,
|
||||
.observed_entries = .{},
|
||||
};
|
||||
}
|
||||
|
||||
pub fn _disconnect(self: *IntersectionObserver) !void {
|
||||
self.observed_entries = .{}; // We don't free as it is on an arena
|
||||
}
|
||||
|
||||
pub fn _observe(self: *IntersectionObserver, target_element: *parser.Element) !void {
|
||||
for (self.observed_entries.items) |*observer| {
|
||||
if (observer.target == target_element) {
|
||||
return; // Already observed
|
||||
}
|
||||
}
|
||||
|
||||
try self.observed_entries.append(self.page.arena, .{
|
||||
.page = self.page,
|
||||
.target = target_element,
|
||||
.options = &self.options,
|
||||
});
|
||||
|
||||
var result: Env.Function.Result = undefined;
|
||||
self.callback.tryCall(void, .{self.observed_entries.items}, &result) catch {
|
||||
log.debug(.user_script, "callback error", .{
|
||||
.err = result.exception,
|
||||
.stack = result.stack,
|
||||
.source = "intersection observer",
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
pub fn _unobserve(self: *IntersectionObserver, target: *parser.Element) !void {
|
||||
for (self.observed_entries.items, 0..) |*observer, index| {
|
||||
if (observer.target == target) {
|
||||
_ = self.observed_entries.swapRemove(index);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn _takeRecords(self: *IntersectionObserver) []IntersectionObserverEntry {
|
||||
return self.observed_entries.items;
|
||||
}
|
||||
};
|
||||
|
||||
const IntersectionObserverOptions = struct {
|
||||
root: ?*parser.Node, // Element or Document
|
||||
rootMargin: ?[]const u8,
|
||||
threshold: ?Threshold,
|
||||
|
||||
const Threshold = union(enum) {
|
||||
single: f32,
|
||||
list: []const f32,
|
||||
};
|
||||
};
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/IntersectionObserverEntry
|
||||
// https://w3c.github.io/IntersectionObserver/#intersection-observer-entry
|
||||
pub const IntersectionObserverEntry = struct {
|
||||
page: *Page,
|
||||
target: *parser.Element,
|
||||
options: *IntersectionObserverOptions,
|
||||
|
||||
// Returns the bounds rectangle of the target element as a DOMRectReadOnly. The bounds are computed as described in the documentation for Element.getBoundingClientRect().
|
||||
pub fn get_boundingClientRect(self: *const IntersectionObserverEntry) !Element.DOMRect {
|
||||
return Element._getBoundingClientRect(self.target, self.page);
|
||||
}
|
||||
|
||||
// Returns the ratio of the intersectionRect to the boundingClientRect.
|
||||
pub fn get_intersectionRatio(_: *const IntersectionObserverEntry) f32 {
|
||||
return 1.0;
|
||||
}
|
||||
|
||||
// Returns a DOMRectReadOnly representing the target's visible area.
|
||||
pub fn get_intersectionRect(self: *const IntersectionObserverEntry) !Element.DOMRect {
|
||||
return Element._getBoundingClientRect(self.target, self.page);
|
||||
}
|
||||
|
||||
// A Boolean value which is true if the target element intersects with the
|
||||
// intersection observer's root. If this is true, then, the
|
||||
// IntersectionObserverEntry describes a transition into a state of
|
||||
// intersection; if it's false, then you know the transition is from
|
||||
// intersecting to not-intersecting.
|
||||
pub fn get_isIntersecting(_: *const IntersectionObserverEntry) bool {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Returns a DOMRectReadOnly for the intersection observer's root.
|
||||
pub fn get_rootBounds(self: *const IntersectionObserverEntry) !Element.DOMRect {
|
||||
const root = self.options.root.?;
|
||||
if (@intFromPtr(root) == @intFromPtr(self.page.window.document)) {
|
||||
return self.page.renderer.boundingRect();
|
||||
}
|
||||
|
||||
const root_type = parser.nodeType(root);
|
||||
|
||||
var element: *parser.Element = undefined;
|
||||
switch (root_type) {
|
||||
.element => element = parser.nodeToElement(root),
|
||||
.document => {
|
||||
const doc = parser.nodeToDocument(root);
|
||||
element = (try parser.documentGetDocumentElement(doc)).?;
|
||||
},
|
||||
else => return error.InvalidState,
|
||||
}
|
||||
|
||||
return Element._getBoundingClientRect(element, self.page);
|
||||
}
|
||||
|
||||
// The Element whose intersection with the root changed.
|
||||
pub fn get_target(self: *const IntersectionObserverEntry) *parser.Element {
|
||||
return self.target;
|
||||
}
|
||||
|
||||
// TODO: pub fn get_time(self: *const IntersectionObserverEntry)
|
||||
};
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: DOM.IntersectionObserver" {
|
||||
try testing.htmlRunner("dom/intersection_observer.html");
|
||||
}
|
||||
@@ -1,357 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const log = @import("../../log.zig");
|
||||
const parser = @import("../netsurf.zig");
|
||||
const Page = @import("../page.zig").Page;
|
||||
|
||||
const Env = @import("../env.zig").Env;
|
||||
const NodeList = @import("nodelist.zig").NodeList;
|
||||
|
||||
pub const Interfaces = .{
|
||||
MutationObserver,
|
||||
MutationRecord,
|
||||
};
|
||||
|
||||
const Walker = @import("../dom/walker.zig").WalkerChildren;
|
||||
|
||||
// WEB IDL https://dom.spec.whatwg.org/#interface-mutationobserver
|
||||
pub const MutationObserver = struct {
|
||||
page: *Page,
|
||||
cbk: Env.Function,
|
||||
connected: bool,
|
||||
scheduled: bool,
|
||||
|
||||
// List of records which were observed. When the call scope ends, we need to
|
||||
// execute our callback with it.
|
||||
observed: std.ArrayListUnmanaged(MutationRecord),
|
||||
|
||||
pub fn constructor(cbk: Env.Function, page: *Page) !MutationObserver {
|
||||
return .{
|
||||
.cbk = cbk,
|
||||
.page = page,
|
||||
.observed = .{},
|
||||
.connected = true,
|
||||
.scheduled = false,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn _observe(self: *MutationObserver, node: *parser.Node, options_: ?Options) !void {
|
||||
const arena = self.page.arena;
|
||||
var options = options_ orelse Options{};
|
||||
if (options.attributeFilter.len > 0) {
|
||||
options.attributeFilter = try arena.dupe([]const u8, options.attributeFilter);
|
||||
}
|
||||
|
||||
const observer = try arena.create(Observer);
|
||||
observer.* = .{
|
||||
.node = node,
|
||||
.options = options,
|
||||
.mutation_observer = self,
|
||||
.event_node = .{ .id = self.cbk.id, .func = Observer.handle },
|
||||
};
|
||||
|
||||
// register node's events
|
||||
if (options.childList or options.subtree) {
|
||||
_ = try parser.eventTargetAddEventListener(
|
||||
parser.toEventTarget(parser.Node, node),
|
||||
"DOMNodeInserted",
|
||||
&observer.event_node,
|
||||
false,
|
||||
);
|
||||
_ = try parser.eventTargetAddEventListener(
|
||||
parser.toEventTarget(parser.Node, node),
|
||||
"DOMNodeRemoved",
|
||||
&observer.event_node,
|
||||
false,
|
||||
);
|
||||
}
|
||||
if (options.attr()) {
|
||||
_ = try parser.eventTargetAddEventListener(
|
||||
parser.toEventTarget(parser.Node, node),
|
||||
"DOMAttrModified",
|
||||
&observer.event_node,
|
||||
false,
|
||||
);
|
||||
}
|
||||
if (options.cdata()) {
|
||||
_ = try parser.eventTargetAddEventListener(
|
||||
parser.toEventTarget(parser.Node, node),
|
||||
"DOMCharacterDataModified",
|
||||
&observer.event_node,
|
||||
false,
|
||||
);
|
||||
}
|
||||
if (options.subtree) {
|
||||
_ = try parser.eventTargetAddEventListener(
|
||||
parser.toEventTarget(parser.Node, node),
|
||||
"DOMSubtreeModified",
|
||||
&observer.event_node,
|
||||
false,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn callback(ctx: *anyopaque) ?u32 {
|
||||
const self: *MutationObserver = @ptrCast(@alignCast(ctx));
|
||||
if (self.connected == false) {
|
||||
self.scheduled = true;
|
||||
return null;
|
||||
}
|
||||
self.scheduled = false;
|
||||
|
||||
const records = self.observed.items;
|
||||
if (records.len == 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
defer self.observed.clearRetainingCapacity();
|
||||
|
||||
var result: Env.Function.Result = undefined;
|
||||
self.cbk.tryCall(void, .{records}, &result) catch {
|
||||
log.debug(.user_script, "callback error", .{
|
||||
.err = result.exception,
|
||||
.stack = result.stack,
|
||||
.source = "mutation observer",
|
||||
});
|
||||
};
|
||||
return null;
|
||||
}
|
||||
|
||||
// TODO
|
||||
pub fn _disconnect(self: *MutationObserver) !void {
|
||||
self.connected = false;
|
||||
}
|
||||
|
||||
// TODO
|
||||
pub fn _takeRecords(_: *const MutationObserver) ?[]const u8 {
|
||||
return &[_]u8{};
|
||||
}
|
||||
};
|
||||
|
||||
pub const MutationRecord = struct {
|
||||
type: []const u8,
|
||||
target: *parser.Node,
|
||||
added_nodes: NodeList = .{},
|
||||
removed_nodes: NodeList = .{},
|
||||
previous_sibling: ?*parser.Node = null,
|
||||
next_sibling: ?*parser.Node = null,
|
||||
attribute_name: ?[]const u8 = null,
|
||||
attribute_namespace: ?[]const u8 = null,
|
||||
old_value: ?[]const u8 = null,
|
||||
|
||||
pub fn get_type(self: *const MutationRecord) []const u8 {
|
||||
return self.type;
|
||||
}
|
||||
|
||||
pub fn get_addedNodes(self: *MutationRecord) *NodeList {
|
||||
return &self.added_nodes;
|
||||
}
|
||||
|
||||
pub fn get_removedNodes(self: *MutationRecord) *NodeList {
|
||||
return &self.removed_nodes;
|
||||
}
|
||||
|
||||
pub fn get_target(self: *const MutationRecord) *parser.Node {
|
||||
return self.target;
|
||||
}
|
||||
|
||||
pub fn get_attributeName(self: *const MutationRecord) ?[]const u8 {
|
||||
return self.attribute_name;
|
||||
}
|
||||
|
||||
pub fn get_attributeNamespace(self: *const MutationRecord) ?[]const u8 {
|
||||
return self.attribute_namespace;
|
||||
}
|
||||
|
||||
pub fn get_previousSibling(self: *const MutationRecord) ?*parser.Node {
|
||||
return self.previous_sibling;
|
||||
}
|
||||
|
||||
pub fn get_nextSibling(self: *const MutationRecord) ?*parser.Node {
|
||||
return self.next_sibling;
|
||||
}
|
||||
|
||||
pub fn get_oldValue(self: *const MutationRecord) ?[]const u8 {
|
||||
return self.old_value;
|
||||
}
|
||||
};
|
||||
|
||||
const Options = struct {
|
||||
childList: bool = false,
|
||||
attributes: bool = false,
|
||||
characterData: bool = false,
|
||||
subtree: bool = false,
|
||||
attributeOldValue: bool = false,
|
||||
characterDataOldValue: bool = false,
|
||||
attributeFilter: [][]const u8 = &.{},
|
||||
|
||||
fn attr(self: Options) bool {
|
||||
return self.attributes or self.attributeOldValue or self.attributeFilter.len > 0;
|
||||
}
|
||||
|
||||
fn cdata(self: Options) bool {
|
||||
return self.characterData or self.characterDataOldValue;
|
||||
}
|
||||
};
|
||||
|
||||
const Observer = struct {
|
||||
node: *parser.Node,
|
||||
options: Options,
|
||||
|
||||
// reference back to the MutationObserver so that we can access the arena
|
||||
// and batch the mutation records.
|
||||
mutation_observer: *MutationObserver,
|
||||
|
||||
event_node: parser.EventNode,
|
||||
|
||||
fn appliesTo(
|
||||
self: *const Observer,
|
||||
target: *parser.Node,
|
||||
event_type: MutationEventType,
|
||||
event: *parser.MutationEvent,
|
||||
) !bool {
|
||||
if (event_type == .DOMAttrModified and self.options.attributeFilter.len > 0) {
|
||||
const attribute_name = try parser.mutationEventAttributeName(event);
|
||||
for (self.options.attributeFilter) |needle| blk: {
|
||||
if (std.mem.eql(u8, attribute_name, needle)) {
|
||||
break :blk;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
// mutation on any target is always ok.
|
||||
if (self.options.subtree) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// if target equals node, alway ok.
|
||||
if (target == self.node) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// no subtree, no same target and no childlist, always noky.
|
||||
if (!self.options.childList) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// target must be a child of o.node
|
||||
const walker = Walker{};
|
||||
var next: ?*parser.Node = null;
|
||||
while (true) {
|
||||
next = walker.get_next(self.node, next) catch break orelse break;
|
||||
if (next.? == target) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
fn handle(en: *parser.EventNode, event: *parser.Event) void {
|
||||
const self: *Observer = @fieldParentPtr("event_node", en);
|
||||
self._handle(event) catch |err| {
|
||||
log.err(.web_api, "handle error", .{ .err = err, .source = "mutation observer" });
|
||||
};
|
||||
}
|
||||
|
||||
fn _handle(self: *Observer, event: *parser.Event) !void {
|
||||
var mutation_observer = self.mutation_observer;
|
||||
|
||||
const node = blk: {
|
||||
const event_target = parser.eventTarget(event) orelse return;
|
||||
break :blk parser.eventTargetToNode(event_target);
|
||||
};
|
||||
|
||||
const mutation_event = parser.eventToMutationEvent(event);
|
||||
const event_type = blk: {
|
||||
const t = parser.eventType(event);
|
||||
break :blk std.meta.stringToEnum(MutationEventType, t) orelse return;
|
||||
};
|
||||
|
||||
if (try self.appliesTo(node, event_type, mutation_event) == false) {
|
||||
return;
|
||||
}
|
||||
|
||||
var record = MutationRecord{
|
||||
.target = self.node,
|
||||
.type = event_type.recordType(),
|
||||
};
|
||||
|
||||
const arena = mutation_observer.page.arena;
|
||||
switch (event_type) {
|
||||
.DOMAttrModified => {
|
||||
record.attribute_name = parser.mutationEventAttributeName(mutation_event) catch null;
|
||||
if (self.options.attributeOldValue) {
|
||||
record.old_value = parser.mutationEventPrevValue(mutation_event);
|
||||
}
|
||||
},
|
||||
.DOMCharacterDataModified => {
|
||||
if (self.options.characterDataOldValue) {
|
||||
record.old_value = parser.mutationEventPrevValue(mutation_event);
|
||||
}
|
||||
},
|
||||
.DOMNodeInserted => {
|
||||
if (parser.mutationEventRelatedNode(mutation_event) catch null) |related_node| {
|
||||
try record.added_nodes.append(arena, related_node);
|
||||
}
|
||||
},
|
||||
.DOMNodeRemoved => {
|
||||
if (parser.mutationEventRelatedNode(mutation_event) catch null) |related_node| {
|
||||
try record.removed_nodes.append(arena, related_node);
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
try mutation_observer.observed.append(arena, record);
|
||||
|
||||
if (mutation_observer.scheduled == false) {
|
||||
mutation_observer.scheduled = true;
|
||||
try mutation_observer.page.scheduler.add(
|
||||
mutation_observer,
|
||||
MutationObserver.callback,
|
||||
0,
|
||||
.{ .name = "mutation_observer" },
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const MutationEventType = enum {
|
||||
DOMAttrModified,
|
||||
DOMCharacterDataModified,
|
||||
DOMNodeInserted,
|
||||
DOMNodeRemoved,
|
||||
|
||||
fn recordType(self: MutationEventType) []const u8 {
|
||||
return switch (self) {
|
||||
.DOMAttrModified => "attributes",
|
||||
.DOMCharacterDataModified => "characterData",
|
||||
.DOMNodeInserted => "childList",
|
||||
.DOMNodeRemoved => "childList",
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: DOM.MutationObserver" {
|
||||
try testing.htmlRunner("dom/mutation_observer.html");
|
||||
}
|
||||
@@ -1,637 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const log = @import("../../log.zig");
|
||||
const parser = @import("../netsurf.zig");
|
||||
const generate = @import("../../runtime/generate.zig");
|
||||
|
||||
const Page = @import("../page.zig").Page;
|
||||
const EventTarget = @import("event_target.zig").EventTarget;
|
||||
|
||||
// DOM
|
||||
const Attr = @import("attribute.zig").Attr;
|
||||
const CData = @import("character_data.zig");
|
||||
const Element = @import("element.zig").Element;
|
||||
const ElementUnion = @import("element.zig").Union;
|
||||
const NodeList = @import("nodelist.zig").NodeList;
|
||||
const Document = @import("document.zig").Document;
|
||||
const DocumentType = @import("document_type.zig").DocumentType;
|
||||
const DocumentFragment = @import("document_fragment.zig").DocumentFragment;
|
||||
const HTMLCollection = @import("html_collection.zig").HTMLCollection;
|
||||
const HTMLAllCollection = @import("html_collection.zig").HTMLAllCollection;
|
||||
const HTMLCollectionIterator = @import("html_collection.zig").HTMLCollectionIterator;
|
||||
const ShadowRoot = @import("shadow_root.zig").ShadowRoot;
|
||||
const Walker = @import("walker.zig").WalkerDepthFirst;
|
||||
|
||||
// HTML
|
||||
const HTML = @import("../html/html.zig");
|
||||
|
||||
// Node interfaces
|
||||
pub const Interfaces = .{
|
||||
Attr,
|
||||
CData.CharacterData,
|
||||
CData.Interfaces,
|
||||
Element,
|
||||
Document,
|
||||
DocumentType,
|
||||
DocumentFragment,
|
||||
HTMLCollection,
|
||||
HTMLAllCollection,
|
||||
HTMLCollectionIterator,
|
||||
HTML.Interfaces,
|
||||
};
|
||||
|
||||
pub const Union = generate.Union(Interfaces);
|
||||
|
||||
// Node implementation
|
||||
pub const Node = struct {
|
||||
pub const Self = parser.Node;
|
||||
pub const prototype = *EventTarget;
|
||||
pub const subtype = .node;
|
||||
|
||||
pub fn toInterface(node: *parser.Node) !Union {
|
||||
return switch (parser.nodeType(node)) {
|
||||
.element => try Element.toInterfaceT(
|
||||
Union,
|
||||
@as(*parser.Element, @ptrCast(node)),
|
||||
),
|
||||
.comment => .{ .Comment = @as(*parser.Comment, @ptrCast(node)) },
|
||||
.text => .{ .Text = @as(*parser.Text, @ptrCast(node)) },
|
||||
.cdata_section => .{ .CDATASection = @as(*parser.CDATASection, @ptrCast(node)) },
|
||||
.processing_instruction => .{ .ProcessingInstruction = @as(*parser.ProcessingInstruction, @ptrCast(node)) },
|
||||
.document => blk: {
|
||||
const doc: *parser.Document = @ptrCast(node);
|
||||
if (doc.is_html) {
|
||||
break :blk .{ .HTMLDocument = @as(*parser.DocumentHTML, @ptrCast(node)) };
|
||||
}
|
||||
|
||||
break :blk .{ .Document = doc };
|
||||
},
|
||||
.document_type => .{ .DocumentType = @as(*parser.DocumentType, @ptrCast(node)) },
|
||||
.attribute => .{ .Attr = @as(*parser.Attribute, @ptrCast(node)) },
|
||||
.document_fragment => .{ .DocumentFragment = @as(*parser.DocumentFragment, @ptrCast(node)) },
|
||||
else => @panic("node type not handled"), // TODO
|
||||
};
|
||||
}
|
||||
|
||||
// class attributes
|
||||
|
||||
pub const _ELEMENT_NODE = @intFromEnum(parser.NodeType.element);
|
||||
pub const _ATTRIBUTE_NODE = @intFromEnum(parser.NodeType.attribute);
|
||||
pub const _TEXT_NODE = @intFromEnum(parser.NodeType.text);
|
||||
pub const _CDATA_SECTION_NODE = @intFromEnum(parser.NodeType.cdata_section);
|
||||
pub const _PROCESSING_INSTRUCTION_NODE = @intFromEnum(parser.NodeType.processing_instruction);
|
||||
pub const _COMMENT_NODE = @intFromEnum(parser.NodeType.comment);
|
||||
pub const _DOCUMENT_NODE = @intFromEnum(parser.NodeType.document);
|
||||
pub const _DOCUMENT_TYPE_NODE = @intFromEnum(parser.NodeType.document_type);
|
||||
pub const _DOCUMENT_FRAGMENT_NODE = @intFromEnum(parser.NodeType.document_fragment);
|
||||
|
||||
// These 3 are deprecated, but both Chrome and Firefox still expose them
|
||||
pub const _ENTITY_REFERENCE_NODE = @intFromEnum(parser.NodeType.entity_reference);
|
||||
pub const _ENTITY_NODE = @intFromEnum(parser.NodeType.entity);
|
||||
pub const _NOTATION_NODE = @intFromEnum(parser.NodeType.notation);
|
||||
|
||||
pub const _DOCUMENT_POSITION_DISCONNECTED = @intFromEnum(parser.DocumentPosition.disconnected);
|
||||
pub const _DOCUMENT_POSITION_PRECEDING = @intFromEnum(parser.DocumentPosition.preceding);
|
||||
pub const _DOCUMENT_POSITION_FOLLOWING = @intFromEnum(parser.DocumentPosition.following);
|
||||
pub const _DOCUMENT_POSITION_CONTAINS = @intFromEnum(parser.DocumentPosition.contains);
|
||||
pub const _DOCUMENT_POSITION_CONTAINED_BY = @intFromEnum(parser.DocumentPosition.contained_by);
|
||||
pub const _DOCUMENT_POSITION_IMPLEMENTATION_SPECIFIC = @intFromEnum(parser.DocumentPosition.implementation_specific);
|
||||
|
||||
// JS funcs
|
||||
// --------
|
||||
|
||||
// Read-only attributes
|
||||
pub fn get_baseURI(_: *parser.Node, page: *Page) ![]const u8 {
|
||||
return page.url.raw;
|
||||
}
|
||||
|
||||
pub fn get_firstChild(self: *parser.Node) !?Union {
|
||||
const res = parser.nodeFirstChild(self);
|
||||
if (res == null) {
|
||||
return null;
|
||||
}
|
||||
return try Node.toInterface(res.?);
|
||||
}
|
||||
|
||||
pub fn get_lastChild(self: *parser.Node) !?Union {
|
||||
const res = parser.nodeLastChild(self);
|
||||
if (res == null) {
|
||||
return null;
|
||||
}
|
||||
return try Node.toInterface(res.?);
|
||||
}
|
||||
|
||||
pub fn get_nextSibling(self: *parser.Node) !?Union {
|
||||
const res = parser.nodeNextSibling(self);
|
||||
if (res == null) {
|
||||
return null;
|
||||
}
|
||||
return try Node.toInterface(res.?);
|
||||
}
|
||||
|
||||
pub fn get_previousSibling(self: *parser.Node) !?Union {
|
||||
const res = parser.nodePreviousSibling(self);
|
||||
if (res == null) {
|
||||
return null;
|
||||
}
|
||||
return try Node.toInterface(res.?);
|
||||
}
|
||||
|
||||
pub fn get_parentNode(self: *parser.Node) !?Union {
|
||||
const res = parser.nodeParentNode(self);
|
||||
if (res == null) {
|
||||
return null;
|
||||
}
|
||||
return try Node.toInterface(res.?);
|
||||
}
|
||||
|
||||
pub fn get_parentElement(self: *parser.Node) !?ElementUnion {
|
||||
const res = parser.nodeParentElement(self);
|
||||
if (res == null) {
|
||||
return null;
|
||||
}
|
||||
return try Element.toInterface(res.?);
|
||||
}
|
||||
|
||||
pub fn get_nodeName(self: *parser.Node) ![]const u8 {
|
||||
return try parser.nodeName(self);
|
||||
}
|
||||
|
||||
pub fn get_nodeType(self: *parser.Node) !u8 {
|
||||
return @intFromEnum(parser.nodeType(self));
|
||||
}
|
||||
|
||||
pub fn get_ownerDocument(self: *parser.Node) !?*parser.DocumentHTML {
|
||||
const res = parser.nodeOwnerDocument(self);
|
||||
if (res == null) {
|
||||
return null;
|
||||
}
|
||||
return @as(*parser.DocumentHTML, @ptrCast(res.?));
|
||||
}
|
||||
|
||||
pub fn get_isConnected(self: *parser.Node) !bool {
|
||||
var node = self;
|
||||
while (true) {
|
||||
const node_type = parser.nodeType(node);
|
||||
if (node_type == .document) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (parser.nodeParentNode(node)) |parent| {
|
||||
// didn't find a document, but node has a parent, let's see
|
||||
// if it's connected;
|
||||
node = parent;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (node_type != .document_fragment) {
|
||||
// doesn't have a parent and isn't a document_fragment
|
||||
// can't be connected
|
||||
return false;
|
||||
}
|
||||
|
||||
if (parser.documentFragmentGetHost(@ptrCast(node))) |host| {
|
||||
// node doesn't have a parent, but it's a document fragment
|
||||
// with a host. The host is like the parent, but we only want to
|
||||
// traverse up (or down) to it in specific cases, like isConnected.
|
||||
node = host;
|
||||
continue;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Read/Write attributes
|
||||
|
||||
pub fn get_nodeValue(self: *parser.Node) !?[]const u8 {
|
||||
return parser.nodeValue(self);
|
||||
}
|
||||
|
||||
pub fn set_nodeValue(self: *parser.Node, data: []u8) !void {
|
||||
try parser.nodeSetValue(self, data);
|
||||
}
|
||||
|
||||
pub fn get_textContent(self: *parser.Node) ?[]const u8 {
|
||||
return parser.nodeTextContent(self);
|
||||
}
|
||||
|
||||
pub fn set_textContent(self: *parser.Node, data: []u8) !void {
|
||||
return try parser.nodeSetTextContent(self, data);
|
||||
}
|
||||
|
||||
// Methods
|
||||
|
||||
pub fn _appendChild(self: *parser.Node, child: *parser.Node) !Union {
|
||||
const self_owner = parser.nodeOwnerDocument(self);
|
||||
const child_owner = parser.nodeOwnerDocument(child);
|
||||
|
||||
// If the node to be inserted has a different ownerDocument than the parent node,
|
||||
// modern browsers automatically adopt the node and its descendants into
|
||||
// the parent's ownerDocument.
|
||||
// This process is known as adoption.
|
||||
// (7.1) https://dom.spec.whatwg.org/#concept-node-insert
|
||||
if (child_owner == null or (self_owner != null and child_owner.? != self_owner.?)) {
|
||||
const w = Walker{};
|
||||
var current = child;
|
||||
while (true) {
|
||||
current.owner = self_owner;
|
||||
current = try w.get_next(child, current) orelse break;
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: DocumentFragment special case
|
||||
const res = try parser.nodeAppendChild(self, child);
|
||||
return try Node.toInterface(res);
|
||||
}
|
||||
|
||||
pub fn _cloneNode(self: *parser.Node, deep: ?bool) !Union {
|
||||
const clone = try parser.nodeCloneNode(self, deep orelse false);
|
||||
return try Node.toInterface(clone);
|
||||
}
|
||||
|
||||
pub fn _compareDocumentPosition(self: *parser.Node, other: *parser.Node) !u32 {
|
||||
if (self == other) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const docself = parser.nodeOwnerDocument(self) orelse blk: {
|
||||
if (parser.nodeType(self) == .document) {
|
||||
break :blk @as(*parser.Document, @ptrCast(self));
|
||||
}
|
||||
break :blk null;
|
||||
};
|
||||
const docother = parser.nodeOwnerDocument(other) orelse blk: {
|
||||
if (parser.nodeType(other) == .document) {
|
||||
break :blk @as(*parser.Document, @ptrCast(other));
|
||||
}
|
||||
break :blk null;
|
||||
};
|
||||
|
||||
// Both are in different document.
|
||||
if (docself == null or docother == null or docself.? != docother.?) {
|
||||
return @intFromEnum(parser.DocumentPosition.disconnected) +
|
||||
@intFromEnum(parser.DocumentPosition.implementation_specific) +
|
||||
@intFromEnum(parser.DocumentPosition.preceding);
|
||||
}
|
||||
|
||||
if (@intFromPtr(self) == @intFromPtr(docself.?)) {
|
||||
// if self is the document, and we already know other is in the
|
||||
// document, then other is contained by and following self.
|
||||
return @intFromEnum(parser.DocumentPosition.following) +
|
||||
@intFromEnum(parser.DocumentPosition.contained_by);
|
||||
}
|
||||
|
||||
const rootself = parser.nodeGetRootNode(self);
|
||||
const rootother = parser.nodeGetRootNode(other);
|
||||
if (rootself != rootother) {
|
||||
return @intFromEnum(parser.DocumentPosition.disconnected) +
|
||||
@intFromEnum(parser.DocumentPosition.implementation_specific) +
|
||||
@intFromEnum(parser.DocumentPosition.preceding);
|
||||
}
|
||||
|
||||
// TODO Both are in a different trees in the same document.
|
||||
|
||||
const w = Walker{};
|
||||
var next: ?*parser.Node = null;
|
||||
|
||||
// Is other a descendant of self?
|
||||
while (true) {
|
||||
next = try w.get_next(self, next) orelse break;
|
||||
if (other == next) {
|
||||
return @intFromEnum(parser.DocumentPosition.following) +
|
||||
@intFromEnum(parser.DocumentPosition.contained_by);
|
||||
}
|
||||
}
|
||||
|
||||
// Is self a descendant of other?
|
||||
next = null;
|
||||
while (true) {
|
||||
next = try w.get_next(other, next) orelse break;
|
||||
if (self == next) {
|
||||
return @intFromEnum(parser.DocumentPosition.contains) +
|
||||
@intFromEnum(parser.DocumentPosition.preceding);
|
||||
}
|
||||
}
|
||||
|
||||
next = null;
|
||||
while (true) {
|
||||
next = try w.get_next(parser.documentToNode(docself.?), next) orelse break;
|
||||
if (other == next) {
|
||||
// other precedes self.
|
||||
return @intFromEnum(parser.DocumentPosition.preceding);
|
||||
}
|
||||
if (self == next) {
|
||||
// other follows self.
|
||||
return @intFromEnum(parser.DocumentPosition.following);
|
||||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
pub fn _contains(self: *parser.Node, other: *parser.Node) bool {
|
||||
return parser.nodeContains(self, other);
|
||||
}
|
||||
|
||||
// Returns itself or ancestor object inheriting from Node.
|
||||
// - An Element inside a standard web page will return an HTMLDocument object representing the entire page (or <iframe>).
|
||||
// - An Element inside a shadow DOM will return the associated ShadowRoot.
|
||||
// - An Element that is not attached to a document or a shadow tree will return the root of the DOM tree it belongs to
|
||||
const GetRootNodeResult = union(enum) {
|
||||
shadow_root: *ShadowRoot,
|
||||
node: Union,
|
||||
};
|
||||
pub fn _getRootNode(self: *parser.Node, options: ?struct { composed: bool = false }, page: *Page) !GetRootNodeResult {
|
||||
if (options) |options_| if (options_.composed) {
|
||||
log.warn(.web_api, "not implemented", .{ .feature = "getRootNode composed" });
|
||||
};
|
||||
|
||||
const root = parser.nodeGetRootNode(self);
|
||||
if (page.getNodeState(root)) |state| {
|
||||
if (state.shadow_root) |sr| {
|
||||
return .{ .shadow_root = sr };
|
||||
}
|
||||
}
|
||||
|
||||
return .{ .node = try Node.toInterface(root) };
|
||||
}
|
||||
|
||||
pub fn _hasChildNodes(self: *parser.Node) bool {
|
||||
return parser.nodeHasChildNodes(self);
|
||||
}
|
||||
|
||||
pub fn get_childNodes(self: *parser.Node, page: *Page) !NodeList {
|
||||
const allocator = page.arena;
|
||||
var list: NodeList = .{};
|
||||
|
||||
var n = parser.nodeFirstChild(self) orelse return list;
|
||||
while (true) {
|
||||
try list.append(allocator, n);
|
||||
n = parser.nodeNextSibling(n) orelse return list;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn _insertBefore(self: *parser.Node, new_node: *parser.Node, ref_node_: ?*parser.Node) !Union {
|
||||
if (ref_node_ == null) {
|
||||
return _appendChild(self, new_node);
|
||||
}
|
||||
|
||||
const self_owner = parser.nodeOwnerDocument(self);
|
||||
const new_node_owner = parser.nodeOwnerDocument(new_node);
|
||||
|
||||
// If the node to be inserted has a different ownerDocument than the parent node,
|
||||
// modern browsers automatically adopt the node and its descendants into
|
||||
// the parent's ownerDocument.
|
||||
// This process is known as adoption.
|
||||
// (7.1) https://dom.spec.whatwg.org/#concept-node-insert
|
||||
if (new_node_owner == null or (self_owner != null and new_node_owner.? != self_owner.?)) {
|
||||
const w = Walker{};
|
||||
var current = new_node;
|
||||
while (true) {
|
||||
current.owner = self_owner;
|
||||
current = try w.get_next(new_node, current) orelse break;
|
||||
}
|
||||
}
|
||||
|
||||
return Node.toInterface(try parser.nodeInsertBefore(self, new_node, ref_node_.?));
|
||||
}
|
||||
|
||||
pub fn _isDefaultNamespace(self: *parser.Node, namespace: ?[]const u8) !bool {
|
||||
return parser.nodeIsDefaultNamespace(self, namespace);
|
||||
}
|
||||
|
||||
pub fn _isEqualNode(self: *parser.Node, other: *parser.Node) !bool {
|
||||
// TODO: other is not an optional parameter, but can be null.
|
||||
return try parser.nodeIsEqualNode(self, other);
|
||||
}
|
||||
|
||||
pub fn _isSameNode(self: *parser.Node, other: *parser.Node) bool {
|
||||
// TODO: other is not an optional parameter, but can be null.
|
||||
// NOTE: there is no need to use isSameNode(); instead use the === strict equality operator
|
||||
return parser.nodeIsSameNode(self, other);
|
||||
}
|
||||
|
||||
pub fn _lookupPrefix(self: *parser.Node, namespace: ?[]const u8) !?[]const u8 {
|
||||
// TODO: other is not an optional parameter, but can be null.
|
||||
if (namespace == null) {
|
||||
return null;
|
||||
}
|
||||
if (std.mem.eql(u8, namespace.?, "")) {
|
||||
return null;
|
||||
}
|
||||
return try parser.nodeLookupPrefix(self, namespace.?);
|
||||
}
|
||||
|
||||
pub fn _lookupNamespaceURI(self: *parser.Node, prefix: ?[]const u8) !?[]const u8 {
|
||||
// TODO: other is not an optional parameter, but can be null.
|
||||
return try parser.nodeLookupNamespaceURI(self, prefix);
|
||||
}
|
||||
|
||||
pub fn _normalize(self: *parser.Node) !void {
|
||||
return try parser.nodeNormalize(self);
|
||||
}
|
||||
|
||||
pub fn _removeChild(self: *parser.Node, child: *parser.Node) !Union {
|
||||
const res = try parser.nodeRemoveChild(self, child);
|
||||
return try Node.toInterface(res);
|
||||
}
|
||||
|
||||
pub fn _replaceChild(self: *parser.Node, new_child: *parser.Node, old_child: *parser.Node) !Union {
|
||||
const res = try parser.nodeReplaceChild(self, new_child, old_child);
|
||||
return try Node.toInterface(res);
|
||||
}
|
||||
|
||||
// Check if the hierarchy node tree constraints are respected.
|
||||
// For now, it checks only if new nodes are not self.
|
||||
// TODO implements the others contraints.
|
||||
// see https://dom.spec.whatwg.org/#concept-node-tree
|
||||
pub fn hierarchy(self: *parser.Node, nodes: []const NodeOrText) bool {
|
||||
for (nodes) |n| {
|
||||
if (n.is(self)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
pub fn prepend(self: *parser.Node, nodes: []const NodeOrText) !void {
|
||||
if (nodes.len == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// check hierarchy
|
||||
if (!hierarchy(self, nodes)) {
|
||||
return parser.DOMError.HierarchyRequest;
|
||||
}
|
||||
|
||||
const doc = (parser.nodeOwnerDocument(self)) orelse return;
|
||||
|
||||
if (parser.nodeFirstChild(self)) |first| {
|
||||
for (nodes) |node| {
|
||||
_ = try parser.nodeInsertBefore(self, try node.toNode(doc), first);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
for (nodes) |node| {
|
||||
_ = try parser.nodeAppendChild(self, try node.toNode(doc));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn append(self: *parser.Node, nodes: []const NodeOrText) !void {
|
||||
if (nodes.len == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// check hierarchy
|
||||
if (!hierarchy(self, nodes)) {
|
||||
return parser.DOMError.HierarchyRequest;
|
||||
}
|
||||
|
||||
const doc = (parser.nodeOwnerDocument(self)) orelse return;
|
||||
for (nodes) |node| {
|
||||
_ = try parser.nodeAppendChild(self, try node.toNode(doc));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn replaceChildren(self: *parser.Node, nodes: []const NodeOrText) !void {
|
||||
if (nodes.len == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// check hierarchy
|
||||
if (!hierarchy(self, nodes)) {
|
||||
return parser.DOMError.HierarchyRequest;
|
||||
}
|
||||
|
||||
// remove existing children
|
||||
try removeChildren(self);
|
||||
|
||||
const doc = (parser.nodeOwnerDocument(self)) orelse return;
|
||||
// add new children
|
||||
for (nodes) |node| {
|
||||
_ = try parser.nodeAppendChild(self, try node.toNode(doc));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn removeChildren(self: *parser.Node) !void {
|
||||
if (!parser.nodeHasChildNodes(self)) return;
|
||||
|
||||
const children = try parser.nodeGetChildNodes(self);
|
||||
const ln = parser.nodeListLength(children);
|
||||
var i: u32 = 0;
|
||||
while (i < ln) {
|
||||
defer i += 1;
|
||||
// we always retrieve the 0 index child on purpose: libdom nodelist
|
||||
// are dynamic. So the next child to remove is always as pos 0.
|
||||
const child = parser.nodeListItem(children, 0) orelse continue;
|
||||
_ = try parser.nodeRemoveChild(self, child);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn before(self: *parser.Node, nodes: []const NodeOrText) !void {
|
||||
const parent = parser.nodeParentNode(self) orelse return;
|
||||
const doc = (parser.nodeOwnerDocument(parent)) orelse return;
|
||||
|
||||
var sibling: ?*parser.Node = self;
|
||||
// have to find the first sibling that isn't in nodes
|
||||
CHECK: while (sibling) |s| {
|
||||
for (nodes) |n| {
|
||||
if (n.is(s)) {
|
||||
sibling = parser.nodePreviousSibling(s);
|
||||
continue :CHECK;
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
if (sibling == null) {
|
||||
sibling = parser.nodeFirstChild(parent);
|
||||
}
|
||||
|
||||
if (sibling) |ref_node| {
|
||||
for (nodes) |node| {
|
||||
_ = try parser.nodeInsertBefore(parent, try node.toNode(doc), ref_node);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
return Node.prepend(self, nodes);
|
||||
}
|
||||
|
||||
pub fn after(self: *parser.Node, nodes: []const NodeOrText) !void {
|
||||
const parent = parser.nodeParentNode(self) orelse return;
|
||||
const doc = (parser.nodeOwnerDocument(parent)) orelse return;
|
||||
|
||||
// have to find the first sibling that isn't in nodes
|
||||
var sibling = parser.nodeNextSibling(self);
|
||||
CHECK: while (sibling) |s| {
|
||||
for (nodes) |n| {
|
||||
if (n.is(s)) {
|
||||
sibling = parser.nodeNextSibling(s);
|
||||
continue :CHECK;
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
if (sibling) |ref_node| {
|
||||
for (nodes) |node| {
|
||||
_ = try parser.nodeInsertBefore(parent, try node.toNode(doc), ref_node);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
for (nodes) |node| {
|
||||
_ = try parser.nodeAppendChild(parent, try node.toNode(doc));
|
||||
}
|
||||
}
|
||||
|
||||
// A lot of functions take either a node or text input.
|
||||
// The text input is to be converted into a Text node.
|
||||
pub const NodeOrText = union(enum) {
|
||||
text: []const u8,
|
||||
node: *parser.Node,
|
||||
|
||||
fn toNode(self: NodeOrText, doc: *parser.Document) !*parser.Node {
|
||||
return switch (self) {
|
||||
.node => |n| n,
|
||||
.text => |txt| @ptrCast(@alignCast(try parser.documentCreateTextNode(doc, txt))),
|
||||
};
|
||||
}
|
||||
|
||||
// Whether the node represented by the NodeOrText is the same as the
|
||||
// given Node. Always false for text values as these represent as-of-yet
|
||||
// created Text nodes.
|
||||
fn is(self: NodeOrText, other: *parser.Node) bool {
|
||||
return switch (self) {
|
||||
.text => false,
|
||||
.node => |n| n == other,
|
||||
};
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: DOM.Node" {
|
||||
try testing.htmlRunner("dom/node.html");
|
||||
try testing.htmlRunner("dom/node_owner.html");
|
||||
}
|
||||
@@ -1,80 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const parser = @import("../netsurf.zig");
|
||||
const Env = @import("../env.zig").Env;
|
||||
const Node = @import("node.zig").Node;
|
||||
|
||||
pub const NodeFilter = struct {
|
||||
pub const _FILTER_ACCEPT: u16 = 1;
|
||||
pub const _FILTER_REJECT: u16 = 2;
|
||||
pub const _FILTER_SKIP: u16 = 3;
|
||||
|
||||
pub const _SHOW_ALL: u32 = std.math.maxInt(u32);
|
||||
pub const _SHOW_ELEMENT: u32 = 0b1;
|
||||
pub const _SHOW_ATTRIBUTE: u32 = 0b10;
|
||||
pub const _SHOW_TEXT: u32 = 0b100;
|
||||
pub const _SHOW_CDATA_SECTION: u32 = 0b1000;
|
||||
pub const _SHOW_ENTITY_REFERENCE: u32 = 0b10000;
|
||||
pub const _SHOW_ENTITY: u32 = 0b100000;
|
||||
pub const _SHOW_PROCESSING_INSTRUCTION: u32 = 0b1000000;
|
||||
pub const _SHOW_COMMENT: u32 = 0b10000000;
|
||||
pub const _SHOW_DOCUMENT: u32 = 0b100000000;
|
||||
pub const _SHOW_DOCUMENT_TYPE: u32 = 0b1000000000;
|
||||
pub const _SHOW_DOCUMENT_FRAGMENT: u32 = 0b10000000000;
|
||||
pub const _SHOW_NOTATION: u32 = 0b100000000000;
|
||||
};
|
||||
|
||||
const VerifyResult = enum { accept, skip, reject };
|
||||
|
||||
pub fn verify(what_to_show: u32, filter: ?Env.Function, node: *parser.Node) !VerifyResult {
|
||||
const node_type = parser.nodeType(node);
|
||||
|
||||
// Verify that we can show this node type.
|
||||
if (!switch (node_type) {
|
||||
.attribute => what_to_show & NodeFilter._SHOW_ATTRIBUTE != 0,
|
||||
.cdata_section => what_to_show & NodeFilter._SHOW_CDATA_SECTION != 0,
|
||||
.comment => what_to_show & NodeFilter._SHOW_COMMENT != 0,
|
||||
.document => what_to_show & NodeFilter._SHOW_DOCUMENT != 0,
|
||||
.document_fragment => what_to_show & NodeFilter._SHOW_DOCUMENT_FRAGMENT != 0,
|
||||
.document_type => what_to_show & NodeFilter._SHOW_DOCUMENT_TYPE != 0,
|
||||
.element => what_to_show & NodeFilter._SHOW_ELEMENT != 0,
|
||||
.entity => what_to_show & NodeFilter._SHOW_ENTITY != 0,
|
||||
.entity_reference => what_to_show & NodeFilter._SHOW_ENTITY_REFERENCE != 0,
|
||||
.notation => what_to_show & NodeFilter._SHOW_NOTATION != 0,
|
||||
.processing_instruction => what_to_show & NodeFilter._SHOW_PROCESSING_INSTRUCTION != 0,
|
||||
.text => what_to_show & NodeFilter._SHOW_TEXT != 0,
|
||||
}) return .reject;
|
||||
|
||||
// Verify that we aren't filtering it out.
|
||||
if (filter) |f| {
|
||||
const acceptance = try f.call(u16, .{try Node.toInterface(node)});
|
||||
return switch (acceptance) {
|
||||
NodeFilter._FILTER_ACCEPT => .accept,
|
||||
NodeFilter._FILTER_REJECT => .reject,
|
||||
NodeFilter._FILTER_SKIP => .skip,
|
||||
else => .reject,
|
||||
};
|
||||
} else return .accept;
|
||||
}
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: DOM.NodeFilter" {
|
||||
try testing.htmlRunner("dom/node_filter.html");
|
||||
}
|
||||
@@ -1,273 +0,0 @@
|
||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const parser = @import("../netsurf.zig");
|
||||
const Env = @import("../env.zig").Env;
|
||||
const NodeFilter = @import("node_filter.zig");
|
||||
const Node = @import("node.zig").Node;
|
||||
const NodeUnion = @import("node.zig").Union;
|
||||
const DOMException = @import("exceptions.zig").DOMException;
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/NodeIterator
|
||||
// While this is similar to TreeWalker it has its own implementation as there are several subtle differences
|
||||
// For example:
|
||||
// - nextNode returns the reference node, whereas TreeWalker returns the next node
|
||||
// - Skip and reject are equivalent for NodeIterator, for TreeWalker they are different
|
||||
pub const NodeIterator = struct {
|
||||
pub const Exception = DOMException;
|
||||
|
||||
root: *parser.Node,
|
||||
reference_node: *parser.Node,
|
||||
what_to_show: u32,
|
||||
filter: ?NodeIteratorOpts,
|
||||
filter_func: ?Env.Function,
|
||||
pointer_before_current: bool = true,
|
||||
// used to track / block recursive filters
|
||||
is_in_callback: bool = false,
|
||||
|
||||
// One of the few cases where null and undefined resolve to different default.
|
||||
// We need the raw JsObject so that we can probe the tri state:
|
||||
// null, undefined or i32.
|
||||
pub const WhatToShow = Env.JsObject;
|
||||
|
||||
pub const NodeIteratorOpts = union(enum) {
|
||||
function: Env.Function,
|
||||
object: struct { acceptNode: Env.Function },
|
||||
};
|
||||
|
||||
pub fn init(node: *parser.Node, what_to_show_: ?WhatToShow, filter: ?NodeIteratorOpts) !NodeIterator {
|
||||
var filter_func: ?Env.Function = null;
|
||||
if (filter) |f| {
|
||||
filter_func = switch (f) {
|
||||
.function => |func| func,
|
||||
.object => |o| o.acceptNode,
|
||||
};
|
||||
}
|
||||
|
||||
var what_to_show: u32 = undefined;
|
||||
if (what_to_show_) |wts| {
|
||||
switch (try wts.triState(NodeIterator, "what_to_show", u32)) {
|
||||
.null => what_to_show = 0,
|
||||
.undefined => what_to_show = NodeFilter.NodeFilter._SHOW_ALL,
|
||||
.value => |v| what_to_show = v,
|
||||
}
|
||||
} else {
|
||||
what_to_show = NodeFilter.NodeFilter._SHOW_ALL;
|
||||
}
|
||||
|
||||
return .{
|
||||
.root = node,
|
||||
.reference_node = node,
|
||||
.what_to_show = what_to_show,
|
||||
.filter = filter,
|
||||
.filter_func = filter_func,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn get_filter(self: *const NodeIterator) ?NodeIteratorOpts {
|
||||
return self.filter;
|
||||
}
|
||||
|
||||
pub fn get_pointerBeforeReferenceNode(self: *const NodeIterator) bool {
|
||||
return self.pointer_before_current;
|
||||
}
|
||||
|
||||
pub fn get_referenceNode(self: *const NodeIterator) !NodeUnion {
|
||||
return try Node.toInterface(self.reference_node);
|
||||
}
|
||||
|
||||
pub fn get_root(self: *const NodeIterator) !NodeUnion {
|
||||
return try Node.toInterface(self.root);
|
||||
}
|
||||
|
||||
pub fn get_whatToShow(self: *const NodeIterator) u32 {
|
||||
return self.what_to_show;
|
||||
}
|
||||
|
||||
pub fn _nextNode(self: *NodeIterator) !?NodeUnion {
|
||||
try self.callbackStart();
|
||||
defer self.callbackEnd();
|
||||
|
||||
if (self.pointer_before_current) {
|
||||
// Unlike TreeWalker, NodeIterator starts at the first node
|
||||
if (.accept == try NodeFilter.verify(self.what_to_show, self.filter_func, self.reference_node)) {
|
||||
self.pointer_before_current = false;
|
||||
return try Node.toInterface(self.reference_node);
|
||||
}
|
||||
}
|
||||
|
||||
if (try self.firstChild(self.reference_node)) |child| {
|
||||
self.reference_node = child;
|
||||
return try Node.toInterface(child);
|
||||
}
|
||||
|
||||
var current = self.reference_node;
|
||||
while (current != self.root) {
|
||||
if (try self.nextSibling(current)) |sibling| {
|
||||
self.reference_node = sibling;
|
||||
return try Node.toInterface(sibling);
|
||||
}
|
||||
|
||||
current = (parser.nodeParentNode(current)) orelse break;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn _previousNode(self: *NodeIterator) !?NodeUnion {
|
||||
try self.callbackStart();
|
||||
defer self.callbackEnd();
|
||||
|
||||
if (!self.pointer_before_current) {
|
||||
if (.accept == try NodeFilter.verify(self.what_to_show, self.filter_func, self.reference_node)) {
|
||||
self.pointer_before_current = true;
|
||||
// Still need to verify as last may be first as well
|
||||
return try Node.toInterface(self.reference_node);
|
||||
}
|
||||
}
|
||||
if (self.reference_node == self.root) {
|
||||
return null;
|
||||
}
|
||||
|
||||
var current = self.reference_node;
|
||||
while (parser.nodePreviousSibling(current)) |previous| {
|
||||
current = previous;
|
||||
|
||||
switch (try NodeFilter.verify(self.what_to_show, self.filter_func, current)) {
|
||||
.accept => {
|
||||
// Get last child if it has one.
|
||||
if (try self.lastChild(current)) |child| {
|
||||
self.reference_node = child;
|
||||
return try Node.toInterface(child);
|
||||
}
|
||||
|
||||
// Otherwise, this node is our previous one.
|
||||
self.reference_node = current;
|
||||
return try Node.toInterface(current);
|
||||
},
|
||||
.reject, .skip => {
|
||||
// Get last child if it has one.
|
||||
if (try self.lastChild(current)) |child| {
|
||||
self.reference_node = child;
|
||||
return try Node.toInterface(child);
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
if (current != self.root) {
|
||||
if (try self.parentNode(current)) |parent| {
|
||||
self.reference_node = parent;
|
||||
return try Node.toInterface(parent);
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn _detach(self: *const NodeIterator) void {
|
||||
// no-op as per spec
|
||||
_ = self;
|
||||
}
|
||||
|
||||
fn firstChild(self: *const NodeIterator, node: *parser.Node) !?*parser.Node {
|
||||
const children = try parser.nodeGetChildNodes(node);
|
||||
const child_count = parser.nodeListLength(children);
|
||||
|
||||
for (0..child_count) |i| {
|
||||
const index: u32 = @intCast(i);
|
||||
const child = (parser.nodeListItem(children, index)) orelse return null;
|
||||
|
||||
switch (try NodeFilter.verify(self.what_to_show, self.filter_func, child)) {
|
||||
.accept => return child, // NOTE: Skip and reject are equivalent for NodeIterator, this is different from TreeWalker
|
||||
.reject, .skip => if (try self.firstChild(child)) |gchild| return gchild,
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
fn lastChild(self: *const NodeIterator, node: *parser.Node) !?*parser.Node {
|
||||
const children = try parser.nodeGetChildNodes(node);
|
||||
const child_count = parser.nodeListLength(children);
|
||||
|
||||
var index: u32 = child_count;
|
||||
while (index > 0) {
|
||||
index -= 1;
|
||||
const child = (parser.nodeListItem(children, index)) orelse return null;
|
||||
|
||||
switch (try NodeFilter.verify(self.what_to_show, self.filter_func, child)) {
|
||||
.accept => return child, // NOTE: Skip and reject are equivalent for NodeIterator, this is different from TreeWalker
|
||||
.reject, .skip => if (try self.lastChild(child)) |gchild| return gchild,
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
// This implementation is actually the same as :TreeWalker
|
||||
fn parentNode(self: *const NodeIterator, node: *parser.Node) !?*parser.Node {
|
||||
if (self.root == node) return null;
|
||||
|
||||
var current = node;
|
||||
while (true) {
|
||||
if (current == self.root) return null;
|
||||
current = (parser.nodeParentNode(current)) orelse return null;
|
||||
|
||||
switch (try NodeFilter.verify(self.what_to_show, self.filter_func, current)) {
|
||||
.accept => return current,
|
||||
.reject, .skip => continue,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// This implementation is actually the same as :TreeWalker
|
||||
fn nextSibling(self: *const NodeIterator, node: *parser.Node) !?*parser.Node {
|
||||
var current = node;
|
||||
|
||||
while (true) {
|
||||
current = (parser.nodeNextSibling(current)) orelse return null;
|
||||
|
||||
switch (try NodeFilter.verify(self.what_to_show, self.filter_func, current)) {
|
||||
.accept => return current,
|
||||
.skip, .reject => continue,
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
fn callbackStart(self: *NodeIterator) !void {
|
||||
if (self.is_in_callback) {
|
||||
// this is the correct DOMExeption
|
||||
return error.InvalidState;
|
||||
}
|
||||
self.is_in_callback = true;
|
||||
}
|
||||
|
||||
fn callbackEnd(self: *NodeIterator) void {
|
||||
self.is_in_callback = false;
|
||||
}
|
||||
};
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: DOM.NodeIterator" {
|
||||
try testing.htmlRunner("dom/node_iterator.html");
|
||||
}
|
||||
@@ -1,206 +0,0 @@
|
||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const parser = @import("../netsurf.zig");
|
||||
const EventTarget = @import("../dom/event_target.zig").EventTarget;
|
||||
const Env = @import("../env.zig").Env;
|
||||
const Page = @import("../page.zig").Page;
|
||||
|
||||
const milliTimestamp = @import("../../datetime.zig").milliTimestamp;
|
||||
|
||||
pub const Interfaces = .{
|
||||
Performance,
|
||||
PerformanceEntry,
|
||||
PerformanceMark,
|
||||
};
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/Performance
|
||||
pub const Performance = struct {
|
||||
pub const prototype = *EventTarget;
|
||||
|
||||
// Extend libdom event target for pure zig struct.
|
||||
base: parser.EventTargetTBase = parser.EventTargetTBase{ .internal_target_type = .performance },
|
||||
|
||||
time_origin: u64,
|
||||
// if (Window.crossOriginIsolated) -> Resolution in isolated contexts: 5 microseconds
|
||||
// else -> Resolution in non-isolated contexts: 100 microseconds
|
||||
const ms_resolution = 100;
|
||||
|
||||
pub fn init() Performance {
|
||||
return .{
|
||||
.time_origin = milliTimestamp(),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn get_timeOrigin(self: *const Performance) u64 {
|
||||
return self.time_origin;
|
||||
}
|
||||
|
||||
pub fn reset(self: *Performance) void {
|
||||
self.time_origin = milliTimestamp();
|
||||
}
|
||||
|
||||
pub fn _now(self: *const Performance) u64 {
|
||||
return milliTimestamp() - self.time_origin;
|
||||
}
|
||||
|
||||
pub fn _mark(_: *Performance, name: Env.String, _options: ?PerformanceMark.Options, page: *Page) !PerformanceMark {
|
||||
const mark: PerformanceMark = try .constructor(name, _options, page);
|
||||
// TODO: Should store this in an entries list
|
||||
return mark;
|
||||
}
|
||||
|
||||
// TODO: fn _mark should record the marks in a lookup
|
||||
pub fn _clearMarks(_: *Performance, name: ?[]const u8) void {
|
||||
_ = name;
|
||||
}
|
||||
|
||||
// TODO: fn _measures should record the marks in a lookup
|
||||
pub fn _clearMeasures(_: *Performance, name: ?[]const u8) void {
|
||||
_ = name;
|
||||
}
|
||||
|
||||
// TODO: fn _measures should record the marks in a lookup
|
||||
pub fn _getEntriesByName(_: *Performance, name: []const u8, typ: ?[]const u8) []PerformanceEntry {
|
||||
_ = name;
|
||||
_ = typ;
|
||||
return &.{};
|
||||
}
|
||||
|
||||
// TODO: fn _measures should record the marks in a lookup
|
||||
pub fn _getEntriesByType(_: *Performance, typ: []const u8) []PerformanceEntry {
|
||||
_ = typ;
|
||||
return &.{};
|
||||
}
|
||||
};
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/PerformanceEntry
|
||||
pub const PerformanceEntry = struct {
|
||||
const PerformanceEntryType = enum {
|
||||
element,
|
||||
event,
|
||||
first_input,
|
||||
largest_contentful_paint,
|
||||
layout_shift,
|
||||
long_animation_frame,
|
||||
longtask,
|
||||
mark,
|
||||
measure,
|
||||
navigation,
|
||||
paint,
|
||||
resource,
|
||||
taskattribution,
|
||||
visibility_state,
|
||||
|
||||
pub fn toString(self: PerformanceEntryType) []const u8 {
|
||||
return switch (self) {
|
||||
.first_input => "first-input",
|
||||
.largest_contentful_paint => "largest-contentful-paint",
|
||||
.layout_shift => "layout-shift",
|
||||
.long_animation_frame => "long-animation-frame",
|
||||
.visibility_state => "visibility-state",
|
||||
else => @tagName(self),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
duration: f64 = 0.0,
|
||||
entry_type: PerformanceEntryType,
|
||||
name: []const u8,
|
||||
start_time: f64 = 0.0,
|
||||
|
||||
pub fn get_duration(self: *const PerformanceEntry) f64 {
|
||||
return self.duration;
|
||||
}
|
||||
|
||||
pub fn get_entryType(self: *const PerformanceEntry) PerformanceEntryType {
|
||||
return self.entry_type;
|
||||
}
|
||||
|
||||
pub fn get_name(self: *const PerformanceEntry) []const u8 {
|
||||
return self.name;
|
||||
}
|
||||
|
||||
pub fn get_startTime(self: *const PerformanceEntry) f64 {
|
||||
return self.start_time;
|
||||
}
|
||||
};
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/PerformanceMark
|
||||
pub const PerformanceMark = struct {
|
||||
pub const prototype = *PerformanceEntry;
|
||||
|
||||
proto: PerformanceEntry,
|
||||
detail: ?Env.JsObject,
|
||||
|
||||
const Options = struct {
|
||||
detail: ?Env.JsObject = null,
|
||||
startTime: ?f64 = null,
|
||||
};
|
||||
|
||||
pub fn constructor(name: Env.String, _options: ?Options, page: *Page) !PerformanceMark {
|
||||
const perf = &page.window.performance;
|
||||
|
||||
const options = _options orelse Options{};
|
||||
const start_time = options.startTime orelse @as(f64, @floatFromInt(perf._now()));
|
||||
|
||||
if (start_time < 0.0) {
|
||||
return error.TypeError;
|
||||
}
|
||||
|
||||
const detail = if (options.detail) |d| try d.persist() else null;
|
||||
const proto = PerformanceEntry{ .name = name.string, .entry_type = .mark, .start_time = start_time };
|
||||
|
||||
return .{ .proto = proto, .detail = detail };
|
||||
}
|
||||
|
||||
pub fn get_detail(self: *const PerformanceMark) ?Env.JsObject {
|
||||
return self.detail;
|
||||
}
|
||||
};
|
||||
|
||||
const testing = @import("./../../testing.zig");
|
||||
|
||||
test "Performance: get_timeOrigin" {
|
||||
var perf = Performance.init();
|
||||
const time_origin = perf.get_timeOrigin();
|
||||
try testing.expect(time_origin >= 0);
|
||||
}
|
||||
|
||||
test "Performance: now" {
|
||||
var perf = Performance.init();
|
||||
|
||||
// Monotonically increasing
|
||||
var now = perf._now();
|
||||
while (now <= 0) { // Loop for now to not be 0
|
||||
try testing.expectEqual(now, 0);
|
||||
now = perf._now();
|
||||
}
|
||||
|
||||
var after = perf._now();
|
||||
while (after <= now) { // Loop untill after > now
|
||||
try testing.expectEqual(after, now);
|
||||
after = perf._now();
|
||||
}
|
||||
}
|
||||
|
||||
test "Browser: Performance.Mark" {
|
||||
try testing.htmlRunner("dom/performance.html");
|
||||
}
|
||||
@@ -1,58 +0,0 @@
|
||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const Env = @import("../env.zig").Env;
|
||||
|
||||
const PerformanceEntry = @import("performance.zig").PerformanceEntry;
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/PerformanceObserver
|
||||
pub const PerformanceObserver = struct {
|
||||
pub const _supportedEntryTypes = [0][]const u8{};
|
||||
|
||||
pub fn constructor(cbk: Env.Function) PerformanceObserver {
|
||||
_ = cbk;
|
||||
return .{};
|
||||
}
|
||||
|
||||
pub fn _observe(self: *const PerformanceObserver, options_: ?Options) void {
|
||||
_ = self;
|
||||
_ = options_;
|
||||
return;
|
||||
}
|
||||
|
||||
pub fn _disconnect(self: *PerformanceObserver) void {
|
||||
_ = self;
|
||||
}
|
||||
|
||||
pub fn _takeRecords(_: *const PerformanceObserver) []PerformanceEntry {
|
||||
return &[_]PerformanceEntry{};
|
||||
}
|
||||
};
|
||||
|
||||
const Options = struct {
|
||||
buffered: ?bool = null,
|
||||
durationThreshold: ?f64 = null,
|
||||
entryTypes: ?[]const []const u8 = null,
|
||||
type: ?[]const u8 = null,
|
||||
};
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: DOM.PerformanceObserver" {
|
||||
try testing.htmlRunner("dom/performance_observer.html");
|
||||
}
|
||||
@@ -1,92 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const parser = @import("../netsurf.zig");
|
||||
const Node = @import("node.zig").Node;
|
||||
const Page = @import("../page.zig").Page;
|
||||
|
||||
// https://dom.spec.whatwg.org/#processinginstruction
|
||||
pub const ProcessingInstruction = struct {
|
||||
pub const Self = parser.ProcessingInstruction;
|
||||
|
||||
// TODO for libdom processing instruction inherit from node.
|
||||
// But the spec says it must inherit from CDATA.
|
||||
pub const prototype = *Node;
|
||||
pub const subtype = .node;
|
||||
|
||||
pub fn get_target(self: *parser.ProcessingInstruction) ![]const u8 {
|
||||
// libdom stores the ProcessingInstruction target in the node's name.
|
||||
return try parser.nodeName(parser.processingInstructionToNode(self));
|
||||
}
|
||||
|
||||
// There's something wrong when we try to clone a ProcessInstruction normally.
|
||||
// The resulting object can't be cast back into a node (it crashes). This is
|
||||
// a simple workaround.
|
||||
pub fn _cloneNode(self: *parser.ProcessingInstruction, _: ?bool, page: *Page) !*parser.ProcessingInstruction {
|
||||
return try parser.documentCreateProcessingInstruction(
|
||||
@ptrCast(page.window.document),
|
||||
try get_target(self),
|
||||
(try get_data(self)) orelse "",
|
||||
);
|
||||
}
|
||||
|
||||
pub fn get_data(self: *parser.ProcessingInstruction) !?[]const u8 {
|
||||
return parser.nodeValue(parser.processingInstructionToNode(self));
|
||||
}
|
||||
|
||||
pub fn set_data(self: *parser.ProcessingInstruction, data: []u8) !void {
|
||||
try parser.nodeSetValue(parser.processingInstructionToNode(self), data);
|
||||
}
|
||||
|
||||
// netsurf's ProcessInstruction doesn't implement the dom_node_get_attributes
|
||||
// and thus will crash if we try to call nodeIsEqualNode.
|
||||
pub fn _isEqualNode(self: *parser.ProcessingInstruction, other_node: *parser.Node) !bool {
|
||||
if (parser.nodeType(other_node) != .processing_instruction) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const other: *parser.ProcessingInstruction = @ptrCast(other_node);
|
||||
|
||||
if (std.mem.eql(u8, try get_target(self), try get_target(other)) == false) {
|
||||
return false;
|
||||
}
|
||||
|
||||
{
|
||||
const self_data = try get_data(self);
|
||||
const other_data = try get_data(other);
|
||||
if (self_data == null and other_data != null) {
|
||||
return false;
|
||||
}
|
||||
if (self_data != null and other_data == null) {
|
||||
return false;
|
||||
}
|
||||
if (std.mem.eql(u8, self_data.?, other_data.?) == false) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: DOM.ProcessingInstruction" {
|
||||
try testing.htmlRunner("dom/processing_instruction.html");
|
||||
}
|
||||
@@ -1,390 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const parser = @import("../netsurf.zig");
|
||||
const Page = @import("../page.zig").Page;
|
||||
|
||||
const Node = @import("node.zig").Node;
|
||||
const NodeUnion = @import("node.zig").Union;
|
||||
const DOMException = @import("exceptions.zig").DOMException;
|
||||
|
||||
pub const Interfaces = .{
|
||||
AbstractRange,
|
||||
Range,
|
||||
};
|
||||
|
||||
pub const AbstractRange = struct {
|
||||
collapsed: bool,
|
||||
end_node: *parser.Node,
|
||||
end_offset: u32,
|
||||
start_node: *parser.Node,
|
||||
start_offset: u32,
|
||||
|
||||
pub fn updateCollapsed(self: *AbstractRange) void {
|
||||
// TODO: Eventually, compare properly.
|
||||
self.collapsed = false;
|
||||
}
|
||||
|
||||
pub fn get_collapsed(self: *const AbstractRange) bool {
|
||||
return self.collapsed;
|
||||
}
|
||||
|
||||
pub fn get_endContainer(self: *const AbstractRange) !NodeUnion {
|
||||
return Node.toInterface(self.end_node);
|
||||
}
|
||||
|
||||
pub fn get_endOffset(self: *const AbstractRange) u32 {
|
||||
return self.end_offset;
|
||||
}
|
||||
|
||||
pub fn get_startContainer(self: *const AbstractRange) !NodeUnion {
|
||||
return Node.toInterface(self.start_node);
|
||||
}
|
||||
|
||||
pub fn get_startOffset(self: *const AbstractRange) u32 {
|
||||
return self.start_offset;
|
||||
}
|
||||
};
|
||||
|
||||
pub const Range = struct {
|
||||
pub const Exception = DOMException;
|
||||
pub const prototype = *AbstractRange;
|
||||
|
||||
proto: AbstractRange,
|
||||
|
||||
pub const _START_TO_START = 0;
|
||||
pub const _START_TO_END = 1;
|
||||
pub const _END_TO_END = 2;
|
||||
pub const _END_TO_START = 3;
|
||||
|
||||
// The Range() constructor returns a newly created Range object whose start
|
||||
// and end is the global Document object.
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/Range/Range
|
||||
pub fn constructor(page: *Page) Range {
|
||||
const proto: AbstractRange = .{
|
||||
.collapsed = true,
|
||||
.end_node = parser.documentHTMLToNode(page.window.document),
|
||||
.end_offset = 0,
|
||||
.start_node = parser.documentHTMLToNode(page.window.document),
|
||||
.start_offset = 0,
|
||||
};
|
||||
|
||||
return .{ .proto = proto };
|
||||
}
|
||||
|
||||
pub fn _setStart(self: *Range, node: *parser.Node, offset_: i32) !void {
|
||||
try ensureValidOffset(node, offset_);
|
||||
const offset: u32 = @intCast(offset_);
|
||||
const position = compare(node, offset, self.proto.start_node, self.proto.start_offset) catch |err| switch (err) {
|
||||
error.WrongDocument => blk: {
|
||||
// allow a node with a different root than the current, or
|
||||
// a disconnected one. Treat it as if it's "after", so that
|
||||
// we also update the end_offset and end_node.
|
||||
break :blk 1;
|
||||
},
|
||||
else => return err,
|
||||
};
|
||||
|
||||
if (position == 1) {
|
||||
// if we're setting the node after the current start, the end must
|
||||
// be set too.
|
||||
self.proto.end_offset = offset;
|
||||
self.proto.end_node = node;
|
||||
}
|
||||
self.proto.start_node = node;
|
||||
self.proto.start_offset = offset;
|
||||
self.proto.updateCollapsed();
|
||||
}
|
||||
|
||||
pub fn _setStartBefore(self: *Range, node: *parser.Node) !void {
|
||||
const parent, const index = try getParentAndIndex(node);
|
||||
self.proto.start_node = parent;
|
||||
self.proto.start_offset = index;
|
||||
}
|
||||
|
||||
pub fn _setStartAfter(self: *Range, node: *parser.Node) !void {
|
||||
const parent, const index = try getParentAndIndex(node);
|
||||
self.proto.start_node = parent;
|
||||
self.proto.start_offset = index + 1;
|
||||
}
|
||||
|
||||
pub fn _setEnd(self: *Range, node: *parser.Node, offset_: i32) !void {
|
||||
try ensureValidOffset(node, offset_);
|
||||
const offset: u32 = @intCast(offset_);
|
||||
|
||||
const position = compare(node, offset, self.proto.start_node, self.proto.start_offset) catch |err| switch (err) {
|
||||
error.WrongDocument => blk: {
|
||||
// allow a node with a different root than the current, or
|
||||
// a disconnected one. Treat it as if it's "before", so that
|
||||
// we also update the end_offset and end_node.
|
||||
break :blk -1;
|
||||
},
|
||||
else => return err,
|
||||
};
|
||||
|
||||
if (position == -1) {
|
||||
// if we're setting the node before the current start, the start
|
||||
// must be set too.
|
||||
self.proto.start_offset = offset;
|
||||
self.proto.start_node = node;
|
||||
}
|
||||
|
||||
self.proto.end_node = node;
|
||||
self.proto.end_offset = offset;
|
||||
self.proto.updateCollapsed();
|
||||
}
|
||||
|
||||
pub fn _setEndBefore(self: *Range, node: *parser.Node) !void {
|
||||
const parent, const index = try getParentAndIndex(node);
|
||||
self.proto.end_node = parent;
|
||||
self.proto.end_offset = index;
|
||||
}
|
||||
|
||||
pub fn _setEndAfter(self: *Range, node: *parser.Node) !void {
|
||||
const parent, const index = try getParentAndIndex(node);
|
||||
self.proto.end_node = parent;
|
||||
self.proto.end_offset = index + 1;
|
||||
}
|
||||
|
||||
pub fn _createContextualFragment(_: *Range, fragment: []const u8, page: *Page) !*parser.DocumentFragment {
|
||||
const document_html = page.window.document;
|
||||
const document = parser.documentHTMLToDocument(document_html);
|
||||
const doc_frag = try parser.documentParseFragmentFromStr(document, fragment);
|
||||
return doc_frag;
|
||||
}
|
||||
|
||||
pub fn _selectNodeContents(self: *Range, node: *parser.Node) !void {
|
||||
self.proto.start_node = node;
|
||||
self.proto.start_offset = 0;
|
||||
self.proto.end_node = node;
|
||||
|
||||
// Set end_offset
|
||||
switch (parser.nodeType(node)) {
|
||||
.text, .cdata_section, .comment, .processing_instruction => {
|
||||
// For text-like nodes, end_offset should be the length of the text data
|
||||
if (parser.nodeValue(node)) |text_data| {
|
||||
self.proto.end_offset = @intCast(text_data.len);
|
||||
} else {
|
||||
self.proto.end_offset = 0;
|
||||
}
|
||||
},
|
||||
else => {
|
||||
// For element and other nodes, end_offset is the number of children
|
||||
const child_nodes = try parser.nodeGetChildNodes(node);
|
||||
const child_count = parser.nodeListLength(child_nodes);
|
||||
self.proto.end_offset = @intCast(child_count);
|
||||
},
|
||||
}
|
||||
|
||||
self.proto.updateCollapsed();
|
||||
}
|
||||
|
||||
// creates a copy
|
||||
pub fn _cloneRange(self: *const Range) Range {
|
||||
return .{
|
||||
.proto = .{
|
||||
.collapsed = self.proto.collapsed,
|
||||
.end_node = self.proto.end_node,
|
||||
.end_offset = self.proto.end_offset,
|
||||
.start_node = self.proto.start_node,
|
||||
.start_offset = self.proto.start_offset,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
pub fn _comparePoint(self: *const Range, node: *parser.Node, offset_: i32) !i32 {
|
||||
const start = self.proto.start_node;
|
||||
if (parser.nodeGetRootNode(start) != parser.nodeGetRootNode(node)) {
|
||||
// WPT really wants this error to be first. Later, when we check
|
||||
// if the relative position is 'disconnected', it'll also catch this
|
||||
// case, but WPT will complain because it sometimes also sends
|
||||
// invalid offsets, and it wants WrongDocument to be raised.
|
||||
return error.WrongDocument;
|
||||
}
|
||||
|
||||
if (parser.nodeType(node) == .document_type) {
|
||||
return error.InvalidNodeType;
|
||||
}
|
||||
|
||||
try ensureValidOffset(node, offset_);
|
||||
|
||||
const offset: u32 = @intCast(offset_);
|
||||
if (try compare(node, offset, start, self.proto.start_offset) == -1) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (try compare(node, offset, self.proto.end_node, self.proto.end_offset) == 1) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
pub fn _isPointInRange(self: *const Range, node: *parser.Node, offset_: i32) !bool {
|
||||
return self._comparePoint(node, offset_) catch |err| switch (err) {
|
||||
error.WrongDocument => return false,
|
||||
else => return err,
|
||||
} == 0;
|
||||
}
|
||||
|
||||
pub fn _intersectsNode(self: *const Range, node: *parser.Node) !bool {
|
||||
const start_root = parser.nodeGetRootNode(self.proto.start_node);
|
||||
const node_root = parser.nodeGetRootNode(node);
|
||||
if (start_root != node_root) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const parent, const index = getParentAndIndex(node) catch |err| switch (err) {
|
||||
error.InvalidNodeType => return true, // if node has no parent, we return true.
|
||||
else => return err,
|
||||
};
|
||||
|
||||
if (try compare(parent, index + 1, self.proto.start_node, self.proto.start_offset) != 1) {
|
||||
// node isn't after start, can't intersect
|
||||
return false;
|
||||
}
|
||||
|
||||
if (try compare(parent, index, self.proto.end_node, self.proto.end_offset) != -1) {
|
||||
// node isn't before end, can't intersect
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
pub fn _compareBoundaryPoints(self: *const Range, how: i32, other: *const Range) !i32 {
|
||||
return switch (how) {
|
||||
_START_TO_START => compare(self.proto.start_node, self.proto.start_offset, other.proto.start_node, other.proto.start_offset),
|
||||
_START_TO_END => compare(self.proto.start_node, self.proto.start_offset, other.proto.end_node, other.proto.end_offset),
|
||||
_END_TO_END => compare(self.proto.end_node, self.proto.end_offset, other.proto.end_node, other.proto.end_offset),
|
||||
_END_TO_START => compare(self.proto.end_node, self.proto.end_offset, other.proto.start_node, other.proto.start_offset),
|
||||
else => error.NotSupported, // this is the correct DOM Exception to return
|
||||
};
|
||||
}
|
||||
|
||||
// The Range.detach() method does nothing. It used to disable the Range
|
||||
// object and enable the browser to release associated resources. The
|
||||
// method has been kept for compatibility.
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/Range/detach
|
||||
pub fn _detach(_: *Range) void {}
|
||||
};
|
||||
|
||||
fn ensureValidOffset(node: *parser.Node, offset: i32) !void {
|
||||
if (offset < 0) {
|
||||
return error.IndexSize;
|
||||
}
|
||||
|
||||
// not >= because 0 seems to represent the node itself.
|
||||
if (offset > try nodeLength(node)) {
|
||||
return error.IndexSize;
|
||||
}
|
||||
}
|
||||
|
||||
fn nodeLength(node: *parser.Node) !usize {
|
||||
switch (try isTextual(node)) {
|
||||
true => return ((parser.nodeTextContent(node)) orelse "").len,
|
||||
false => {
|
||||
const children = try parser.nodeGetChildNodes(node);
|
||||
return @intCast(parser.nodeListLength(children));
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn isTextual(node: *parser.Node) !bool {
|
||||
return switch (parser.nodeType(node)) {
|
||||
.text, .comment, .cdata_section => true,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
|
||||
fn getParentAndIndex(child: *parser.Node) !struct { *parser.Node, u32 } {
|
||||
const parent = (parser.nodeParentNode(child)) orelse return error.InvalidNodeType;
|
||||
const children = try parser.nodeGetChildNodes(parent);
|
||||
const ln = parser.nodeListLength(children);
|
||||
var i: u32 = 0;
|
||||
while (i < ln) {
|
||||
defer i += 1;
|
||||
const c = parser.nodeListItem(children, i) orelse continue;
|
||||
if (c == child) {
|
||||
return .{ parent, i };
|
||||
}
|
||||
}
|
||||
|
||||
// should not be possible to reach this point
|
||||
return error.InvalidNodeType;
|
||||
}
|
||||
|
||||
// implementation is largely copied from the WPT helper called getPosition in
|
||||
// the common.js of the dom folder.
|
||||
fn compare(node_a: *parser.Node, offset_a: u32, node_b: *parser.Node, offset_b: u32) !i32 {
|
||||
if (node_a == node_b) {
|
||||
// This is a simple and common case, where the two nodes are the same
|
||||
// We just need to compare their offsets
|
||||
if (offset_a == offset_b) {
|
||||
return 0;
|
||||
}
|
||||
return if (offset_a < offset_b) -1 else 1;
|
||||
}
|
||||
|
||||
// We're probably comparing two different nodes. "Probably", because the
|
||||
// above case on considered the offset if the two nodes were the same
|
||||
// as-is. They could still be the same here, if we first consider the
|
||||
// offset.
|
||||
const position = try Node._compareDocumentPosition(node_b, node_a);
|
||||
if (position & @intFromEnum(parser.DocumentPosition.disconnected) == @intFromEnum(parser.DocumentPosition.disconnected)) {
|
||||
return error.WrongDocument;
|
||||
}
|
||||
|
||||
if (position & @intFromEnum(parser.DocumentPosition.following) == @intFromEnum(parser.DocumentPosition.following)) {
|
||||
return switch (try compare(node_b, offset_b, node_a, offset_a)) {
|
||||
-1 => 1,
|
||||
1 => -1,
|
||||
else => unreachable,
|
||||
};
|
||||
}
|
||||
|
||||
if (position & @intFromEnum(parser.DocumentPosition.contains) == @intFromEnum(parser.DocumentPosition.contains)) {
|
||||
// node_a contains node_b
|
||||
var child = node_b;
|
||||
while (parser.nodeParentNode(child)) |parent| {
|
||||
if (parent == node_a) {
|
||||
// child.parentNode == node_a
|
||||
break;
|
||||
}
|
||||
child = parent;
|
||||
} else {
|
||||
// this should not happen, because Node._compareDocumentPosition
|
||||
// has told us that node_a contains node_b, so one of node_b's
|
||||
// parent's MUST be node_a. But somehow we do end up here sometimes.
|
||||
return -1;
|
||||
}
|
||||
|
||||
const child_parent, const child_index = try getParentAndIndex(child);
|
||||
std.debug.assert(node_a == child_parent);
|
||||
return if (child_index < offset_a) -1 else 1;
|
||||
}
|
||||
|
||||
return -1;
|
||||
}
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: Range" {
|
||||
try testing.htmlRunner("dom/range.html");
|
||||
}
|
||||
@@ -1,54 +0,0 @@
|
||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const Env = @import("../env.zig").Env;
|
||||
const parser = @import("../netsurf.zig");
|
||||
|
||||
pub const Interfaces = .{
|
||||
ResizeObserver,
|
||||
};
|
||||
|
||||
// WEB IDL https://drafts.csswg.org/resize-observer/#resize-observer-interface
|
||||
pub const ResizeObserver = struct {
|
||||
pub fn constructor(cbk: Env.Function) ResizeObserver {
|
||||
_ = cbk;
|
||||
return .{};
|
||||
}
|
||||
|
||||
pub fn _observe(self: *const ResizeObserver, element: *parser.Element, options_: ?Options) void {
|
||||
_ = self;
|
||||
_ = element;
|
||||
_ = options_;
|
||||
return;
|
||||
}
|
||||
|
||||
pub fn _unobserve(self: *const ResizeObserver, element: *parser.Element) void {
|
||||
_ = self;
|
||||
_ = element;
|
||||
return;
|
||||
}
|
||||
|
||||
// TODO
|
||||
pub fn _disconnect(self: *ResizeObserver) void {
|
||||
_ = self;
|
||||
}
|
||||
};
|
||||
|
||||
const Options = struct {
|
||||
box: []const u8,
|
||||
};
|
||||
@@ -1,101 +0,0 @@
|
||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const dump = @import("../dump.zig");
|
||||
const parser = @import("../netsurf.zig");
|
||||
|
||||
const Env = @import("../env.zig").Env;
|
||||
const Page = @import("../page.zig").Page;
|
||||
const Node = @import("node.zig").Node;
|
||||
const Element = @import("element.zig").Element;
|
||||
const ElementUnion = @import("element.zig").Union;
|
||||
|
||||
// WEB IDL https://dom.spec.whatwg.org/#interface-shadowroot
|
||||
pub const ShadowRoot = struct {
|
||||
pub const prototype = *parser.DocumentFragment;
|
||||
pub const subtype = .node;
|
||||
|
||||
mode: Mode,
|
||||
host: *parser.Element,
|
||||
proto: *parser.DocumentFragment,
|
||||
adopted_style_sheets: ?Env.JsObject = null,
|
||||
|
||||
pub const Mode = enum {
|
||||
open,
|
||||
closed,
|
||||
};
|
||||
|
||||
pub fn get_host(self: *const ShadowRoot) !ElementUnion {
|
||||
return Element.toInterface(self.host);
|
||||
}
|
||||
|
||||
pub fn get_adoptedStyleSheets(self: *ShadowRoot, page: *Page) !Env.JsObject {
|
||||
if (self.adopted_style_sheets) |obj| {
|
||||
return obj;
|
||||
}
|
||||
|
||||
const obj = try page.main_context.newArray(0).persist();
|
||||
self.adopted_style_sheets = obj;
|
||||
return obj;
|
||||
}
|
||||
|
||||
pub fn set_adoptedStyleSheets(self: *ShadowRoot, sheets: Env.JsObject) !void {
|
||||
self.adopted_style_sheets = try sheets.persist();
|
||||
}
|
||||
|
||||
pub fn get_innerHTML(self: *ShadowRoot, page: *Page) ![]const u8 {
|
||||
var aw = std.Io.Writer.Allocating.init(page.call_arena);
|
||||
try dump.writeChildren(parser.documentFragmentToNode(self.proto), .{}, &aw.writer);
|
||||
return aw.written();
|
||||
}
|
||||
|
||||
pub fn set_innerHTML(self: *ShadowRoot, str_: ?[]const u8) !void {
|
||||
const sr_doc = parser.documentFragmentToNode(self.proto);
|
||||
const doc = parser.nodeOwnerDocument(sr_doc) orelse return parser.DOMError.WrongDocument;
|
||||
try Node.removeChildren(sr_doc);
|
||||
const str = str_ orelse return;
|
||||
|
||||
const fragment = try parser.documentParseFragmentFromStr(doc, str);
|
||||
const fragment_node = parser.documentFragmentToNode(fragment);
|
||||
|
||||
// Element.set_innerHTML also has some weirdness here. It isn't clear
|
||||
// what should and shouldn't be set. Whatever string you pass to libdom,
|
||||
// it always creates a full HTML document, with an html, head and body
|
||||
// element.
|
||||
// For ShadowRoot, it appears the only the children within the body should
|
||||
// be set.
|
||||
const html = parser.nodeFirstChild(fragment_node) orelse return;
|
||||
const head = parser.nodeFirstChild(html) orelse return;
|
||||
const body = parser.nodeNextSibling(head) orelse return;
|
||||
|
||||
const children = try parser.nodeGetChildNodes(body);
|
||||
const ln = parser.nodeListLength(children);
|
||||
for (0..ln) |_| {
|
||||
// always index 0, because nodeAppendChild moves the node out of
|
||||
// the nodeList and into the new tree
|
||||
const child = parser.nodeListItem(children, 0) orelse continue;
|
||||
_ = try parser.nodeAppendChild(sr_doc, child);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: DOM.ShadowRoot" {
|
||||
try testing.htmlRunner("dom/shadow_root.html");
|
||||
}
|
||||
@@ -1,280 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const parser = @import("../netsurf.zig");
|
||||
|
||||
const NodeFilter = @import("node_filter.zig");
|
||||
const Env = @import("../env.zig").Env;
|
||||
const Node = @import("node.zig").Node;
|
||||
const NodeUnion = @import("node.zig").Union;
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/TreeWalker
|
||||
pub const TreeWalker = struct {
|
||||
root: *parser.Node,
|
||||
current_node: *parser.Node,
|
||||
what_to_show: u32,
|
||||
filter: ?TreeWalkerOpts,
|
||||
filter_func: ?Env.Function,
|
||||
|
||||
// One of the few cases where null and undefined resolve to different default.
|
||||
// We need the raw JsObject so that we can probe the tri state:
|
||||
// null, undefined or i32.
|
||||
pub const WhatToShow = Env.JsObject;
|
||||
|
||||
pub const TreeWalkerOpts = union(enum) {
|
||||
function: Env.Function,
|
||||
object: struct { acceptNode: Env.Function },
|
||||
};
|
||||
|
||||
pub fn init(node: *parser.Node, what_to_show_: ?WhatToShow, filter: ?TreeWalkerOpts) !TreeWalker {
|
||||
var filter_func: ?Env.Function = null;
|
||||
|
||||
if (filter) |f| {
|
||||
filter_func = switch (f) {
|
||||
.function => |func| func,
|
||||
.object => |o| o.acceptNode,
|
||||
};
|
||||
}
|
||||
|
||||
var what_to_show: u32 = undefined;
|
||||
if (what_to_show_) |wts| {
|
||||
switch (try wts.triState(TreeWalker, "what_to_show", u32)) {
|
||||
.null => what_to_show = 0,
|
||||
.undefined => what_to_show = NodeFilter.NodeFilter._SHOW_ALL,
|
||||
.value => |v| what_to_show = v,
|
||||
}
|
||||
} else {
|
||||
what_to_show = NodeFilter.NodeFilter._SHOW_ALL;
|
||||
}
|
||||
|
||||
return .{
|
||||
.root = node,
|
||||
.current_node = node,
|
||||
.what_to_show = what_to_show,
|
||||
.filter = filter,
|
||||
.filter_func = filter_func,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn get_root(self: *TreeWalker) !NodeUnion {
|
||||
return try Node.toInterface(self.root);
|
||||
}
|
||||
|
||||
pub fn get_currentNode(self: *TreeWalker) !NodeUnion {
|
||||
return try Node.toInterface(self.current_node);
|
||||
}
|
||||
|
||||
pub fn get_whatToShow(self: *TreeWalker) u32 {
|
||||
return self.what_to_show;
|
||||
}
|
||||
|
||||
pub fn get_filter(self: *TreeWalker) ?TreeWalkerOpts {
|
||||
return self.filter;
|
||||
}
|
||||
|
||||
pub fn set_currentNode(self: *TreeWalker, node: *parser.Node) !void {
|
||||
self.current_node = node;
|
||||
}
|
||||
|
||||
fn firstChild(self: *const TreeWalker, node: *parser.Node) !?*parser.Node {
|
||||
const children = try parser.nodeGetChildNodes(node);
|
||||
const child_count = parser.nodeListLength(children);
|
||||
|
||||
for (0..child_count) |i| {
|
||||
const index: u32 = @intCast(i);
|
||||
const child = (parser.nodeListItem(children, index)) orelse return null;
|
||||
|
||||
switch (try NodeFilter.verify(self.what_to_show, self.filter_func, child)) {
|
||||
.accept => return child,
|
||||
.reject => continue,
|
||||
.skip => if (try self.firstChild(child)) |gchild| return gchild,
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
fn lastChild(self: *const TreeWalker, node: *parser.Node) !?*parser.Node {
|
||||
const children = try parser.nodeGetChildNodes(node);
|
||||
const child_count = parser.nodeListLength(children);
|
||||
|
||||
var index: u32 = child_count;
|
||||
while (index > 0) {
|
||||
index -= 1;
|
||||
const child = (parser.nodeListItem(children, index)) orelse return null;
|
||||
|
||||
switch (try NodeFilter.verify(self.what_to_show, self.filter_func, child)) {
|
||||
.accept => return child,
|
||||
.reject => continue,
|
||||
.skip => if (try self.lastChild(child)) |gchild| return gchild,
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
fn nextSibling(self: *const TreeWalker, node: *parser.Node) !?*parser.Node {
|
||||
var current = node;
|
||||
|
||||
while (true) {
|
||||
current = (parser.nodeNextSibling(current)) orelse return null;
|
||||
|
||||
switch (try NodeFilter.verify(self.what_to_show, self.filter_func, current)) {
|
||||
.accept => return current,
|
||||
.skip, .reject => continue,
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
fn previousSibling(self: *const TreeWalker, node: *parser.Node) !?*parser.Node {
|
||||
var current = node;
|
||||
|
||||
while (true) {
|
||||
current = (parser.nodePreviousSibling(current)) orelse return null;
|
||||
|
||||
switch (try NodeFilter.verify(self.what_to_show, self.filter_func, current)) {
|
||||
.accept => return current,
|
||||
.skip, .reject => continue,
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
fn parentNode(self: *const TreeWalker, node: *parser.Node) !?*parser.Node {
|
||||
if (self.root == node) return null;
|
||||
|
||||
var current = node;
|
||||
while (true) {
|
||||
if (current == self.root) return null;
|
||||
current = (parser.nodeParentNode(current)) orelse return null;
|
||||
|
||||
switch (try NodeFilter.verify(self.what_to_show, self.filter_func, current)) {
|
||||
.accept => return current,
|
||||
.reject, .skip => continue,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn _firstChild(self: *TreeWalker) !?NodeUnion {
|
||||
if (try self.firstChild(self.current_node)) |child| {
|
||||
self.current_node = child;
|
||||
return try Node.toInterface(child);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn _lastChild(self: *TreeWalker) !?NodeUnion {
|
||||
if (try self.lastChild(self.current_node)) |child| {
|
||||
self.current_node = child;
|
||||
return try Node.toInterface(child);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn _nextNode(self: *TreeWalker) !?NodeUnion {
|
||||
if (try self.firstChild(self.current_node)) |child| {
|
||||
self.current_node = child;
|
||||
return try Node.toInterface(child);
|
||||
}
|
||||
|
||||
var current = self.current_node;
|
||||
while (current != self.root) {
|
||||
if (try self.nextSibling(current)) |sibling| {
|
||||
self.current_node = sibling;
|
||||
return try Node.toInterface(sibling);
|
||||
}
|
||||
|
||||
current = (parser.nodeParentNode(current)) orelse break;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn _nextSibling(self: *TreeWalker) !?NodeUnion {
|
||||
if (try self.nextSibling(self.current_node)) |sibling| {
|
||||
self.current_node = sibling;
|
||||
return try Node.toInterface(sibling);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn _parentNode(self: *TreeWalker) !?NodeUnion {
|
||||
if (try self.parentNode(self.current_node)) |parent| {
|
||||
self.current_node = parent;
|
||||
return try Node.toInterface(parent);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn _previousNode(self: *TreeWalker) !?NodeUnion {
|
||||
if (self.current_node == self.root) return null;
|
||||
|
||||
var current = self.current_node;
|
||||
while (parser.nodePreviousSibling(current)) |previous| {
|
||||
current = previous;
|
||||
|
||||
switch (try NodeFilter.verify(self.what_to_show, self.filter_func, current)) {
|
||||
.accept => {
|
||||
// Get last child if it has one.
|
||||
if (try self.lastChild(current)) |child| {
|
||||
self.current_node = child;
|
||||
return try Node.toInterface(child);
|
||||
}
|
||||
|
||||
// Otherwise, this node is our previous one.
|
||||
self.current_node = current;
|
||||
return try Node.toInterface(current);
|
||||
},
|
||||
.reject => continue,
|
||||
.skip => {
|
||||
// Get last child if it has one.
|
||||
if (try self.lastChild(current)) |child| {
|
||||
self.current_node = child;
|
||||
return try Node.toInterface(child);
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
if (current != self.root) {
|
||||
if (try self.parentNode(current)) |parent| {
|
||||
self.current_node = parent;
|
||||
return try Node.toInterface(parent);
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn _previousSibling(self: *TreeWalker) !?NodeUnion {
|
||||
if (try self.previousSibling(self.current_node)) |sibling| {
|
||||
self.current_node = sibling;
|
||||
return try Node.toInterface(sibling);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
};
|
||||
@@ -17,105 +17,49 @@
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const File = std.fs.File;
|
||||
|
||||
const parser = @import("netsurf.zig");
|
||||
const Page = @import("page.zig").Page;
|
||||
const Walker = @import("dom/walker.zig").WalkerChildren;
|
||||
|
||||
pub const Opts = struct {
|
||||
// set to include element shadowroots in the dump
|
||||
page: ?*const Page = null,
|
||||
|
||||
strip_mode: StripMode = .{},
|
||||
|
||||
pub const StripMode = struct {
|
||||
js: bool = false,
|
||||
ui: bool = false,
|
||||
css: bool = false,
|
||||
};
|
||||
};
|
||||
const parser = @import("netsurf");
|
||||
const Walker = @import("../dom/walker.zig").WalkerChildren;
|
||||
|
||||
// writer must be a std.io.Writer
|
||||
pub fn writeHTML(doc: *parser.Document, opts: Opts, writer: *std.Io.Writer) !void {
|
||||
pub fn writeHTML(doc: *parser.Document, writer: anytype) !void {
|
||||
try writer.writeAll("<!DOCTYPE html>\n");
|
||||
try writeChildren(parser.documentToNode(doc), opts, writer);
|
||||
try writeChildren(parser.documentToNode(doc), writer);
|
||||
try writer.writeAll("\n");
|
||||
}
|
||||
|
||||
// Spec: https://www.w3.org/TR/xml/#sec-prolog-dtd
|
||||
pub fn writeDocType(doc_type: *parser.DocumentType, writer: *std.Io.Writer) !void {
|
||||
try writer.writeAll("<!DOCTYPE ");
|
||||
try writer.writeAll(try parser.documentTypeGetName(doc_type));
|
||||
|
||||
const public_id = parser.documentTypeGetPublicId(doc_type);
|
||||
const system_id = parser.documentTypeGetSystemId(doc_type);
|
||||
if (public_id.len != 0 and system_id.len != 0) {
|
||||
try writer.writeAll(" PUBLIC \"");
|
||||
try writeEscapedAttributeValue(writer, public_id);
|
||||
try writer.writeAll("\" \"");
|
||||
try writeEscapedAttributeValue(writer, system_id);
|
||||
try writer.writeAll("\"");
|
||||
} else if (public_id.len != 0) {
|
||||
try writer.writeAll(" PUBLIC \"");
|
||||
try writeEscapedAttributeValue(writer, public_id);
|
||||
try writer.writeAll("\"");
|
||||
} else if (system_id.len != 0) {
|
||||
try writer.writeAll(" SYSTEM \"");
|
||||
try writeEscapedAttributeValue(writer, system_id);
|
||||
try writer.writeAll("\"");
|
||||
}
|
||||
// Internal subset is not implemented
|
||||
try writer.writeAll(">");
|
||||
}
|
||||
|
||||
pub fn writeNode(node: *parser.Node, opts: Opts, writer: *std.Io.Writer) anyerror!void {
|
||||
switch (parser.nodeType(node)) {
|
||||
pub fn writeNode(node: *parser.Node, writer: anytype) anyerror!void {
|
||||
switch (try parser.nodeType(node)) {
|
||||
.element => {
|
||||
// open the tag
|
||||
const tag_type = try parser.nodeHTMLGetTagType(node) orelse .undef;
|
||||
if (try isStripped(tag_type, node, opts.strip_mode)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const tag = try parser.nodeLocalName(node);
|
||||
try writer.writeAll("<");
|
||||
try writer.writeAll(tag);
|
||||
|
||||
// write the attributes
|
||||
const _map = try parser.nodeGetAttributes(node);
|
||||
if (_map) |map| {
|
||||
const ln = try parser.namedNodeMapGetLength(map);
|
||||
for (0..ln) |i| {
|
||||
const attr = try parser.namedNodeMapItem(map, @intCast(i)) orelse break;
|
||||
try writer.writeAll(" ");
|
||||
try writer.writeAll(try parser.attributeGetName(attr));
|
||||
try writer.writeAll("=\"");
|
||||
const attribute_value = try parser.attributeGetValue(attr) orelse "";
|
||||
try writeEscapedAttributeValue(writer, attribute_value);
|
||||
try writer.writeAll("\"");
|
||||
}
|
||||
const map = try parser.nodeGetAttributes(node);
|
||||
const ln = try parser.namedNodeMapGetLength(map);
|
||||
var i: u32 = 0;
|
||||
while (i < ln) {
|
||||
const attr = try parser.namedNodeMapItem(map, i) orelse break;
|
||||
try writer.writeAll(" ");
|
||||
try writer.writeAll(try parser.attributeGetName(attr));
|
||||
try writer.writeAll("=\"");
|
||||
const attribute_value = try parser.attributeGetValue(attr) orelse "";
|
||||
try writeEscapedAttributeValue(writer, attribute_value);
|
||||
try writer.writeAll("\"");
|
||||
i += 1;
|
||||
}
|
||||
|
||||
try writer.writeAll(">");
|
||||
|
||||
if (opts.page) |page| {
|
||||
if (page.getNodeState(node)) |state| {
|
||||
if (state.shadow_root) |sr| {
|
||||
try writeChildren(@ptrCast(@alignCast(sr.proto)), opts, writer);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// void elements can't have any content.
|
||||
if (try isVoid(parser.nodeToElement(node))) return;
|
||||
|
||||
if (tag_type == .script) {
|
||||
try writer.writeAll(parser.nodeTextContent(node) orelse "");
|
||||
} else {
|
||||
// write the children
|
||||
// TODO avoid recursion
|
||||
try writeChildren(node, opts, writer);
|
||||
}
|
||||
// write the children
|
||||
// TODO avoid recursion
|
||||
try writeChildren(node, writer);
|
||||
|
||||
// close the tag
|
||||
try writer.writeAll("</");
|
||||
@@ -123,17 +67,17 @@ pub fn writeNode(node: *parser.Node, opts: Opts, writer: *std.Io.Writer) anyerro
|
||||
try writer.writeAll(">");
|
||||
},
|
||||
.text => {
|
||||
const v = parser.nodeValue(node) orelse return;
|
||||
const v = try parser.nodeValue(node) orelse return;
|
||||
try writeEscapedTextNode(writer, v);
|
||||
},
|
||||
.cdata_section => {
|
||||
const v = parser.nodeValue(node) orelse return;
|
||||
const v = try parser.nodeValue(node) orelse return;
|
||||
try writer.writeAll("<![CDATA[");
|
||||
try writer.writeAll(v);
|
||||
try writer.writeAll("]]>");
|
||||
},
|
||||
.comment => {
|
||||
const v = parser.nodeValue(node) orelse return;
|
||||
const v = try parser.nodeValue(node) orelse return;
|
||||
try writer.writeAll("<!--");
|
||||
try writer.writeAll(v);
|
||||
try writer.writeAll("-->");
|
||||
@@ -145,7 +89,7 @@ pub fn writeNode(node: *parser.Node, opts: Opts, writer: *std.Io.Writer) anyerro
|
||||
.document_fragment => return,
|
||||
// document will never be called, but required for completeness.
|
||||
.document => return,
|
||||
// done globally instead, but required for completeness. Only the outer DOCTYPE should be written
|
||||
// done globally instead, but required for completeness.
|
||||
.document_type => return,
|
||||
// deprecated
|
||||
.attribute => return,
|
||||
@@ -156,79 +100,19 @@ pub fn writeNode(node: *parser.Node, opts: Opts, writer: *std.Io.Writer) anyerro
|
||||
}
|
||||
|
||||
// writer must be a std.io.Writer
|
||||
pub fn writeChildren(root: *parser.Node, opts: Opts, writer: *std.Io.Writer) !void {
|
||||
pub fn writeChildren(root: *parser.Node, writer: anytype) !void {
|
||||
const walker = Walker{};
|
||||
var next: ?*parser.Node = null;
|
||||
while (true) {
|
||||
next = try walker.get_next(root, next) orelse break;
|
||||
try writeNode(next.?, opts, writer);
|
||||
try writeNode(next.?, writer);
|
||||
}
|
||||
}
|
||||
|
||||
fn isStripped(tag_type: parser.Tag, node: *parser.Node, strip_mode: Opts.StripMode) !bool {
|
||||
if (strip_mode.js and try isJsRelated(tag_type, node)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (strip_mode.css and try isCssRelated(tag_type, node)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (strip_mode.ui and try isUIRelated(tag_type, node)) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
fn isJsRelated(tag_type: parser.Tag, node: *parser.Node) !bool {
|
||||
if (tag_type == .script) {
|
||||
return true;
|
||||
}
|
||||
if (tag_type == .link) {
|
||||
const el = parser.nodeToElement(node);
|
||||
const as = try parser.elementGetAttribute(el, "as") orelse return false;
|
||||
if (!std.ascii.eqlIgnoreCase(as, "script")) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const rel = try parser.elementGetAttribute(el, "rel") orelse return false;
|
||||
return std.ascii.eqlIgnoreCase(rel, "preload");
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
fn isCssRelated(tag_type: parser.Tag, node: *parser.Node) !bool {
|
||||
if (tag_type == .style) {
|
||||
return true;
|
||||
}
|
||||
if (tag_type == .link) {
|
||||
const el = parser.nodeToElement(node);
|
||||
const rel = try parser.elementGetAttribute(el, "rel") orelse return false;
|
||||
return std.ascii.eqlIgnoreCase(rel, "stylesheet");
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
fn isUIRelated(tag_type: parser.Tag, node: *parser.Node) !bool {
|
||||
if (try isCssRelated(tag_type, node)) {
|
||||
return true;
|
||||
}
|
||||
if (tag_type == .img or tag_type == .picture or tag_type == .video) {
|
||||
return true;
|
||||
}
|
||||
if (tag_type == .undef) {
|
||||
const name = try parser.nodeLocalName(node);
|
||||
if (std.mem.eql(u8, name, "svg")) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
// area, base, br, col, embed, hr, img, input, link, meta, source, track, wbr
|
||||
// https://html.spec.whatwg.org/#void-elements
|
||||
fn isVoid(elem: *parser.Element) !bool {
|
||||
const tag = try parser.elementTag(elem);
|
||||
const tag = try parser.elementHTMLGetTagType(@as(*parser.ElementHTML, @ptrCast(elem)));
|
||||
return switch (tag) {
|
||||
.area, .base, .br, .col, .embed, .hr, .img, .input, .link => true,
|
||||
.meta, .source, .track, .wbr => true,
|
||||
@@ -273,9 +157,6 @@ fn writeEscapedAttributeValue(writer: anytype, value: []const u8) !void {
|
||||
|
||||
const testing = std.testing;
|
||||
test "dump.writeHTML" {
|
||||
parser.init();
|
||||
defer parser.deinit();
|
||||
|
||||
try testWriteHTML(
|
||||
"<div id=\"content\">Over 9000!</div>",
|
||||
"<div id=\"content\">Over 9000!</div>",
|
||||
@@ -302,11 +183,6 @@ test "dump.writeHTML" {
|
||||
\\</head><body>9000</body></html>
|
||||
\\
|
||||
, "<html><title>It's over what?</title><meta name=a value=\"b\">\n<body>9000");
|
||||
|
||||
try testWriteHTML(
|
||||
"<p>hi</p><script>alert(power > 9000)</script>",
|
||||
"<p>hi</p><script>alert(power > 9000)</script>",
|
||||
);
|
||||
}
|
||||
|
||||
fn testWriteHTML(comptime expected_body: []const u8, src: []const u8) !void {
|
||||
@@ -318,13 +194,13 @@ fn testWriteHTML(comptime expected_body: []const u8, src: []const u8) !void {
|
||||
}
|
||||
|
||||
fn testWriteFullHTML(comptime expected: []const u8, src: []const u8) !void {
|
||||
var aw = std.Io.Writer.Allocating.init(testing.allocator);
|
||||
defer aw.deinit();
|
||||
var buf = std.ArrayListUnmanaged(u8){};
|
||||
defer buf.deinit(testing.allocator);
|
||||
|
||||
const doc_html = try parser.documentHTMLParseFromStr(src);
|
||||
defer parser.documentHTMLClose(doc_html) catch {};
|
||||
|
||||
const doc = parser.documentHTMLToDocument(doc_html);
|
||||
try writeHTML(doc, .{}, &aw.writer);
|
||||
try testing.expectEqualStrings(expected, aw.written());
|
||||
try writeHTML(doc, buf.writer(testing.allocator));
|
||||
try testing.expectEqualStrings(expected, buf.items);
|
||||
}
|
||||
|
||||
@@ -1,102 +0,0 @@
|
||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const log = @import("../../log.zig");
|
||||
|
||||
const Page = @import("../page.zig").Page;
|
||||
|
||||
// https://encoding.spec.whatwg.org/#interface-textdecoder
|
||||
const TextDecoder = @This();
|
||||
|
||||
const SupportedLabels = enum {
|
||||
utf8,
|
||||
@"utf-8",
|
||||
@"unicode-1-1-utf-8",
|
||||
};
|
||||
|
||||
const Options = struct {
|
||||
fatal: bool = false,
|
||||
ignoreBOM: bool = false,
|
||||
};
|
||||
|
||||
fatal: bool,
|
||||
ignore_bom: bool,
|
||||
stream: std.ArrayList(u8),
|
||||
|
||||
pub fn constructor(label_: ?[]const u8, opts_: ?Options) !TextDecoder {
|
||||
if (label_) |l| {
|
||||
_ = std.meta.stringToEnum(SupportedLabels, l) orelse {
|
||||
log.warn(.web_api, "not implemented", .{ .feature = "TextDecoder label", .label = l });
|
||||
return error.NotImplemented;
|
||||
};
|
||||
}
|
||||
const opts = opts_ orelse Options{};
|
||||
return .{
|
||||
.stream = .empty,
|
||||
.fatal = opts.fatal,
|
||||
.ignore_bom = opts.ignoreBOM,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn get_encoding(_: *const TextDecoder) []const u8 {
|
||||
return "utf-8";
|
||||
}
|
||||
|
||||
pub fn get_ignoreBOM(self: *const TextDecoder) bool {
|
||||
return self.ignore_bom;
|
||||
}
|
||||
|
||||
pub fn get_fatal(self: *const TextDecoder) bool {
|
||||
return self.fatal;
|
||||
}
|
||||
|
||||
const DecodeOptions = struct {
|
||||
stream: bool = false,
|
||||
};
|
||||
pub fn _decode(self: *TextDecoder, input_: ?[]const u8, opts_: ?DecodeOptions, page: *Page) ![]const u8 {
|
||||
var str = input_ orelse return "";
|
||||
const opts: DecodeOptions = opts_ orelse .{};
|
||||
|
||||
if (self.stream.items.len > 0) {
|
||||
try self.stream.appendSlice(page.arena, str);
|
||||
str = self.stream.items;
|
||||
}
|
||||
|
||||
if (self.fatal and !std.unicode.utf8ValidateSlice(str)) {
|
||||
if (opts.stream) {
|
||||
if (self.stream.items.len == 0) {
|
||||
try self.stream.appendSlice(page.arena, str);
|
||||
}
|
||||
return "";
|
||||
}
|
||||
return error.InvalidUtf8;
|
||||
}
|
||||
|
||||
self.stream.clearRetainingCapacity();
|
||||
if (self.ignore_bom == false and std.mem.startsWith(u8, str, &.{ 0xEF, 0xBB, 0xBF })) {
|
||||
return str[3..];
|
||||
}
|
||||
|
||||
return str;
|
||||
}
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: Encoding.TextDecoder" {
|
||||
try testing.htmlRunner("encoding/decoder.html");
|
||||
}
|
||||
@@ -1,48 +0,0 @@
|
||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const Env = @import("../env.zig").Env;
|
||||
|
||||
// https://encoding.spec.whatwg.org/#interface-textencoder
|
||||
const TextEncoder = @This();
|
||||
|
||||
pub fn constructor() !TextEncoder {
|
||||
return .{};
|
||||
}
|
||||
|
||||
pub fn get_encoding(_: *const TextEncoder) []const u8 {
|
||||
return "utf-8";
|
||||
}
|
||||
|
||||
pub fn _encode(_: *const TextEncoder, v: []const u8) !Env.TypedArray(u8) {
|
||||
// Ensure the input is a valid utf-8
|
||||
// It seems chrome accepts invalid utf-8 sequence.
|
||||
//
|
||||
if (!std.unicode.utf8ValidateSlice(v)) {
|
||||
return error.InvalidUtf8;
|
||||
}
|
||||
|
||||
return .{ .values = v };
|
||||
}
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: Encoding.TextEncoder" {
|
||||
try testing.htmlRunner("encoding/encoder.html");
|
||||
}
|
||||
@@ -1,51 +0,0 @@
|
||||
const std = @import("std");
|
||||
|
||||
const Page = @import("page.zig").Page;
|
||||
const js = @import("../runtime/js.zig");
|
||||
const generate = @import("../runtime/generate.zig");
|
||||
|
||||
const WebApis = struct {
|
||||
// Wrapped like this for debug ergonomics.
|
||||
// When we create our Env, a few lines down, we define it as:
|
||||
// pub const Env = js.Env(*Page, WebApis);
|
||||
//
|
||||
// If there's a compile time error witht he Env, it's type will be readable,
|
||||
// i.e.: runtime.js.Env(*browser.env.Page, browser.env.WebApis)
|
||||
//
|
||||
// But if we didn't wrap it in the struct, like we once didn't, and defined
|
||||
// env as:
|
||||
// pub const Env = js.Env(*Page, Interfaces);
|
||||
//
|
||||
// Because Interfaces is an anynoumous type, it doesn't have a friendly name
|
||||
// and errors would be something like:
|
||||
// runtime.js.Env(*browser.Page, .{...A HUNDRED TYPES...})
|
||||
pub const Interfaces = generate.Tuple(.{
|
||||
@import("crypto/crypto.zig").Crypto,
|
||||
@import("console/console.zig").Console,
|
||||
@import("css/css.zig").Interfaces,
|
||||
@import("cssom/cssom.zig").Interfaces,
|
||||
@import("dom/dom.zig").Interfaces,
|
||||
@import("dom/shadow_root.zig").ShadowRoot,
|
||||
@import("encoding/encoding.zig").Interfaces,
|
||||
@import("events/event.zig").Interfaces,
|
||||
@import("html/html.zig").Interfaces,
|
||||
@import("iterator/iterator.zig").Interfaces,
|
||||
@import("storage/storage.zig").Interfaces,
|
||||
@import("url/url.zig").Interfaces,
|
||||
@import("xhr/xhr.zig").Interfaces,
|
||||
@import("xhr/form_data.zig").Interfaces,
|
||||
@import("xhr/File.zig"),
|
||||
@import("xmlserializer/xmlserializer.zig").Interfaces,
|
||||
@import("fetch/fetch.zig").Interfaces,
|
||||
@import("streams/streams.zig").Interfaces,
|
||||
});
|
||||
};
|
||||
|
||||
pub const JsThis = Env.JsThis;
|
||||
pub const JsObject = Env.JsObject;
|
||||
pub const Function = Env.Function;
|
||||
pub const Promise = Env.Promise;
|
||||
pub const PromiseResolver = Env.PromiseResolver;
|
||||
|
||||
pub const Env = js.Env(*Page, WebApis);
|
||||
pub const Global = @import("html/window.zig").Window;
|
||||
@@ -1,85 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const parser = @import("../netsurf.zig");
|
||||
const Event = @import("event.zig").Event;
|
||||
const JsObject = @import("../env.zig").JsObject;
|
||||
const netsurf = @import("../netsurf.zig");
|
||||
|
||||
// https://dom.spec.whatwg.org/#interface-customevent
|
||||
pub const CustomEvent = struct {
|
||||
pub const prototype = *Event;
|
||||
pub const union_make_copy = true;
|
||||
|
||||
proto: parser.Event,
|
||||
detail: ?JsObject,
|
||||
|
||||
const CustomEventInit = struct {
|
||||
bubbles: bool = false,
|
||||
cancelable: bool = false,
|
||||
composed: bool = false,
|
||||
detail: ?JsObject = null,
|
||||
};
|
||||
|
||||
pub fn constructor(event_type: []const u8, opts_: ?CustomEventInit) !CustomEvent {
|
||||
const opts = opts_ orelse CustomEventInit{};
|
||||
|
||||
const event = try parser.eventCreate();
|
||||
defer parser.eventDestroy(event);
|
||||
try parser.eventInit(event, event_type, .{
|
||||
.bubbles = opts.bubbles,
|
||||
.cancelable = opts.cancelable,
|
||||
.composed = opts.composed,
|
||||
});
|
||||
|
||||
return .{
|
||||
.proto = event.*,
|
||||
.detail = if (opts.detail) |d| try d.persist() else null,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn get_detail(self: *CustomEvent) ?JsObject {
|
||||
return self.detail;
|
||||
}
|
||||
|
||||
// Initializes an already created `CustomEvent`.
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/CustomEvent/initCustomEvent
|
||||
pub fn _initCustomEvent(
|
||||
self: *CustomEvent,
|
||||
event_type: []const u8,
|
||||
can_bubble: bool,
|
||||
cancelable: bool,
|
||||
maybe_detail: ?JsObject,
|
||||
) !void {
|
||||
// This function can only be called after the constructor has called.
|
||||
// So we assume proto is initialized already by constructor.
|
||||
self.proto.type = try netsurf.strFromData(event_type);
|
||||
self.proto.bubble = can_bubble;
|
||||
self.proto.cancelable = cancelable;
|
||||
self.proto.is_initialised = true;
|
||||
// Detail is stored separately.
|
||||
if (maybe_detail) |detail| {
|
||||
self.detail = try detail.persist();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: Events.Custom" {
|
||||
try testing.htmlRunner("events/custom.html");
|
||||
}
|
||||
@@ -1,400 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const log = @import("../../log.zig");
|
||||
const parser = @import("../netsurf.zig");
|
||||
const generate = @import("../../runtime/generate.zig");
|
||||
|
||||
const Page = @import("../page.zig").Page;
|
||||
const Node = @import("../dom/node.zig").Node;
|
||||
const DOMException = @import("../dom/exceptions.zig").DOMException;
|
||||
const EventTarget = @import("../dom/event_target.zig").EventTarget;
|
||||
const EventTargetUnion = @import("../dom/event_target.zig").Union;
|
||||
const AbortSignal = @import("../html/AbortController.zig").AbortSignal;
|
||||
|
||||
const CustomEvent = @import("custom_event.zig").CustomEvent;
|
||||
const ProgressEvent = @import("../xhr/progress_event.zig").ProgressEvent;
|
||||
const MouseEvent = @import("mouse_event.zig").MouseEvent;
|
||||
const KeyboardEvent = @import("keyboard_event.zig").KeyboardEvent;
|
||||
const ErrorEvent = @import("../html/error_event.zig").ErrorEvent;
|
||||
const MessageEvent = @import("../dom/MessageChannel.zig").MessageEvent;
|
||||
|
||||
// Event interfaces
|
||||
pub const Interfaces = .{
|
||||
Event,
|
||||
CustomEvent,
|
||||
ProgressEvent,
|
||||
MouseEvent,
|
||||
KeyboardEvent,
|
||||
ErrorEvent,
|
||||
MessageEvent,
|
||||
};
|
||||
|
||||
pub const Union = generate.Union(Interfaces);
|
||||
|
||||
// https://dom.spec.whatwg.org/#event
|
||||
pub const Event = struct {
|
||||
pub const Self = parser.Event;
|
||||
pub const Exception = DOMException;
|
||||
|
||||
pub const EventInit = parser.EventInit;
|
||||
|
||||
// JS
|
||||
// --
|
||||
|
||||
pub const _CAPTURING_PHASE = 1;
|
||||
pub const _AT_TARGET = 2;
|
||||
pub const _BUBBLING_PHASE = 3;
|
||||
|
||||
pub fn toInterface(evt: *parser.Event) Union {
|
||||
return switch (parser.eventGetInternalType(evt)) {
|
||||
.event, .abort_signal, .xhr_event => .{ .Event = evt },
|
||||
.custom_event => .{ .CustomEvent = @as(*CustomEvent, @ptrCast(evt)).* },
|
||||
.progress_event => .{ .ProgressEvent = @as(*ProgressEvent, @ptrCast(evt)).* },
|
||||
.mouse_event => .{ .MouseEvent = @as(*parser.MouseEvent, @ptrCast(evt)) },
|
||||
.error_event => .{ .ErrorEvent = @as(*ErrorEvent, @ptrCast(evt)).* },
|
||||
.message_event => .{ .MessageEvent = @as(*MessageEvent, @ptrCast(evt)).* },
|
||||
.keyboard_event => .{ .KeyboardEvent = @as(*parser.KeyboardEvent, @ptrCast(evt)) },
|
||||
};
|
||||
}
|
||||
|
||||
pub fn constructor(event_type: []const u8, opts: ?EventInit) !*parser.Event {
|
||||
const event = try parser.eventCreate();
|
||||
try parser.eventInit(event, event_type, opts orelse EventInit{});
|
||||
return event;
|
||||
}
|
||||
|
||||
// Getters
|
||||
|
||||
pub fn get_type(self: *parser.Event) []const u8 {
|
||||
return parser.eventType(self);
|
||||
}
|
||||
|
||||
pub fn get_target(self: *parser.Event, page: *Page) !?EventTargetUnion {
|
||||
const et = parser.eventTarget(self);
|
||||
if (et == null) return null;
|
||||
return try EventTarget.toInterface(et.?, page);
|
||||
}
|
||||
|
||||
pub fn get_currentTarget(self: *parser.Event, page: *Page) !?EventTargetUnion {
|
||||
const et = parser.eventCurrentTarget(self);
|
||||
if (et == null) return null;
|
||||
return try EventTarget.toInterface(et.?, page);
|
||||
}
|
||||
|
||||
pub fn get_eventPhase(self: *parser.Event) u8 {
|
||||
return parser.eventPhase(self);
|
||||
}
|
||||
|
||||
pub fn get_bubbles(self: *parser.Event) bool {
|
||||
return parser.eventBubbles(self);
|
||||
}
|
||||
|
||||
pub fn get_cancelable(self: *parser.Event) bool {
|
||||
return parser.eventCancelable(self);
|
||||
}
|
||||
|
||||
pub fn get_defaultPrevented(self: *parser.Event) bool {
|
||||
return parser.eventDefaultPrevented(self);
|
||||
}
|
||||
|
||||
pub fn get_isTrusted(self: *parser.Event) bool {
|
||||
return parser.eventIsTrusted(self);
|
||||
}
|
||||
|
||||
// Even though this is supposed to to provide microsecond resolution, browser
|
||||
// return coarser values to protect against fingerprinting. libdom returns
|
||||
// seconds, which is good enough.
|
||||
pub fn get_timeStamp(self: *parser.Event) u64 {
|
||||
return parser.eventTimestamp(self);
|
||||
}
|
||||
|
||||
// Methods
|
||||
|
||||
pub fn _initEvent(
|
||||
self: *parser.Event,
|
||||
eventType: []const u8,
|
||||
bubbles: ?bool,
|
||||
cancelable: ?bool,
|
||||
) !void {
|
||||
const opts = EventInit{
|
||||
.bubbles = bubbles orelse false,
|
||||
.cancelable = cancelable orelse false,
|
||||
};
|
||||
return try parser.eventInit(self, eventType, opts);
|
||||
}
|
||||
|
||||
pub fn _stopPropagation(self: *parser.Event) !void {
|
||||
return parser.eventStopPropagation(self);
|
||||
}
|
||||
|
||||
pub fn _stopImmediatePropagation(self: *parser.Event) !void {
|
||||
return parser.eventStopImmediatePropagation(self);
|
||||
}
|
||||
|
||||
pub fn _preventDefault(self: *parser.Event) !void {
|
||||
return parser.eventPreventDefault(self);
|
||||
}
|
||||
|
||||
pub fn _composedPath(self: *parser.Event, page: *Page) ![]const EventTargetUnion {
|
||||
const et_ = parser.eventTarget(self);
|
||||
const et = et_ orelse return &.{};
|
||||
|
||||
var node: ?*parser.Node = switch (parser.eventTargetInternalType(et)) {
|
||||
.libdom_node => @as(*parser.Node, @ptrCast(et)),
|
||||
.plain => parser.eventTargetToNode(et),
|
||||
else => {
|
||||
// Window, XHR, MessagePort, etc...no path beyond the event itself
|
||||
return &.{try EventTarget.toInterface(et, page)};
|
||||
},
|
||||
};
|
||||
|
||||
const arena = page.call_arena;
|
||||
var path: std.ArrayListUnmanaged(EventTargetUnion) = .empty;
|
||||
while (node) |n| {
|
||||
try path.append(arena, .{
|
||||
.node = try Node.toInterface(n),
|
||||
});
|
||||
|
||||
node = parser.nodeParentNode(n);
|
||||
if (node == null and parser.nodeType(n) == .document_fragment) {
|
||||
// we have a non-continuous hook from a shadowroot to its host (
|
||||
// it's parent element). libdom doesn't really support ShdowRoots
|
||||
// and, for the most part, that works out well since it naturally
|
||||
// provides isolation. But events don't follow the same
|
||||
// shadowroot isolation as most other things, so, if this is
|
||||
// a parent-less document fragment, we need to check if it has
|
||||
// a host.
|
||||
if (parser.documentFragmentGetHost(@ptrCast(n))) |host| {
|
||||
node = host;
|
||||
|
||||
// If a document fragment has a host, then that host
|
||||
// _has_ to have a state and that state _has_ to have
|
||||
// a shadow_root field. All of this is set in Element._attachShadow
|
||||
if (page.getNodeState(host).?.shadow_root.?.mode == .closed) {
|
||||
// if the shadow root is closed, then the composedPath
|
||||
// starts at the host element.
|
||||
path.clearRetainingCapacity();
|
||||
}
|
||||
} else {
|
||||
// Our document fragement has no parent and no host, we
|
||||
// can break out of the loop.
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (path.getLastOrNull()) |last| {
|
||||
// the Window isn't part of the DOM hierarchy, but for events, it
|
||||
// is, so we need to glue it on.
|
||||
if (last.node == .HTMLDocument and last.node.HTMLDocument == page.window.document) {
|
||||
try path.append(arena, .{ .node = .{ .Window = &page.window } });
|
||||
}
|
||||
}
|
||||
return path.items;
|
||||
}
|
||||
};
|
||||
|
||||
pub const EventHandler = struct {
|
||||
once: bool,
|
||||
capture: bool,
|
||||
callback: Function,
|
||||
node: parser.EventNode,
|
||||
listener: *parser.EventListener,
|
||||
|
||||
const Env = @import("../env.zig").Env;
|
||||
const Function = Env.Function;
|
||||
|
||||
pub const Listener = union(enum) {
|
||||
function: Function,
|
||||
object: Env.JsObject,
|
||||
|
||||
pub fn callback(self: Listener, target: *parser.EventTarget) !?Function {
|
||||
return switch (self) {
|
||||
.function => |func| try func.withThis(target),
|
||||
.object => |obj| blk: {
|
||||
const func = (try obj.getFunction("handleEvent")) orelse return null;
|
||||
break :blk try func.withThis(try obj.persist());
|
||||
},
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub const Opts = union(enum) {
|
||||
flags: Flags,
|
||||
capture: bool,
|
||||
|
||||
const Flags = struct {
|
||||
once: ?bool,
|
||||
capture: ?bool,
|
||||
// We ignore this property. It seems to be largely used to help the
|
||||
// browser make certain performance tweaks (i.e. the browser knows
|
||||
// that the listener won't call preventDefault() and thus can safely
|
||||
// run the default as needed).
|
||||
passive: ?bool,
|
||||
signal: ?*AbortSignal, // currently does nothing
|
||||
};
|
||||
};
|
||||
|
||||
pub fn register(
|
||||
allocator: Allocator,
|
||||
target: *parser.EventTarget,
|
||||
typ: []const u8,
|
||||
listener: Listener,
|
||||
opts_: ?Opts,
|
||||
) !?*EventHandler {
|
||||
var once = false;
|
||||
var capture = false;
|
||||
var signal: ?*AbortSignal = null;
|
||||
|
||||
if (opts_) |opts| {
|
||||
switch (opts) {
|
||||
.capture => |c| capture = c,
|
||||
.flags => |f| {
|
||||
once = f.once orelse false;
|
||||
signal = f.signal orelse null;
|
||||
capture = f.capture orelse false;
|
||||
},
|
||||
}
|
||||
}
|
||||
const callback = (try listener.callback(target)) orelse return null;
|
||||
|
||||
if (signal) |s| {
|
||||
const signal_target = parser.toEventTarget(AbortSignal, s);
|
||||
|
||||
const scb = try allocator.create(SignalCallback);
|
||||
scb.* = .{
|
||||
.target = target,
|
||||
.capture = capture,
|
||||
.callback_id = callback.id,
|
||||
.typ = try allocator.dupe(u8, typ),
|
||||
.signal_target = signal_target,
|
||||
.signal_listener = undefined,
|
||||
.node = .{ .func = SignalCallback.handle },
|
||||
};
|
||||
|
||||
scb.signal_listener = try parser.eventTargetAddEventListener(
|
||||
signal_target,
|
||||
"abort",
|
||||
&scb.node,
|
||||
false,
|
||||
);
|
||||
}
|
||||
|
||||
// check if event target has already this listener
|
||||
if (try parser.eventTargetHasListener(target, typ, capture, callback.id) != null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const eh = try allocator.create(EventHandler);
|
||||
eh.* = .{
|
||||
.once = once,
|
||||
.capture = capture,
|
||||
.callback = callback,
|
||||
.node = .{
|
||||
.id = callback.id,
|
||||
.func = handle,
|
||||
},
|
||||
.listener = undefined,
|
||||
};
|
||||
|
||||
eh.listener = try parser.eventTargetAddEventListener(
|
||||
target,
|
||||
typ,
|
||||
&eh.node,
|
||||
capture,
|
||||
);
|
||||
return eh;
|
||||
}
|
||||
|
||||
fn handle(node: *parser.EventNode, event: *parser.Event) void {
|
||||
const ievent = Event.toInterface(event);
|
||||
const self: *EventHandler = @fieldParentPtr("node", node);
|
||||
var result: Function.Result = undefined;
|
||||
self.callback.tryCall(void, .{ievent}, &result) catch {
|
||||
log.debug(.user_script, "callback error", .{
|
||||
.err = result.exception,
|
||||
.stack = result.stack,
|
||||
.source = "event handler",
|
||||
});
|
||||
};
|
||||
|
||||
if (self.once) {
|
||||
const target = parser.eventTarget(event).?;
|
||||
const typ = parser.eventType(event);
|
||||
parser.eventTargetRemoveEventListener(
|
||||
target,
|
||||
typ,
|
||||
self.listener,
|
||||
self.capture,
|
||||
) catch {};
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const SignalCallback = struct {
|
||||
typ: []const u8,
|
||||
capture: bool,
|
||||
callback_id: usize,
|
||||
node: parser.EventNode,
|
||||
target: *parser.EventTarget,
|
||||
signal_target: *parser.EventTarget,
|
||||
signal_listener: *parser.EventListener,
|
||||
|
||||
fn handle(node: *parser.EventNode, _: *parser.Event) void {
|
||||
const self: *SignalCallback = @fieldParentPtr("node", node);
|
||||
self._handle() catch |err| {
|
||||
log.err(.app, "event signal handler", .{ .err = err });
|
||||
};
|
||||
}
|
||||
|
||||
fn _handle(self: *SignalCallback) !void {
|
||||
const lst = try parser.eventTargetHasListener(
|
||||
self.target,
|
||||
self.typ,
|
||||
self.capture,
|
||||
self.callback_id,
|
||||
);
|
||||
if (lst == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
try parser.eventTargetRemoveEventListener(
|
||||
self.target,
|
||||
self.typ,
|
||||
lst.?,
|
||||
self.capture,
|
||||
);
|
||||
|
||||
// remove the abort signal listener itself
|
||||
try parser.eventTargetRemoveEventListener(
|
||||
self.signal_target,
|
||||
"abort",
|
||||
self.signal_listener,
|
||||
false,
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: Event" {
|
||||
try testing.htmlRunner("events/event.html");
|
||||
}
|
||||
@@ -1,159 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const log = @import("../../log.zig");
|
||||
const builtin = @import("builtin");
|
||||
|
||||
const parser = @import("../netsurf.zig");
|
||||
const Event = @import("event.zig").Event;
|
||||
|
||||
// TODO: We currently don't have a UIEvent interface so we skip it in the prototype chain.
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/UIEvent
|
||||
const UIEvent = Event;
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/KeyboardEvent
|
||||
pub const KeyboardEvent = struct {
|
||||
pub const Self = parser.KeyboardEvent;
|
||||
pub const prototype = *UIEvent;
|
||||
|
||||
pub const ConstructorOptions = struct {
|
||||
key: []const u8 = "",
|
||||
code: []const u8 = "",
|
||||
location: parser.KeyboardEventOpts.LocationCode = .standard,
|
||||
repeat: bool = false,
|
||||
isComposing: bool = false,
|
||||
// Currently not supported but we take as argument.
|
||||
charCode: u32 = 0,
|
||||
// Currently not supported but we take as argument.
|
||||
keyCode: u32 = 0,
|
||||
// Currently not supported but we take as argument.
|
||||
which: u32 = 0,
|
||||
ctrlKey: bool = false,
|
||||
shiftKey: bool = false,
|
||||
altKey: bool = false,
|
||||
metaKey: bool = false,
|
||||
};
|
||||
|
||||
pub fn constructor(event_type: []const u8, maybe_options: ?ConstructorOptions) !*parser.KeyboardEvent {
|
||||
const options: ConstructorOptions = maybe_options orelse .{};
|
||||
|
||||
var event = try parser.keyboardEventCreate();
|
||||
parser.eventSetInternalType(@ptrCast(&event), .keyboard_event);
|
||||
|
||||
try parser.keyboardEventInit(
|
||||
event,
|
||||
event_type,
|
||||
.{
|
||||
.key = options.key,
|
||||
.code = options.code,
|
||||
.location = options.location,
|
||||
.repeat = options.repeat,
|
||||
.is_composing = options.isComposing,
|
||||
.ctrl_key = options.ctrlKey,
|
||||
.shift_key = options.shiftKey,
|
||||
.alt_key = options.altKey,
|
||||
.meta_key = options.metaKey,
|
||||
},
|
||||
);
|
||||
|
||||
return event;
|
||||
}
|
||||
|
||||
// Returns the modifier state for given modifier key.
|
||||
pub fn _getModifierState(self: *Self, key: []const u8) bool {
|
||||
// Chrome and Firefox do case-sensitive match, here we prefer the same.
|
||||
if (std.mem.eql(u8, key, "Alt")) {
|
||||
return get_altKey(self);
|
||||
}
|
||||
|
||||
if (std.mem.eql(u8, key, "AltGraph")) {
|
||||
return (get_altKey(self) and get_ctrlKey(self));
|
||||
}
|
||||
|
||||
if (std.mem.eql(u8, key, "Control")) {
|
||||
return get_ctrlKey(self);
|
||||
}
|
||||
|
||||
if (std.mem.eql(u8, key, "Shift")) {
|
||||
return get_shiftKey(self);
|
||||
}
|
||||
|
||||
if (std.mem.eql(u8, key, "Meta") or std.mem.eql(u8, key, "OS")) {
|
||||
return get_metaKey(self);
|
||||
}
|
||||
|
||||
// Special case for IE.
|
||||
if (comptime builtin.os.tag == .windows) {
|
||||
if (std.mem.eql(u8, key, "Win")) {
|
||||
return get_metaKey(self);
|
||||
}
|
||||
}
|
||||
|
||||
// getModifierState() also accepts a deprecated virtual modifier named "Accel".
|
||||
// event.getModifierState("Accel") returns true when at least one of
|
||||
// KeyboardEvent.ctrlKey or KeyboardEvent.metaKey is true.
|
||||
//
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/KeyboardEvent/getModifierState#accel_virtual_modifier
|
||||
if (std.mem.eql(u8, key, "Accel")) {
|
||||
return (get_ctrlKey(self) or get_metaKey(self));
|
||||
}
|
||||
|
||||
// TODO: Add support for "CapsLock", "ScrollLock".
|
||||
return false;
|
||||
}
|
||||
|
||||
// Getters.
|
||||
|
||||
pub fn get_altKey(self: *Self) bool {
|
||||
return parser.keyboardEventKeyIsSet(self, .alt);
|
||||
}
|
||||
|
||||
pub fn get_ctrlKey(self: *Self) bool {
|
||||
return parser.keyboardEventKeyIsSet(self, .ctrl);
|
||||
}
|
||||
|
||||
pub fn get_metaKey(self: *Self) bool {
|
||||
return parser.keyboardEventKeyIsSet(self, .meta);
|
||||
}
|
||||
|
||||
pub fn get_shiftKey(self: *Self) bool {
|
||||
return parser.keyboardEventKeyIsSet(self, .shift);
|
||||
}
|
||||
|
||||
pub fn get_isComposing(self: *Self) bool {
|
||||
return self.is_composing;
|
||||
}
|
||||
|
||||
pub fn get_location(self: *Self) u32 {
|
||||
return self.location;
|
||||
}
|
||||
|
||||
pub fn get_key(self: *Self) ![]const u8 {
|
||||
return parser.keyboardEventGetKey(self);
|
||||
}
|
||||
|
||||
pub fn get_repeat(self: *Self) bool {
|
||||
return self.repeat;
|
||||
}
|
||||
};
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: Events.Keyboard" {
|
||||
try testing.htmlRunner("events/keyboard.html");
|
||||
}
|
||||
@@ -1,111 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const log = @import("../../log.zig");
|
||||
|
||||
const parser = @import("../netsurf.zig");
|
||||
const Event = @import("event.zig").Event;
|
||||
|
||||
// TODO: We currently don't have a UIEvent interface so we skip it in the prototype chain.
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/UIEvent
|
||||
const UIEvent = Event;
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/MouseEvent
|
||||
pub const MouseEvent = struct {
|
||||
pub const Self = parser.MouseEvent;
|
||||
pub const prototype = *UIEvent;
|
||||
|
||||
const MouseButton = enum(u16) {
|
||||
main_button = 0,
|
||||
auxillary_button = 1,
|
||||
secondary_button = 2,
|
||||
fourth_button = 3,
|
||||
fifth_button = 4,
|
||||
};
|
||||
|
||||
const MouseEventInit = struct {
|
||||
screenX: i32 = 0,
|
||||
screenY: i32 = 0,
|
||||
clientX: i32 = 0,
|
||||
clientY: i32 = 0,
|
||||
ctrlKey: bool = false,
|
||||
shiftKey: bool = false,
|
||||
altKey: bool = false,
|
||||
metaKey: bool = false,
|
||||
button: MouseButton = .main_button,
|
||||
};
|
||||
|
||||
pub fn constructor(event_type: []const u8, opts_: ?MouseEventInit) !*parser.MouseEvent {
|
||||
const opts = opts_ orelse MouseEventInit{};
|
||||
|
||||
var mouse_event = try parser.mouseEventCreate();
|
||||
parser.eventSetInternalType(@ptrCast(&mouse_event), .mouse_event);
|
||||
|
||||
try parser.mouseEventInit(mouse_event, event_type, .{
|
||||
.x = opts.clientX,
|
||||
.y = opts.clientY,
|
||||
.ctrl = opts.ctrlKey,
|
||||
.shift = opts.shiftKey,
|
||||
.alt = opts.altKey,
|
||||
.meta = opts.metaKey,
|
||||
.button = @intFromEnum(opts.button),
|
||||
});
|
||||
|
||||
if (!std.mem.eql(u8, event_type, "click")) {
|
||||
log.warn(.mouse_event, "unsupported mouse event", .{ .event = event_type });
|
||||
}
|
||||
|
||||
return mouse_event;
|
||||
}
|
||||
|
||||
pub fn get_button(self: *parser.MouseEvent) u16 {
|
||||
return self.button;
|
||||
}
|
||||
|
||||
// These is just an alias for clientX.
|
||||
pub fn get_x(self: *parser.MouseEvent) i32 {
|
||||
return self.cx;
|
||||
}
|
||||
|
||||
// These is just an alias for clientY.
|
||||
pub fn get_y(self: *parser.MouseEvent) i32 {
|
||||
return self.cy;
|
||||
}
|
||||
|
||||
pub fn get_clientX(self: *parser.MouseEvent) i32 {
|
||||
return self.cx;
|
||||
}
|
||||
|
||||
pub fn get_clientY(self: *parser.MouseEvent) i32 {
|
||||
return self.cy;
|
||||
}
|
||||
|
||||
pub fn get_screenX(self: *parser.MouseEvent) i32 {
|
||||
return self.sx;
|
||||
}
|
||||
|
||||
pub fn get_screenY(self: *parser.MouseEvent) i32 {
|
||||
return self.sy;
|
||||
}
|
||||
};
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: Events.Mouse" {
|
||||
try testing.htmlRunner("events/mouse.html");
|
||||
}
|
||||
@@ -1,227 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const log = @import("../../log.zig");
|
||||
const URL = @import("../../url.zig").URL;
|
||||
const Page = @import("../page.zig").Page;
|
||||
|
||||
const iterator = @import("../iterator/iterator.zig");
|
||||
|
||||
const v8 = @import("v8");
|
||||
const Env = @import("../env.zig").Env;
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/Headers
|
||||
const Headers = @This();
|
||||
|
||||
// Case-Insensitive String HashMap.
|
||||
// This allows us to avoid having to allocate lowercase keys all the time.
|
||||
const HeaderHashMap = std.HashMapUnmanaged([]const u8, []const u8, struct {
|
||||
pub fn hash(_: @This(), s: []const u8) u64 {
|
||||
var buf: [64]u8 = undefined;
|
||||
var hasher = std.hash.Wyhash.init(s.len);
|
||||
|
||||
var key = s;
|
||||
while (key.len >= 64) {
|
||||
const lower = std.ascii.lowerString(buf[0..], key[0..64]);
|
||||
hasher.update(lower);
|
||||
key = key[64..];
|
||||
}
|
||||
|
||||
if (key.len > 0) {
|
||||
const lower = std.ascii.lowerString(buf[0..key.len], key);
|
||||
hasher.update(lower);
|
||||
}
|
||||
|
||||
return hasher.final();
|
||||
}
|
||||
|
||||
pub fn eql(_: @This(), a: []const u8, b: []const u8) bool {
|
||||
return std.ascii.eqlIgnoreCase(a, b);
|
||||
}
|
||||
}, 80);
|
||||
|
||||
headers: HeaderHashMap = .empty,
|
||||
|
||||
// They can either be:
|
||||
//
|
||||
// 1. An array of string pairs.
|
||||
// 2. An object with string keys to string values.
|
||||
// 3. Another Headers object.
|
||||
pub const HeadersInit = union(enum) {
|
||||
// List of Pairs of []const u8
|
||||
strings: []const [2][]const u8,
|
||||
// Headers
|
||||
headers: *Headers,
|
||||
// Mappings
|
||||
object: Env.JsObject,
|
||||
};
|
||||
|
||||
pub fn constructor(_init: ?HeadersInit, page: *Page) !Headers {
|
||||
const arena = page.arena;
|
||||
var headers: HeaderHashMap = .empty;
|
||||
|
||||
if (_init) |init| {
|
||||
switch (init) {
|
||||
.strings => |kvs| {
|
||||
for (kvs) |pair| {
|
||||
const key = try arena.dupe(u8, pair[0]);
|
||||
const value = try arena.dupe(u8, pair[1]);
|
||||
|
||||
try headers.put(arena, key, value);
|
||||
}
|
||||
},
|
||||
.headers => |hdrs| {
|
||||
var iter = hdrs.headers.iterator();
|
||||
while (iter.next()) |entry| {
|
||||
try headers.put(arena, entry.key_ptr.*, entry.value_ptr.*);
|
||||
}
|
||||
},
|
||||
.object => |obj| {
|
||||
var iter = obj.nameIterator();
|
||||
while (try iter.next()) |name_value| {
|
||||
const name = try name_value.toString(arena);
|
||||
const value = try obj.get(name);
|
||||
const value_string = try value.toString(arena);
|
||||
|
||||
try headers.put(arena, name, value_string);
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
return .{
|
||||
.headers = headers,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn append(self: *Headers, name: []const u8, value: []const u8, allocator: std.mem.Allocator) !void {
|
||||
const key = try allocator.dupe(u8, name);
|
||||
const gop = try self.headers.getOrPut(allocator, key);
|
||||
|
||||
if (gop.found_existing) {
|
||||
// If we found it, append the value.
|
||||
const new_value = try std.fmt.allocPrint(allocator, "{s}, {s}", .{ gop.value_ptr.*, value });
|
||||
gop.value_ptr.* = new_value;
|
||||
} else {
|
||||
// Otherwise, we should just put it in.
|
||||
gop.value_ptr.* = try allocator.dupe(u8, value);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn _append(self: *Headers, name: []const u8, value: []const u8, page: *Page) !void {
|
||||
const arena = page.arena;
|
||||
try self.append(name, value, arena);
|
||||
}
|
||||
|
||||
pub fn _delete(self: *Headers, name: []const u8) void {
|
||||
_ = self.headers.remove(name);
|
||||
}
|
||||
|
||||
pub const HeadersEntryIterator = struct {
|
||||
slot: [2][]const u8,
|
||||
iter: HeaderHashMap.Iterator,
|
||||
|
||||
// TODO: these SHOULD be in lexigraphical order but I'm not sure how actually
|
||||
// important that is.
|
||||
pub fn _next(self: *HeadersEntryIterator) ?[2][]const u8 {
|
||||
if (self.iter.next()) |entry| {
|
||||
self.slot[0] = entry.key_ptr.*;
|
||||
self.slot[1] = entry.value_ptr.*;
|
||||
return self.slot;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub fn _entries(self: *const Headers) HeadersEntryIterable {
|
||||
return .{
|
||||
.inner = .{
|
||||
.slot = undefined,
|
||||
.iter = self.headers.iterator(),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
pub fn _forEach(self: *Headers, callback_fn: Env.Function, this_arg: ?Env.JsObject) !void {
|
||||
var iter = self.headers.iterator();
|
||||
|
||||
const cb = if (this_arg) |this| try callback_fn.withThis(this) else callback_fn;
|
||||
|
||||
while (iter.next()) |entry| {
|
||||
try cb.call(void, .{ entry.key_ptr.*, entry.value_ptr.*, self });
|
||||
}
|
||||
}
|
||||
|
||||
pub fn _get(self: *const Headers, name: []const u8) ?[]const u8 {
|
||||
return self.headers.get(name);
|
||||
}
|
||||
|
||||
pub fn _has(self: *const Headers, name: []const u8) bool {
|
||||
return self.headers.contains(name);
|
||||
}
|
||||
|
||||
pub const HeadersKeyIterator = struct {
|
||||
iter: HeaderHashMap.KeyIterator,
|
||||
|
||||
pub fn _next(self: *HeadersKeyIterator) ?[]const u8 {
|
||||
if (self.iter.next()) |key| {
|
||||
return key.*;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub fn _keys(self: *const Headers) HeadersKeyIterable {
|
||||
return .{ .inner = .{ .iter = self.headers.keyIterator() } };
|
||||
}
|
||||
|
||||
pub fn _set(self: *Headers, name: []const u8, value: []const u8, page: *Page) !void {
|
||||
const arena = page.arena;
|
||||
|
||||
const key = try arena.dupe(u8, name);
|
||||
const gop = try self.headers.getOrPut(arena, key);
|
||||
gop.value_ptr.* = try arena.dupe(u8, value);
|
||||
}
|
||||
|
||||
pub const HeadersValueIterator = struct {
|
||||
iter: HeaderHashMap.ValueIterator,
|
||||
|
||||
pub fn _next(self: *HeadersValueIterator) ?[]const u8 {
|
||||
if (self.iter.next()) |value| {
|
||||
return value.*;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub fn _values(self: *const Headers) HeadersValueIterable {
|
||||
return .{ .inner = .{ .iter = self.headers.valueIterator() } };
|
||||
}
|
||||
|
||||
pub const HeadersKeyIterable = iterator.Iterable(HeadersKeyIterator, "HeadersKeyIterator");
|
||||
pub const HeadersValueIterable = iterator.Iterable(HeadersValueIterator, "HeadersValueIterator");
|
||||
pub const HeadersEntryIterable = iterator.Iterable(HeadersEntryIterator, "HeadersEntryIterator");
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "fetch: Headers" {
|
||||
try testing.htmlRunner("fetch/headers.html");
|
||||
}
|
||||
@@ -1,298 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const log = @import("../../log.zig");
|
||||
|
||||
const URL = @import("../../url.zig").URL;
|
||||
const Page = @import("../page.zig").Page;
|
||||
|
||||
const Response = @import("./Response.zig");
|
||||
const Http = @import("../../http/Http.zig");
|
||||
const ReadableStream = @import("../streams/ReadableStream.zig");
|
||||
|
||||
const v8 = @import("v8");
|
||||
const Env = @import("../env.zig").Env;
|
||||
|
||||
const Headers = @import("Headers.zig");
|
||||
const HeadersInit = @import("Headers.zig").HeadersInit;
|
||||
|
||||
pub const RequestInput = union(enum) {
|
||||
string: []const u8,
|
||||
request: *Request,
|
||||
};
|
||||
|
||||
pub const RequestCache = enum {
|
||||
default,
|
||||
@"no-store",
|
||||
reload,
|
||||
@"no-cache",
|
||||
@"force-cache",
|
||||
@"only-if-cached",
|
||||
|
||||
pub fn fromString(str: []const u8) ?RequestCache {
|
||||
for (std.enums.values(RequestCache)) |cache| {
|
||||
if (std.ascii.eqlIgnoreCase(str, @tagName(cache))) {
|
||||
return cache;
|
||||
}
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn toString(self: RequestCache) []const u8 {
|
||||
return @tagName(self);
|
||||
}
|
||||
};
|
||||
|
||||
pub const RequestCredentials = enum {
|
||||
omit,
|
||||
@"same-origin",
|
||||
include,
|
||||
|
||||
pub fn fromString(str: []const u8) ?RequestCredentials {
|
||||
for (std.enums.values(RequestCredentials)) |cache| {
|
||||
if (std.ascii.eqlIgnoreCase(str, @tagName(cache))) {
|
||||
return cache;
|
||||
}
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn toString(self: RequestCredentials) []const u8 {
|
||||
return @tagName(self);
|
||||
}
|
||||
};
|
||||
|
||||
pub const RequestMode = enum {
|
||||
cors,
|
||||
@"no-cors",
|
||||
@"same-origin",
|
||||
navigate,
|
||||
|
||||
pub fn fromString(str: []const u8) ?RequestMode {
|
||||
for (std.enums.values(RequestMode)) |cache| {
|
||||
if (std.ascii.eqlIgnoreCase(str, @tagName(cache))) {
|
||||
return cache;
|
||||
}
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn toString(self: RequestMode) []const u8 {
|
||||
return @tagName(self);
|
||||
}
|
||||
};
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/RequestInit
|
||||
pub const RequestInit = struct {
|
||||
body: ?[]const u8 = null,
|
||||
cache: ?[]const u8 = null,
|
||||
credentials: ?[]const u8 = null,
|
||||
headers: ?HeadersInit = null,
|
||||
integrity: ?[]const u8 = null,
|
||||
method: ?[]const u8 = null,
|
||||
mode: ?[]const u8 = null,
|
||||
};
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/Request/Request
|
||||
const Request = @This();
|
||||
|
||||
method: Http.Method,
|
||||
url: [:0]const u8,
|
||||
cache: RequestCache,
|
||||
credentials: RequestCredentials,
|
||||
// no-cors is default is not built with constructor.
|
||||
mode: RequestMode = .@"no-cors",
|
||||
headers: Headers,
|
||||
body: ?[]const u8,
|
||||
body_used: bool = false,
|
||||
integrity: []const u8,
|
||||
|
||||
pub fn constructor(input: RequestInput, _options: ?RequestInit, page: *Page) !Request {
|
||||
const arena = page.arena;
|
||||
const options: RequestInit = _options orelse .{};
|
||||
|
||||
const url: [:0]const u8 = blk: switch (input) {
|
||||
.string => |str| {
|
||||
break :blk try URL.stitch(arena, str, page.url.raw, .{ .null_terminated = true });
|
||||
},
|
||||
.request => |req| {
|
||||
break :blk try arena.dupeZ(u8, req.url);
|
||||
},
|
||||
};
|
||||
|
||||
const cache = (if (options.cache) |cache| RequestCache.fromString(cache) else null) orelse RequestCache.default;
|
||||
const credentials = (if (options.credentials) |creds| RequestCredentials.fromString(creds) else null) orelse RequestCredentials.@"same-origin";
|
||||
const integrity = if (options.integrity) |integ| try arena.dupe(u8, integ) else "";
|
||||
const headers: Headers = if (options.headers) |hdrs| try Headers.constructor(hdrs, page) else .{};
|
||||
const mode = (if (options.mode) |mode| RequestMode.fromString(mode) else null) orelse RequestMode.cors;
|
||||
|
||||
const method: Http.Method = blk: {
|
||||
if (options.method) |given_method| {
|
||||
for (std.enums.values(Http.Method)) |method| {
|
||||
if (std.ascii.eqlIgnoreCase(given_method, @tagName(method))) {
|
||||
break :blk method;
|
||||
}
|
||||
} else {
|
||||
return error.TypeError;
|
||||
}
|
||||
} else {
|
||||
break :blk Http.Method.GET;
|
||||
}
|
||||
};
|
||||
|
||||
// Can't have a body on .GET or .HEAD.
|
||||
const body: ?[]const u8 = blk: {
|
||||
if (method == .GET or method == .HEAD) {
|
||||
break :blk null;
|
||||
} else break :blk if (options.body) |body| try arena.dupe(u8, body) else null;
|
||||
};
|
||||
|
||||
return .{
|
||||
.method = method,
|
||||
.url = url,
|
||||
.cache = cache,
|
||||
.credentials = credentials,
|
||||
.mode = mode,
|
||||
.headers = headers,
|
||||
.body = body,
|
||||
.integrity = integrity,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn get_body(self: *const Request, page: *Page) !?*ReadableStream {
|
||||
if (self.body) |body| {
|
||||
const stream = try ReadableStream.constructor(null, null, page);
|
||||
try stream.queue.append(page.arena, body);
|
||||
return stream;
|
||||
} else return null;
|
||||
}
|
||||
|
||||
pub fn get_bodyUsed(self: *const Request) bool {
|
||||
return self.body_used;
|
||||
}
|
||||
|
||||
pub fn get_cache(self: *const Request) RequestCache {
|
||||
return self.cache;
|
||||
}
|
||||
|
||||
pub fn get_credentials(self: *const Request) RequestCredentials {
|
||||
return self.credentials;
|
||||
}
|
||||
|
||||
pub fn get_headers(self: *Request) *Headers {
|
||||
return &self.headers;
|
||||
}
|
||||
|
||||
pub fn get_integrity(self: *const Request) []const u8 {
|
||||
return self.integrity;
|
||||
}
|
||||
|
||||
// TODO: If we ever support the Navigation API, we need isHistoryNavigation
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/Request/isHistoryNavigation
|
||||
|
||||
pub fn get_method(self: *const Request) []const u8 {
|
||||
return @tagName(self.method);
|
||||
}
|
||||
|
||||
pub fn get_mode(self: *const Request) RequestMode {
|
||||
return self.mode;
|
||||
}
|
||||
|
||||
pub fn get_url(self: *const Request) []const u8 {
|
||||
return self.url;
|
||||
}
|
||||
|
||||
pub fn _clone(self: *Request) !Request {
|
||||
// Not allowed to clone if the body was used.
|
||||
if (self.body_used) {
|
||||
return error.TypeError;
|
||||
}
|
||||
|
||||
// OK to just return the same fields BECAUSE
|
||||
// all of these fields are read-only and can't be modified.
|
||||
return Request{
|
||||
.body = self.body,
|
||||
.body_used = self.body_used,
|
||||
.cache = self.cache,
|
||||
.credentials = self.credentials,
|
||||
.headers = self.headers,
|
||||
.method = self.method,
|
||||
.integrity = self.integrity,
|
||||
.url = self.url,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn _bytes(self: *Response, page: *Page) !Env.Promise {
|
||||
if (self.body_used) {
|
||||
return error.TypeError;
|
||||
}
|
||||
|
||||
const resolver = page.main_context.createPromiseResolver();
|
||||
|
||||
try resolver.resolve(self.body);
|
||||
self.body_used = true;
|
||||
return resolver.promise();
|
||||
}
|
||||
|
||||
pub fn _json(self: *Response, page: *Page) !Env.Promise {
|
||||
if (self.body_used) {
|
||||
return error.TypeError;
|
||||
}
|
||||
|
||||
const resolver = page.main_context.createPromiseResolver();
|
||||
|
||||
if (self.body) |body| {
|
||||
const p = std.json.parseFromSliceLeaky(
|
||||
std.json.Value,
|
||||
page.call_arena,
|
||||
body,
|
||||
.{},
|
||||
) catch |e| {
|
||||
log.info(.browser, "invalid json", .{ .err = e, .source = "Request" });
|
||||
return error.SyntaxError;
|
||||
};
|
||||
|
||||
try resolver.resolve(p);
|
||||
} else {
|
||||
try resolver.resolve(null);
|
||||
}
|
||||
|
||||
self.body_used = true;
|
||||
return resolver.promise();
|
||||
}
|
||||
|
||||
pub fn _text(self: *Response, page: *Page) !Env.Promise {
|
||||
if (self.body_used) {
|
||||
return error.TypeError;
|
||||
}
|
||||
|
||||
const resolver = page.main_context.createPromiseResolver();
|
||||
|
||||
try resolver.resolve(self.body);
|
||||
self.body_used = true;
|
||||
return resolver.promise();
|
||||
}
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "fetch: Request" {
|
||||
try testing.htmlRunner("fetch/request.html");
|
||||
}
|
||||
@@ -1,225 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const log = @import("../../log.zig");
|
||||
|
||||
const v8 = @import("v8");
|
||||
|
||||
const HttpClient = @import("../../http/Client.zig");
|
||||
const Http = @import("../../http/Http.zig");
|
||||
const URL = @import("../../url.zig").URL;
|
||||
|
||||
const ReadableStream = @import("../streams/ReadableStream.zig");
|
||||
const Headers = @import("Headers.zig");
|
||||
const HeadersInit = @import("Headers.zig").HeadersInit;
|
||||
|
||||
const Env = @import("../env.zig").Env;
|
||||
const Mime = @import("../mime.zig").Mime;
|
||||
const Page = @import("../page.zig").Page;
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/Response
|
||||
const Response = @This();
|
||||
|
||||
status: u16 = 200,
|
||||
status_text: []const u8 = "",
|
||||
headers: Headers,
|
||||
mime: ?Mime = null,
|
||||
url: []const u8 = "",
|
||||
body: ?[]const u8 = null,
|
||||
body_used: bool = false,
|
||||
redirected: bool = false,
|
||||
type: ResponseType = .basic,
|
||||
|
||||
const ResponseBody = union(enum) {
|
||||
string: []const u8,
|
||||
};
|
||||
|
||||
const ResponseOptions = struct {
|
||||
status: u16 = 200,
|
||||
statusText: ?[]const u8 = null,
|
||||
headers: ?HeadersInit = null,
|
||||
};
|
||||
|
||||
pub const ResponseType = enum {
|
||||
basic,
|
||||
cors,
|
||||
@"error",
|
||||
@"opaque",
|
||||
opaqueredirect,
|
||||
|
||||
pub fn fromString(str: []const u8) ?ResponseType {
|
||||
for (std.enums.values(ResponseType)) |cache| {
|
||||
if (std.ascii.eqlIgnoreCase(str, @tagName(cache))) {
|
||||
return cache;
|
||||
}
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn toString(self: ResponseType) []const u8 {
|
||||
return @tagName(self);
|
||||
}
|
||||
};
|
||||
|
||||
pub fn constructor(_input: ?ResponseBody, _options: ?ResponseOptions, page: *Page) !Response {
|
||||
const arena = page.arena;
|
||||
|
||||
const options: ResponseOptions = _options orelse .{};
|
||||
|
||||
const body = blk: {
|
||||
if (_input) |input| {
|
||||
switch (input) {
|
||||
.string => |str| {
|
||||
break :blk try arena.dupe(u8, str);
|
||||
},
|
||||
}
|
||||
} else {
|
||||
break :blk null;
|
||||
}
|
||||
};
|
||||
|
||||
const headers: Headers = if (options.headers) |hdrs| try Headers.constructor(hdrs, page) else .{};
|
||||
const status_text = if (options.statusText) |st| try arena.dupe(u8, st) else "";
|
||||
|
||||
return .{
|
||||
.body = body,
|
||||
.headers = headers,
|
||||
.status = options.status,
|
||||
.status_text = status_text,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn get_body(self: *const Response, page: *Page) !*ReadableStream {
|
||||
const stream = try ReadableStream.constructor(null, null, page);
|
||||
if (self.body) |body| {
|
||||
try stream.queue.append(page.arena, body);
|
||||
}
|
||||
return stream;
|
||||
}
|
||||
|
||||
pub fn get_bodyUsed(self: *const Response) bool {
|
||||
return self.body_used;
|
||||
}
|
||||
|
||||
pub fn get_headers(self: *Response) *Headers {
|
||||
return &self.headers;
|
||||
}
|
||||
|
||||
pub fn get_ok(self: *const Response) bool {
|
||||
return self.status >= 200 and self.status <= 299;
|
||||
}
|
||||
|
||||
pub fn get_redirected(self: *const Response) bool {
|
||||
return self.redirected;
|
||||
}
|
||||
|
||||
pub fn get_status(self: *const Response) u16 {
|
||||
return self.status;
|
||||
}
|
||||
|
||||
pub fn get_statusText(self: *const Response) []const u8 {
|
||||
return self.status_text;
|
||||
}
|
||||
|
||||
pub fn get_type(self: *const Response) ResponseType {
|
||||
return self.type;
|
||||
}
|
||||
|
||||
pub fn get_url(self: *const Response) []const u8 {
|
||||
return self.url;
|
||||
}
|
||||
|
||||
pub fn _clone(self: *const Response) !Response {
|
||||
if (self.body_used) {
|
||||
return error.TypeError;
|
||||
}
|
||||
|
||||
// OK to just return the same fields BECAUSE
|
||||
// all of these fields are read-only and can't be modified.
|
||||
return Response{
|
||||
.body = self.body,
|
||||
.body_used = self.body_used,
|
||||
.mime = self.mime,
|
||||
.headers = self.headers,
|
||||
.redirected = self.redirected,
|
||||
.status = self.status,
|
||||
.url = self.url,
|
||||
.type = self.type,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn _bytes(self: *Response, page: *Page) !Env.Promise {
|
||||
if (self.body_used) {
|
||||
return error.TypeError;
|
||||
}
|
||||
|
||||
const resolver = Env.PromiseResolver{
|
||||
.js_context = page.main_context,
|
||||
.resolver = v8.PromiseResolver.init(page.main_context.v8_context),
|
||||
};
|
||||
|
||||
try resolver.resolve(self.body);
|
||||
self.body_used = true;
|
||||
return resolver.promise();
|
||||
}
|
||||
|
||||
pub fn _json(self: *Response, page: *Page) !Env.Promise {
|
||||
if (self.body_used) {
|
||||
return error.TypeError;
|
||||
}
|
||||
|
||||
const resolver = page.main_context.createPromiseResolver();
|
||||
|
||||
if (self.body) |body| {
|
||||
const p = std.json.parseFromSliceLeaky(
|
||||
std.json.Value,
|
||||
page.call_arena,
|
||||
body,
|
||||
.{},
|
||||
) catch |e| {
|
||||
log.info(.browser, "invalid json", .{ .err = e, .source = "Response" });
|
||||
return error.SyntaxError;
|
||||
};
|
||||
|
||||
try resolver.resolve(p);
|
||||
} else {
|
||||
try resolver.resolve(null);
|
||||
}
|
||||
|
||||
self.body_used = true;
|
||||
return resolver.promise();
|
||||
}
|
||||
|
||||
pub fn _text(self: *Response, page: *Page) !Env.Promise {
|
||||
if (self.body_used) {
|
||||
return error.TypeError;
|
||||
}
|
||||
|
||||
const resolver = page.main_context.createPromiseResolver();
|
||||
|
||||
try resolver.resolve(self.body);
|
||||
self.body_used = true;
|
||||
return resolver.promise();
|
||||
}
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "fetch: Response" {
|
||||
try testing.htmlRunner("fetch/response.html");
|
||||
}
|
||||
@@ -1,240 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const log = @import("../../log.zig");
|
||||
|
||||
const Env = @import("../env.zig").Env;
|
||||
const Page = @import("../page.zig").Page;
|
||||
|
||||
const Http = @import("../../http/Http.zig");
|
||||
const HttpClient = @import("../../http/Client.zig");
|
||||
const Mime = @import("../mime.zig").Mime;
|
||||
|
||||
const Headers = @import("Headers.zig");
|
||||
|
||||
const RequestInput = @import("Request.zig").RequestInput;
|
||||
const RequestInit = @import("Request.zig").RequestInit;
|
||||
const Request = @import("Request.zig");
|
||||
const Response = @import("Response.zig");
|
||||
|
||||
pub const Interfaces = .{
|
||||
@import("Headers.zig"),
|
||||
@import("Headers.zig").HeadersEntryIterable,
|
||||
@import("Headers.zig").HeadersKeyIterable,
|
||||
@import("Headers.zig").HeadersValueIterable,
|
||||
@import("Request.zig"),
|
||||
@import("Response.zig"),
|
||||
};
|
||||
|
||||
pub const FetchContext = struct {
|
||||
arena: std.mem.Allocator,
|
||||
js_ctx: *Env.JsContext,
|
||||
promise_resolver: Env.PersistentPromiseResolver,
|
||||
|
||||
method: Http.Method,
|
||||
url: []const u8,
|
||||
body: std.ArrayListUnmanaged(u8) = .empty,
|
||||
headers: std.ArrayListUnmanaged([]const u8) = .empty,
|
||||
status: u16 = 0,
|
||||
mime: ?Mime = null,
|
||||
mode: Request.RequestMode,
|
||||
transfer: ?*HttpClient.Transfer = null,
|
||||
|
||||
/// This effectively takes ownership of the FetchContext.
|
||||
///
|
||||
/// We just return the underlying slices used for `headers`
|
||||
/// and for `body` here to avoid an allocation.
|
||||
pub fn toResponse(self: *const FetchContext) !Response {
|
||||
var headers: Headers = .{};
|
||||
|
||||
// If the mode is "no-cors", we need to return this opaque/stripped Response.
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/Response/type
|
||||
if (self.mode == .@"no-cors") {
|
||||
return Response{
|
||||
.status = 0,
|
||||
.headers = headers,
|
||||
.mime = self.mime,
|
||||
.body = null,
|
||||
.url = self.url,
|
||||
.type = .@"opaque",
|
||||
};
|
||||
}
|
||||
|
||||
// convert into Headers
|
||||
for (self.headers.items) |hdr| {
|
||||
var iter = std.mem.splitScalar(u8, hdr, ':');
|
||||
const name = iter.next() orelse "";
|
||||
const value = iter.next() orelse "";
|
||||
try headers.append(name, value, self.arena);
|
||||
}
|
||||
|
||||
const resp_type: Response.ResponseType = blk: {
|
||||
if (std.mem.startsWith(u8, self.url, "data:")) {
|
||||
break :blk .basic;
|
||||
}
|
||||
|
||||
break :blk switch (self.mode) {
|
||||
.cors => .cors,
|
||||
.@"same-origin", .navigate => .basic,
|
||||
.@"no-cors" => unreachable,
|
||||
};
|
||||
};
|
||||
|
||||
return Response{
|
||||
.status = self.status,
|
||||
.headers = headers,
|
||||
.mime = self.mime,
|
||||
.body = self.body.items,
|
||||
.url = self.url,
|
||||
.type = resp_type,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/Window/fetch
|
||||
pub fn fetch(input: RequestInput, options: ?RequestInit, page: *Page) !Env.Promise {
|
||||
const arena = page.arena;
|
||||
|
||||
const req = try Request.constructor(input, options, page);
|
||||
var headers = try page.http_client.newHeaders();
|
||||
|
||||
// Copy our headers into the HTTP headers.
|
||||
var header_iter = req.headers.headers.iterator();
|
||||
while (header_iter.next()) |entry| {
|
||||
const combined = try std.fmt.allocPrintSentinel(
|
||||
page.arena,
|
||||
"{s}: {s}",
|
||||
.{ entry.key_ptr.*, entry.value_ptr.* },
|
||||
0,
|
||||
);
|
||||
try headers.add(combined.ptr);
|
||||
}
|
||||
|
||||
try page.requestCookie(.{}).headersForRequest(arena, req.url, &headers);
|
||||
|
||||
const resolver = try page.main_context.createPersistentPromiseResolver(.page);
|
||||
|
||||
const fetch_ctx = try arena.create(FetchContext);
|
||||
fetch_ctx.* = .{
|
||||
.arena = arena,
|
||||
.js_ctx = page.main_context,
|
||||
.promise_resolver = resolver,
|
||||
.method = req.method,
|
||||
.url = req.url,
|
||||
.mode = req.mode,
|
||||
};
|
||||
|
||||
try page.http_client.request(.{
|
||||
.ctx = @ptrCast(fetch_ctx),
|
||||
.url = req.url,
|
||||
.method = req.method,
|
||||
.headers = headers,
|
||||
.body = req.body,
|
||||
.cookie_jar = page.cookie_jar,
|
||||
.resource_type = .fetch,
|
||||
|
||||
.start_callback = struct {
|
||||
fn startCallback(transfer: *HttpClient.Transfer) !void {
|
||||
const self: *FetchContext = @ptrCast(@alignCast(transfer.ctx));
|
||||
log.debug(.fetch, "request start", .{ .method = self.method, .url = self.url, .source = "fetch" });
|
||||
|
||||
self.transfer = transfer;
|
||||
}
|
||||
}.startCallback,
|
||||
.header_callback = struct {
|
||||
fn headerCallback(transfer: *HttpClient.Transfer) !void {
|
||||
const self: *FetchContext = @ptrCast(@alignCast(transfer.ctx));
|
||||
|
||||
const header = &transfer.response_header.?;
|
||||
|
||||
log.debug(.fetch, "request header", .{
|
||||
.source = "fetch",
|
||||
.method = self.method,
|
||||
.url = self.url,
|
||||
.status = header.status,
|
||||
});
|
||||
|
||||
if (header.contentType()) |ct| {
|
||||
self.mime = Mime.parse(ct) catch {
|
||||
return error.MimeParsing;
|
||||
};
|
||||
}
|
||||
|
||||
if (transfer.getContentLength()) |cl| {
|
||||
try self.body.ensureTotalCapacity(self.arena, cl);
|
||||
}
|
||||
|
||||
var it = transfer.responseHeaderIterator();
|
||||
while (it.next()) |hdr| {
|
||||
const joined = try std.fmt.allocPrint(self.arena, "{s}: {s}", .{ hdr.name, hdr.value });
|
||||
try self.headers.append(self.arena, joined);
|
||||
}
|
||||
|
||||
self.status = header.status;
|
||||
}
|
||||
}.headerCallback,
|
||||
.data_callback = struct {
|
||||
fn dataCallback(transfer: *HttpClient.Transfer, data: []const u8) !void {
|
||||
const self: *FetchContext = @ptrCast(@alignCast(transfer.ctx));
|
||||
try self.body.appendSlice(self.arena, data);
|
||||
}
|
||||
}.dataCallback,
|
||||
.done_callback = struct {
|
||||
fn doneCallback(ctx: *anyopaque) !void {
|
||||
const self: *FetchContext = @ptrCast(@alignCast(ctx));
|
||||
self.transfer = null;
|
||||
|
||||
log.info(.fetch, "request complete", .{
|
||||
.source = "fetch",
|
||||
.method = self.method,
|
||||
.url = self.url,
|
||||
.status = self.status,
|
||||
});
|
||||
|
||||
const response = try self.toResponse();
|
||||
try self.promise_resolver.resolve(response);
|
||||
}
|
||||
}.doneCallback,
|
||||
.error_callback = struct {
|
||||
fn errorCallback(ctx: *anyopaque, err: anyerror) void {
|
||||
const self: *FetchContext = @ptrCast(@alignCast(ctx));
|
||||
self.transfer = null;
|
||||
|
||||
log.err(.fetch, "error", .{
|
||||
.url = self.url,
|
||||
.err = err,
|
||||
.source = "fetch error",
|
||||
});
|
||||
|
||||
// We throw an Abort error when the page is getting closed so,
|
||||
// in this case, we don't need to reject the promise.
|
||||
if (err != error.Abort) {
|
||||
self.promise_resolver.reject(@errorName(err)) catch unreachable;
|
||||
}
|
||||
}
|
||||
}.errorCallback,
|
||||
});
|
||||
|
||||
return resolver.promise();
|
||||
}
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "fetch: fetch" {
|
||||
try testing.htmlRunner("fetch/fetch.html");
|
||||
}
|
||||
@@ -1,143 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const log = @import("../../log.zig");
|
||||
const parser = @import("../netsurf.zig");
|
||||
const Env = @import("../env.zig").Env;
|
||||
const Page = @import("../page.zig").Page;
|
||||
const EventTarget = @import("../dom/event_target.zig").EventTarget;
|
||||
|
||||
pub const Interfaces = .{
|
||||
AbortController,
|
||||
AbortSignal,
|
||||
};
|
||||
|
||||
const AbortController = @This();
|
||||
|
||||
signal: *AbortSignal,
|
||||
|
||||
pub fn constructor(page: *Page) !AbortController {
|
||||
// Why do we allocate this rather than storing directly in the struct?
|
||||
// https://github.com/lightpanda-io/project/discussions/165
|
||||
const signal = try page.arena.create(AbortSignal);
|
||||
signal.* = .init;
|
||||
|
||||
return .{
|
||||
.signal = signal,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn get_signal(self: *AbortController) *AbortSignal {
|
||||
return self.signal;
|
||||
}
|
||||
|
||||
pub fn _abort(self: *AbortController, reason_: ?[]const u8) !void {
|
||||
return self.signal.abort(reason_);
|
||||
}
|
||||
|
||||
pub const AbortSignal = struct {
|
||||
const DEFAULT_REASON = "AbortError";
|
||||
|
||||
pub const prototype = *EventTarget;
|
||||
proto: parser.EventTargetTBase = .{ .internal_target_type = .abort_signal },
|
||||
|
||||
aborted: bool,
|
||||
reason: ?[]const u8,
|
||||
|
||||
pub const init: AbortSignal = .{
|
||||
.reason = null,
|
||||
.aborted = false,
|
||||
};
|
||||
|
||||
pub fn static_abort(reason_: ?[]const u8) AbortSignal {
|
||||
return .{
|
||||
.aborted = true,
|
||||
.reason = reason_ orelse DEFAULT_REASON,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn static_timeout(delay: u32, page: *Page) !*AbortSignal {
|
||||
const callback = try page.arena.create(TimeoutCallback);
|
||||
callback.* = .{
|
||||
.signal = .init,
|
||||
};
|
||||
|
||||
try page.scheduler.add(callback, TimeoutCallback.run, delay, .{ .name = "abort_signal" });
|
||||
return &callback.signal;
|
||||
}
|
||||
|
||||
pub fn get_aborted(self: *const AbortSignal) bool {
|
||||
return self.aborted;
|
||||
}
|
||||
|
||||
fn abort(self: *AbortSignal, reason_: ?[]const u8) !void {
|
||||
self.aborted = true;
|
||||
self.reason = reason_ orelse DEFAULT_REASON;
|
||||
|
||||
const abort_event = try parser.eventCreate();
|
||||
parser.eventSetInternalType(abort_event, .abort_signal);
|
||||
|
||||
defer parser.eventDestroy(abort_event);
|
||||
try parser.eventInit(abort_event, "abort", .{});
|
||||
_ = try parser.eventTargetDispatchEvent(
|
||||
parser.toEventTarget(AbortSignal, self),
|
||||
abort_event,
|
||||
);
|
||||
}
|
||||
|
||||
const Reason = union(enum) {
|
||||
reason: []const u8,
|
||||
undefined: void,
|
||||
};
|
||||
pub fn get_reason(self: *const AbortSignal) Reason {
|
||||
if (self.reason) |r| {
|
||||
return .{ .reason = r };
|
||||
}
|
||||
return .{ .undefined = {} };
|
||||
}
|
||||
|
||||
const ThrowIfAborted = union(enum) {
|
||||
exception: Env.Exception,
|
||||
undefined: void,
|
||||
};
|
||||
pub fn _throwIfAborted(self: *const AbortSignal, page: *Page) ThrowIfAborted {
|
||||
if (self.aborted) {
|
||||
const ex = page.main_context.throw(self.reason orelse DEFAULT_REASON);
|
||||
return .{ .exception = ex };
|
||||
}
|
||||
return .{ .undefined = {} };
|
||||
}
|
||||
};
|
||||
|
||||
const TimeoutCallback = struct {
|
||||
signal: AbortSignal,
|
||||
|
||||
fn run(ctx: *anyopaque) ?u32 {
|
||||
const self: *TimeoutCallback = @ptrCast(@alignCast(ctx));
|
||||
self.signal.abort("TimeoutError") catch |err| {
|
||||
log.warn(.app, "abort signal timeout", .{ .err = err });
|
||||
};
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: HTML.AbortController" {
|
||||
try testing.htmlRunner("html/abort_controller.html");
|
||||
}
|
||||
@@ -1,81 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
const std = @import("std");
|
||||
const parser = @import("../netsurf.zig");
|
||||
const Env = @import("../env.zig").Env;
|
||||
const Page = @import("../page.zig").Page;
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const DataSet = @This();
|
||||
|
||||
element: *parser.Element,
|
||||
|
||||
pub fn named_get(self: *const DataSet, name: []const u8, _: *bool, page: *Page) !Env.UndefinedOr([]const u8) {
|
||||
const normalized_name = try normalize(page.call_arena, name);
|
||||
if (try parser.elementGetAttribute(self.element, normalized_name)) |value| {
|
||||
return .{ .value = value };
|
||||
}
|
||||
return .undefined;
|
||||
}
|
||||
|
||||
pub fn named_set(self: *DataSet, name: []const u8, value: []const u8, _: *bool, page: *Page) !void {
|
||||
const normalized_name = try normalize(page.call_arena, name);
|
||||
try parser.elementSetAttribute(self.element, normalized_name, value);
|
||||
}
|
||||
|
||||
pub fn named_delete(self: *DataSet, name: []const u8, _: *bool, page: *Page) !void {
|
||||
const normalized_name = try normalize(page.call_arena, name);
|
||||
try parser.elementRemoveAttribute(self.element, normalized_name);
|
||||
}
|
||||
|
||||
fn normalize(allocator: Allocator, name: []const u8) ![]const u8 {
|
||||
var upper_count: usize = 0;
|
||||
for (name) |c| {
|
||||
if (std.ascii.isUpper(c)) {
|
||||
upper_count += 1;
|
||||
}
|
||||
}
|
||||
// for every upper-case letter, we'll probably need a dash before it
|
||||
// and we need the 'data-' prefix
|
||||
var normalized = try allocator.alloc(u8, name.len + upper_count + 5);
|
||||
|
||||
@memcpy(normalized[0..5], "data-");
|
||||
if (upper_count == 0) {
|
||||
@memcpy(normalized[5..], name);
|
||||
return normalized;
|
||||
}
|
||||
|
||||
var pos: usize = 5;
|
||||
for (name) |c| {
|
||||
if (std.ascii.isUpper(c)) {
|
||||
normalized[pos] = '-';
|
||||
pos += 1;
|
||||
normalized[pos] = c + 32;
|
||||
} else {
|
||||
normalized[pos] = c;
|
||||
}
|
||||
pos += 1;
|
||||
}
|
||||
return normalized;
|
||||
}
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: HTML.DataSet" {
|
||||
try testing.htmlRunner("html/dataset.html");
|
||||
}
|
||||
@@ -1,322 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const log = @import("../../log.zig");
|
||||
const parser = @import("../netsurf.zig");
|
||||
const Page = @import("../page.zig").Page;
|
||||
|
||||
const Window = @import("window.zig").Window;
|
||||
const Element = @import("../dom/element.zig").Element;
|
||||
const ElementUnion = @import("../dom/element.zig").Union;
|
||||
const Document = @import("../dom/document.zig").Document;
|
||||
const NodeList = @import("../dom/nodelist.zig").NodeList;
|
||||
const Location = @import("location.zig").Location;
|
||||
|
||||
const collection = @import("../dom/html_collection.zig");
|
||||
const Walker = @import("../dom/walker.zig").WalkerDepthFirst;
|
||||
const Cookie = @import("../storage/cookie.zig").Cookie;
|
||||
|
||||
// WEB IDL https://html.spec.whatwg.org/#the-document-object
|
||||
pub const HTMLDocument = struct {
|
||||
pub const Self = parser.DocumentHTML;
|
||||
pub const prototype = *Document;
|
||||
pub const subtype = .node;
|
||||
|
||||
// JS funcs
|
||||
// --------
|
||||
|
||||
pub fn get_domain(self: *parser.DocumentHTML, page: *Page) ![]const u8 {
|
||||
// libdom's document_html get_domain always returns null, this is
|
||||
// the way MDN recommends getting the domain anyways, since document.domain
|
||||
// is deprecated.
|
||||
const location = try parser.documentHTMLGetLocation(Location, self) orelse return "";
|
||||
return location.get_host(page);
|
||||
}
|
||||
|
||||
pub fn set_domain(_: *parser.DocumentHTML, _: []const u8) ![]const u8 {
|
||||
return error.NotImplemented;
|
||||
}
|
||||
|
||||
pub fn get_referrer(self: *parser.DocumentHTML) ![]const u8 {
|
||||
return try parser.documentHTMLGetReferrer(self);
|
||||
}
|
||||
|
||||
pub fn set_referrer(_: *parser.DocumentHTML, _: []const u8) ![]const u8 {
|
||||
return error.NotImplemented;
|
||||
}
|
||||
|
||||
pub fn get_body(self: *parser.DocumentHTML) !?*parser.Body {
|
||||
return try parser.documentHTMLBody(self);
|
||||
}
|
||||
|
||||
pub fn set_body(self: *parser.DocumentHTML, elt: ?*parser.ElementHTML) !?*parser.Body {
|
||||
try parser.documentHTMLSetBody(self, elt);
|
||||
return try get_body(self);
|
||||
}
|
||||
|
||||
pub fn get_head(self: *parser.DocumentHTML) !?*parser.Head {
|
||||
const root = parser.documentHTMLToNode(self);
|
||||
const walker = Walker{};
|
||||
var next: ?*parser.Node = null;
|
||||
while (true) {
|
||||
next = try walker.get_next(root, next) orelse return null;
|
||||
if (std.ascii.eqlIgnoreCase("head", try parser.nodeName(next.?))) {
|
||||
return @as(*parser.Head, @ptrCast(next.?));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_cookie(_: *parser.DocumentHTML, page: *Page) ![]const u8 {
|
||||
var buf: std.ArrayListUnmanaged(u8) = .{};
|
||||
try page.cookie_jar.forRequest(&page.url.uri, buf.writer(page.arena), .{
|
||||
.is_http = false,
|
||||
.is_navigation = true,
|
||||
});
|
||||
return buf.items;
|
||||
}
|
||||
|
||||
pub fn set_cookie(_: *parser.DocumentHTML, cookie_str: []const u8, page: *Page) ![]const u8 {
|
||||
// we use the cookie jar's allocator to parse the cookie because it
|
||||
// outlives the page's arena.
|
||||
const c = try Cookie.parse(page.cookie_jar.allocator, &page.url.uri, cookie_str);
|
||||
errdefer c.deinit();
|
||||
if (c.http_only) {
|
||||
c.deinit();
|
||||
return ""; // HttpOnly cookies cannot be set from JS
|
||||
}
|
||||
try page.cookie_jar.add(c, std.time.timestamp());
|
||||
return cookie_str;
|
||||
}
|
||||
|
||||
pub fn get_title(self: *parser.DocumentHTML) ![]const u8 {
|
||||
return try parser.documentHTMLGetTitle(self);
|
||||
}
|
||||
|
||||
pub fn set_title(self: *parser.DocumentHTML, v: []const u8) ![]const u8 {
|
||||
try parser.documentHTMLSetTitle(self, v);
|
||||
return v;
|
||||
}
|
||||
|
||||
pub fn _getElementsByName(self: *parser.DocumentHTML, name: []const u8, page: *Page) !NodeList {
|
||||
var list: NodeList = .{};
|
||||
|
||||
if (name.len == 0) {
|
||||
return list;
|
||||
}
|
||||
|
||||
const root = parser.documentHTMLToNode(self);
|
||||
var c = try collection.HTMLCollectionByName(root, name, .{
|
||||
.include_root = false,
|
||||
});
|
||||
|
||||
const ln = try c.get_length();
|
||||
try list.ensureTotalCapacity(page.arena, ln);
|
||||
|
||||
var i: u32 = 0;
|
||||
while (i < ln) : (i += 1) {
|
||||
const n = try c.item(i) orelse break;
|
||||
list.appendAssumeCapacity(n);
|
||||
}
|
||||
|
||||
return list;
|
||||
}
|
||||
|
||||
pub fn get_images(self: *parser.DocumentHTML) collection.HTMLCollection {
|
||||
return collection.HTMLCollectionByTagName(parser.documentHTMLToNode(self), "img", .{
|
||||
.include_root = false,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn get_embeds(self: *parser.DocumentHTML) collection.HTMLCollection {
|
||||
return collection.HTMLCollectionByTagName(parser.documentHTMLToNode(self), "embed", .{
|
||||
.include_root = false,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn get_plugins(self: *parser.DocumentHTML) collection.HTMLCollection {
|
||||
return get_embeds(self);
|
||||
}
|
||||
|
||||
pub fn get_forms(self: *parser.DocumentHTML) collection.HTMLCollection {
|
||||
return collection.HTMLCollectionByTagName(parser.documentHTMLToNode(self), "form", .{
|
||||
.include_root = false,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn get_scripts(self: *parser.DocumentHTML) collection.HTMLCollection {
|
||||
return collection.HTMLCollectionByTagName(parser.documentHTMLToNode(self), "script", .{
|
||||
.include_root = false,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn get_applets(_: *parser.DocumentHTML) collection.HTMLCollection {
|
||||
return collection.HTMLCollectionEmpty();
|
||||
}
|
||||
|
||||
pub fn get_links(self: *parser.DocumentHTML) collection.HTMLCollection {
|
||||
return collection.HTMLCollectionByLinks(parser.documentHTMLToNode(self), .{
|
||||
.include_root = false,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn get_anchors(self: *parser.DocumentHTML) collection.HTMLCollection {
|
||||
return collection.HTMLCollectionByAnchors(parser.documentHTMLToNode(self), .{
|
||||
.include_root = false,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn get_all(self: *parser.DocumentHTML) collection.HTMLAllCollection {
|
||||
return collection.HTMLAllCollection.init(parser.documentHTMLToNode(self));
|
||||
}
|
||||
|
||||
pub fn get_currentScript(self: *parser.DocumentHTML) !?*parser.Script {
|
||||
return try parser.documentHTMLGetCurrentScript(self);
|
||||
}
|
||||
|
||||
pub fn get_location(self: *parser.DocumentHTML) !?*Location {
|
||||
return try parser.documentHTMLGetLocation(Location, self);
|
||||
}
|
||||
|
||||
pub fn set_location(_: *const parser.DocumentHTML, url: []const u8, page: *Page) !void {
|
||||
return page.navigateFromWebAPI(url, .{ .reason = .script });
|
||||
}
|
||||
|
||||
pub fn get_designMode(_: *parser.DocumentHTML) []const u8 {
|
||||
return "off";
|
||||
}
|
||||
|
||||
pub fn set_designMode(_: *parser.DocumentHTML, _: []const u8) []const u8 {
|
||||
return "off";
|
||||
}
|
||||
|
||||
pub fn get_defaultView(_: *parser.DocumentHTML, page: *Page) *Window {
|
||||
return &page.window;
|
||||
}
|
||||
|
||||
pub fn get_readyState(self: *parser.DocumentHTML, page: *Page) ![]const u8 {
|
||||
const state = try page.getOrCreateNodeState(@ptrCast(@alignCast(self)));
|
||||
return @tagName(state.ready_state);
|
||||
}
|
||||
|
||||
// noop legacy functions
|
||||
// https://html.spec.whatwg.org/#Document-partial
|
||||
pub fn _clear(_: *parser.DocumentHTML) void {}
|
||||
pub fn _captureEvents(_: *parser.DocumentHTML) void {}
|
||||
pub fn _releaseEvents(_: *parser.DocumentHTML) void {}
|
||||
|
||||
pub fn get_fgColor(_: *parser.DocumentHTML) []const u8 {
|
||||
return "";
|
||||
}
|
||||
pub fn set_fgColor(_: *parser.DocumentHTML, _: []const u8) []const u8 {
|
||||
return "";
|
||||
}
|
||||
pub fn get_linkColor(_: *parser.DocumentHTML) []const u8 {
|
||||
return "";
|
||||
}
|
||||
pub fn set_linkColor(_: *parser.DocumentHTML, _: []const u8) []const u8 {
|
||||
return "";
|
||||
}
|
||||
pub fn get_vlinkColor(_: *parser.DocumentHTML) []const u8 {
|
||||
return "";
|
||||
}
|
||||
pub fn set_vlinkColor(_: *parser.DocumentHTML, _: []const u8) []const u8 {
|
||||
return "";
|
||||
}
|
||||
pub fn get_alinkColor(_: *parser.DocumentHTML) []const u8 {
|
||||
return "";
|
||||
}
|
||||
pub fn set_alinkColor(_: *parser.DocumentHTML, _: []const u8) []const u8 {
|
||||
return "";
|
||||
}
|
||||
pub fn get_bgColor(_: *parser.DocumentHTML) []const u8 {
|
||||
return "";
|
||||
}
|
||||
pub fn set_bgColor(_: *parser.DocumentHTML, _: []const u8) []const u8 {
|
||||
return "";
|
||||
}
|
||||
|
||||
// Returns the topmost Element at the specified coordinates (relative to the viewport).
|
||||
// Since LightPanda requires the client to know what they are clicking on we do not return the underlying element at this moment
|
||||
// This can currenty only happen if the first pixel is clicked without having rendered any element. This will change when css properties are supported.
|
||||
// This returns an ElementUnion instead of a *Parser.Element in case the element somehow hasn't passed through the js runtime yet.
|
||||
// While x and y should be f32, here we take i32 since that's what our
|
||||
// "renderer" uses. By specifying i32 here, rather than f32 and doing the
|
||||
// conversion ourself, we rely on v8's type conversion which is both more
|
||||
// flexible (e.g. handles NaN) and will be more consistent with a browser.
|
||||
pub fn _elementFromPoint(_: *parser.DocumentHTML, x: i32, y: i32, page: *Page) !?ElementUnion {
|
||||
const element = page.renderer.getElementAtPosition(x, y) orelse return null;
|
||||
// TODO if pointer-events set to none the underlying element should be returned (parser.documentGetDocumentElement(self.document);?)
|
||||
return try Element.toInterface(element);
|
||||
}
|
||||
|
||||
// Returns an array of all elements at the specified coordinates (relative to the viewport). The elements are ordered from the topmost to the bottommost box of the viewport.
|
||||
// While x and y should be f32, here we take i32 since that's what our
|
||||
// "renderer" uses. By specifying i32 here, rather than f32 and doing the
|
||||
// conversion ourself, we rely on v8's type conversion which is both more
|
||||
// flexible (e.g. handles NaN) and will be more consistent with a browser.
|
||||
pub fn _elementsFromPoint(_: *parser.DocumentHTML, x: i32, y: i32, page: *Page) ![]ElementUnion {
|
||||
const element = page.renderer.getElementAtPosition(x, y) orelse return &.{};
|
||||
// TODO if pointer-events set to none the underlying element should be returned (parser.documentGetDocumentElement(self.document);?)
|
||||
|
||||
var list: std.ArrayListUnmanaged(ElementUnion) = .empty;
|
||||
try list.ensureTotalCapacity(page.call_arena, 3);
|
||||
list.appendAssumeCapacity(try Element.toInterface(element));
|
||||
|
||||
// Since we are using a flat renderer there is no hierarchy of elements. What we do know is that the element is part of the main document.
|
||||
// Thus we can add the HtmlHtmlElement and it's child HTMLBodyElement to the returned list.
|
||||
// TBD Should we instead return every parent that is an element? Note that a child does not physically need to be overlapping the parent.
|
||||
// Should we do a render pass on demand?
|
||||
const doc_elem = try parser.documentGetDocumentElement(parser.documentHTMLToDocument(page.window.document)) orelse {
|
||||
return list.items;
|
||||
};
|
||||
if (try parser.documentHTMLBody(page.window.document)) |body| {
|
||||
list.appendAssumeCapacity(try Element.toInterface(parser.bodyToElement(body)));
|
||||
}
|
||||
list.appendAssumeCapacity(try Element.toInterface(doc_elem));
|
||||
return list.items;
|
||||
}
|
||||
|
||||
pub fn documentIsLoaded(self: *parser.DocumentHTML, page: *Page) !void {
|
||||
const state = try page.getOrCreateNodeState(@ptrCast(@alignCast(self)));
|
||||
state.ready_state = .interactive;
|
||||
|
||||
log.debug(.script_event, "dispatch event", .{
|
||||
.type = "DOMContentLoaded",
|
||||
.source = "document",
|
||||
});
|
||||
|
||||
const evt = try parser.eventCreate();
|
||||
defer parser.eventDestroy(evt);
|
||||
try parser.eventInit(evt, "DOMContentLoaded", .{ .bubbles = true, .cancelable = true });
|
||||
_ = try parser.eventTargetDispatchEvent(parser.toEventTarget(parser.DocumentHTML, self), evt);
|
||||
|
||||
try page.window.dispatchForDocumentTarget(evt);
|
||||
}
|
||||
|
||||
pub fn documentIsComplete(self: *parser.DocumentHTML, page: *Page) !void {
|
||||
const state = try page.getOrCreateNodeState(@ptrCast(@alignCast(self)));
|
||||
state.ready_state = .complete;
|
||||
}
|
||||
};
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: HTML.Document" {
|
||||
try testing.htmlRunner("html/document.html");
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,86 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
const Env = @import("../env.zig").Env;
|
||||
const parser = @import("../netsurf.zig");
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/ErrorEvent
|
||||
pub const ErrorEvent = struct {
|
||||
pub const prototype = *parser.Event;
|
||||
pub const union_make_copy = true;
|
||||
|
||||
proto: parser.Event,
|
||||
message: []const u8,
|
||||
filename: []const u8,
|
||||
lineno: i32,
|
||||
colno: i32,
|
||||
@"error": ?Env.JsObject,
|
||||
|
||||
const ErrorEventInit = struct {
|
||||
message: []const u8 = "",
|
||||
filename: []const u8 = "",
|
||||
lineno: i32 = 0,
|
||||
colno: i32 = 0,
|
||||
@"error": ?Env.JsObject = null,
|
||||
};
|
||||
|
||||
pub fn constructor(event_type: []const u8, opts: ?ErrorEventInit) !ErrorEvent {
|
||||
const event = try parser.eventCreate();
|
||||
defer parser.eventDestroy(event);
|
||||
try parser.eventInit(event, event_type, .{});
|
||||
parser.eventSetInternalType(event, .event);
|
||||
|
||||
const o = opts orelse ErrorEventInit{};
|
||||
|
||||
return .{
|
||||
.proto = event.*,
|
||||
.message = o.message,
|
||||
.filename = o.filename,
|
||||
.lineno = o.lineno,
|
||||
.colno = o.colno,
|
||||
.@"error" = if (o.@"error") |e| try e.persist() else null,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn get_message(self: *const ErrorEvent) []const u8 {
|
||||
return self.message;
|
||||
}
|
||||
|
||||
pub fn get_filename(self: *const ErrorEvent) []const u8 {
|
||||
return self.filename;
|
||||
}
|
||||
|
||||
pub fn get_lineno(self: *const ErrorEvent) i32 {
|
||||
return self.lineno;
|
||||
}
|
||||
|
||||
pub fn get_colno(self: *const ErrorEvent) i32 {
|
||||
return self.colno;
|
||||
}
|
||||
|
||||
pub fn get_error(self: *const ErrorEvent) Env.UndefinedOr(Env.JsObject) {
|
||||
if (self.@"error") |e| {
|
||||
return .{ .value = e };
|
||||
}
|
||||
return .undefined;
|
||||
}
|
||||
};
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: HTML.ErrorEvent" {
|
||||
try testing.htmlRunner("html/error_event.html");
|
||||
}
|
||||
@@ -1,37 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const parser = @import("../netsurf.zig");
|
||||
const Page = @import("../page.zig").Page;
|
||||
const HTMLElement = @import("elements.zig").HTMLElement;
|
||||
|
||||
pub const HTMLFormElement = struct {
|
||||
pub const Self = parser.Form;
|
||||
pub const prototype = *HTMLElement;
|
||||
pub const subtype = .node;
|
||||
|
||||
pub fn _submit(self: *parser.Form, page: *Page) !void {
|
||||
return page.submitForm(self, null);
|
||||
}
|
||||
|
||||
pub fn _reset(self: *parser.Form) !void {
|
||||
try parser.formElementReset(self);
|
||||
}
|
||||
};
|
||||
@@ -1,28 +0,0 @@
|
||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
const std = @import("std");
|
||||
|
||||
const parser = @import("../netsurf.zig");
|
||||
const HTMLElement = @import("elements.zig").HTMLElement;
|
||||
|
||||
// https://html.spec.whatwg.org/multipage/iframe-embed-object.html#htmliframeelement
|
||||
pub const HTMLIFrameElement = struct {
|
||||
pub const Self = parser.IFrame;
|
||||
pub const prototype = *HTMLElement;
|
||||
pub const subtype = .node;
|
||||
};
|
||||
@@ -1,92 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const Page = @import("../page.zig").Page;
|
||||
|
||||
const URL = @import("../url/url.zig").URL;
|
||||
|
||||
// https://html.spec.whatwg.org/multipage/nav-history-apis.html#the-location-interface
|
||||
pub const Location = struct {
|
||||
url: ?URL = null,
|
||||
|
||||
pub fn get_href(self: *Location, page: *Page) ![]const u8 {
|
||||
if (self.url) |*u| return u.get_href(page);
|
||||
return "";
|
||||
}
|
||||
|
||||
pub fn get_protocol(self: *Location, page: *Page) ![]const u8 {
|
||||
if (self.url) |*u| return u.get_protocol(page);
|
||||
return "";
|
||||
}
|
||||
|
||||
pub fn get_host(self: *Location, page: *Page) ![]const u8 {
|
||||
if (self.url) |*u| return u.get_host(page);
|
||||
return "";
|
||||
}
|
||||
|
||||
pub fn get_hostname(self: *Location) []const u8 {
|
||||
if (self.url) |*u| return u.get_hostname();
|
||||
return "";
|
||||
}
|
||||
|
||||
pub fn get_port(self: *Location, page: *Page) ![]const u8 {
|
||||
if (self.url) |*u| return u.get_port(page);
|
||||
return "";
|
||||
}
|
||||
|
||||
pub fn get_pathname(self: *Location) []const u8 {
|
||||
if (self.url) |*u| return u.get_pathname();
|
||||
return "";
|
||||
}
|
||||
|
||||
pub fn get_search(self: *Location, page: *Page) ![]const u8 {
|
||||
if (self.url) |*u| return u.get_search(page);
|
||||
return "";
|
||||
}
|
||||
|
||||
pub fn get_hash(self: *Location, page: *Page) ![]const u8 {
|
||||
if (self.url) |*u| return u.get_hash(page);
|
||||
return "";
|
||||
}
|
||||
|
||||
pub fn get_origin(self: *Location, page: *Page) ![]const u8 {
|
||||
if (self.url) |*u| return u.get_origin(page);
|
||||
return "";
|
||||
}
|
||||
|
||||
pub fn _assign(_: *const Location, url: []const u8, page: *Page) !void {
|
||||
return page.navigateFromWebAPI(url, .{ .reason = .script });
|
||||
}
|
||||
|
||||
pub fn _replace(_: *const Location, url: []const u8, page: *Page) !void {
|
||||
return page.navigateFromWebAPI(url, .{ .reason = .script });
|
||||
}
|
||||
|
||||
pub fn _reload(_: *const Location, page: *Page) !void {
|
||||
return page.navigateFromWebAPI(page.url.raw, .{ .reason = .script });
|
||||
}
|
||||
|
||||
pub fn _toString(self: *Location, page: *Page) ![]const u8 {
|
||||
return try self.get_href(page);
|
||||
}
|
||||
};
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: HTML.Location" {
|
||||
try testing.htmlRunner("html/location.html");
|
||||
}
|
||||
@@ -1,45 +0,0 @@
|
||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const parser = @import("../netsurf.zig");
|
||||
const Function = @import("../env.zig").Function;
|
||||
const EventTarget = @import("../dom/event_target.zig").EventTarget;
|
||||
|
||||
// https://drafts.csswg.org/cssom-view/#the-mediaquerylist-interface
|
||||
pub const MediaQueryList = struct {
|
||||
pub const prototype = *EventTarget;
|
||||
|
||||
// Extend libdom event target for pure zig struct.
|
||||
// This is not safe as it relies on a structure layout that isn't guaranteed
|
||||
base: parser.EventTargetTBase = parser.EventTargetTBase{ .internal_target_type = .media_query_list },
|
||||
|
||||
matches: bool,
|
||||
media: []const u8,
|
||||
|
||||
pub fn get_matches(self: *const MediaQueryList) bool {
|
||||
return self.matches;
|
||||
}
|
||||
|
||||
pub fn get_media(self: *const MediaQueryList) []const u8 {
|
||||
return self.media;
|
||||
}
|
||||
|
||||
pub fn _addListener(_: *const MediaQueryList, _: Function) void {}
|
||||
|
||||
pub fn _removeListener(_: *const MediaQueryList, _: Function) void {}
|
||||
};
|
||||
@@ -1,103 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const parser = @import("../netsurf.zig");
|
||||
const EventTarget = @import("../dom/event_target.zig").EventTarget;
|
||||
|
||||
pub const Interfaces = .{
|
||||
Screen,
|
||||
ScreenOrientation,
|
||||
};
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/Screen
|
||||
pub const Screen = struct {
|
||||
pub const prototype = *EventTarget;
|
||||
|
||||
proto: parser.EventTargetTBase = .{ .internal_target_type = .screen },
|
||||
|
||||
height: u32 = 1080,
|
||||
width: u32 = 1920,
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/Screen/colorDepth
|
||||
color_depth: u32 = 8,
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/Screen/pixelDepth
|
||||
pixel_depth: u32 = 8,
|
||||
orientation: ScreenOrientation = .{ .type = .landscape_primary },
|
||||
|
||||
pub fn get_availHeight(self: *const Screen) u32 {
|
||||
return self.height;
|
||||
}
|
||||
|
||||
pub fn get_availWidth(self: *const Screen) u32 {
|
||||
return self.width;
|
||||
}
|
||||
|
||||
pub fn get_height(self: *const Screen) u32 {
|
||||
return self.height;
|
||||
}
|
||||
|
||||
pub fn get_width(self: *const Screen) u32 {
|
||||
return self.width;
|
||||
}
|
||||
|
||||
pub fn get_pixelDepth(self: *const Screen) u32 {
|
||||
return self.pixel_depth;
|
||||
}
|
||||
|
||||
pub fn get_orientation(self: *const Screen) ScreenOrientation {
|
||||
return self.orientation;
|
||||
}
|
||||
};
|
||||
|
||||
const ScreenOrientationType = enum {
|
||||
portrait_primary,
|
||||
portrait_secondary,
|
||||
landscape_primary,
|
||||
landscape_secondary,
|
||||
|
||||
pub fn toString(self: ScreenOrientationType) []const u8 {
|
||||
return switch (self) {
|
||||
.portrait_primary => "portrait-primary",
|
||||
.portrait_secondary => "portrait-secondary",
|
||||
.landscape_primary => "landscape-primary",
|
||||
.landscape_secondary => "landscape-secondary",
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub const ScreenOrientation = struct {
|
||||
pub const prototype = *EventTarget;
|
||||
|
||||
angle: u32 = 0,
|
||||
type: ScreenOrientationType,
|
||||
proto: parser.EventTargetTBase = .{ .internal_target_type = .screen_orientation },
|
||||
|
||||
pub fn get_angle(self: *const ScreenOrientation) u32 {
|
||||
return self.angle;
|
||||
}
|
||||
|
||||
pub fn get_type(self: *const ScreenOrientation) []const u8 {
|
||||
return self.type.toString();
|
||||
}
|
||||
};
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: HTML.Screen" {
|
||||
try testing.htmlRunner("html/screen.html");
|
||||
}
|
||||
@@ -1,204 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
const std = @import("std");
|
||||
|
||||
const parser = @import("../netsurf.zig");
|
||||
const collection = @import("../dom/html_collection.zig");
|
||||
|
||||
const Page = @import("../page.zig").Page;
|
||||
const HTMLElement = @import("elements.zig").HTMLElement;
|
||||
|
||||
pub const Interfaces = .{
|
||||
HTMLSelectElement,
|
||||
HTMLOptionElement,
|
||||
HTMLOptionsCollection,
|
||||
};
|
||||
|
||||
pub const HTMLSelectElement = struct {
|
||||
pub const Self = parser.Select;
|
||||
pub const prototype = *HTMLElement;
|
||||
pub const subtype = .node;
|
||||
|
||||
pub fn get_length(select: *parser.Select) !u32 {
|
||||
return parser.selectGetLength(select);
|
||||
}
|
||||
|
||||
pub fn get_form(select: *parser.Select) !?*parser.Form {
|
||||
return parser.selectGetForm(select);
|
||||
}
|
||||
|
||||
pub fn get_name(select: *parser.Select) ![]const u8 {
|
||||
return parser.selectGetName(select);
|
||||
}
|
||||
pub fn set_name(select: *parser.Select, name: []const u8) !void {
|
||||
return parser.selectSetName(select, name);
|
||||
}
|
||||
|
||||
pub fn get_disabled(select: *parser.Select) !bool {
|
||||
return parser.selectGetDisabled(select);
|
||||
}
|
||||
pub fn set_disabled(select: *parser.Select, disabled: bool) !void {
|
||||
return parser.selectSetDisabled(select, disabled);
|
||||
}
|
||||
|
||||
pub fn get_multiple(select: *parser.Select) !bool {
|
||||
return parser.selectGetMultiple(select);
|
||||
}
|
||||
pub fn set_multiple(select: *parser.Select, multiple: bool) !void {
|
||||
return parser.selectSetMultiple(select, multiple);
|
||||
}
|
||||
|
||||
pub fn get_selectedIndex(select: *parser.Select, page: *Page) !i32 {
|
||||
const state = try page.getOrCreateNodeState(@ptrCast(@alignCast(select)));
|
||||
const selected_index = try parser.selectGetSelectedIndex(select);
|
||||
|
||||
// See the explicit_index_set field documentation
|
||||
if (!state.explicit_index_set) {
|
||||
if (selected_index == -1) {
|
||||
if (try parser.selectGetMultiple(select) == false) {
|
||||
if (try get_length(select) > 0) {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return selected_index;
|
||||
}
|
||||
|
||||
// Libdom's dom_html_select_select_set_selected_index will crash if index
|
||||
// is out of range, and it doesn't properly unset options
|
||||
pub fn set_selectedIndex(select: *parser.Select, index: i32, page: *Page) !void {
|
||||
var state = try page.getOrCreateNodeState(@ptrCast(@alignCast(select)));
|
||||
state.explicit_index_set = true;
|
||||
|
||||
const options = try parser.selectGetOptions(select);
|
||||
const len = try parser.optionCollectionGetLength(options);
|
||||
for (0..len) |i| {
|
||||
const option = try parser.optionCollectionItem(options, @intCast(i));
|
||||
try parser.optionSetSelected(option, false);
|
||||
}
|
||||
if (index >= 0 and index < try get_length(select)) {
|
||||
const option = try parser.optionCollectionItem(options, @intCast(index));
|
||||
try parser.optionSetSelected(option, true);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_options(select: *parser.Select) HTMLOptionsCollection {
|
||||
return .{
|
||||
.select = select,
|
||||
.proto = collection.HTMLCollectionChildren(@ptrCast(@alignCast(select)), .{
|
||||
.mutable = true,
|
||||
.include_root = false,
|
||||
}),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub const HTMLOptionElement = struct {
|
||||
pub const Self = parser.Option;
|
||||
pub const prototype = *HTMLElement;
|
||||
pub const subtype = .node;
|
||||
|
||||
pub fn get_value(self: *parser.Option) ![]const u8 {
|
||||
return parser.optionGetValue(self);
|
||||
}
|
||||
pub fn set_value(self: *parser.Option, value: []const u8) !void {
|
||||
return parser.optionSetValue(self, value);
|
||||
}
|
||||
|
||||
pub fn get_label(self: *parser.Option) ![]const u8 {
|
||||
return parser.optionGetLabel(self);
|
||||
}
|
||||
pub fn set_label(self: *parser.Option, label: []const u8) !void {
|
||||
return parser.optionSetLabel(self, label);
|
||||
}
|
||||
|
||||
pub fn get_selected(self: *parser.Option) !bool {
|
||||
return parser.optionGetSelected(self);
|
||||
}
|
||||
pub fn set_selected(self: *parser.Option, value: bool) !void {
|
||||
return parser.optionSetSelected(self, value);
|
||||
}
|
||||
|
||||
pub fn get_disabled(self: *parser.Option) !bool {
|
||||
return parser.optionGetDisabled(self);
|
||||
}
|
||||
pub fn set_disabled(self: *parser.Option, value: bool) !void {
|
||||
return parser.optionSetDisabled(self, value);
|
||||
}
|
||||
|
||||
pub fn get_text(self: *parser.Option) ![]const u8 {
|
||||
return parser.optionGetText(self);
|
||||
}
|
||||
|
||||
pub fn get_form(self: *parser.Option) !?*parser.Form {
|
||||
return parser.optionGetForm(self);
|
||||
}
|
||||
};
|
||||
|
||||
pub const HTMLOptionsCollection = struct {
|
||||
pub const prototype = *collection.HTMLCollection;
|
||||
|
||||
proto: collection.HTMLCollection,
|
||||
select: *parser.Select,
|
||||
|
||||
pub fn get_selectedIndex(self: *HTMLOptionsCollection, page: *Page) !i32 {
|
||||
return HTMLSelectElement.get_selectedIndex(self.select, page);
|
||||
}
|
||||
|
||||
pub fn set_selectedIndex(self: *HTMLOptionsCollection, index: i32, page: *Page) !void {
|
||||
return HTMLSelectElement.set_selectedIndex(self.select, index, page);
|
||||
}
|
||||
|
||||
const BeforeOpts = union(enum) {
|
||||
index: u32,
|
||||
option: *parser.Option,
|
||||
};
|
||||
pub fn _add(self: *HTMLOptionsCollection, option: *parser.Option, before_: ?BeforeOpts) !void {
|
||||
const Node = @import("../dom/node.zig").Node;
|
||||
const before = before_ orelse {
|
||||
return self.appendOption(option);
|
||||
};
|
||||
|
||||
const insert_before: *parser.Node = switch (before) {
|
||||
.option => |o| @ptrCast(@alignCast(o)),
|
||||
.index => |i| (try self.proto.item(i)) orelse return self.appendOption(option),
|
||||
};
|
||||
return Node.before(insert_before, &.{
|
||||
.{ .node = @ptrCast(@alignCast(option)) },
|
||||
});
|
||||
}
|
||||
|
||||
pub fn _remove(self: *HTMLOptionsCollection, index: u32) !void {
|
||||
const Node = @import("../dom/node.zig").Node;
|
||||
const option = (try self.proto.item(index)) orelse return;
|
||||
_ = try Node._removeChild(@ptrCast(@alignCast(self.select)), option);
|
||||
}
|
||||
|
||||
fn appendOption(self: *HTMLOptionsCollection, option: *parser.Option) !void {
|
||||
const Node = @import("../dom/node.zig").Node;
|
||||
return Node.append(@ptrCast(@alignCast(self.select)), &.{
|
||||
.{ .node = @ptrCast(@alignCast(option)) },
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: HTML.Select" {
|
||||
try testing.htmlRunner("html/select.html");
|
||||
}
|
||||
@@ -1,36 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const Element = @import("../dom/element.zig").Element;
|
||||
|
||||
// Support for SVGElements is very limited, this is a dummy implementation.
|
||||
// This is here no to be able to support `element instanceof SVGElement;` in JavaScript.
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/SVGElement
|
||||
pub const SVGElement = struct {
|
||||
// Currently the prototype chain is not implemented (will not be returned by toInterface())
|
||||
// For that we need parser.SvgElement and the derived types with tags in the v-table.
|
||||
pub const prototype = *Element;
|
||||
// While this is a Node, could consider not exposing the subtype untill we have
|
||||
// a Self type to cast to.
|
||||
pub const subtype = .node;
|
||||
};
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: HTML.SVGElement" {
|
||||
try testing.htmlRunner("html/svg.html");
|
||||
}
|
||||
@@ -1,457 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const log = @import("../../log.zig");
|
||||
const parser = @import("../netsurf.zig");
|
||||
const Env = @import("../env.zig").Env;
|
||||
const Page = @import("../page.zig").Page;
|
||||
|
||||
const Navigator = @import("navigator.zig").Navigator;
|
||||
const History = @import("history.zig").History;
|
||||
const Location = @import("location.zig").Location;
|
||||
const Crypto = @import("../crypto/crypto.zig").Crypto;
|
||||
const Console = @import("../console/console.zig").Console;
|
||||
const EventTarget = @import("../dom/event_target.zig").EventTarget;
|
||||
const MediaQueryList = @import("media_query_list.zig").MediaQueryList;
|
||||
const Performance = @import("../dom/performance.zig").Performance;
|
||||
const CSSStyleDeclaration = @import("../cssom/CSSStyleDeclaration.zig");
|
||||
const Screen = @import("screen.zig").Screen;
|
||||
const domcss = @import("../dom/css.zig");
|
||||
const Css = @import("../css/css.zig").Css;
|
||||
|
||||
const Function = Env.Function;
|
||||
|
||||
const v8 = @import("v8");
|
||||
const Request = @import("../fetch/Request.zig");
|
||||
const fetchFn = @import("../fetch/fetch.zig").fetch;
|
||||
|
||||
const storage = @import("../storage/storage.zig");
|
||||
|
||||
// https://dom.spec.whatwg.org/#interface-window-extensions
|
||||
// https://html.spec.whatwg.org/multipage/nav-history-apis.html#window
|
||||
pub const Window = struct {
|
||||
pub const prototype = *EventTarget;
|
||||
|
||||
// Extend libdom event target for pure zig struct.
|
||||
base: parser.EventTargetTBase = parser.EventTargetTBase{ .internal_target_type = .window },
|
||||
|
||||
document: *parser.DocumentHTML,
|
||||
target: []const u8 = "",
|
||||
history: History = .{},
|
||||
location: Location = .{},
|
||||
storage_shelf: ?*storage.Shelf = null,
|
||||
|
||||
// counter for having unique timer ids
|
||||
timer_id: u30 = 0,
|
||||
timers: std.AutoHashMapUnmanaged(u32, void) = .{},
|
||||
|
||||
crypto: Crypto = .{},
|
||||
console: Console = .{},
|
||||
navigator: Navigator = .{},
|
||||
performance: Performance,
|
||||
screen: Screen = .{},
|
||||
css: Css = .{},
|
||||
|
||||
pub fn create(target: ?[]const u8, navigator: ?Navigator) !Window {
|
||||
var fbs = std.io.fixedBufferStream("");
|
||||
const html_doc = try parser.documentHTMLParse(fbs.reader(), "utf-8");
|
||||
const doc = parser.documentHTMLToDocument(html_doc);
|
||||
try parser.documentSetDocumentURI(doc, "about:blank");
|
||||
|
||||
return .{
|
||||
.document = html_doc,
|
||||
.target = target orelse "",
|
||||
.navigator = navigator orelse .{},
|
||||
.performance = Performance.init(),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn replaceLocation(self: *Window, loc: Location) !void {
|
||||
self.location = loc;
|
||||
try parser.documentHTMLSetLocation(Location, self.document, &self.location);
|
||||
}
|
||||
|
||||
pub fn replaceDocument(self: *Window, doc: *parser.DocumentHTML) !void {
|
||||
self.performance.reset(); // When to reset see: https://developer.mozilla.org/en-US/docs/Web/API/Performance/timeOrigin
|
||||
self.document = doc;
|
||||
try parser.documentHTMLSetLocation(Location, doc, &self.location);
|
||||
}
|
||||
|
||||
pub fn setStorageShelf(self: *Window, shelf: *storage.Shelf) void {
|
||||
self.storage_shelf = shelf;
|
||||
}
|
||||
|
||||
pub fn _fetch(_: *Window, input: Request.RequestInput, options: ?Request.RequestInit, page: *Page) !Env.Promise {
|
||||
return fetchFn(input, options, page);
|
||||
}
|
||||
|
||||
pub fn get_window(self: *Window) *Window {
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn get_navigator(self: *Window) *Navigator {
|
||||
return &self.navigator;
|
||||
}
|
||||
|
||||
pub fn get_location(self: *Window) *Location {
|
||||
return &self.location;
|
||||
}
|
||||
|
||||
pub fn set_location(_: *const Window, url: []const u8, page: *Page) !void {
|
||||
return page.navigateFromWebAPI(url, .{ .reason = .script });
|
||||
}
|
||||
|
||||
pub fn get_console(self: *Window) *Console {
|
||||
return &self.console;
|
||||
}
|
||||
|
||||
pub fn get_crypto(self: *Window) *Crypto {
|
||||
return &self.crypto;
|
||||
}
|
||||
|
||||
pub fn get_self(self: *Window) *Window {
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn get_parent(self: *Window) *Window {
|
||||
return self;
|
||||
}
|
||||
|
||||
// frames return the window itself, but accessing it via a pseudo
|
||||
// array returns the Window object corresponding to the given frame or
|
||||
// iframe.
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/Window/frames
|
||||
pub fn get_frames(self: *Window) *Window {
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn indexed_get(self: *Window, index: u32, has_value: *bool, page: *Page) !*Window {
|
||||
const frames = try domcss.querySelectorAll(
|
||||
page.call_arena,
|
||||
parser.documentHTMLToNode(self.document),
|
||||
"iframe",
|
||||
);
|
||||
|
||||
if (index >= frames.nodes.items.len) {
|
||||
has_value.* = false;
|
||||
return undefined;
|
||||
}
|
||||
|
||||
has_value.* = true;
|
||||
// TODO return the correct frame's window
|
||||
// frames.nodes.items[indexed]
|
||||
return error.TODO;
|
||||
}
|
||||
|
||||
// Retrieve the numbre of frames/iframes from the DOM dynamically.
|
||||
pub fn get_length(self: *const Window, page: *Page) !u32 {
|
||||
const frames = try domcss.querySelectorAll(
|
||||
page.call_arena,
|
||||
parser.documentHTMLToNode(self.document),
|
||||
"iframe",
|
||||
);
|
||||
|
||||
return frames.get_length();
|
||||
}
|
||||
|
||||
pub fn get_top(self: *Window) *Window {
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn get_document(self: *Window) ?*parser.DocumentHTML {
|
||||
return self.document;
|
||||
}
|
||||
|
||||
pub fn get_history(self: *Window) *History {
|
||||
return &self.history;
|
||||
}
|
||||
|
||||
// The interior height of the window in pixels, including the height of the horizontal scroll bar, if present.
|
||||
pub fn get_innerHeight(_: *Window, page: *Page) u32 {
|
||||
// We do not have scrollbars or padding so this is the same as Element.clientHeight
|
||||
return page.renderer.height();
|
||||
}
|
||||
|
||||
// The interior width of the window in pixels. That includes the width of the vertical scroll bar, if one is present.
|
||||
pub fn get_innerWidth(_: *Window, page: *Page) u32 {
|
||||
// We do not have scrollbars or padding so this is the same as Element.clientWidth
|
||||
return page.renderer.width();
|
||||
}
|
||||
|
||||
pub fn get_name(self: *Window) []const u8 {
|
||||
return self.target;
|
||||
}
|
||||
|
||||
pub fn get_localStorage(self: *Window) !*storage.Bottle {
|
||||
if (self.storage_shelf == null) return parser.DOMError.NotSupported;
|
||||
return &self.storage_shelf.?.bucket.local;
|
||||
}
|
||||
|
||||
pub fn get_sessionStorage(self: *Window) !*storage.Bottle {
|
||||
if (self.storage_shelf == null) return parser.DOMError.NotSupported;
|
||||
return &self.storage_shelf.?.bucket.session;
|
||||
}
|
||||
|
||||
pub fn get_performance(self: *Window) *Performance {
|
||||
return &self.performance;
|
||||
}
|
||||
|
||||
pub fn get_screen(self: *Window) *Screen {
|
||||
return &self.screen;
|
||||
}
|
||||
|
||||
pub fn get_CSS(self: *Window) *Css {
|
||||
return &self.css;
|
||||
}
|
||||
|
||||
pub fn _requestAnimationFrame(self: *Window, cbk: Function, page: *Page) !u32 {
|
||||
return self.createTimeout(cbk, 5, page, .{
|
||||
.animation_frame = true,
|
||||
.name = "animationFrame",
|
||||
.low_priority = true,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn _cancelAnimationFrame(self: *Window, id: u32) !void {
|
||||
_ = self.timers.remove(id);
|
||||
}
|
||||
|
||||
pub fn _setTimeout(self: *Window, cbk: Function, delay: ?u32, params: []Env.JsObject, page: *Page) !u32 {
|
||||
return self.createTimeout(cbk, delay, page, .{ .args = params, .name = "setTimeout" });
|
||||
}
|
||||
|
||||
pub fn _setInterval(self: *Window, cbk: Function, delay: ?u32, params: []Env.JsObject, page: *Page) !u32 {
|
||||
return self.createTimeout(cbk, delay, page, .{ .repeat = true, .args = params, .name = "setInterval" });
|
||||
}
|
||||
|
||||
pub fn _clearTimeout(self: *Window, id: u32) !void {
|
||||
_ = self.timers.remove(id);
|
||||
}
|
||||
|
||||
pub fn _clearInterval(self: *Window, id: u32) !void {
|
||||
_ = self.timers.remove(id);
|
||||
}
|
||||
|
||||
pub fn _queueMicrotask(self: *Window, cbk: Function, page: *Page) !u32 {
|
||||
return self.createTimeout(cbk, 0, page, .{ .name = "queueMicrotask" });
|
||||
}
|
||||
|
||||
pub fn _setImmediate(self: *Window, cbk: Function, page: *Page) !u32 {
|
||||
return self.createTimeout(cbk, 0, page, .{ .name = "setImmediate" });
|
||||
}
|
||||
|
||||
pub fn _clearImmediate(self: *Window, id: u32) void {
|
||||
_ = self.timers.remove(id);
|
||||
}
|
||||
|
||||
pub fn _matchMedia(_: *const Window, media: Env.String) !MediaQueryList {
|
||||
return .{
|
||||
.matches = false, // TODO?
|
||||
.media = media.string,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn _btoa(_: *const Window, value: []const u8, page: *Page) ![]const u8 {
|
||||
const Encoder = std.base64.standard.Encoder;
|
||||
const out = try page.call_arena.alloc(u8, Encoder.calcSize(value.len));
|
||||
return Encoder.encode(out, value);
|
||||
}
|
||||
|
||||
pub fn _atob(_: *const Window, value: []const u8, page: *Page) ![]const u8 {
|
||||
const Decoder = std.base64.standard.Decoder;
|
||||
const size = Decoder.calcSizeForSlice(value) catch return error.InvalidCharacterError;
|
||||
|
||||
const out = try page.call_arena.alloc(u8, size);
|
||||
Decoder.decode(out, value) catch return error.InvalidCharacterError;
|
||||
return out;
|
||||
}
|
||||
|
||||
const CreateTimeoutOpts = struct {
|
||||
name: []const u8,
|
||||
args: []Env.JsObject = &.{},
|
||||
repeat: bool = false,
|
||||
animation_frame: bool = false,
|
||||
low_priority: bool = false,
|
||||
};
|
||||
fn createTimeout(self: *Window, cbk: Function, delay_: ?u32, page: *Page, opts: CreateTimeoutOpts) !u32 {
|
||||
const delay = delay_ orelse 0;
|
||||
if (self.timers.count() > 512) {
|
||||
return error.TooManyTimeout;
|
||||
}
|
||||
const timer_id = self.timer_id +% 1;
|
||||
self.timer_id = timer_id;
|
||||
|
||||
const arena = page.arena;
|
||||
|
||||
const gop = try self.timers.getOrPut(arena, timer_id);
|
||||
if (gop.found_existing) {
|
||||
// this can only happen if we've created 2^31 timeouts.
|
||||
return error.TooManyTimeout;
|
||||
} else {
|
||||
gop.value_ptr.* = {};
|
||||
}
|
||||
errdefer _ = self.timers.remove(timer_id);
|
||||
|
||||
const args = opts.args;
|
||||
var persisted_args: []Env.JsObject = &.{};
|
||||
if (args.len > 0) {
|
||||
persisted_args = try page.arena.alloc(Env.JsObject, args.len);
|
||||
for (args, persisted_args) |a, *ca| {
|
||||
ca.* = try a.persist();
|
||||
}
|
||||
}
|
||||
|
||||
const callback = try arena.create(TimerCallback);
|
||||
callback.* = .{
|
||||
.cbk = cbk,
|
||||
.window = self,
|
||||
.timer_id = timer_id,
|
||||
.args = persisted_args,
|
||||
.animation_frame = opts.animation_frame,
|
||||
// setting a repeat time of 0 is illegal, doing + 1 is a simple way to avoid that
|
||||
.repeat = if (opts.repeat) delay + 1 else null,
|
||||
};
|
||||
|
||||
try page.scheduler.add(callback, TimerCallback.run, delay, .{
|
||||
.name = opts.name,
|
||||
.low_priority = opts.low_priority,
|
||||
});
|
||||
|
||||
return timer_id;
|
||||
}
|
||||
|
||||
// TODO: getComputedStyle should return a read-only CSSStyleDeclaration.
|
||||
// We currently don't have a read-only one, so we return a new instance on
|
||||
// each call.
|
||||
pub fn _getComputedStyle(_: *const Window, element: *parser.Element, pseudo_element: ?[]const u8) !CSSStyleDeclaration {
|
||||
_ = element;
|
||||
_ = pseudo_element;
|
||||
return .empty;
|
||||
}
|
||||
|
||||
const ScrollToOpts = union(enum) {
|
||||
x: i32,
|
||||
opts: Opts,
|
||||
|
||||
const Opts = struct {
|
||||
top: i32,
|
||||
left: i32,
|
||||
behavior: []const u8,
|
||||
};
|
||||
};
|
||||
pub fn _scrollTo(self: *Window, opts: ScrollToOpts, y: ?u32) !void {
|
||||
_ = opts;
|
||||
_ = y;
|
||||
|
||||
{
|
||||
const scroll_event = try parser.eventCreate();
|
||||
defer parser.eventDestroy(scroll_event);
|
||||
|
||||
try parser.eventInit(scroll_event, "scroll", .{});
|
||||
_ = try parser.eventTargetDispatchEvent(
|
||||
parser.toEventTarget(Window, self),
|
||||
scroll_event,
|
||||
);
|
||||
}
|
||||
|
||||
{
|
||||
const scroll_end = try parser.eventCreate();
|
||||
defer parser.eventDestroy(scroll_end);
|
||||
|
||||
try parser.eventInit(scroll_end, "scrollend", .{});
|
||||
_ = try parser.eventTargetDispatchEvent(
|
||||
parser.toEventTarget(parser.DocumentHTML, self.document),
|
||||
scroll_end,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// libdom's document doesn't have a parent, which is correct, but
|
||||
// breaks the event bubbling that happens for many events from
|
||||
// document -> window.
|
||||
// We need to force dispatch this event on the window, with the
|
||||
// document target.
|
||||
// In theory, we should do this for a lot of events and might need
|
||||
// to come up with a good way to solve this more generically. But
|
||||
// this specific event, and maybe a few others in the near future,
|
||||
// are blockers.
|
||||
// Worth noting that NetSurf itself appears to do something similar:
|
||||
// https://github.com/netsurf-browser/netsurf/blob/a32e1a03e1c91ee9f0aa211937dbae7a96831149/content/handlers/html/html.c#L380
|
||||
pub fn dispatchForDocumentTarget(self: *Window, evt: *parser.Event) !void {
|
||||
// we assume that this evt has already been dispatched on the document
|
||||
// and thus the target has already been set to the document.
|
||||
return self.base.redispatchEvent(evt);
|
||||
}
|
||||
};
|
||||
|
||||
const TimerCallback = struct {
|
||||
// the id of our timer (windows.timers key)
|
||||
timer_id: u31,
|
||||
|
||||
// if false, we'll remove the timer_id from the window.timers lookup on run
|
||||
repeat: ?u32,
|
||||
|
||||
// The JavaScript callback to execute
|
||||
cbk: Function,
|
||||
|
||||
animation_frame: bool = false,
|
||||
|
||||
window: *Window,
|
||||
|
||||
args: []Env.JsObject = &.{},
|
||||
|
||||
fn run(ctx: *anyopaque) ?u32 {
|
||||
const self: *TimerCallback = @ptrCast(@alignCast(ctx));
|
||||
if (self.repeat != null) {
|
||||
if (self.window.timers.contains(self.timer_id) == false) {
|
||||
// it was called
|
||||
return null;
|
||||
}
|
||||
} else if (self.window.timers.remove(self.timer_id) == false) {
|
||||
// it was cancelled
|
||||
return null;
|
||||
}
|
||||
|
||||
var result: Function.Result = undefined;
|
||||
|
||||
var call: anyerror!void = undefined;
|
||||
if (self.animation_frame) {
|
||||
call = self.cbk.tryCall(void, .{self.window.performance._now()}, &result);
|
||||
} else {
|
||||
call = self.cbk.tryCall(void, self.args, &result);
|
||||
}
|
||||
|
||||
call catch {
|
||||
log.warn(.user_script, "callback error", .{
|
||||
.err = result.exception,
|
||||
.stack = result.stack,
|
||||
.source = "window timeout",
|
||||
});
|
||||
};
|
||||
|
||||
return self.repeat;
|
||||
}
|
||||
};
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: Window" {
|
||||
try testing.htmlRunner("window/window.html");
|
||||
try testing.htmlRunner("window/frames.html");
|
||||
}
|
||||
@@ -1,226 +0,0 @@
|
||||
pub const Interfaces = .{
|
||||
U32Iterator,
|
||||
};
|
||||
|
||||
pub const U32Iterator = struct {
|
||||
length: u32,
|
||||
index: u32 = 0,
|
||||
|
||||
pub const Return = struct {
|
||||
value: u32,
|
||||
done: bool,
|
||||
};
|
||||
|
||||
pub fn _next(self: *U32Iterator) Return {
|
||||
const i = self.index;
|
||||
if (i >= self.length) {
|
||||
return .{
|
||||
.value = 0,
|
||||
.done = true,
|
||||
};
|
||||
}
|
||||
|
||||
self.index = i + 1;
|
||||
return .{
|
||||
.value = i,
|
||||
.done = false,
|
||||
};
|
||||
}
|
||||
|
||||
// Iterators should be iterable. There's a [JS] example on MDN that
|
||||
// suggests this is the correct approach:
|
||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols#the_iterator_protocol
|
||||
pub fn _symbol_iterator(self: *U32Iterator) *U32Iterator {
|
||||
return self;
|
||||
}
|
||||
};
|
||||
|
||||
// A wrapper around an iterator that emits an Iterable result
|
||||
// An iterable has a next() which emits a {done: bool, value: T} result
|
||||
pub fn Iterable(comptime T: type, comptime JsName: []const u8) type {
|
||||
// The inner iterator's return type.
|
||||
// Maybe an error union.
|
||||
// Definitely an optional
|
||||
const RawValue = @typeInfo(@TypeOf(T._next)).@"fn".return_type.?;
|
||||
const CanError = @typeInfo(RawValue) == .error_union;
|
||||
|
||||
const Value = blk: {
|
||||
// Unwrap the RawValue
|
||||
var V = RawValue;
|
||||
if (CanError) {
|
||||
V = @typeInfo(V).error_union.payload;
|
||||
}
|
||||
break :blk @typeInfo(V).optional.child;
|
||||
};
|
||||
|
||||
const Result = struct {
|
||||
done: bool,
|
||||
// todo, technically, we should return undefined when done = true
|
||||
// or even omit the value;
|
||||
value: ?Value,
|
||||
};
|
||||
|
||||
const ReturnType = if (CanError) T.Error!Result else Result;
|
||||
|
||||
return struct {
|
||||
// the inner value iterator
|
||||
inner: T,
|
||||
|
||||
// Generics don't generate clean names. Can't just take the resulting
|
||||
// type name and use that as a the JS class name. So we always ask for
|
||||
// an explicit JS class name
|
||||
pub const js_name = JsName;
|
||||
|
||||
const Self = @This();
|
||||
|
||||
pub fn init(inner: T) Self {
|
||||
return .{ .inner = inner };
|
||||
}
|
||||
|
||||
pub fn _next(self: *Self) ReturnType {
|
||||
const value = if (comptime CanError) try self.inner._next() else self.inner._next();
|
||||
return .{ .done = value == null, .value = value };
|
||||
}
|
||||
|
||||
pub fn _symbol_iterator(self: *Self) *Self {
|
||||
return self;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// A wrapper around an iterator that emits integer/index keyed entries.
|
||||
pub fn NumericEntries(comptime T: type, comptime JsName: []const u8) type {
|
||||
// The inner iterator's return type.
|
||||
// Maybe an error union.
|
||||
// Definitely an optional
|
||||
const RawValue = @typeInfo(@TypeOf(T._next)).@"fn".return_type.?;
|
||||
const CanError = @typeInfo(RawValue) == .error_union;
|
||||
|
||||
const Value = blk: {
|
||||
// Unwrap the RawValue
|
||||
var V = RawValue;
|
||||
if (CanError) {
|
||||
V = @typeInfo(V).error_union.payload;
|
||||
}
|
||||
break :blk @typeInfo(V).optional.child;
|
||||
};
|
||||
|
||||
const ReturnType = if (CanError) T.Error!?struct { u32, Value } else ?struct { u32, Value };
|
||||
|
||||
// Avoid ambiguity. We want to expose a NumericEntries(T).Iterable, so we
|
||||
// need a declartion inside here for an "Iterable", but that will conflict
|
||||
// with the above Iterable generic function we have.
|
||||
const BaseIterable = Iterable;
|
||||
|
||||
return struct {
|
||||
// the inner value iterator
|
||||
inner: T,
|
||||
index: u32,
|
||||
|
||||
const Self = @This();
|
||||
|
||||
// Generics don't generate clean names. Can't just take the resulting
|
||||
// type name and use that as a the JS class name. So we always ask for
|
||||
// an explicit JS class name
|
||||
pub const js_name = JsName;
|
||||
|
||||
// re-exposed for when/if we compose this type into an Iterable
|
||||
pub const Error = T.Error;
|
||||
|
||||
// This iterator as an iterable
|
||||
pub const Iterable = BaseIterable(Self, JsName ++ "Iterable");
|
||||
|
||||
pub fn init(inner: T) Self {
|
||||
return .{ .inner = inner, .index = 0 };
|
||||
}
|
||||
|
||||
pub fn _next(self: *Self) ReturnType {
|
||||
const value_ = if (comptime CanError) try self.inner._next() else self.inner._next();
|
||||
const value = value_ orelse return null;
|
||||
|
||||
const index = self.index;
|
||||
self.index = index + 1;
|
||||
return .{ index, value };
|
||||
}
|
||||
|
||||
// make the iterator, iterable
|
||||
pub fn _symbol_iterator(self: *Self) Self.Iterable {
|
||||
return Self.Iterable.init(self.*);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "U32Iterator" {
|
||||
{
|
||||
var it = U32Iterator{ .length = 0 };
|
||||
try testing.expectEqual(.{ .value = 0, .done = true }, it._next());
|
||||
try testing.expectEqual(.{ .value = 0, .done = true }, it._next());
|
||||
}
|
||||
|
||||
{
|
||||
var it = U32Iterator{ .length = 3 };
|
||||
try testing.expectEqual(.{ .value = 0, .done = false }, it._next());
|
||||
try testing.expectEqual(.{ .value = 1, .done = false }, it._next());
|
||||
try testing.expectEqual(.{ .value = 2, .done = false }, it._next());
|
||||
try testing.expectEqual(.{ .value = 0, .done = true }, it._next());
|
||||
try testing.expectEqual(.{ .value = 0, .done = true }, it._next());
|
||||
}
|
||||
}
|
||||
|
||||
test "NumericEntries" {
|
||||
const it = DummyIterator{};
|
||||
var entries = NumericEntries(DummyIterator, "DummyIterator").init(it);
|
||||
|
||||
const v1 = entries._next().?;
|
||||
try testing.expectEqual(0, v1.@"0");
|
||||
try testing.expectEqual("it's", v1.@"1");
|
||||
|
||||
const v2 = entries._next().?;
|
||||
try testing.expectEqual(1, v2.@"0");
|
||||
try testing.expectEqual("over", v2.@"1");
|
||||
|
||||
const v3 = entries._next().?;
|
||||
try testing.expectEqual(2, v3.@"0");
|
||||
try testing.expectEqual("9000!!", v3.@"1");
|
||||
|
||||
try testing.expectEqual(null, entries._next());
|
||||
try testing.expectEqual(null, entries._next());
|
||||
try testing.expectEqual(null, entries._next());
|
||||
}
|
||||
|
||||
test "Iterable" {
|
||||
const it = DummyIterator{};
|
||||
var entries = Iterable(DummyIterator, "DummyIterator").init(it);
|
||||
|
||||
const v1 = entries._next();
|
||||
try testing.expectEqual(false, v1.done);
|
||||
try testing.expectEqual("it's", v1.value.?);
|
||||
|
||||
const v2 = entries._next();
|
||||
try testing.expectEqual(false, v2.done);
|
||||
try testing.expectEqual("over", v2.value.?);
|
||||
|
||||
const v3 = entries._next();
|
||||
try testing.expectEqual(false, v3.done);
|
||||
try testing.expectEqual("9000!!", v3.value.?);
|
||||
|
||||
try testing.expectEqual(true, entries._next().done);
|
||||
try testing.expectEqual(true, entries._next().done);
|
||||
try testing.expectEqual(true, entries._next().done);
|
||||
}
|
||||
|
||||
const DummyIterator = struct {
|
||||
index: u32 = 0,
|
||||
|
||||
pub fn _next(self: *DummyIterator) ?[]const u8 {
|
||||
const index = self.index;
|
||||
self.index = index + 1;
|
||||
return switch (index) {
|
||||
0 => "it's",
|
||||
1 => "over",
|
||||
2 => "9000!!",
|
||||
else => null,
|
||||
};
|
||||
}
|
||||
};
|
||||
@@ -1,284 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
// Used by FormDAta and URLSearchParams.
|
||||
//
|
||||
// We store the values in an ArrayList rather than a an
|
||||
// StringArrayHashMap([]const u8) because of the way the iterators (i.e., keys(),
|
||||
// values() and entries()) work. The FormData can contain duplicate keys, and
|
||||
// each iteration yields 1 key=>value pair. So, given:
|
||||
//
|
||||
// let f = new FormData();
|
||||
// f.append('a', '1');
|
||||
// f.append('a', '2');
|
||||
//
|
||||
// Then we'd expect f.keys(), f.values() and f.entries() to yield 2 results:
|
||||
// ['a', '1']
|
||||
// ['a', '2']
|
||||
//
|
||||
// This is much easier to do with an ArrayList than a HashMap, especially given
|
||||
// that the FormData could be mutated while iterating.
|
||||
// The downside is that most of the normal operations are O(N).
|
||||
pub const List = struct {
|
||||
entries: std.ArrayListUnmanaged(KeyValue) = .{},
|
||||
|
||||
pub fn init(entries: std.ArrayListUnmanaged(KeyValue)) List {
|
||||
return .{ .entries = entries };
|
||||
}
|
||||
|
||||
pub fn clone(self: *const List, arena: Allocator) !List {
|
||||
const entries = self.entries.items;
|
||||
|
||||
var c: std.ArrayListUnmanaged(KeyValue) = .{};
|
||||
try c.ensureTotalCapacity(arena, entries.len);
|
||||
for (entries) |kv| {
|
||||
c.appendAssumeCapacity(kv);
|
||||
}
|
||||
|
||||
return .{ .entries = c };
|
||||
}
|
||||
|
||||
pub fn fromOwnedSlice(entries: []KeyValue) List {
|
||||
return .{
|
||||
.entries = std.ArrayListUnmanaged(KeyValue).fromOwnedSlice(entries),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn count(self: *const List) usize {
|
||||
return self.entries.items.len;
|
||||
}
|
||||
|
||||
pub fn get(self: *const List, key: []const u8) ?[]const u8 {
|
||||
const result = self.find(key) orelse return null;
|
||||
return result.entry.value;
|
||||
}
|
||||
|
||||
pub fn getAll(self: *const List, arena: Allocator, key: []const u8) ![]const []const u8 {
|
||||
var arr: std.ArrayListUnmanaged([]const u8) = .empty;
|
||||
for (self.entries.items) |entry| {
|
||||
if (std.mem.eql(u8, key, entry.key)) {
|
||||
try arr.append(arena, entry.value);
|
||||
}
|
||||
}
|
||||
return arr.items;
|
||||
}
|
||||
|
||||
pub fn has(self: *const List, key: []const u8) bool {
|
||||
return self.find(key) != null;
|
||||
}
|
||||
|
||||
pub fn set(self: *List, arena: Allocator, key: []const u8, value: []const u8) !void {
|
||||
self.delete(key);
|
||||
return self.append(arena, key, value);
|
||||
}
|
||||
|
||||
pub fn append(self: *List, arena: Allocator, key: []const u8, value: []const u8) !void {
|
||||
return self.appendOwned(arena, try arena.dupe(u8, key), try arena.dupe(u8, value));
|
||||
}
|
||||
|
||||
pub fn appendOwned(self: *List, arena: Allocator, key: []const u8, value: []const u8) !void {
|
||||
return self.entries.append(arena, .{
|
||||
.key = key,
|
||||
.value = value,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn appendOwnedAssumeCapacity(self: *List, key: []const u8, value: []const u8) void {
|
||||
self.entries.appendAssumeCapacity(.{
|
||||
.key = key,
|
||||
.value = value,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn delete(self: *List, key: []const u8) void {
|
||||
var i: usize = 0;
|
||||
while (i < self.entries.items.len) {
|
||||
const entry = self.entries.items[i];
|
||||
if (std.mem.eql(u8, key, entry.key)) {
|
||||
_ = self.entries.swapRemove(i);
|
||||
} else {
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn deleteKeyValue(self: *List, key: []const u8, value: []const u8) void {
|
||||
var i: usize = 0;
|
||||
while (i < self.entries.items.len) {
|
||||
const entry = self.entries.items[i];
|
||||
if (std.mem.eql(u8, key, entry.key) and std.mem.eql(u8, value, entry.value)) {
|
||||
_ = self.entries.swapRemove(i);
|
||||
} else {
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn keyIterator(self: *const List) KeyIterator {
|
||||
return .{ .entries = &self.entries };
|
||||
}
|
||||
|
||||
pub fn valueIterator(self: *const List) ValueIterator {
|
||||
return .{ .entries = &self.entries };
|
||||
}
|
||||
|
||||
pub fn entryIterator(self: *const List) EntryIterator {
|
||||
return .{ .entries = &self.entries };
|
||||
}
|
||||
|
||||
pub fn ensureTotalCapacity(self: *List, arena: Allocator, len: usize) !void {
|
||||
return self.entries.ensureTotalCapacity(arena, len);
|
||||
}
|
||||
|
||||
const FindResult = struct {
|
||||
index: usize,
|
||||
entry: KeyValue,
|
||||
};
|
||||
|
||||
fn find(self: *const List, key: []const u8) ?FindResult {
|
||||
for (self.entries.items, 0..) |entry, i| {
|
||||
if (std.mem.eql(u8, key, entry.key)) {
|
||||
return .{ .index = i, .entry = entry };
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
pub const KeyValue = struct {
|
||||
key: []const u8,
|
||||
value: []const u8,
|
||||
};
|
||||
|
||||
pub const KeyIterator = struct {
|
||||
index: usize = 0,
|
||||
entries: *const std.ArrayListUnmanaged(KeyValue),
|
||||
|
||||
pub fn _next(self: *KeyIterator) ?[]const u8 {
|
||||
const entries = self.entries.items;
|
||||
|
||||
const index = self.index;
|
||||
if (index == entries.len) {
|
||||
return null;
|
||||
}
|
||||
self.index += 1;
|
||||
return entries[index].key;
|
||||
}
|
||||
};
|
||||
|
||||
pub const ValueIterator = struct {
|
||||
index: usize = 0,
|
||||
entries: *const std.ArrayListUnmanaged(KeyValue),
|
||||
|
||||
pub fn _next(self: *ValueIterator) ?[]const u8 {
|
||||
const entries = self.entries.items;
|
||||
|
||||
const index = self.index;
|
||||
if (index == entries.len) {
|
||||
return null;
|
||||
}
|
||||
self.index += 1;
|
||||
return entries[index].value;
|
||||
}
|
||||
};
|
||||
|
||||
pub const EntryIterator = struct {
|
||||
index: usize = 0,
|
||||
entries: *const std.ArrayListUnmanaged(KeyValue),
|
||||
|
||||
pub fn _next(self: *EntryIterator) ?struct { []const u8, []const u8 } {
|
||||
const entries = self.entries.items;
|
||||
|
||||
const index = self.index;
|
||||
if (index == entries.len) {
|
||||
return null;
|
||||
}
|
||||
self.index += 1;
|
||||
const entry = entries[index];
|
||||
return .{ entry.key, entry.value };
|
||||
}
|
||||
};
|
||||
|
||||
const URLEncodeMode = enum {
|
||||
form,
|
||||
query,
|
||||
};
|
||||
|
||||
pub fn urlEncode(list: List, mode: URLEncodeMode, writer: anytype) !void {
|
||||
const entries = list.entries.items;
|
||||
if (entries.len == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
try urlEncodeEntry(entries[0], mode, writer);
|
||||
for (entries[1..]) |entry| {
|
||||
try writer.writeByte('&');
|
||||
try urlEncodeEntry(entry, mode, writer);
|
||||
}
|
||||
}
|
||||
|
||||
fn urlEncodeEntry(entry: KeyValue, mode: URLEncodeMode, writer: anytype) !void {
|
||||
try urlEncodeValue(entry.key, mode, writer);
|
||||
|
||||
// for a form, for an empty value, we'll do "spice="
|
||||
// but for a query, we do "spice"
|
||||
if (mode == .query and entry.value.len == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
try writer.writeByte('=');
|
||||
try urlEncodeValue(entry.value, mode, writer);
|
||||
}
|
||||
|
||||
fn urlEncodeValue(value: []const u8, mode: URLEncodeMode, writer: anytype) !void {
|
||||
if (!urlEncodeShouldEscape(value, mode)) {
|
||||
return writer.writeAll(value);
|
||||
}
|
||||
|
||||
for (value) |b| {
|
||||
if (urlEncodeUnreserved(b, mode)) {
|
||||
try writer.writeByte(b);
|
||||
} else if (b == ' ' and mode == .form) {
|
||||
// for form submission, space should be encoded as '+', not '%20'
|
||||
try writer.writeByte('+');
|
||||
} else {
|
||||
try writer.print("%{X:0>2}", .{b});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn urlEncodeShouldEscape(value: []const u8, mode: URLEncodeMode) bool {
|
||||
for (value) |b| {
|
||||
if (!urlEncodeUnreserved(b, mode)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
fn urlEncodeUnreserved(b: u8, mode: URLEncodeMode) bool {
|
||||
return switch (b) {
|
||||
'A'...'Z', 'a'...'z', '0'...'9', '-', '.', '_' => true,
|
||||
'~' => mode == .query,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
97
src/browser/loader.zig
Normal file
97
src/browser/loader.zig
Normal file
@@ -0,0 +1,97 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const Client = @import("../http/Client.zig");
|
||||
|
||||
const user_agent = @import("browser.zig").user_agent;
|
||||
|
||||
pub const Loader = struct {
|
||||
client: Client,
|
||||
// use 64KB for headers buffer size.
|
||||
server_header_buffer: [1024 * 64]u8 = undefined,
|
||||
|
||||
pub const Response = struct {
|
||||
alloc: std.mem.Allocator,
|
||||
req: *Client.Request,
|
||||
|
||||
pub fn deinit(self: *Response) void {
|
||||
self.req.deinit();
|
||||
self.alloc.destroy(self.req);
|
||||
}
|
||||
};
|
||||
|
||||
pub fn init(alloc: std.mem.Allocator) Loader {
|
||||
return Loader{
|
||||
.client = Client{
|
||||
.allocator = alloc,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Loader) void {
|
||||
self.client.deinit();
|
||||
}
|
||||
|
||||
// see
|
||||
// https://ziglang.org/documentation/master/std/#A;std:http.Client.fetch
|
||||
// for reference.
|
||||
// The caller is responsible for calling `deinit()` on the `Response`.
|
||||
pub fn get(self: *Loader, alloc: std.mem.Allocator, uri: std.Uri) !Response {
|
||||
var resp = Response{
|
||||
.alloc = alloc,
|
||||
.req = try alloc.create(Client.Request),
|
||||
};
|
||||
errdefer alloc.destroy(resp.req);
|
||||
|
||||
resp.req.* = try self.client.open(.GET, uri, .{
|
||||
.headers = .{
|
||||
.user_agent = .{ .override = user_agent },
|
||||
},
|
||||
.extra_headers = &.{
|
||||
.{ .name = "Accept", .value = "*/*" },
|
||||
.{ .name = "Accept-Language", .value = "en-US,en;q=0.5" },
|
||||
},
|
||||
.server_header_buffer = &self.server_header_buffer,
|
||||
});
|
||||
errdefer resp.req.deinit();
|
||||
|
||||
try resp.req.send();
|
||||
try resp.req.finish();
|
||||
try resp.req.wait();
|
||||
|
||||
return resp;
|
||||
}
|
||||
};
|
||||
|
||||
test "loader: get" {
|
||||
const alloc = std.testing.allocator;
|
||||
var loader = Loader.init(alloc);
|
||||
defer loader.deinit();
|
||||
|
||||
const uri = try std.Uri.parse("http://localhost:9582/loader");
|
||||
var result = try loader.get(alloc, uri);
|
||||
defer result.deinit();
|
||||
|
||||
try std.testing.expectEqual(.ok, result.req.response.status);
|
||||
|
||||
var res: [128]u8 = undefined;
|
||||
const size = try result.req.readAll(&res);
|
||||
try std.testing.expectEqual(6, size);
|
||||
try std.testing.expectEqualStrings("Hello!", res[0..6]);
|
||||
}
|
||||
@@ -1,110 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
// This file makes the glue between mimalloc heap allocation and libdom memory
|
||||
// management.
|
||||
// We replace the libdom default usage of allocations with mimalloc heap
|
||||
// allocation to be able to free all memory used at once, like an arena usage.
|
||||
const std = @import("std");
|
||||
|
||||
const c = @cImport({
|
||||
@cInclude("mimalloc.h");
|
||||
});
|
||||
|
||||
var heap: ?*c.mi_heap_t = null;
|
||||
|
||||
pub fn create() void {
|
||||
std.debug.assert(heap == null);
|
||||
heap = c.mi_heap_new();
|
||||
std.debug.assert(heap != null);
|
||||
}
|
||||
|
||||
pub fn destroy() void {
|
||||
std.debug.assert(heap != null);
|
||||
c.mi_heap_destroy(heap.?);
|
||||
heap = null;
|
||||
}
|
||||
|
||||
pub fn getRSS() i64 {
|
||||
if (@import("builtin").mode != .Debug) {
|
||||
// just don't trust my implementation, plus a caller might not know
|
||||
// that this requires parsing some unstructured data
|
||||
@compileError("Only available in debug builds");
|
||||
}
|
||||
var buf: [1024 * 8]u8 = undefined;
|
||||
var fba = std.heap.FixedBufferAllocator.init(&buf);
|
||||
var writer = std.Io.Writer.Allocating.init(fba.allocator());
|
||||
|
||||
c.mi_stats_print_out(struct {
|
||||
fn print(msg: [*c]const u8, data: ?*anyopaque) callconv(.c) void {
|
||||
const w: *std.Io.Writer = @ptrCast(@alignCast(data.?));
|
||||
w.writeAll(std.mem.span(msg)) catch |err| {
|
||||
std.debug.print("Failed to write mimalloc data: {}\n", .{err});
|
||||
};
|
||||
}
|
||||
}.print, &writer.writer);
|
||||
|
||||
const data = writer.written();
|
||||
const index = std.mem.indexOf(u8, data, "rss: ") orelse return -1;
|
||||
const sep = std.mem.indexOfScalarPos(u8, data, index + 5, ' ') orelse return -2;
|
||||
const value = std.fmt.parseFloat(f64, data[index + 5 .. sep]) catch return -3;
|
||||
const unit = data[sep + 1 ..];
|
||||
if (std.mem.startsWith(u8, unit, "KiB,")) {
|
||||
return @as(i64, @intFromFloat(value)) * 1024;
|
||||
}
|
||||
|
||||
if (std.mem.startsWith(u8, unit, "MiB,")) {
|
||||
return @as(i64, @intFromFloat(value)) * 1024 * 1024;
|
||||
}
|
||||
|
||||
if (std.mem.startsWith(u8, unit, "GiB,")) {
|
||||
return @as(i64, @intFromFloat(value)) * 1024 * 1024 * 1024;
|
||||
}
|
||||
|
||||
return -4;
|
||||
}
|
||||
|
||||
pub export fn m_alloc(size: usize) callconv(.c) ?*anyopaque {
|
||||
std.debug.assert(heap != null);
|
||||
return c.mi_heap_malloc(heap.?, size);
|
||||
}
|
||||
|
||||
pub export fn re_alloc(ptr: ?*anyopaque, size: usize) callconv(.c) ?*anyopaque {
|
||||
std.debug.assert(heap != null);
|
||||
return c.mi_heap_realloc(heap.?, ptr, size);
|
||||
}
|
||||
|
||||
pub export fn c_alloc(nmemb: usize, size: usize) callconv(.c) ?*anyopaque {
|
||||
std.debug.assert(heap != null);
|
||||
return c.mi_heap_calloc(heap.?, nmemb, size);
|
||||
}
|
||||
|
||||
pub export fn str_dup(s: [*c]const u8) callconv(.c) [*c]u8 {
|
||||
std.debug.assert(heap != null);
|
||||
return c.mi_heap_strdup(heap.?, s);
|
||||
}
|
||||
|
||||
pub export fn strn_dup(s: [*c]const u8, size: usize) callconv(.c) [*c]u8 {
|
||||
std.debug.assert(heap != null);
|
||||
return c.mi_heap_strndup(heap.?, s, size);
|
||||
}
|
||||
|
||||
// NOOP, use destroy to clear all the memory allocated at once.
|
||||
pub export fn f_ree(_: ?*anyopaque) callconv(.c) void {
|
||||
return;
|
||||
}
|
||||
@@ -17,88 +17,46 @@
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
pub const Mime = struct {
|
||||
content_type: ContentType,
|
||||
params: []const u8 = "",
|
||||
// IANA defines max. charset value length as 40.
|
||||
// We keep 41 for null-termination since HTML parser expects in this format.
|
||||
charset: [41]u8 = default_charset,
|
||||
|
||||
/// String "UTF-8" continued by null characters.
|
||||
pub const default_charset = .{ 'U', 'T', 'F', '-', '8' } ++ .{0} ** 36;
|
||||
|
||||
/// Mime with unknown Content-Type, empty params and empty charset.
|
||||
pub const unknown = Mime{ .content_type = .{ .unknown = {} } };
|
||||
charset: ?[]const u8 = null,
|
||||
arena: std.heap.ArenaAllocator,
|
||||
|
||||
pub const ContentTypeEnum = enum {
|
||||
text_xml,
|
||||
text_html,
|
||||
text_javascript,
|
||||
text_plain,
|
||||
text_css,
|
||||
application_json,
|
||||
unknown,
|
||||
other,
|
||||
};
|
||||
|
||||
pub const ContentType = union(ContentTypeEnum) {
|
||||
text_xml: void,
|
||||
text_html: void,
|
||||
text_javascript: void,
|
||||
text_plain: void,
|
||||
text_css: void,
|
||||
application_json: void,
|
||||
unknown: void,
|
||||
other: struct { type: []const u8, sub_type: []const u8 },
|
||||
};
|
||||
|
||||
/// Returns the null-terminated charset value.
|
||||
pub fn charsetString(mime: *const Mime) [:0]const u8 {
|
||||
return @ptrCast(&mime.charset);
|
||||
}
|
||||
|
||||
/// Removes quotes of value if quotes are given.
|
||||
///
|
||||
/// Currently we don't validate the charset.
|
||||
/// See section 2.3 Naming Requirements:
|
||||
/// https://datatracker.ietf.org/doc/rfc2978/
|
||||
fn parseCharset(value: []const u8) error{ CharsetTooBig, Invalid }![]const u8 {
|
||||
// Cannot be larger than 40.
|
||||
// https://datatracker.ietf.org/doc/rfc2978/
|
||||
if (value.len > 40) return error.CharsetTooBig;
|
||||
|
||||
// If the first char is a quote, look for a pair.
|
||||
if (value[0] == '"') {
|
||||
if (value.len < 3 or value[value.len - 1] != '"') {
|
||||
return error.Invalid;
|
||||
}
|
||||
|
||||
return value[1 .. value.len - 1];
|
||||
}
|
||||
|
||||
// No quotes.
|
||||
return value;
|
||||
}
|
||||
|
||||
pub fn parse(input: []u8) !Mime {
|
||||
pub fn parse(allocator: Allocator, input: []const u8) !Mime {
|
||||
if (input.len > 255) {
|
||||
return error.TooBig;
|
||||
}
|
||||
|
||||
// Zig's trim API is broken. The return type is always `[]const u8`,
|
||||
// even if the input type is `[]u8`. @constCast is safe here.
|
||||
var normalized = @constCast(std.mem.trim(u8, input, &std.ascii.whitespace));
|
||||
_ = std.ascii.lowerString(normalized, normalized);
|
||||
var arena = std.heap.ArenaAllocator.init(allocator);
|
||||
errdefer arena.deinit();
|
||||
|
||||
const content_type, const type_len = try parseContentType(normalized);
|
||||
if (type_len >= normalized.len) {
|
||||
return .{ .content_type = content_type };
|
||||
var trimmed = trim(input);
|
||||
|
||||
const content_type, const type_len = try parseContentType(trimmed);
|
||||
if (type_len >= trimmed.len) {
|
||||
return .{ .arena = arena, .content_type = content_type };
|
||||
}
|
||||
|
||||
const params = trimLeft(normalized[type_len..]);
|
||||
const params = trimLeft(trimmed[type_len..]);
|
||||
|
||||
var charset: [41]u8 = undefined;
|
||||
var charset: ?[]const u8 = null;
|
||||
|
||||
var it = std.mem.splitScalar(u8, params, ';');
|
||||
while (it.next()) |attr| {
|
||||
@@ -110,148 +68,86 @@ pub const Mime = struct {
|
||||
return error.Invalid;
|
||||
}
|
||||
|
||||
const attribute_name = std.meta.stringToEnum(enum {
|
||||
charset,
|
||||
}, name) orelse continue;
|
||||
|
||||
switch (attribute_name) {
|
||||
.charset => {
|
||||
if (value.len == 0) {
|
||||
break;
|
||||
}
|
||||
|
||||
const attribute_value = try parseCharset(value);
|
||||
@memcpy(charset[0..attribute_value.len], attribute_value);
|
||||
// Null-terminate right after attribute value.
|
||||
charset[attribute_value.len] = 0;
|
||||
switch (name.len) {
|
||||
7 => if (isCaseEqual("charset", name)) {
|
||||
charset = try parseValue(arena.allocator(), value);
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
||||
return .{
|
||||
.arena = arena,
|
||||
.params = params,
|
||||
.charset = charset,
|
||||
.content_type = content_type,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn sniff(body: []const u8) ?Mime {
|
||||
// 0x0C is form feed
|
||||
const content = std.mem.trimLeft(u8, body, &.{ ' ', '\t', '\n', '\r', 0x0C });
|
||||
if (content.len == 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (content[0] != '<') {
|
||||
if (std.mem.startsWith(u8, content, &.{ 0xEF, 0xBB, 0xBF })) {
|
||||
// UTF-8 BOM
|
||||
return .{ .content_type = .{ .text_plain = {} } };
|
||||
}
|
||||
if (std.mem.startsWith(u8, content, &.{ 0xFE, 0xFF })) {
|
||||
// UTF-16 big-endian BOM
|
||||
return .{ .content_type = .{ .text_plain = {} } };
|
||||
}
|
||||
if (std.mem.startsWith(u8, content, &.{ 0xFF, 0xFE })) {
|
||||
// UTF-16 little-endian BOM
|
||||
return .{ .content_type = .{ .text_plain = {} } };
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
// The longest prefix we have is "<!DOCTYPE HTML ", 15 bytes. If we're
|
||||
// here, we already know content[0] == '<', so we can skip that. So 14
|
||||
// bytes.
|
||||
|
||||
// +1 because we don't need the leading '<'
|
||||
var buf: [14]u8 = undefined;
|
||||
|
||||
const stripped = content[1..];
|
||||
const prefix_len = @min(stripped.len, buf.len);
|
||||
const prefix = std.ascii.lowerString(&buf, stripped[0..prefix_len]);
|
||||
|
||||
// we already know it starts with a <
|
||||
const known_prefixes = [_]struct { []const u8, ContentType }{
|
||||
.{ "!doctype html", .{ .text_html = {} } },
|
||||
.{ "html", .{ .text_html = {} } },
|
||||
.{ "script", .{ .text_html = {} } },
|
||||
.{ "iframe", .{ .text_html = {} } },
|
||||
.{ "h1", .{ .text_html = {} } },
|
||||
.{ "div", .{ .text_html = {} } },
|
||||
.{ "font", .{ .text_html = {} } },
|
||||
.{ "table", .{ .text_html = {} } },
|
||||
.{ "a", .{ .text_html = {} } },
|
||||
.{ "style", .{ .text_html = {} } },
|
||||
.{ "title", .{ .text_html = {} } },
|
||||
.{ "b", .{ .text_html = {} } },
|
||||
.{ "body", .{ .text_html = {} } },
|
||||
.{ "br", .{ .text_html = {} } },
|
||||
.{ "p", .{ .text_html = {} } },
|
||||
.{ "!--", .{ .text_html = {} } },
|
||||
.{ "xml", .{ .text_xml = {} } },
|
||||
};
|
||||
inline for (known_prefixes) |kp| {
|
||||
const known_prefix = kp.@"0";
|
||||
if (std.mem.startsWith(u8, prefix, known_prefix) and prefix.len > known_prefix.len) {
|
||||
const next = prefix[known_prefix.len];
|
||||
// a "tag-terminating-byte"
|
||||
if (next == ' ' or next == '>') {
|
||||
return .{ .content_type = kp.@"1" };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
pub fn deinit(self: *Mime) void {
|
||||
self.arena.deinit();
|
||||
}
|
||||
|
||||
pub fn isHTML(self: *const Mime) bool {
|
||||
return self.content_type == .text_html;
|
||||
}
|
||||
|
||||
// we expect value to be lowercase
|
||||
fn parseContentType(value: []const u8) !struct { ContentType, usize } {
|
||||
const end = std.mem.indexOfScalarPos(u8, value, 0, ';') orelse value.len;
|
||||
const type_name = trimRight(value[0..end]);
|
||||
const attribute_start = end + 1;
|
||||
|
||||
if (std.meta.stringToEnum(enum {
|
||||
@"text/xml",
|
||||
@"text/html",
|
||||
@"text/css",
|
||||
@"text/plain",
|
||||
|
||||
@"text/javascript",
|
||||
@"application/javascript",
|
||||
@"application/x-javascript",
|
||||
|
||||
@"application/json",
|
||||
}, type_name)) |known_type| {
|
||||
const ct: ContentType = switch (known_type) {
|
||||
.@"text/xml" => .{ .text_xml = {} },
|
||||
.@"text/html" => .{ .text_html = {} },
|
||||
.@"text/javascript", .@"application/javascript", .@"application/x-javascript" => .{ .text_javascript = {} },
|
||||
.@"text/plain" => .{ .text_plain = {} },
|
||||
.@"text/css" => .{ .text_css = {} },
|
||||
.@"application/json" => .{ .application_json = {} },
|
||||
};
|
||||
return .{ ct, attribute_start };
|
||||
}
|
||||
|
||||
const separator = std.mem.indexOfScalarPos(u8, type_name, 0, '/') orelse return error.Invalid;
|
||||
const separator = std.mem.indexOfScalarPos(u8, value, 0, '/') orelse {
|
||||
return error.Invalid;
|
||||
};
|
||||
const end = std.mem.indexOfScalarPos(u8, value, separator, ';') orelse blk: {
|
||||
break :blk value.len;
|
||||
};
|
||||
|
||||
const main_type = value[0..separator];
|
||||
const sub_type = trimRight(value[separator + 1 .. end]);
|
||||
|
||||
if (main_type.len == 0 or validType(main_type) == false) {
|
||||
if (parseCommonContentType(main_type, sub_type)) |content_type| {
|
||||
return .{ content_type, end + 1 };
|
||||
}
|
||||
|
||||
if (main_type.len == 0) {
|
||||
return error.Invalid;
|
||||
}
|
||||
if (sub_type.len == 0 or validType(sub_type) == false) {
|
||||
if (validType(main_type) == false) {
|
||||
return error.Invalid;
|
||||
}
|
||||
|
||||
return .{ .{ .other = .{
|
||||
if (sub_type.len == 0) {
|
||||
return error.Invalid;
|
||||
}
|
||||
if (validType(sub_type) == false) {
|
||||
return error.Invalid;
|
||||
}
|
||||
|
||||
const content_type = ContentType{ .other = .{
|
||||
.type = main_type,
|
||||
.sub_type = sub_type,
|
||||
} }, attribute_start };
|
||||
} };
|
||||
|
||||
return .{ content_type, end + 1 };
|
||||
}
|
||||
|
||||
fn parseCommonContentType(main_type: []const u8, sub_type: []const u8) ?ContentType {
|
||||
switch (main_type.len) {
|
||||
4 => if (isCaseEqual("text", main_type)) {
|
||||
switch (sub_type.len) {
|
||||
3 => if (isCaseEqual("xml", sub_type)) {
|
||||
return .{ .text_xml = {} };
|
||||
},
|
||||
4 => if (isCaseEqual("html", sub_type)) {
|
||||
return .{ .text_html = {} };
|
||||
},
|
||||
5 => if (isCaseEqual("plain", sub_type)) {
|
||||
return .{ .text_plain = {} };
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
const T_SPECIAL = blk: {
|
||||
@@ -262,6 +158,56 @@ pub const Mime = struct {
|
||||
break :blk v;
|
||||
};
|
||||
|
||||
fn parseValue(allocator: Allocator, value: []const u8) ![]const u8 {
|
||||
if (value[0] != '"') {
|
||||
return value;
|
||||
}
|
||||
|
||||
// 1 to skip the opening quote
|
||||
var value_pos: usize = 1;
|
||||
var unescaped_len: usize = 0;
|
||||
const last = value.len - 1;
|
||||
|
||||
while (value_pos < value.len) {
|
||||
switch (value[value_pos]) {
|
||||
'"' => break,
|
||||
'\\' => {
|
||||
if (value_pos == last) {
|
||||
return error.Invalid;
|
||||
}
|
||||
const next = value[value_pos + 1];
|
||||
if (T_SPECIAL[next] == false) {
|
||||
return error.Invalid;
|
||||
}
|
||||
value_pos += 2;
|
||||
},
|
||||
else => value_pos += 1,
|
||||
}
|
||||
unescaped_len += 1;
|
||||
}
|
||||
|
||||
if (unescaped_len == 0) {
|
||||
return error.Invalid;
|
||||
}
|
||||
|
||||
value_pos = 1;
|
||||
const owned = try allocator.alloc(u8, unescaped_len);
|
||||
for (0..unescaped_len) |i| {
|
||||
switch (value[value_pos]) {
|
||||
'"' => break,
|
||||
'\\' => {
|
||||
owned[i] = value[value_pos + 1];
|
||||
value_pos += 2;
|
||||
},
|
||||
else => |c| {
|
||||
owned[i] = c;
|
||||
value_pos += 1;
|
||||
},
|
||||
}
|
||||
}
|
||||
return owned;
|
||||
}
|
||||
|
||||
const VALID_CODEPOINTS = blk: {
|
||||
var v: [256]bool = undefined;
|
||||
for (0..256) |i| {
|
||||
@@ -282,6 +228,10 @@ pub const Mime = struct {
|
||||
return true;
|
||||
}
|
||||
|
||||
fn trim(s: []const u8) []const u8 {
|
||||
return std.mem.trim(u8, s, &std.ascii.whitespace);
|
||||
}
|
||||
|
||||
fn trimLeft(s: []const u8) []const u8 {
|
||||
return std.mem.trimLeft(u8, s, &std.ascii.whitespace);
|
||||
}
|
||||
@@ -289,12 +239,28 @@ pub const Mime = struct {
|
||||
fn trimRight(s: []const u8) []const u8 {
|
||||
return std.mem.trimRight(u8, s, &std.ascii.whitespace);
|
||||
}
|
||||
|
||||
fn isCaseEqual(comptime target: anytype, value: []const u8) bool {
|
||||
// - 8 beause we don't care about the sentinel
|
||||
const bit_len = @bitSizeOf(@TypeOf(target.*)) - 8;
|
||||
const byte_len = bit_len / 8;
|
||||
|
||||
const T = @Type(.{ .Int = .{
|
||||
.bits = bit_len,
|
||||
.signedness = .unsigned,
|
||||
} });
|
||||
|
||||
const bit_target: T = @bitCast(@as(*const [byte_len]u8, target).*);
|
||||
|
||||
if (@as(T, @bitCast(value[0..byte_len].*)) == bit_target) {
|
||||
return true;
|
||||
}
|
||||
return std.ascii.eqlIgnoreCase(value, target);
|
||||
}
|
||||
};
|
||||
|
||||
const testing = @import("../testing.zig");
|
||||
test "Mime: invalid" {
|
||||
defer testing.reset();
|
||||
|
||||
const testing = std.testing;
|
||||
test "Mime: invalid " {
|
||||
const invalids = [_][]const u8{
|
||||
"",
|
||||
"text",
|
||||
@@ -310,17 +276,15 @@ test "Mime: invalid" {
|
||||
"text/html; charset=\"\"",
|
||||
"text/html; charset=\"",
|
||||
"text/html; charset=\"\\",
|
||||
"text/html; charset=\"\\a\"", // invalid to escape non special characters
|
||||
};
|
||||
|
||||
for (invalids) |invalid| {
|
||||
const mutable_input = try testing.arena_allocator.dupe(u8, invalid);
|
||||
try testing.expectError(error.Invalid, Mime.parse(mutable_input));
|
||||
try testing.expectError(error.Invalid, Mime.parse(undefined, invalid));
|
||||
}
|
||||
}
|
||||
|
||||
test "Mime: parse common" {
|
||||
defer testing.reset();
|
||||
|
||||
try expect(.{ .content_type = .{ .text_xml = {} } }, "text/xml");
|
||||
try expect(.{ .content_type = .{ .text_html = {} } }, "text/html");
|
||||
try expect(.{ .content_type = .{ .text_plain = {} } }, "text/plain");
|
||||
@@ -340,35 +304,24 @@ test "Mime: parse common" {
|
||||
try expect(.{ .content_type = .{ .text_xml = {} } }, " TeXT/xml");
|
||||
try expect(.{ .content_type = .{ .text_html = {} } }, "teXt/HtML ;");
|
||||
try expect(.{ .content_type = .{ .text_plain = {} } }, "tExT/PlAiN;");
|
||||
|
||||
try expect(.{ .content_type = .{ .text_javascript = {} } }, "text/javascript");
|
||||
try expect(.{ .content_type = .{ .text_javascript = {} } }, "Application/JavaScript");
|
||||
try expect(.{ .content_type = .{ .text_javascript = {} } }, "application/x-javascript");
|
||||
|
||||
try expect(.{ .content_type = .{ .application_json = {} } }, "application/json");
|
||||
try expect(.{ .content_type = .{ .text_css = {} } }, "text/css");
|
||||
}
|
||||
|
||||
test "Mime: parse uncommon" {
|
||||
defer testing.reset();
|
||||
|
||||
const text_csv = Expectation{
|
||||
.content_type = .{ .other = .{ .type = "text", .sub_type = "csv" } },
|
||||
const text_javascript = Expectation{
|
||||
.content_type = .{ .other = .{ .type = "text", .sub_type = "javascript" } },
|
||||
};
|
||||
try expect(text_csv, "text/csv");
|
||||
try expect(text_csv, "text/csv;");
|
||||
try expect(text_csv, " text/csv\t ");
|
||||
try expect(text_csv, " text/csv\t ;");
|
||||
try expect(text_javascript, "text/javascript");
|
||||
try expect(text_javascript, "text/javascript;");
|
||||
try expect(text_javascript, " text/javascript\t ");
|
||||
try expect(text_javascript, " text/javascript\t ;");
|
||||
|
||||
try expect(
|
||||
.{ .content_type = .{ .other = .{ .type = "text", .sub_type = "csv" } } },
|
||||
"Text/CSV",
|
||||
.{ .content_type = .{ .other = .{ .type = "Text", .sub_type = "Javascript" } } },
|
||||
"Text/Javascript",
|
||||
);
|
||||
}
|
||||
|
||||
test "Mime: parse charset" {
|
||||
defer testing.reset();
|
||||
|
||||
try expect(.{
|
||||
.content_type = .{ .text_xml = {} },
|
||||
.charset = "utf-8",
|
||||
@@ -379,34 +332,20 @@ test "Mime: parse charset" {
|
||||
.content_type = .{ .text_xml = {} },
|
||||
.charset = "utf-8",
|
||||
.params = "charset=\"utf-8\"",
|
||||
}, "text/xml;charset=\"UTF-8\"");
|
||||
|
||||
try expect(.{
|
||||
.content_type = .{ .text_html = {} },
|
||||
.charset = "iso-8859-1",
|
||||
.params = "charset=\"iso-8859-1\"",
|
||||
}, "text/html; charset=\"iso-8859-1\"");
|
||||
|
||||
try expect(.{
|
||||
.content_type = .{ .text_html = {} },
|
||||
.charset = "iso-8859-1",
|
||||
.params = "charset=\"iso-8859-1\"",
|
||||
}, "text/html; charset=\"ISO-8859-1\"");
|
||||
}, "text/xml;charset=\"utf-8\"");
|
||||
|
||||
try expect(.{
|
||||
.content_type = .{ .text_xml = {} },
|
||||
.charset = "custom-non-standard-charset-value",
|
||||
.params = "charset=\"custom-non-standard-charset-value\"",
|
||||
}, "text/xml;charset=\"custom-non-standard-charset-value\"");
|
||||
.charset = "\\ \" ",
|
||||
.params = "charset=\"\\\\ \\\" \"",
|
||||
}, "text/xml;charset=\"\\\\ \\\" \" ");
|
||||
}
|
||||
|
||||
test "Mime: isHTML" {
|
||||
defer testing.reset();
|
||||
|
||||
const isHTML = struct {
|
||||
fn isHTML(expected: bool, input: []const u8) !void {
|
||||
const mutable_input = try testing.arena_allocator.dupe(u8, input);
|
||||
var mime = try Mime.parse(mutable_input);
|
||||
var mime = try Mime.parse(testing.allocator, input);
|
||||
defer mime.deinit();
|
||||
try testing.expectEqual(expected, mime.isHTML());
|
||||
}
|
||||
}.isHTML;
|
||||
@@ -418,71 +357,6 @@ test "Mime: isHTML" {
|
||||
try isHTML(false, "over/9000");
|
||||
}
|
||||
|
||||
test "Mime: sniff" {
|
||||
try testing.expectEqual(null, Mime.sniff(""));
|
||||
try testing.expectEqual(null, Mime.sniff("<htm"));
|
||||
try testing.expectEqual(null, Mime.sniff("<html!"));
|
||||
try testing.expectEqual(null, Mime.sniff("<a_"));
|
||||
try testing.expectEqual(null, Mime.sniff("<!doctype html"));
|
||||
try testing.expectEqual(null, Mime.sniff("<!doctype html>"));
|
||||
try testing.expectEqual(null, Mime.sniff("\n <!doctype html>"));
|
||||
try testing.expectEqual(null, Mime.sniff("\n \t <font/>"));
|
||||
|
||||
const expectHTML = struct {
|
||||
fn expect(input: []const u8) !void {
|
||||
try testing.expectEqual(.text_html, std.meta.activeTag(Mime.sniff(input).?.content_type));
|
||||
}
|
||||
}.expect;
|
||||
|
||||
try expectHTML("<!doctype html ");
|
||||
try expectHTML("\n \t <!DOCTYPE HTML ");
|
||||
|
||||
try expectHTML("<html ");
|
||||
try expectHTML("\n \t <HtmL> even more stufff");
|
||||
|
||||
try expectHTML("<script>");
|
||||
try expectHTML("\n \t <SCRIpt >alert(document.cookies)</script>");
|
||||
|
||||
try expectHTML("<iframe>");
|
||||
try expectHTML(" \t <ifRAME >");
|
||||
|
||||
try expectHTML("<h1>");
|
||||
try expectHTML(" <H1>");
|
||||
|
||||
try expectHTML("<div>");
|
||||
try expectHTML("\n\r\r <DiV>");
|
||||
|
||||
try expectHTML("<font>");
|
||||
try expectHTML(" <fonT>");
|
||||
|
||||
try expectHTML("<table>");
|
||||
try expectHTML("\t\t<TAblE>");
|
||||
|
||||
try expectHTML("<a>");
|
||||
try expectHTML("\n\n<A>");
|
||||
|
||||
try expectHTML("<style>");
|
||||
try expectHTML(" \n\t <STyLE>");
|
||||
|
||||
try expectHTML("<title>");
|
||||
try expectHTML(" \n\t <TITLE>");
|
||||
|
||||
try expectHTML("<b>");
|
||||
try expectHTML(" \n\t <B>");
|
||||
|
||||
try expectHTML("<body>");
|
||||
try expectHTML(" \n\t <BODY>");
|
||||
|
||||
try expectHTML("<br>");
|
||||
try expectHTML(" \n\t <BR>");
|
||||
|
||||
try expectHTML("<p>");
|
||||
try expectHTML(" \n\t <P>");
|
||||
|
||||
try expectHTML("<!-->");
|
||||
try expectHTML(" \n\t <!-->");
|
||||
}
|
||||
|
||||
const Expectation = struct {
|
||||
content_type: Mime.ContentType,
|
||||
params: []const u8 = "",
|
||||
@@ -490,9 +364,9 @@ const Expectation = struct {
|
||||
};
|
||||
|
||||
fn expect(expected: Expectation, input: []const u8) !void {
|
||||
const mutable_input = try testing.arena_allocator.dupe(u8, input);
|
||||
var actual = try Mime.parse(testing.allocator, input);
|
||||
defer actual.deinit();
|
||||
|
||||
const actual = try Mime.parse(mutable_input);
|
||||
try testing.expectEqual(
|
||||
std.meta.activeTag(expected.content_type),
|
||||
std.meta.activeTag(actual.content_type),
|
||||
@@ -501,19 +375,17 @@ fn expect(expected: Expectation, input: []const u8) !void {
|
||||
switch (expected.content_type) {
|
||||
.other => |e| {
|
||||
const a = actual.content_type.other;
|
||||
try testing.expectEqual(e.type, a.type);
|
||||
try testing.expectEqual(e.sub_type, a.sub_type);
|
||||
try testing.expectEqualStrings(e.type, a.type);
|
||||
try testing.expectEqualStrings(e.sub_type, a.sub_type);
|
||||
},
|
||||
else => {}, // already asserted above
|
||||
}
|
||||
|
||||
try testing.expectEqual(expected.params, actual.params);
|
||||
try testing.expectEqualStrings(expected.params, actual.params);
|
||||
|
||||
if (expected.charset) |ec| {
|
||||
// We remove the null characters for testing purposes here.
|
||||
try testing.expectEqual(ec, actual.charsetString()[0..ec.len]);
|
||||
try testing.expectEqualStrings(ec, actual.charset.?);
|
||||
} else {
|
||||
const m: Mime = .unknown;
|
||||
try testing.expectEqual(m.charsetString(), actual.charsetString());
|
||||
try testing.expectEqual(null, actual.charset);
|
||||
}
|
||||
}
|
||||
|
||||
1241
src/browser/page.zig
1241
src/browser/page.zig
File diff suppressed because it is too large
Load Diff
@@ -1,99 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
|
||||
const log = @import("../../log.zig");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const Env = @import("../env.zig").Env;
|
||||
|
||||
pub const Loader = struct {
|
||||
state: enum { empty, loading } = .empty,
|
||||
|
||||
done: struct {
|
||||
webcomponents: bool = false,
|
||||
} = .{},
|
||||
|
||||
fn load(self: *Loader, comptime name: []const u8, source: []const u8, js_context: *Env.JsContext) void {
|
||||
var try_catch: Env.TryCatch = undefined;
|
||||
try_catch.init(js_context);
|
||||
defer try_catch.deinit();
|
||||
|
||||
self.state = .loading;
|
||||
defer self.state = .empty;
|
||||
|
||||
log.debug(.js, "polyfill load", .{ .name = name });
|
||||
_ = js_context.exec(source, name) catch |err| {
|
||||
log.fatal(.app, "polyfill error", .{
|
||||
.name = name,
|
||||
.err = try_catch.err(js_context.call_arena) catch @errorName(err) orelse @errorName(err),
|
||||
});
|
||||
};
|
||||
|
||||
@field(self.done, name) = true;
|
||||
}
|
||||
|
||||
pub fn missing(self: *Loader, name: []const u8, js_context: *Env.JsContext) bool {
|
||||
// Avoid recursive calls during polyfill loading.
|
||||
if (self.state == .loading) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!self.done.webcomponents and isWebcomponents(name)) {
|
||||
const source = @import("webcomponents.zig").source;
|
||||
self.load("webcomponents", source, js_context);
|
||||
// We return false here: We want v8 to continue the calling chain
|
||||
// to finally find the polyfill we just inserted. If we want to
|
||||
// return false and stops the call chain, we have to use
|
||||
// `info.GetReturnValue.Set()` function, or `undefined` will be
|
||||
// returned immediately.
|
||||
return false;
|
||||
}
|
||||
|
||||
if (comptime builtin.mode == .Debug) {
|
||||
log.debug(.unknown_prop, "unkown global property", .{
|
||||
.info = "but the property can exist in pure JS",
|
||||
.property = name,
|
||||
});
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
fn isWebcomponents(name: []const u8) bool {
|
||||
if (std.mem.eql(u8, name, "customElements")) return true;
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
pub fn preload(allocator: Allocator, js_context: *Env.JsContext) !void {
|
||||
var try_catch: Env.TryCatch = undefined;
|
||||
try_catch.init(js_context);
|
||||
defer try_catch.deinit();
|
||||
|
||||
const name = "webcomponents-pre";
|
||||
const source = @import("webcomponents.zig").pre;
|
||||
_ = js_context.exec(source, name) catch |err| {
|
||||
if (try try_catch.err(allocator)) |msg| {
|
||||
defer allocator.free(msg);
|
||||
log.fatal(.app, "polyfill error", .{ .name = name, .err = msg });
|
||||
}
|
||||
return err;
|
||||
};
|
||||
}
|
||||
@@ -1,61 +0,0 @@
|
||||
/**
|
||||
@license @nocompile
|
||||
Copyright (c) 2018 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
(function(){/*
|
||||
|
||||
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found
|
||||
at http://polymer.github.io/AUTHORS.txt The complete set of contributors may
|
||||
be found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by
|
||||
Google as part of the polymer project is also subject to an additional IP
|
||||
rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
'use strict';var n=window.Document.prototype.createElement,p=window.Document.prototype.createElementNS,aa=window.Document.prototype.importNode,ba=window.Document.prototype.prepend,ca=window.Document.prototype.append,da=window.DocumentFragment.prototype.prepend,ea=window.DocumentFragment.prototype.append,q=window.Node.prototype.cloneNode,r=window.Node.prototype.appendChild,t=window.Node.prototype.insertBefore,u=window.Node.prototype.removeChild,v=window.Node.prototype.replaceChild,w=Object.getOwnPropertyDescriptor(window.Node.prototype,
|
||||
"textContent"),y=window.Element.prototype.attachShadow,z=Object.getOwnPropertyDescriptor(window.Element.prototype,"innerHTML"),A=window.Element.prototype.getAttribute,B=window.Element.prototype.setAttribute,C=window.Element.prototype.removeAttribute,D=window.Element.prototype.toggleAttribute,E=window.Element.prototype.getAttributeNS,F=window.Element.prototype.setAttributeNS,G=window.Element.prototype.removeAttributeNS,H=window.Element.prototype.insertAdjacentElement,fa=window.Element.prototype.insertAdjacentHTML,
|
||||
ha=window.Element.prototype.prepend,ia=window.Element.prototype.append,ja=window.Element.prototype.before,ka=window.Element.prototype.after,la=window.Element.prototype.replaceWith,ma=window.Element.prototype.remove,na=window.HTMLElement,I=Object.getOwnPropertyDescriptor(window.HTMLElement.prototype,"innerHTML"),oa=window.HTMLElement.prototype.insertAdjacentElement,pa=window.HTMLElement.prototype.insertAdjacentHTML;var qa=new Set;"annotation-xml color-profile font-face font-face-src font-face-uri font-face-format font-face-name missing-glyph".split(" ").forEach(function(a){return qa.add(a)});function ra(a){var b=qa.has(a);a=/^[a-z][.0-9_a-z]*-[-.0-9_a-z]*$/.test(a);return!b&&a}var sa=document.contains?document.contains.bind(document):document.documentElement.contains.bind(document.documentElement);
|
||||
function J(a){var b=a.isConnected;if(void 0!==b)return b;if(sa(a))return!0;for(;a&&!(a.__CE_isImportDocument||a instanceof Document);)a=a.parentNode||(window.ShadowRoot&&a instanceof ShadowRoot?a.host:void 0);return!(!a||!(a.__CE_isImportDocument||a instanceof Document))}function K(a){var b=a.children;if(b)return Array.prototype.slice.call(b);b=[];for(a=a.firstChild;a;a=a.nextSibling)a.nodeType===Node.ELEMENT_NODE&&b.push(a);return b}
|
||||
function L(a,b){for(;b&&b!==a&&!b.nextSibling;)b=b.parentNode;return b&&b!==a?b.nextSibling:null}
|
||||
function M(a,b,d){for(var f=a;f;){if(f.nodeType===Node.ELEMENT_NODE){var c=f;b(c);var e=c.localName;if("link"===e&&"import"===c.getAttribute("rel")){f=c.import;void 0===d&&(d=new Set);if(f instanceof Node&&!d.has(f))for(d.add(f),f=f.firstChild;f;f=f.nextSibling)M(f,b,d);f=L(a,c);continue}else if("template"===e){f=L(a,c);continue}if(c=c.__CE_shadowRoot)for(c=c.firstChild;c;c=c.nextSibling)M(c,b,d)}f=f.firstChild?f.firstChild:L(a,f)}};function N(){var a=!(null===O||void 0===O||!O.noDocumentConstructionObserver),b=!(null===O||void 0===O||!O.shadyDomFastWalk);this.m=[];this.g=[];this.j=!1;this.shadyDomFastWalk=b;this.I=!a}function P(a,b,d,f){var c=window.ShadyDOM;if(a.shadyDomFastWalk&&c&&c.inUse){if(b.nodeType===Node.ELEMENT_NODE&&d(b),b.querySelectorAll)for(a=c.nativeMethods.querySelectorAll.call(b,"*"),b=0;b<a.length;b++)d(a[b])}else M(b,d,f)}function ta(a,b){a.j=!0;a.m.push(b)}function ua(a,b){a.j=!0;a.g.push(b)}
|
||||
function Q(a,b){a.j&&P(a,b,function(d){return R(a,d)})}function R(a,b){if(a.j&&!b.__CE_patched){b.__CE_patched=!0;for(var d=0;d<a.m.length;d++)a.m[d](b);for(d=0;d<a.g.length;d++)a.g[d](b)}}function S(a,b){var d=[];P(a,b,function(c){return d.push(c)});for(b=0;b<d.length;b++){var f=d[b];1===f.__CE_state?a.connectedCallback(f):T(a,f)}}function U(a,b){var d=[];P(a,b,function(c){return d.push(c)});for(b=0;b<d.length;b++){var f=d[b];1===f.__CE_state&&a.disconnectedCallback(f)}}
|
||||
function V(a,b,d){d=void 0===d?{}:d;var f=d.J,c=d.upgrade||function(g){return T(a,g)},e=[];P(a,b,function(g){a.j&&R(a,g);if("link"===g.localName&&"import"===g.getAttribute("rel")){var h=g.import;h instanceof Node&&(h.__CE_isImportDocument=!0,h.__CE_registry=document.__CE_registry);h&&"complete"===h.readyState?h.__CE_documentLoadHandled=!0:g.addEventListener("load",function(){var k=g.import;if(!k.__CE_documentLoadHandled){k.__CE_documentLoadHandled=!0;var l=new Set;f&&(f.forEach(function(m){return l.add(m)}),
|
||||
l.delete(k));V(a,k,{J:l,upgrade:c})}})}else e.push(g)},f);for(b=0;b<e.length;b++)c(e[b])}
|
||||
function T(a,b){try{var d=b.ownerDocument,f=d.__CE_registry;var c=f&&(d.defaultView||d.__CE_isImportDocument)?W(f,b.localName):void 0;if(c&&void 0===b.__CE_state){c.constructionStack.push(b);try{try{if(new c.constructorFunction!==b)throw Error("The custom element constructor did not produce the element being upgraded.");}finally{c.constructionStack.pop()}}catch(k){throw b.__CE_state=2,k;}b.__CE_state=1;b.__CE_definition=c;if(c.attributeChangedCallback&&b.hasAttributes()){var e=c.observedAttributes;
|
||||
for(c=0;c<e.length;c++){var g=e[c],h=b.getAttribute(g);null!==h&&a.attributeChangedCallback(b,g,null,h,null)}}J(b)&&a.connectedCallback(b)}}catch(k){X(k)}}N.prototype.connectedCallback=function(a){var b=a.__CE_definition;if(b.connectedCallback)try{b.connectedCallback.call(a)}catch(d){X(d)}};N.prototype.disconnectedCallback=function(a){var b=a.__CE_definition;if(b.disconnectedCallback)try{b.disconnectedCallback.call(a)}catch(d){X(d)}};
|
||||
N.prototype.attributeChangedCallback=function(a,b,d,f,c){var e=a.__CE_definition;if(e.attributeChangedCallback&&-1<e.observedAttributes.indexOf(b))try{e.attributeChangedCallback.call(a,b,d,f,c)}catch(g){X(g)}};
|
||||
function va(a,b,d,f){var c=b.__CE_registry;if(c&&(null===f||"http://www.w3.org/1999/xhtml"===f)&&(c=W(c,d)))try{var e=new c.constructorFunction;if(void 0===e.__CE_state||void 0===e.__CE_definition)throw Error("Failed to construct '"+d+"': The returned value was not constructed with the HTMLElement constructor.");if("http://www.w3.org/1999/xhtml"!==e.namespaceURI)throw Error("Failed to construct '"+d+"': The constructed element's namespace must be the HTML namespace.");if(e.hasAttributes())throw Error("Failed to construct '"+
|
||||
d+"': The constructed element must not have any attributes.");if(null!==e.firstChild)throw Error("Failed to construct '"+d+"': The constructed element must not have any children.");if(null!==e.parentNode)throw Error("Failed to construct '"+d+"': The constructed element must not have a parent node.");if(e.ownerDocument!==b)throw Error("Failed to construct '"+d+"': The constructed element's owner document is incorrect.");if(e.localName!==d)throw Error("Failed to construct '"+d+"': The constructed element's local name is incorrect.");
|
||||
return e}catch(g){return X(g),b=null===f?n.call(b,d):p.call(b,f,d),Object.setPrototypeOf(b,HTMLUnknownElement.prototype),b.__CE_state=2,b.__CE_definition=void 0,R(a,b),b}b=null===f?n.call(b,d):p.call(b,f,d);R(a,b);return b}
|
||||
function X(a){var b="",d="",f=0,c=0;a instanceof Error?(b=a.message,d=a.sourceURL||a.fileName||"",f=a.line||a.lineNumber||0,c=a.column||a.columnNumber||0):b="Uncaught "+String(a);var e=void 0;void 0===ErrorEvent.prototype.initErrorEvent?e=new ErrorEvent("error",{cancelable:!0,message:b,filename:d,lineno:f,colno:c,error:a}):(e=document.createEvent("ErrorEvent"),e.initErrorEvent("error",!1,!0,b,d,f),e.preventDefault=function(){Object.defineProperty(this,"defaultPrevented",{configurable:!0,get:function(){return!0}})});
|
||||
void 0===e.error&&Object.defineProperty(e,"error",{configurable:!0,enumerable:!0,get:function(){return a}});window.dispatchEvent(e);e.defaultPrevented||console.error(a)};function wa(){var a=this;this.g=void 0;this.F=new Promise(function(b){a.l=b})}wa.prototype.resolve=function(a){if(this.g)throw Error("Already resolved.");this.g=a;this.l(a)};function xa(a){var b=document;this.l=void 0;this.h=a;this.g=b;V(this.h,this.g);"loading"===this.g.readyState&&(this.l=new MutationObserver(this.G.bind(this)),this.l.observe(this.g,{childList:!0,subtree:!0}))}function ya(a){a.l&&a.l.disconnect()}xa.prototype.G=function(a){var b=this.g.readyState;"interactive"!==b&&"complete"!==b||ya(this);for(b=0;b<a.length;b++)for(var d=a[b].addedNodes,f=0;f<d.length;f++)V(this.h,d[f])};function Y(a){this.s=new Map;this.u=new Map;this.C=new Map;this.A=!1;this.B=new Map;this.o=function(b){return b()};this.i=!1;this.v=[];this.h=a;this.D=a.I?new xa(a):void 0}Y.prototype.H=function(a,b){var d=this;if(!(b instanceof Function))throw new TypeError("Custom element constructor getters must be functions.");za(this,a);this.s.set(a,b);this.v.push(a);this.i||(this.i=!0,this.o(function(){return Aa(d)}))};
|
||||
Y.prototype.define=function(a,b){var d=this;if(!(b instanceof Function))throw new TypeError("Custom element constructors must be functions.");za(this,a);Ba(this,a,b);this.v.push(a);this.i||(this.i=!0,this.o(function(){return Aa(d)}))};function za(a,b){if(!ra(b))throw new SyntaxError("The element name '"+b+"' is not valid.");if(W(a,b))throw Error("A custom element with name '"+(b+"' has already been defined."));if(a.A)throw Error("A custom element is already being defined.");}
|
||||
function Ba(a,b,d){a.A=!0;var f;try{var c=d.prototype;if(!(c instanceof Object))throw new TypeError("The custom element constructor's prototype is not an object.");var e=function(m){var x=c[m];if(void 0!==x&&!(x instanceof Function))throw Error("The '"+m+"' callback must be a function.");return x};var g=e("connectedCallback");var h=e("disconnectedCallback");var k=e("adoptedCallback");var l=(f=e("attributeChangedCallback"))&&d.observedAttributes||[]}catch(m){throw m;}finally{a.A=!1}d={localName:b,
|
||||
constructorFunction:d,connectedCallback:g,disconnectedCallback:h,adoptedCallback:k,attributeChangedCallback:f,observedAttributes:l,constructionStack:[]};a.u.set(b,d);a.C.set(d.constructorFunction,d);return d}Y.prototype.upgrade=function(a){V(this.h,a)};
|
||||
function Aa(a){if(!1!==a.i){a.i=!1;for(var b=[],d=a.v,f=new Map,c=0;c<d.length;c++)f.set(d[c],[]);V(a.h,document,{upgrade:function(k){if(void 0===k.__CE_state){var l=k.localName,m=f.get(l);m?m.push(k):a.u.has(l)&&b.push(k)}}});for(c=0;c<b.length;c++)T(a.h,b[c]);for(c=0;c<d.length;c++){for(var e=d[c],g=f.get(e),h=0;h<g.length;h++)T(a.h,g[h]);(e=a.B.get(e))&&e.resolve(void 0)}d.length=0}}Y.prototype.get=function(a){if(a=W(this,a))return a.constructorFunction};
|
||||
Y.prototype.whenDefined=function(a){if(!ra(a))return Promise.reject(new SyntaxError("'"+a+"' is not a valid custom element name."));var b=this.B.get(a);if(b)return b.F;b=new wa;this.B.set(a,b);var d=this.u.has(a)||this.s.has(a);a=-1===this.v.indexOf(a);d&&a&&b.resolve(void 0);return b.F};Y.prototype.polyfillWrapFlushCallback=function(a){this.D&&ya(this.D);var b=this.o;this.o=function(d){return a(function(){return b(d)})}};
|
||||
function W(a,b){var d=a.u.get(b);if(d)return d;if(d=a.s.get(b)){a.s.delete(b);try{return Ba(a,b,d())}catch(f){X(f)}}}Y.prototype.define=Y.prototype.define;Y.prototype.upgrade=Y.prototype.upgrade;Y.prototype.get=Y.prototype.get;Y.prototype.whenDefined=Y.prototype.whenDefined;Y.prototype.polyfillDefineLazy=Y.prototype.H;Y.prototype.polyfillWrapFlushCallback=Y.prototype.polyfillWrapFlushCallback;function Z(a,b,d){function f(c){return function(e){for(var g=[],h=0;h<arguments.length;++h)g[h]=arguments[h];h=[];for(var k=[],l=0;l<g.length;l++){var m=g[l];m instanceof Element&&J(m)&&k.push(m);if(m instanceof DocumentFragment)for(m=m.firstChild;m;m=m.nextSibling)h.push(m);else h.push(m)}c.apply(this,g);for(g=0;g<k.length;g++)U(a,k[g]);if(J(this))for(g=0;g<h.length;g++)k=h[g],k instanceof Element&&S(a,k)}}void 0!==d.prepend&&(b.prepend=f(d.prepend));void 0!==d.append&&(b.append=f(d.append))};function Ca(a){Document.prototype.createElement=function(b){return va(a,this,b,null)};Document.prototype.importNode=function(b,d){b=aa.call(this,b,!!d);this.__CE_registry?V(a,b):Q(a,b);return b};Document.prototype.createElementNS=function(b,d){return va(a,this,d,b)};Z(a,Document.prototype,{prepend:ba,append:ca})};function Da(a){function b(f){return function(c){for(var e=[],g=0;g<arguments.length;++g)e[g]=arguments[g];g=[];for(var h=[],k=0;k<e.length;k++){var l=e[k];l instanceof Element&&J(l)&&h.push(l);if(l instanceof DocumentFragment)for(l=l.firstChild;l;l=l.nextSibling)g.push(l);else g.push(l)}f.apply(this,e);for(e=0;e<h.length;e++)U(a,h[e]);if(J(this))for(e=0;e<g.length;e++)h=g[e],h instanceof Element&&S(a,h)}}var d=Element.prototype;void 0!==ja&&(d.before=b(ja));void 0!==ka&&(d.after=b(ka));void 0!==la&&
|
||||
(d.replaceWith=function(f){for(var c=[],e=0;e<arguments.length;++e)c[e]=arguments[e];e=[];for(var g=[],h=0;h<c.length;h++){var k=c[h];k instanceof Element&&J(k)&&g.push(k);if(k instanceof DocumentFragment)for(k=k.firstChild;k;k=k.nextSibling)e.push(k);else e.push(k)}h=J(this);la.apply(this,c);for(c=0;c<g.length;c++)U(a,g[c]);if(h)for(U(a,this),c=0;c<e.length;c++)g=e[c],g instanceof Element&&S(a,g)});void 0!==ma&&(d.remove=function(){var f=J(this);ma.call(this);f&&U(a,this)})};function Ea(a){function b(c,e){Object.defineProperty(c,"innerHTML",{enumerable:e.enumerable,configurable:!0,get:e.get,set:function(g){var h=this,k=void 0;J(this)&&(k=[],P(a,this,function(x){x!==h&&k.push(x)}));e.set.call(this,g);if(k)for(var l=0;l<k.length;l++){var m=k[l];1===m.__CE_state&&a.disconnectedCallback(m)}this.ownerDocument.__CE_registry?V(a,this):Q(a,this);return g}})}function d(c,e){c.insertAdjacentElement=function(g,h){var k=J(h);g=e.call(this,g,h);k&&U(a,h);J(g)&&S(a,h);return g}}function f(c,
|
||||
e){function g(h,k){for(var l=[];h!==k;h=h.nextSibling)l.push(h);for(k=0;k<l.length;k++)V(a,l[k])}c.insertAdjacentHTML=function(h,k){h=h.toLowerCase();if("beforebegin"===h){var l=this.previousSibling;e.call(this,h,k);g(l||this.parentNode.firstChild,this)}else if("afterbegin"===h)l=this.firstChild,e.call(this,h,k),g(this.firstChild,l);else if("beforeend"===h)l=this.lastChild,e.call(this,h,k),g(l||this.firstChild,null);else if("afterend"===h)l=this.nextSibling,e.call(this,h,k),g(this.nextSibling,l);
|
||||
else throw new SyntaxError("The value provided ("+String(h)+") is not one of 'beforebegin', 'afterbegin', 'beforeend', or 'afterend'.");}}y&&(Element.prototype.attachShadow=function(c){c=y.call(this,c);if(a.j&&!c.__CE_patched){c.__CE_patched=!0;for(var e=0;e<a.m.length;e++)a.m[e](c)}return this.__CE_shadowRoot=c});z&&z.get?b(Element.prototype,z):I&&I.get?b(HTMLElement.prototype,I):ua(a,function(c){b(c,{enumerable:!0,configurable:!0,get:function(){return q.call(this,!0).innerHTML},set:function(e){var g=
|
||||
"template"===this.localName,h=g?this.content:this,k=p.call(document,this.namespaceURI,this.localName);for(k.innerHTML=e;0<h.childNodes.length;)u.call(h,h.childNodes[0]);for(e=g?k.content:k;0<e.childNodes.length;)r.call(h,e.childNodes[0])}})});Element.prototype.setAttribute=function(c,e){if(1!==this.__CE_state)return B.call(this,c,e);var g=A.call(this,c);B.call(this,c,e);e=A.call(this,c);a.attributeChangedCallback(this,c,g,e,null)};Element.prototype.setAttributeNS=function(c,e,g){if(1!==this.__CE_state)return F.call(this,
|
||||
c,e,g);var h=E.call(this,c,e);F.call(this,c,e,g);g=E.call(this,c,e);a.attributeChangedCallback(this,e,h,g,c)};Element.prototype.removeAttribute=function(c){if(1!==this.__CE_state)return C.call(this,c);var e=A.call(this,c);C.call(this,c);null!==e&&a.attributeChangedCallback(this,c,e,null,null)};D&&(Element.prototype.toggleAttribute=function(c,e){if(1!==this.__CE_state)return D.call(this,c,e);var g=A.call(this,c),h=null!==g;e=D.call(this,c,e);h!==e&&a.attributeChangedCallback(this,c,g,e?"":null,null);
|
||||
return e});Element.prototype.removeAttributeNS=function(c,e){if(1!==this.__CE_state)return G.call(this,c,e);var g=E.call(this,c,e);G.call(this,c,e);var h=E.call(this,c,e);g!==h&&a.attributeChangedCallback(this,e,g,h,c)};oa?d(HTMLElement.prototype,oa):H&&d(Element.prototype,H);pa?f(HTMLElement.prototype,pa):fa&&f(Element.prototype,fa);Z(a,Element.prototype,{prepend:ha,append:ia});Da(a)};var Fa={};function Ga(a){function b(){var d=this.constructor;var f=document.__CE_registry.C.get(d);if(!f)throw Error("Failed to construct a custom element: The constructor was not registered with `customElements`.");var c=f.constructionStack;if(0===c.length)return c=n.call(document,f.localName),Object.setPrototypeOf(c,d.prototype),c.__CE_state=1,c.__CE_definition=f,R(a,c),c;var e=c.length-1,g=c[e];if(g===Fa)throw Error("Failed to construct '"+f.localName+"': This element was already constructed.");c[e]=Fa;
|
||||
Object.setPrototypeOf(g,d.prototype);R(a,g);return g}b.prototype=na.prototype;Object.defineProperty(HTMLElement.prototype,"constructor",{writable:!0,configurable:!0,enumerable:!1,value:b});window.HTMLElement=b};function Ha(a){function b(d,f){Object.defineProperty(d,"textContent",{enumerable:f.enumerable,configurable:!0,get:f.get,set:function(c){if(this.nodeType===Node.TEXT_NODE)f.set.call(this,c);else{var e=void 0;if(this.firstChild){var g=this.childNodes,h=g.length;if(0<h&&J(this)){e=Array(h);for(var k=0;k<h;k++)e[k]=g[k]}}f.set.call(this,c);if(e)for(c=0;c<e.length;c++)U(a,e[c])}}})}Node.prototype.insertBefore=function(d,f){if(d instanceof DocumentFragment){var c=K(d);d=t.call(this,d,f);if(J(this))for(f=
|
||||
0;f<c.length;f++)S(a,c[f]);return d}c=d instanceof Element&&J(d);f=t.call(this,d,f);c&&U(a,d);J(this)&&S(a,d);return f};Node.prototype.appendChild=function(d){if(d instanceof DocumentFragment){var f=K(d);d=r.call(this,d);if(J(this))for(var c=0;c<f.length;c++)S(a,f[c]);return d}f=d instanceof Element&&J(d);c=r.call(this,d);f&&U(a,d);J(this)&&S(a,d);return c};Node.prototype.cloneNode=function(d){d=q.call(this,!!d);this.ownerDocument.__CE_registry?V(a,d):Q(a,d);return d};Node.prototype.removeChild=function(d){var f=
|
||||
d instanceof Element&&J(d),c=u.call(this,d);f&&U(a,d);return c};Node.prototype.replaceChild=function(d,f){if(d instanceof DocumentFragment){var c=K(d);d=v.call(this,d,f);if(J(this))for(U(a,f),f=0;f<c.length;f++)S(a,c[f]);return d}c=d instanceof Element&&J(d);var e=v.call(this,d,f),g=J(this);g&&U(a,f);c&&U(a,d);g&&S(a,d);return e};w&&w.get?b(Node.prototype,w):ta(a,function(d){b(d,{enumerable:!0,configurable:!0,get:function(){for(var f=[],c=this.firstChild;c;c=c.nextSibling)c.nodeType!==Node.COMMENT_NODE&&
|
||||
f.push(c.textContent);return f.join("")},set:function(f){for(;this.firstChild;)u.call(this,this.firstChild);null!=f&&""!==f&&r.call(this,document.createTextNode(f))}})})};var O=window.customElements;function Ia(){var a=new N;Ga(a);Ca(a);Z(a,DocumentFragment.prototype,{prepend:da,append:ea});Ha(a);Ea(a);window.CustomElementRegistry=Y;a=new Y(a);document.__CE_registry=a;Object.defineProperty(window,"customElements",{configurable:!0,enumerable:!0,value:a})}O&&!O.forcePolyfill&&"function"==typeof O.define&&"function"==typeof O.get||Ia();window.__CE_installPolyfill=Ia;/*
|
||||
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
}).call(this);
|
||||
@@ -1,42 +0,0 @@
|
||||
// webcomponents.js code comes from
|
||||
// https://github.com/webcomponents/polyfills/tree/master/packages/webcomponentsjs
|
||||
//
|
||||
// The original code source is available in a "BSD style license".
|
||||
//
|
||||
// This is the `webcomponents-ce.js` bundle
|
||||
pub const source = @embedFile("webcomponents.js");
|
||||
|
||||
// The main webcomponents.js is lazilly loaded when window.customElements is
|
||||
// called. But, if you look at the test below, you'll notice that we declare
|
||||
// our custom element (LightPanda) before we call `customElements.define`. We
|
||||
// _have_ to declare it before we can register it.
|
||||
// That causes an issue, because the LightPanda class extends HTMLElement, which
|
||||
// hasn't been monkeypatched by the polyfill yet. If you were to try it as-is
|
||||
// you'd get an "Illegal Constructor", because that's what the Zig HTMLElement
|
||||
// constructor does (and that's correct).
|
||||
// However, once HTMLElement is monkeypatched, it'll work. One simple solution
|
||||
// is to run the webcomponents.js polyfill proactively on each page, ensuring
|
||||
// that HTMLElement is monkeypatched before any other JavaScript is run. But
|
||||
// that adds _a lot_ of overhead.
|
||||
// So instead of always running the [large and intrusive] webcomponents.js
|
||||
// polyfill, we'll always run this little snippet. It wraps the HTMLElement
|
||||
// constructor. When the Lightpanda class is created, it'll extend our little
|
||||
// wrapper. But, unlike the Zig default constructor which throws, our code
|
||||
// calls the "real" constructor. That might seem like the same thing, but by the
|
||||
// time our wrapper is called, the webcomponents.js polyfill will have been
|
||||
// loaded and the "real" constructor will be the monkeypatched version.
|
||||
// TL;DR creates a layer of indirection for the constructor, so that, when it's
|
||||
// actually instantiated, the webcomponents.js polyfill will have been loaded.
|
||||
pub const pre =
|
||||
\\ (() => {
|
||||
\\ const HE = window.HTMLElement;
|
||||
\\ const b = function() { return HE.prototype.constructor.call(this); }
|
||||
\\ b.prototype = HE.prototype;
|
||||
\\ window.HTMLElement = b;
|
||||
\\ })();
|
||||
;
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: Polyfill.WebComponents" {
|
||||
try testing.htmlRunner("polyfill/webcomponents.html");
|
||||
}
|
||||
@@ -1,116 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const parser = @import("netsurf.zig");
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
// provide very poor abstration to the rest of the code. In theory, we can change
|
||||
// the FlatRenderer to a different implementation, and it'll all just work.
|
||||
pub const Renderer = FlatRenderer;
|
||||
|
||||
// This "renderer" positions elements in a single row in an unspecified order.
|
||||
// The important thing is that elements have a consistent position/index within
|
||||
// that row, which can be turned into a rectangle.
|
||||
const FlatRenderer = struct {
|
||||
allocator: Allocator,
|
||||
|
||||
// key is a @ptrFromInt of the element
|
||||
// value is the index position
|
||||
positions: std.AutoHashMapUnmanaged(u64, u32),
|
||||
|
||||
// given an index, get the element
|
||||
elements: std.ArrayListUnmanaged(u64),
|
||||
|
||||
const Element = @import("dom/element.zig").Element;
|
||||
|
||||
// we expect allocator to be an arena
|
||||
pub fn init(allocator: Allocator) FlatRenderer {
|
||||
return .{
|
||||
.elements = .{},
|
||||
.positions = .{},
|
||||
.allocator = allocator,
|
||||
};
|
||||
}
|
||||
|
||||
// The DOMRect is always relative to the viewport, not the document the element belongs to.
|
||||
// Element that are not part of the main document, either detached or in a shadow DOM should not call this function.
|
||||
pub fn getRect(self: *FlatRenderer, e: *parser.Element) !Element.DOMRect {
|
||||
var elements = &self.elements;
|
||||
const gop = try self.positions.getOrPut(self.allocator, @intFromPtr(e));
|
||||
var x: u32 = gop.value_ptr.*;
|
||||
if (gop.found_existing == false) {
|
||||
x = @intCast(elements.items.len);
|
||||
try elements.append(self.allocator, @intFromPtr(e));
|
||||
gop.value_ptr.* = x;
|
||||
}
|
||||
|
||||
const _x: f64 = @floatFromInt(x);
|
||||
const y: f64 = 0.0;
|
||||
const w: f64 = 1.0;
|
||||
const h: f64 = 1.0;
|
||||
|
||||
return .{
|
||||
.x = _x,
|
||||
.y = y,
|
||||
.width = w,
|
||||
.height = h,
|
||||
.left = _x,
|
||||
.top = y,
|
||||
.right = _x + w,
|
||||
.bottom = y + h,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn boundingRect(self: *const FlatRenderer) Element.DOMRect {
|
||||
const x: f64 = 0.0;
|
||||
const y: f64 = 0.0;
|
||||
const w: f64 = @floatFromInt(self.width());
|
||||
const h: f64 = @floatFromInt(self.width());
|
||||
|
||||
return .{
|
||||
.x = x,
|
||||
.y = y,
|
||||
.width = w,
|
||||
.height = h,
|
||||
.left = x,
|
||||
.top = y,
|
||||
.right = x + w,
|
||||
.bottom = y + h,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn width(self: *const FlatRenderer) u32 {
|
||||
return @max(@as(u32, @intCast(self.elements.items.len)), 1); // At least 1 pixel even if empty
|
||||
}
|
||||
|
||||
pub fn height(_: *const FlatRenderer) u32 {
|
||||
return 1;
|
||||
}
|
||||
|
||||
pub fn getElementAtPosition(self: *const FlatRenderer, x: i32, y: i32) ?*parser.Element {
|
||||
if (y != 0 or x < 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const elements = self.elements.items;
|
||||
return if (x < elements.len) @ptrFromInt(elements[@intCast(x)]) else null;
|
||||
}
|
||||
};
|
||||
@@ -1,181 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const Env = @import("env.zig").Env;
|
||||
const Page = @import("page.zig").Page;
|
||||
const Browser = @import("browser.zig").Browser;
|
||||
const NavigateOpts = @import("page.zig").NavigateOpts;
|
||||
|
||||
const log = @import("../log.zig");
|
||||
const parser = @import("netsurf.zig");
|
||||
const storage = @import("storage/storage.zig");
|
||||
|
||||
// Session is like a browser's tab.
|
||||
// It owns the js env and the loader for all the pages of the session.
|
||||
// You can create successively multiple pages for a session, but you must
|
||||
// deinit a page before running another one.
|
||||
pub const Session = struct {
|
||||
browser: *Browser,
|
||||
|
||||
// Used to create our Inspector and in the BrowserContext.
|
||||
arena: Allocator,
|
||||
|
||||
// The page's arena is unsuitable for data that has to existing while
|
||||
// navigating from one page to another. For example, if we're clicking
|
||||
// on an HREF, the URL exists in the original page (where the click
|
||||
// originated) but also has to exist in the new page.
|
||||
// While we could use the Session's arena, this could accumulate a lot of
|
||||
// memory if we do many navigation events. The `transfer_arena` is meant to
|
||||
// bridge the gap: existing long enough to store any data needed to end one
|
||||
// page and start another.
|
||||
transfer_arena: Allocator,
|
||||
|
||||
executor: Env.ExecutionWorld,
|
||||
storage_shed: storage.Shed,
|
||||
cookie_jar: storage.CookieJar,
|
||||
|
||||
page: ?Page = null,
|
||||
|
||||
// If the current page want to navigate to a new page
|
||||
// (form submit, link click, top.location = xxx)
|
||||
// the details are stored here so that, on the next call to session.wait
|
||||
// we can destroy the current page and start a new one.
|
||||
queued_navigation: ?QueuedNavigation,
|
||||
|
||||
pub fn init(self: *Session, browser: *Browser) !void {
|
||||
var executor = try browser.env.newExecutionWorld();
|
||||
errdefer executor.deinit();
|
||||
|
||||
const allocator = browser.app.allocator;
|
||||
self.* = .{
|
||||
.browser = browser,
|
||||
.executor = executor,
|
||||
.queued_navigation = null,
|
||||
.arena = browser.session_arena.allocator(),
|
||||
.storage_shed = storage.Shed.init(allocator),
|
||||
.cookie_jar = storage.CookieJar.init(allocator),
|
||||
.transfer_arena = browser.transfer_arena.allocator(),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Session) void {
|
||||
if (self.page != null) {
|
||||
self.removePage();
|
||||
}
|
||||
self.cookie_jar.deinit();
|
||||
self.storage_shed.deinit();
|
||||
self.executor.deinit();
|
||||
}
|
||||
|
||||
// NOTE: the caller is not the owner of the returned value,
|
||||
// the pointer on Page is just returned as a convenience
|
||||
pub fn createPage(self: *Session) !*Page {
|
||||
std.debug.assert(self.page == null);
|
||||
|
||||
// Start netsurf memory arena.
|
||||
// We need to init this early as JS event handlers may be registered through Runtime.evaluate before the first html doc is loaded
|
||||
parser.init();
|
||||
|
||||
const page_arena = &self.browser.page_arena;
|
||||
_ = page_arena.reset(.{ .retain_with_limit = 1 * 1024 * 1024 });
|
||||
_ = self.browser.state_pool.reset(.{ .retain_with_limit = 4 * 1024 });
|
||||
|
||||
self.page = @as(Page, undefined);
|
||||
const page = &self.page.?;
|
||||
try Page.init(page, page_arena.allocator(), self);
|
||||
|
||||
log.debug(.browser, "create page", .{});
|
||||
// start JS env
|
||||
// Inform CDP the main page has been created such that additional context for other Worlds can be created as well
|
||||
self.browser.notification.dispatch(.page_created, page);
|
||||
|
||||
return page;
|
||||
}
|
||||
|
||||
pub fn removePage(self: *Session) void {
|
||||
// Inform CDP the page is going to be removed, allowing other worlds to remove themselves before the main one
|
||||
self.browser.notification.dispatch(.page_remove, .{});
|
||||
|
||||
std.debug.assert(self.page != null);
|
||||
|
||||
// RemoveJsContext() will execute the destructor of any type that
|
||||
// registered a destructor (e.g. XMLHttpRequest).
|
||||
// Should be called before we deinit the page, because these objects
|
||||
// could be referencing it.
|
||||
self.executor.removeJsContext();
|
||||
|
||||
self.page.?.deinit();
|
||||
self.page = null;
|
||||
|
||||
// clear netsurf memory arena.
|
||||
parser.deinit();
|
||||
|
||||
log.debug(.browser, "remove page", .{});
|
||||
}
|
||||
|
||||
pub fn currentPage(self: *Session) ?*Page {
|
||||
return &(self.page orelse return null);
|
||||
}
|
||||
|
||||
pub const WaitResult = enum {
|
||||
done,
|
||||
no_page,
|
||||
extra_socket,
|
||||
};
|
||||
|
||||
pub fn wait(self: *Session, wait_ms: i32) WaitResult {
|
||||
if (self.queued_navigation) |qn| {
|
||||
// This was already aborted on the page, but it would be pretty
|
||||
// bad if old requests went to the new page, so let's make double sure
|
||||
self.browser.http_client.abort();
|
||||
|
||||
// Page.navigateFromWebAPI terminatedExecution. If we don't resume
|
||||
// it before doing a shutdown we'll get an error.
|
||||
self.executor.resumeExecution();
|
||||
self.removePage();
|
||||
self.queued_navigation = null;
|
||||
|
||||
const page = self.createPage() catch |err| {
|
||||
log.err(.browser, "queued navigation page error", .{
|
||||
.err = err,
|
||||
.url = qn.url,
|
||||
});
|
||||
return .done;
|
||||
};
|
||||
|
||||
page.navigate(qn.url, qn.opts) catch |err| {
|
||||
log.err(.browser, "queued navigation error", .{ .err = err, .url = qn.url });
|
||||
return .done;
|
||||
};
|
||||
}
|
||||
|
||||
if (self.page) |*page| {
|
||||
return page.wait(wait_ms);
|
||||
}
|
||||
return .no_page;
|
||||
}
|
||||
};
|
||||
|
||||
const QueuedNavigation = struct {
|
||||
url: []const u8,
|
||||
opts: NavigateOpts,
|
||||
};
|
||||
@@ -1,961 +0,0 @@
|
||||
const std = @import("std");
|
||||
const Uri = std.Uri;
|
||||
const Allocator = std.mem.Allocator;
|
||||
const ArenaAllocator = std.heap.ArenaAllocator;
|
||||
|
||||
const log = @import("../../log.zig");
|
||||
const DateTime = @import("../../datetime.zig").DateTime;
|
||||
const public_suffix_list = @import("../../data/public_suffix_list.zig").lookup;
|
||||
|
||||
pub const LookupOpts = struct {
|
||||
request_time: ?i64 = null,
|
||||
origin_uri: ?*const Uri = null,
|
||||
is_http: bool,
|
||||
is_navigation: bool = true,
|
||||
prefix: ?[]const u8 = null,
|
||||
};
|
||||
|
||||
pub const Jar = struct {
|
||||
allocator: Allocator,
|
||||
cookies: std.ArrayListUnmanaged(Cookie),
|
||||
|
||||
pub fn init(allocator: Allocator) Jar {
|
||||
return .{
|
||||
.cookies = .{},
|
||||
.allocator = allocator,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Jar) void {
|
||||
for (self.cookies.items) |c| {
|
||||
c.deinit();
|
||||
}
|
||||
self.cookies.deinit(self.allocator);
|
||||
}
|
||||
|
||||
pub fn clearRetainingCapacity(self: *Jar) void {
|
||||
for (self.cookies.items) |c| {
|
||||
c.deinit();
|
||||
}
|
||||
self.cookies.clearRetainingCapacity();
|
||||
}
|
||||
|
||||
pub fn add(
|
||||
self: *Jar,
|
||||
cookie: Cookie,
|
||||
request_time: i64,
|
||||
) !void {
|
||||
const is_expired = isCookieExpired(&cookie, request_time);
|
||||
defer if (is_expired) {
|
||||
cookie.deinit();
|
||||
};
|
||||
|
||||
for (self.cookies.items, 0..) |*c, i| {
|
||||
if (areCookiesEqual(&cookie, c)) {
|
||||
c.deinit();
|
||||
if (is_expired) {
|
||||
_ = self.cookies.swapRemove(i);
|
||||
} else {
|
||||
self.cookies.items[i] = cookie;
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (!is_expired) {
|
||||
try self.cookies.append(self.allocator, cookie);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn removeExpired(self: *Jar, request_time: ?i64) void {
|
||||
if (self.cookies.items.len == 0) return;
|
||||
const time = request_time orelse std.time.timestamp();
|
||||
var i: usize = self.cookies.items.len - 1;
|
||||
while (i > 0) {
|
||||
defer i -= 1;
|
||||
const cookie = &self.cookies.items[i];
|
||||
if (isCookieExpired(cookie, time)) {
|
||||
self.cookies.swapRemove(i).deinit();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn forRequest(self: *Jar, target_uri: *const Uri, writer: anytype, opts: LookupOpts) !void {
|
||||
const target = PreparedUri{
|
||||
.host = (target_uri.host orelse return error.InvalidURI).percent_encoded,
|
||||
.path = target_uri.path.percent_encoded,
|
||||
.secure = std.mem.eql(u8, target_uri.scheme, "https"),
|
||||
};
|
||||
const same_site = try areSameSite(opts.origin_uri, target.host);
|
||||
|
||||
removeExpired(self, opts.request_time);
|
||||
|
||||
var first = true;
|
||||
for (self.cookies.items) |*cookie| {
|
||||
if (!cookie.appliesTo(&target, same_site, opts.is_navigation, opts.is_http)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// we have a match!
|
||||
if (first) {
|
||||
if (opts.prefix) |prefix| {
|
||||
try writer.writeAll(prefix);
|
||||
}
|
||||
first = false;
|
||||
} else {
|
||||
try writer.writeAll("; ");
|
||||
}
|
||||
try writeCookie(cookie, writer);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn populateFromResponse(self: *Jar, uri: *const Uri, set_cookie: []const u8) !void {
|
||||
const c = Cookie.parse(self.allocator, uri, set_cookie) catch |err| {
|
||||
log.warn(.web_api, "cookie parse failed", .{ .raw = set_cookie, .err = err });
|
||||
return;
|
||||
};
|
||||
|
||||
const now = std.time.timestamp();
|
||||
try self.add(c, now);
|
||||
}
|
||||
|
||||
fn writeCookie(cookie: *const Cookie, writer: anytype) !void {
|
||||
if (cookie.name.len > 0) {
|
||||
try writer.writeAll(cookie.name);
|
||||
try writer.writeByte('=');
|
||||
}
|
||||
if (cookie.value.len > 0) {
|
||||
try writer.writeAll(cookie.value);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
fn isCookieExpired(cookie: *const Cookie, now: i64) bool {
|
||||
const ce = cookie.expires orelse return false;
|
||||
return ce <= @as(f64, @floatFromInt(now));
|
||||
}
|
||||
|
||||
fn areCookiesEqual(a: *const Cookie, b: *const Cookie) bool {
|
||||
if (std.mem.eql(u8, a.name, b.name) == false) {
|
||||
return false;
|
||||
}
|
||||
if (std.mem.eql(u8, a.domain, b.domain) == false) {
|
||||
return false;
|
||||
}
|
||||
if (std.mem.eql(u8, a.path, b.path) == false) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
fn areSameSite(origin_uri_: ?*const std.Uri, target_host: []const u8) !bool {
|
||||
const origin_uri = origin_uri_ orelse return true;
|
||||
const origin_host = (origin_uri.host orelse return error.InvalidURI).percent_encoded;
|
||||
|
||||
// common case
|
||||
if (std.mem.eql(u8, target_host, origin_host)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return std.mem.eql(u8, findSecondLevelDomain(target_host), findSecondLevelDomain(origin_host));
|
||||
}
|
||||
|
||||
fn findSecondLevelDomain(host: []const u8) []const u8 {
|
||||
var i = std.mem.lastIndexOfScalar(u8, host, '.') orelse return host;
|
||||
while (true) {
|
||||
i = std.mem.lastIndexOfScalar(u8, host[0..i], '.') orelse return host;
|
||||
const strip = i + 1;
|
||||
if (public_suffix_list(host[strip..]) == false) {
|
||||
return host[strip..];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub const Cookie = struct {
|
||||
arena: ArenaAllocator,
|
||||
name: []const u8,
|
||||
value: []const u8,
|
||||
domain: []const u8,
|
||||
path: []const u8,
|
||||
expires: ?f64,
|
||||
secure: bool = false,
|
||||
http_only: bool = false,
|
||||
same_site: SameSite = .none,
|
||||
|
||||
const SameSite = enum {
|
||||
strict,
|
||||
lax,
|
||||
none,
|
||||
};
|
||||
|
||||
pub fn deinit(self: *const Cookie) void {
|
||||
self.arena.deinit();
|
||||
}
|
||||
|
||||
// There's https://datatracker.ietf.org/doc/html/rfc6265 but browsers are
|
||||
// far less strict. I only found 2 cases where browsers will reject a cookie:
|
||||
// - a byte 0...32 and 127..255 anywhere in the cookie (the HTTP header
|
||||
// parser might take care of this already)
|
||||
// - any shenanigans with the domain attribute - it has to be the current
|
||||
// domain or one of higher order, exluding TLD.
|
||||
// Anything else, will turn into a cookie.
|
||||
// Single value? That's a cookie with an emtpy name and a value
|
||||
// Key or Values with characters the RFC says aren't allowed? Allowed! (
|
||||
// (as long as the characters are 32...126)
|
||||
// Invalid attributes? Ignored.
|
||||
// Invalid attribute values? Ignore.
|
||||
// Duplicate attributes - use the last valid
|
||||
// Value-less attributes with a value? Ignore the value
|
||||
pub fn parse(allocator: Allocator, uri: *const std.Uri, str: []const u8) !Cookie {
|
||||
if (str.len == 0) {
|
||||
// this check is necessary, `std.mem.minMax` asserts len > 0
|
||||
return error.Empty;
|
||||
}
|
||||
{
|
||||
const min, const max = std.mem.minMax(u8, str);
|
||||
if (min < 32 or max > 126) {
|
||||
return error.InvalidByteSequence;
|
||||
}
|
||||
}
|
||||
|
||||
const cookie_name, const cookie_value, const rest = parseNameValue(str) catch {
|
||||
return error.InvalidNameValue;
|
||||
};
|
||||
|
||||
var scrap: [8]u8 = undefined;
|
||||
|
||||
var path: ?[]const u8 = null;
|
||||
var domain: ?[]const u8 = null;
|
||||
var secure: ?bool = null;
|
||||
var max_age: ?i64 = null;
|
||||
var http_only: ?bool = null;
|
||||
var expires: ?[]const u8 = null;
|
||||
var same_site: ?Cookie.SameSite = null;
|
||||
|
||||
var it = std.mem.splitScalar(u8, rest, ';');
|
||||
while (it.next()) |attribute| {
|
||||
const sep = std.mem.indexOfScalarPos(u8, attribute, 0, '=') orelse attribute.len;
|
||||
const key_string = trim(attribute[0..sep]);
|
||||
|
||||
if (key_string.len > 8) {
|
||||
// not valid, ignore
|
||||
continue;
|
||||
}
|
||||
|
||||
// Make sure no one changes our max length without also expanding the size of scrap
|
||||
std.debug.assert(key_string.len <= 8);
|
||||
|
||||
const key = std.meta.stringToEnum(enum {
|
||||
path,
|
||||
domain,
|
||||
secure,
|
||||
@"max-age",
|
||||
expires,
|
||||
httponly,
|
||||
samesite,
|
||||
}, std.ascii.lowerString(&scrap, key_string)) orelse continue;
|
||||
|
||||
const value = if (sep == attribute.len) "" else trim(attribute[sep + 1 ..]);
|
||||
switch (key) {
|
||||
.path => path = value,
|
||||
.domain => domain = value,
|
||||
.secure => secure = true,
|
||||
.@"max-age" => max_age = std.fmt.parseInt(i64, value, 10) catch continue,
|
||||
.expires => expires = value,
|
||||
.httponly => http_only = true,
|
||||
.samesite => {
|
||||
same_site = std.meta.stringToEnum(Cookie.SameSite, std.ascii.lowerString(&scrap, value)) orelse continue;
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
if (same_site == .none and secure == null) {
|
||||
return error.InsecureSameSite;
|
||||
}
|
||||
|
||||
var arena = ArenaAllocator.init(allocator);
|
||||
errdefer arena.deinit();
|
||||
const aa = arena.allocator();
|
||||
const owned_name = try aa.dupe(u8, cookie_name);
|
||||
const owned_value = try aa.dupe(u8, cookie_value);
|
||||
const owned_path = try parsePath(aa, uri, path);
|
||||
const owned_domain = try parseDomain(aa, uri, domain);
|
||||
|
||||
var normalized_expires: ?f64 = null;
|
||||
if (max_age) |ma| {
|
||||
normalized_expires = @floatFromInt(std.time.timestamp() + ma);
|
||||
} else {
|
||||
// max age takes priority over expires
|
||||
if (expires) |expires_| {
|
||||
var exp_dt = DateTime.parse(expires_, .rfc822) catch null;
|
||||
if (exp_dt == null) {
|
||||
if ((expires_.len > 11 and expires_[7] == '-' and expires_[11] == '-')) {
|
||||
// Replace dashes and try again
|
||||
const output = try aa.dupe(u8, expires_);
|
||||
output[7] = ' ';
|
||||
output[11] = ' ';
|
||||
exp_dt = DateTime.parse(output, .rfc822) catch null;
|
||||
}
|
||||
}
|
||||
if (exp_dt) |dt| {
|
||||
normalized_expires = @floatFromInt(dt.unix(.seconds));
|
||||
} else {
|
||||
// Algolia, for example, will call document.setCookie with
|
||||
// an expired value which is literally 'Invalid Date'
|
||||
// (it's trying to do something like: `new Date() + undefined`).
|
||||
log.debug(.web_api, "cookie expires date", .{ .date = expires_ });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return .{
|
||||
.arena = arena,
|
||||
.name = owned_name,
|
||||
.value = owned_value,
|
||||
.path = owned_path,
|
||||
.same_site = same_site orelse .lax,
|
||||
.secure = secure orelse false,
|
||||
.http_only = http_only orelse false,
|
||||
.domain = owned_domain,
|
||||
.expires = normalized_expires,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn parsePath(arena: Allocator, uri: ?*const std.Uri, explicit_path: ?[]const u8) ![]const u8 {
|
||||
// path attribute value either begins with a '/' or we
|
||||
// ignore it and use the "default-path" algorithm
|
||||
if (explicit_path) |path| {
|
||||
if (path.len > 0 and path[0] == '/') {
|
||||
return try arena.dupe(u8, path);
|
||||
}
|
||||
}
|
||||
|
||||
// default-path
|
||||
const url_path = (uri orelse return "/").path;
|
||||
|
||||
const either = url_path.percent_encoded;
|
||||
if (either.len == 0 or (either.len == 1 and either[0] == '/')) {
|
||||
return "/";
|
||||
}
|
||||
|
||||
var owned_path: []const u8 = try percentEncode(arena, url_path, isPathChar);
|
||||
const last = std.mem.lastIndexOfScalar(u8, owned_path[1..], '/') orelse {
|
||||
return "/";
|
||||
};
|
||||
return try arena.dupe(u8, owned_path[0 .. last + 1]);
|
||||
}
|
||||
|
||||
pub fn parseDomain(arena: Allocator, uri: ?*const std.Uri, explicit_domain: ?[]const u8) ![]const u8 {
|
||||
var encoded_host: ?[]const u8 = null;
|
||||
if (uri) |uri_| {
|
||||
const uri_host = uri_.host orelse return error.InvalidURI;
|
||||
const host = try percentEncode(arena, uri_host, isHostChar);
|
||||
_ = toLower(host);
|
||||
encoded_host = host;
|
||||
}
|
||||
|
||||
if (explicit_domain) |domain| {
|
||||
if (domain.len > 0) {
|
||||
const no_leading_dot = if (domain[0] == '.') domain[1..] else domain;
|
||||
|
||||
var aw = try std.Io.Writer.Allocating.initCapacity(arena, no_leading_dot.len + 1);
|
||||
try aw.writer.writeByte('.');
|
||||
try std.Uri.Component.percentEncode(&aw.writer, no_leading_dot, isHostChar);
|
||||
const owned_domain = toLower(aw.written());
|
||||
|
||||
if (std.mem.indexOfScalarPos(u8, owned_domain, 1, '.') == null and std.mem.eql(u8, "localhost", owned_domain[1..]) == false) {
|
||||
// can't set a cookie for a TLD
|
||||
return error.InvalidDomain;
|
||||
}
|
||||
if (encoded_host) |host| {
|
||||
if (std.mem.endsWith(u8, host, owned_domain[1..]) == false) {
|
||||
return error.InvalidDomain;
|
||||
}
|
||||
}
|
||||
|
||||
return owned_domain;
|
||||
}
|
||||
}
|
||||
|
||||
return encoded_host orelse return error.InvalidDomain; // default-domain
|
||||
}
|
||||
|
||||
pub fn percentEncode(arena: Allocator, component: std.Uri.Component, comptime isValidChar: fn (u8) bool) ![]u8 {
|
||||
switch (component) {
|
||||
.raw => |str| {
|
||||
var aw = try std.Io.Writer.Allocating.initCapacity(arena, str.len);
|
||||
try std.Uri.Component.percentEncode(&aw.writer, str, isValidChar);
|
||||
return aw.written(); // @memory retains memory used before growing
|
||||
},
|
||||
.percent_encoded => |str| {
|
||||
return try arena.dupe(u8, str);
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn isHostChar(c: u8) bool {
|
||||
return switch (c) {
|
||||
'A'...'Z', 'a'...'z', '0'...'9', '-', '.', '_', '~' => true,
|
||||
'!', '$', '&', '\'', '(', ')', '*', '+', ',', ';', '=' => true,
|
||||
':' => true,
|
||||
'[', ']' => true,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn isPathChar(c: u8) bool {
|
||||
return switch (c) {
|
||||
'A'...'Z', 'a'...'z', '0'...'9', '-', '.', '_', '~' => true,
|
||||
'!', '$', '&', '\'', '(', ')', '*', '+', ',', ';', '=' => true,
|
||||
'/', ':', '@' => true,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
|
||||
fn parseNameValue(str: []const u8) !struct { []const u8, []const u8, []const u8 } {
|
||||
const key_value_end = std.mem.indexOfScalarPos(u8, str, 0, ';') orelse str.len;
|
||||
const rest = if (key_value_end == str.len) "" else str[key_value_end + 1 ..];
|
||||
|
||||
const sep = std.mem.indexOfScalarPos(u8, str[0..key_value_end], 0, '=') orelse {
|
||||
const value = trim(str[0..key_value_end]);
|
||||
if (value.len == 0) {
|
||||
return error.Empty;
|
||||
}
|
||||
return .{ "", value, rest };
|
||||
};
|
||||
|
||||
const name = trim(str[0..sep]);
|
||||
const value = trim(str[sep + 1 .. key_value_end]);
|
||||
return .{ name, value, rest };
|
||||
}
|
||||
|
||||
pub fn appliesTo(self: *const Cookie, url: *const PreparedUri, same_site: bool, is_navigation: bool, is_http: bool) bool {
|
||||
if (self.http_only and is_http == false) {
|
||||
// http only cookies can be accessed from Javascript
|
||||
return false;
|
||||
}
|
||||
|
||||
if (url.secure == false and self.secure) {
|
||||
// secure cookie can only be sent over HTTPs
|
||||
return false;
|
||||
}
|
||||
|
||||
if (same_site == false) {
|
||||
// If we aren't on the "same site" (matching 2nd level domain
|
||||
// taking into account public suffix list), then the cookie
|
||||
// can only be sent if cookie.same_site == .none, or if
|
||||
// we're navigating to (as opposed to, say, loading an image)
|
||||
// and cookie.same_site == .lax
|
||||
switch (self.same_site) {
|
||||
.strict => return false,
|
||||
.lax => if (is_navigation == false) return false,
|
||||
.none => {},
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
if (self.domain[0] == '.') {
|
||||
// When a Set-Cookie header has a Domain attribute
|
||||
// Then we will _always_ prefix it with a dot, extending its
|
||||
// availability to all subdomains (yes, setting the Domain
|
||||
// attributes EXPANDS the domains which the cookie will be
|
||||
// sent to, to always include all subdomains).
|
||||
if (std.mem.eql(u8, url.host, self.domain[1..]) == false and std.mem.endsWith(u8, url.host, self.domain) == false) {
|
||||
return false;
|
||||
}
|
||||
} else if (std.mem.eql(u8, url.host, self.domain) == false) {
|
||||
// When the Domain attribute isn't specific, then the cookie
|
||||
// is only sent on an exact match.
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
if (self.path[self.path.len - 1] == '/') {
|
||||
// If our cookie has a trailing slash, we can only match is
|
||||
// the target path is a perfix. I.e., if our path is
|
||||
// /doc/ we can only match /doc/*
|
||||
if (std.mem.startsWith(u8, url.path, self.path) == false) {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
// Our cookie path is something like /hello
|
||||
if (std.mem.startsWith(u8, url.path, self.path) == false) {
|
||||
// The target path has to either be /hello (it isn't)
|
||||
return false;
|
||||
} else if (url.path.len < self.path.len or (url.path.len > self.path.len and url.path[self.path.len] != '/')) {
|
||||
// Or it has to be something like /hello/* (it isn't)
|
||||
// it isn't!
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
pub const PreparedUri = struct {
|
||||
host: []const u8, // Percent encoded, lower case
|
||||
path: []const u8, // Percent encoded
|
||||
secure: bool, // True if scheme is https
|
||||
};
|
||||
|
||||
fn trim(str: []const u8) []const u8 {
|
||||
return std.mem.trim(u8, str, &std.ascii.whitespace);
|
||||
}
|
||||
|
||||
fn trimLeft(str: []const u8) []const u8 {
|
||||
return std.mem.trimLeft(u8, str, &std.ascii.whitespace);
|
||||
}
|
||||
|
||||
fn trimRight(str: []const u8) []const u8 {
|
||||
return std.mem.trimLeft(u8, str, &std.ascii.whitespace);
|
||||
}
|
||||
|
||||
pub fn toLower(str: []u8) []u8 {
|
||||
for (str, 0..) |c, i| {
|
||||
str[i] = std.ascii.toLower(c);
|
||||
}
|
||||
return str;
|
||||
}
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "cookie: findSecondLevelDomain" {
|
||||
const cases = [_]struct { []const u8, []const u8 }{
|
||||
.{ "", "" },
|
||||
.{ "com", "com" },
|
||||
.{ "lightpanda.io", "lightpanda.io" },
|
||||
.{ "lightpanda.io", "test.lightpanda.io" },
|
||||
.{ "lightpanda.io", "first.test.lightpanda.io" },
|
||||
.{ "www.gov.uk", "www.gov.uk" },
|
||||
.{ "stats.gov.uk", "www.stats.gov.uk" },
|
||||
.{ "api.gov.uk", "api.gov.uk" },
|
||||
.{ "dev.api.gov.uk", "dev.api.gov.uk" },
|
||||
.{ "dev.api.gov.uk", "1.dev.api.gov.uk" },
|
||||
};
|
||||
for (cases) |c| {
|
||||
try testing.expectEqual(c.@"0", findSecondLevelDomain(c.@"1"));
|
||||
}
|
||||
}
|
||||
|
||||
test "Jar: add" {
|
||||
const expectCookies = struct {
|
||||
fn expect(expected: []const struct { []const u8, []const u8 }, jar: Jar) !void {
|
||||
try testing.expectEqual(expected.len, jar.cookies.items.len);
|
||||
LOOP: for (expected) |e| {
|
||||
for (jar.cookies.items) |c| {
|
||||
if (std.mem.eql(u8, e.@"0", c.name) and std.mem.eql(u8, e.@"1", c.value)) {
|
||||
continue :LOOP;
|
||||
}
|
||||
}
|
||||
std.debug.print("Cookie ({s}={s}) not found", .{ e.@"0", e.@"1" });
|
||||
return error.CookieNotFound;
|
||||
}
|
||||
}
|
||||
}.expect;
|
||||
|
||||
const now = std.time.timestamp();
|
||||
|
||||
var jar = Jar.init(testing.allocator);
|
||||
defer jar.deinit();
|
||||
try expectCookies(&.{}, jar);
|
||||
|
||||
try jar.add(try Cookie.parse(testing.allocator, &test_uri, "over=9000;Max-Age=0"), now);
|
||||
try expectCookies(&.{}, jar);
|
||||
|
||||
try jar.add(try Cookie.parse(testing.allocator, &test_uri, "over=9000"), now);
|
||||
try expectCookies(&.{.{ "over", "9000" }}, jar);
|
||||
|
||||
try jar.add(try Cookie.parse(testing.allocator, &test_uri, "over=9000!!"), now);
|
||||
try expectCookies(&.{.{ "over", "9000!!" }}, jar);
|
||||
|
||||
try jar.add(try Cookie.parse(testing.allocator, &test_uri, "spice=flow"), now);
|
||||
try expectCookies(&.{ .{ "over", "9000!!" }, .{ "spice", "flow" } }, jar);
|
||||
|
||||
try jar.add(try Cookie.parse(testing.allocator, &test_uri, "spice=flows;Path=/"), now);
|
||||
try expectCookies(&.{ .{ "over", "9000!!" }, .{ "spice", "flows" } }, jar);
|
||||
|
||||
try jar.add(try Cookie.parse(testing.allocator, &test_uri, "over=9001;Path=/other"), now);
|
||||
try expectCookies(&.{ .{ "over", "9000!!" }, .{ "spice", "flows" }, .{ "over", "9001" } }, jar);
|
||||
|
||||
try jar.add(try Cookie.parse(testing.allocator, &test_uri, "over=9002;Path=/;Domain=lightpanda.io"), now);
|
||||
try expectCookies(&.{ .{ "over", "9000!!" }, .{ "spice", "flows" }, .{ "over", "9001" }, .{ "over", "9002" } }, jar);
|
||||
|
||||
try jar.add(try Cookie.parse(testing.allocator, &test_uri, "over=x;Path=/other;Max-Age=-200"), now);
|
||||
try expectCookies(&.{ .{ "over", "9000!!" }, .{ "spice", "flows" }, .{ "over", "9002" } }, jar);
|
||||
}
|
||||
|
||||
test "Jar: forRequest" {
|
||||
const expectCookies = struct {
|
||||
fn expect(expected: []const u8, jar: *Jar, target_uri: Uri, opts: LookupOpts) !void {
|
||||
var arr: std.ArrayListUnmanaged(u8) = .empty;
|
||||
defer arr.deinit(testing.allocator);
|
||||
try jar.forRequest(&target_uri, arr.writer(testing.allocator), opts);
|
||||
try testing.expectEqual(expected, arr.items);
|
||||
}
|
||||
}.expect;
|
||||
|
||||
const now = std.time.timestamp();
|
||||
|
||||
var jar = Jar.init(testing.allocator);
|
||||
defer jar.deinit();
|
||||
|
||||
const test_uri_2 = Uri.parse("http://test.lightpanda.io/") catch unreachable;
|
||||
|
||||
{
|
||||
// test with no cookies
|
||||
try expectCookies("", &jar, test_uri, .{ .is_http = true });
|
||||
}
|
||||
|
||||
try jar.add(try Cookie.parse(testing.allocator, &test_uri, "global1=1"), now);
|
||||
try jar.add(try Cookie.parse(testing.allocator, &test_uri, "global2=2;Max-Age=30;domain=lightpanda.io"), now);
|
||||
try jar.add(try Cookie.parse(testing.allocator, &test_uri, "path1=3;Path=/about"), now);
|
||||
try jar.add(try Cookie.parse(testing.allocator, &test_uri, "path2=4;Path=/docs/"), now);
|
||||
try jar.add(try Cookie.parse(testing.allocator, &test_uri, "secure=5;Secure"), now);
|
||||
try jar.add(try Cookie.parse(testing.allocator, &test_uri, "sitenone=6;SameSite=None;Path=/x/;Secure"), now);
|
||||
try jar.add(try Cookie.parse(testing.allocator, &test_uri, "sitelax=7;SameSite=Lax;Path=/x/"), now);
|
||||
try jar.add(try Cookie.parse(testing.allocator, &test_uri, "sitestrict=8;SameSite=Strict;Path=/x/"), now);
|
||||
try jar.add(try Cookie.parse(testing.allocator, &test_uri_2, "domain1=9;domain=test.lightpanda.io"), now);
|
||||
|
||||
// nothing fancy here
|
||||
try expectCookies("global1=1; global2=2", &jar, test_uri, .{ .is_http = true });
|
||||
try expectCookies("global1=1; global2=2", &jar, test_uri, .{ .origin_uri = &test_uri, .is_navigation = false, .is_http = true });
|
||||
|
||||
// We have a cookie where Domain=lightpanda.io
|
||||
// This should _not_ match xyxlightpanda.io
|
||||
try expectCookies("", &jar, try std.Uri.parse("http://anothersitelightpanda.io/"), .{
|
||||
.origin_uri = &test_uri,
|
||||
.is_http = true,
|
||||
});
|
||||
|
||||
// matching path without trailing /
|
||||
try expectCookies("global1=1; global2=2; path1=3", &jar, try std.Uri.parse("http://lightpanda.io/about"), .{
|
||||
.origin_uri = &test_uri,
|
||||
.is_http = true,
|
||||
});
|
||||
|
||||
// incomplete prefix path
|
||||
try expectCookies("global1=1; global2=2", &jar, try std.Uri.parse("http://lightpanda.io/abou"), .{
|
||||
.origin_uri = &test_uri,
|
||||
.is_http = true,
|
||||
});
|
||||
|
||||
// path doesn't match
|
||||
try expectCookies("global1=1; global2=2", &jar, try std.Uri.parse("http://lightpanda.io/aboutus"), .{
|
||||
.origin_uri = &test_uri,
|
||||
.is_http = true,
|
||||
});
|
||||
|
||||
// path doesn't match cookie directory
|
||||
try expectCookies("global1=1; global2=2", &jar, try std.Uri.parse("http://lightpanda.io/docs"), .{
|
||||
.origin_uri = &test_uri,
|
||||
.is_http = true,
|
||||
});
|
||||
|
||||
// exact directory match
|
||||
try expectCookies("global1=1; global2=2; path2=4", &jar, try std.Uri.parse("http://lightpanda.io/docs/"), .{
|
||||
.origin_uri = &test_uri,
|
||||
.is_http = true,
|
||||
});
|
||||
|
||||
// sub directory match
|
||||
try expectCookies("global1=1; global2=2; path2=4", &jar, try std.Uri.parse("http://lightpanda.io/docs/more"), .{
|
||||
.origin_uri = &test_uri,
|
||||
.is_http = true,
|
||||
});
|
||||
|
||||
// secure
|
||||
try expectCookies("global1=1; global2=2; secure=5", &jar, try std.Uri.parse("https://lightpanda.io/"), .{
|
||||
.origin_uri = &test_uri,
|
||||
.is_http = true,
|
||||
});
|
||||
|
||||
// navigational cross domain, secure
|
||||
try expectCookies("global1=1; global2=2; secure=5; sitenone=6; sitelax=7", &jar, try std.Uri.parse("https://lightpanda.io/x/"), .{
|
||||
.origin_uri = &(try std.Uri.parse("https://example.com/")),
|
||||
.is_http = true,
|
||||
});
|
||||
|
||||
// navigational cross domain, insecure
|
||||
try expectCookies("global1=1; global2=2; sitelax=7", &jar, try std.Uri.parse("http://lightpanda.io/x/"), .{
|
||||
.origin_uri = &(try std.Uri.parse("https://example.com/")),
|
||||
.is_http = true,
|
||||
});
|
||||
|
||||
// non-navigational cross domain, insecure
|
||||
try expectCookies("", &jar, try std.Uri.parse("http://lightpanda.io/x/"), .{
|
||||
.origin_uri = &(try std.Uri.parse("https://example.com/")),
|
||||
.is_http = true,
|
||||
.is_navigation = false,
|
||||
});
|
||||
|
||||
// non-navigational cross domain, secure
|
||||
try expectCookies("sitenone=6", &jar, try std.Uri.parse("https://lightpanda.io/x/"), .{
|
||||
.origin_uri = &(try std.Uri.parse("https://example.com/")),
|
||||
.is_http = true,
|
||||
.is_navigation = false,
|
||||
});
|
||||
|
||||
// non-navigational same origin
|
||||
try expectCookies("global1=1; global2=2; sitelax=7; sitestrict=8", &jar, try std.Uri.parse("http://lightpanda.io/x/"), .{
|
||||
.origin_uri = &(try std.Uri.parse("https://lightpanda.io/")),
|
||||
.is_http = true,
|
||||
.is_navigation = false,
|
||||
});
|
||||
|
||||
// exact domain match + suffix
|
||||
try expectCookies("global2=2; domain1=9", &jar, try std.Uri.parse("http://test.lightpanda.io/"), .{
|
||||
.origin_uri = &test_uri,
|
||||
.is_http = true,
|
||||
});
|
||||
|
||||
// domain suffix match + suffix
|
||||
try expectCookies("global2=2; domain1=9", &jar, try std.Uri.parse("http://1.test.lightpanda.io/"), .{
|
||||
.origin_uri = &test_uri,
|
||||
.is_http = true,
|
||||
});
|
||||
|
||||
// non-matching domain
|
||||
try expectCookies("global2=2", &jar, try std.Uri.parse("http://other.lightpanda.io/"), .{
|
||||
.origin_uri = &test_uri,
|
||||
.is_http = true,
|
||||
});
|
||||
|
||||
const l = jar.cookies.items.len;
|
||||
try expectCookies("global1=1", &jar, test_uri, .{
|
||||
.request_time = now + 100,
|
||||
.origin_uri = &test_uri,
|
||||
.is_http = true,
|
||||
});
|
||||
try testing.expectEqual(l - 1, jar.cookies.items.len);
|
||||
|
||||
// If you add more cases after this point, note that the above test removes
|
||||
// the 'global2' cookie
|
||||
}
|
||||
|
||||
test "Cookie: parse key=value" {
|
||||
try expectError(error.Empty, null, "");
|
||||
try expectError(error.InvalidByteSequence, null, &.{ 'a', 30, '=', 'b' });
|
||||
try expectError(error.InvalidByteSequence, null, &.{ 'a', 127, '=', 'b' });
|
||||
try expectError(error.InvalidByteSequence, null, &.{ 'a', '=', 'b', 20 });
|
||||
try expectError(error.InvalidByteSequence, null, &.{ 'a', '=', 'b', 128 });
|
||||
|
||||
try expectAttribute(.{ .name = "", .value = "a" }, null, "a");
|
||||
try expectAttribute(.{ .name = "", .value = "a" }, null, "a;");
|
||||
try expectAttribute(.{ .name = "", .value = "a b" }, null, "a b");
|
||||
try expectAttribute(.{ .name = "a b", .value = "b" }, null, "a b=b");
|
||||
try expectAttribute(.{ .name = "a,", .value = "b" }, null, "a,=b");
|
||||
try expectAttribute(.{ .name = ":a>", .value = "b>><" }, null, ":a>=b>><");
|
||||
|
||||
try expectAttribute(.{ .name = "abc", .value = "" }, null, "abc=");
|
||||
try expectAttribute(.{ .name = "abc", .value = "" }, null, "abc=;");
|
||||
|
||||
try expectAttribute(.{ .name = "a", .value = "b" }, null, "a=b");
|
||||
try expectAttribute(.{ .name = "a", .value = "b" }, null, "a=b;");
|
||||
|
||||
try expectAttribute(.{ .name = "abc", .value = "fe f" }, null, "abc= fe f");
|
||||
try expectAttribute(.{ .name = "abc", .value = "fe f" }, null, "abc= fe f ");
|
||||
try expectAttribute(.{ .name = "abc", .value = "fe f" }, null, "abc= fe f;");
|
||||
try expectAttribute(.{ .name = "abc", .value = "fe f" }, null, "abc= fe f ;");
|
||||
try expectAttribute(.{ .name = "abc", .value = "\" fe f\"" }, null, "abc=\" fe f\"");
|
||||
try expectAttribute(.{ .name = "abc", .value = "\" fe f \"" }, null, "abc=\" fe f \"");
|
||||
try expectAttribute(.{ .name = "ab4344c", .value = "1ads23" }, null, " ab4344c=1ads23 ");
|
||||
|
||||
try expectAttribute(.{ .name = "ab4344c", .value = "1ads23" }, null, " ab4344c = 1ads23 ;");
|
||||
}
|
||||
|
||||
test "Cookie: parse path" {
|
||||
try expectAttribute(.{ .path = "/" }, "http://a/", "b");
|
||||
try expectAttribute(.{ .path = "/" }, "http://a/", "b;path");
|
||||
try expectAttribute(.{ .path = "/" }, "http://a/", "b;Path=");
|
||||
try expectAttribute(.{ .path = "/" }, "http://a/", "b;Path=;");
|
||||
try expectAttribute(.{ .path = "/" }, "http://a/", "b; Path=other");
|
||||
try expectAttribute(.{ .path = "/" }, "http://a/23", "b; path=other ");
|
||||
|
||||
try expectAttribute(.{ .path = "/" }, "http://a/abc", "b");
|
||||
try expectAttribute(.{ .path = "/abc" }, "http://a/abc/", "b");
|
||||
try expectAttribute(.{ .path = "/abc" }, "http://a/abc/123", "b");
|
||||
try expectAttribute(.{ .path = "/abc/123" }, "http://a/abc/123/", "b");
|
||||
|
||||
try expectAttribute(.{ .path = "/a" }, "http://a/", "b;Path=/a");
|
||||
try expectAttribute(.{ .path = "/aa" }, "http://a/", "b;path=/aa;");
|
||||
try expectAttribute(.{ .path = "/aabc/" }, "http://a/", "b; path= /aabc/ ;");
|
||||
|
||||
try expectAttribute(.{ .path = "/bbb/" }, "http://a/", "b; path=/a/; path=/bbb/");
|
||||
try expectAttribute(.{ .path = "/cc" }, "http://a/", "b; path=/a/; path=/bbb/; path = /cc");
|
||||
}
|
||||
|
||||
test "Cookie: parse secure" {
|
||||
try expectAttribute(.{ .secure = false }, null, "b");
|
||||
try expectAttribute(.{ .secure = false }, null, "b;secured");
|
||||
try expectAttribute(.{ .secure = false }, null, "b;security");
|
||||
try expectAttribute(.{ .secure = false }, null, "b;SecureX");
|
||||
try expectAttribute(.{ .secure = true }, null, "b; Secure");
|
||||
try expectAttribute(.{ .secure = true }, null, "b; Secure ");
|
||||
try expectAttribute(.{ .secure = true }, null, "b; Secure=on ");
|
||||
try expectAttribute(.{ .secure = true }, null, "b; Secure=Off ");
|
||||
try expectAttribute(.{ .secure = true }, null, "b; secure=Off ");
|
||||
try expectAttribute(.{ .secure = true }, null, "b; seCUre=Off ");
|
||||
}
|
||||
|
||||
test "Cookie: parse HttpOnly" {
|
||||
try expectAttribute(.{ .http_only = false }, null, "b");
|
||||
try expectAttribute(.{ .http_only = false }, null, "b;HttpOnly0");
|
||||
try expectAttribute(.{ .http_only = false }, null, "b;H ttpOnly");
|
||||
try expectAttribute(.{ .http_only = true }, null, "b; HttpOnly");
|
||||
try expectAttribute(.{ .http_only = true }, null, "b; Httponly ");
|
||||
try expectAttribute(.{ .http_only = true }, null, "b; Httponly=on ");
|
||||
try expectAttribute(.{ .http_only = true }, null, "b; httpOnly=Off ");
|
||||
try expectAttribute(.{ .http_only = true }, null, "b; httpOnly=Off ");
|
||||
try expectAttribute(.{ .http_only = true }, null, "b; HttpOnly=Off ");
|
||||
}
|
||||
|
||||
test "Cookie: parse SameSite" {
|
||||
try expectAttribute(.{ .same_site = .lax }, null, "b;samesite");
|
||||
try expectAttribute(.{ .same_site = .lax }, null, "b;samesite=lax");
|
||||
try expectAttribute(.{ .same_site = .lax }, null, "b; SameSite=Lax ");
|
||||
try expectAttribute(.{ .same_site = .lax }, null, "b; SameSite=Other ");
|
||||
try expectAttribute(.{ .same_site = .lax }, null, "b; SameSite=Nope ");
|
||||
|
||||
// SameSite=none is only valid when Secure is set. The whole cookie is
|
||||
// rejected otherwise
|
||||
try expectError(error.InsecureSameSite, null, "b;samesite=none");
|
||||
try expectError(error.InsecureSameSite, null, "b;SameSite=None");
|
||||
try expectAttribute(.{ .same_site = .none }, null, "b; samesite=none; secure ");
|
||||
try expectAttribute(.{ .same_site = .none }, null, "b; SameSite=None ; SECURE");
|
||||
try expectAttribute(.{ .same_site = .none }, null, "b;Secure; SameSite=None");
|
||||
try expectAttribute(.{ .same_site = .none }, null, "b; SameSite=None; Secure");
|
||||
|
||||
try expectAttribute(.{ .same_site = .strict }, null, "b; samesite=Strict ");
|
||||
try expectAttribute(.{ .same_site = .strict }, null, "b; SameSite= STRICT ");
|
||||
try expectAttribute(.{ .same_site = .strict }, null, "b; SameSITE=strict;");
|
||||
try expectAttribute(.{ .same_site = .strict }, null, "b; SameSite=Strict");
|
||||
|
||||
try expectAttribute(.{ .same_site = .strict }, null, "b; SameSite=None; SameSite=lax; SameSite=Strict");
|
||||
}
|
||||
|
||||
test "Cookie: parse max-age" {
|
||||
try expectAttribute(.{ .expires = null }, null, "b;max-age");
|
||||
try expectAttribute(.{ .expires = null }, null, "b;max-age=abc");
|
||||
try expectAttribute(.{ .expires = null }, null, "b;max-age=13.22");
|
||||
try expectAttribute(.{ .expires = null }, null, "b;max-age=13abc");
|
||||
|
||||
try expectAttribute(.{ .expires = std.time.timestamp() + 13 }, null, "b;max-age=13");
|
||||
try expectAttribute(.{ .expires = std.time.timestamp() + -22 }, null, "b;max-age=-22");
|
||||
try expectAttribute(.{ .expires = std.time.timestamp() + 4294967296 }, null, "b;max-age=4294967296");
|
||||
try expectAttribute(.{ .expires = std.time.timestamp() + -4294967296 }, null, "b;Max-Age= -4294967296");
|
||||
try expectAttribute(.{ .expires = std.time.timestamp() + 0 }, null, "b; Max-Age=0");
|
||||
try expectAttribute(.{ .expires = std.time.timestamp() + 500 }, null, "b; Max-Age = 500 ; Max-Age=invalid");
|
||||
try expectAttribute(.{ .expires = std.time.timestamp() + 1000 }, null, "b;max-age=600;max-age=0;max-age = 1000");
|
||||
}
|
||||
|
||||
test "Cookie: parse expires" {
|
||||
try expectAttribute(.{ .expires = null }, null, "b;expires=");
|
||||
try expectAttribute(.{ .expires = null }, null, "b;expires=abc");
|
||||
try expectAttribute(.{ .expires = null }, null, "b;expires=13.22");
|
||||
try expectAttribute(.{ .expires = null }, null, "b;expires=33");
|
||||
|
||||
try expectAttribute(.{ .expires = 1918798080 }, null, "b;expires=Wed, 21 Oct 2030 07:28:00 GMT");
|
||||
try expectAttribute(.{ .expires = 1784275395 }, null, "b;expires=Fri, 17-Jul-2026 08:03:15 GMT");
|
||||
// max-age has priority over expires
|
||||
try expectAttribute(.{ .expires = std.time.timestamp() + 10 }, null, "b;Max-Age=10; expires=Wed, 21 Oct 2030 07:28:00 GMT");
|
||||
}
|
||||
|
||||
test "Cookie: parse all" {
|
||||
try expectCookie(.{
|
||||
.name = "user-id",
|
||||
.value = "9000",
|
||||
.path = "/cms",
|
||||
.domain = "lightpanda.io",
|
||||
}, "https://lightpanda.io/cms/users", "user-id=9000");
|
||||
|
||||
try expectCookie(.{
|
||||
.name = "user-id",
|
||||
.value = "9000",
|
||||
.path = "/",
|
||||
.http_only = true,
|
||||
.secure = true,
|
||||
.domain = ".lightpanda.io",
|
||||
.expires = @floatFromInt(std.time.timestamp() + 30),
|
||||
}, "https://lightpanda.io/cms/users", "user-id=9000; HttpOnly; Max-Age=30; Secure; path=/; Domain=lightpanda.io");
|
||||
|
||||
try expectCookie(.{
|
||||
.name = "app_session",
|
||||
.value = "123",
|
||||
.path = "/",
|
||||
.http_only = true,
|
||||
.secure = false,
|
||||
.domain = ".localhost",
|
||||
.same_site = .lax,
|
||||
.expires = @floatFromInt(std.time.timestamp() + 7200),
|
||||
}, "http://localhost:8000/login", "app_session=123; Max-Age=7200; path=/; domain=localhost; httponly; samesite=lax");
|
||||
}
|
||||
|
||||
test "Cookie: parse domain" {
|
||||
try expectAttribute(.{ .domain = "lightpanda.io" }, "http://lightpanda.io/", "b");
|
||||
try expectAttribute(.{ .domain = "dev.lightpanda.io" }, "http://dev.lightpanda.io/", "b");
|
||||
try expectAttribute(.{ .domain = ".lightpanda.io" }, "http://lightpanda.io/", "b;domain=lightpanda.io");
|
||||
try expectAttribute(.{ .domain = ".lightpanda.io" }, "http://lightpanda.io/", "b;domain=.lightpanda.io");
|
||||
try expectAttribute(.{ .domain = ".dev.lightpanda.io" }, "http://dev.lightpanda.io/", "b;domain=dev.lightpanda.io");
|
||||
try expectAttribute(.{ .domain = ".lightpanda.io" }, "http://dev.lightpanda.io/", "b;domain=lightpanda.io");
|
||||
try expectAttribute(.{ .domain = ".lightpanda.io" }, "http://dev.lightpanda.io/", "b;domain=.lightpanda.io");
|
||||
try expectAttribute(.{ .domain = ".localhost" }, "http://localhost/", "b;domain=localhost");
|
||||
try expectAttribute(.{ .domain = ".localhost" }, "http://localhost/", "b;domain=.localhost");
|
||||
|
||||
try expectError(error.InvalidDomain, "http://lightpanda.io/", "b;domain=io");
|
||||
try expectError(error.InvalidDomain, "http://lightpanda.io/", "b;domain=.io");
|
||||
try expectError(error.InvalidDomain, "http://lightpanda.io/", "b;domain=other.lightpanda.io");
|
||||
try expectError(error.InvalidDomain, "http://lightpanda.io/", "b;domain=other.lightpanda.com");
|
||||
try expectError(error.InvalidDomain, "http://lightpanda.io/", "b;domain=other.example.com");
|
||||
}
|
||||
|
||||
const ExpectedCookie = struct {
|
||||
name: []const u8,
|
||||
value: []const u8,
|
||||
path: []const u8,
|
||||
domain: []const u8,
|
||||
expires: ?f64 = null,
|
||||
secure: bool = false,
|
||||
http_only: bool = false,
|
||||
same_site: Cookie.SameSite = .lax,
|
||||
};
|
||||
|
||||
fn expectCookie(expected: ExpectedCookie, url: []const u8, set_cookie: []const u8) !void {
|
||||
const uri = try Uri.parse(url);
|
||||
var cookie = try Cookie.parse(testing.allocator, &uri, set_cookie);
|
||||
defer cookie.deinit();
|
||||
|
||||
try testing.expectEqual(expected.name, cookie.name);
|
||||
try testing.expectEqual(expected.value, cookie.value);
|
||||
try testing.expectEqual(expected.secure, cookie.secure);
|
||||
try testing.expectEqual(expected.http_only, cookie.http_only);
|
||||
try testing.expectEqual(expected.same_site, cookie.same_site);
|
||||
try testing.expectEqual(expected.path, cookie.path);
|
||||
try testing.expectEqual(expected.domain, cookie.domain);
|
||||
|
||||
try testing.expectDelta(expected.expires, cookie.expires, 2.0);
|
||||
}
|
||||
|
||||
fn expectAttribute(expected: anytype, url: ?[]const u8, set_cookie: []const u8) !void {
|
||||
const uri = if (url) |u| try Uri.parse(u) else test_uri;
|
||||
var cookie = try Cookie.parse(testing.allocator, &uri, set_cookie);
|
||||
defer cookie.deinit();
|
||||
|
||||
inline for (@typeInfo(@TypeOf(expected)).@"struct".fields) |f| {
|
||||
if (comptime std.mem.eql(u8, f.name, "expires")) {
|
||||
switch (@typeInfo(@TypeOf(expected.expires))) {
|
||||
.int, .comptime_int => try testing.expectDelta(@as(f64, @floatFromInt(expected.expires)), cookie.expires, 1.0),
|
||||
else => try testing.expectDelta(expected.expires, cookie.expires, 1.0),
|
||||
}
|
||||
} else {
|
||||
try testing.expectEqual(@field(expected, f.name), @field(cookie, f.name));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn expectError(expected: anyerror, url: ?[]const u8, set_cookie: []const u8) !void {
|
||||
const uri = if (url) |u| try Uri.parse(u) else test_uri;
|
||||
try testing.expectError(expected, Cookie.parse(testing.allocator, &uri, set_cookie));
|
||||
}
|
||||
|
||||
const test_uri = Uri.parse("http://lightpanda.io/") catch unreachable;
|
||||
@@ -1,178 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const log = @import("../../log.zig");
|
||||
|
||||
const Page = @import("../page.zig").Page;
|
||||
const Env = @import("../env.zig").Env;
|
||||
|
||||
const ReadableStream = @This();
|
||||
const ReadableStreamDefaultReader = @import("ReadableStreamDefaultReader.zig");
|
||||
const ReadableStreamDefaultController = @import("ReadableStreamDefaultController.zig");
|
||||
|
||||
const State = union(enum) {
|
||||
readable,
|
||||
closed: ?[]const u8,
|
||||
cancelled: ?[]const u8,
|
||||
errored: Env.JsObject,
|
||||
};
|
||||
|
||||
// This promise resolves when a stream is canceled.
|
||||
cancel_resolver: Env.PersistentPromiseResolver,
|
||||
closed_resolver: Env.PersistentPromiseResolver,
|
||||
reader_resolver: ?Env.PersistentPromiseResolver = null,
|
||||
|
||||
locked: bool = false,
|
||||
state: State = .readable,
|
||||
|
||||
cancel_fn: ?Env.Function = null,
|
||||
pull_fn: ?Env.Function = null,
|
||||
|
||||
strategy: QueueingStrategy,
|
||||
queue: std.ArrayListUnmanaged([]const u8) = .empty,
|
||||
|
||||
pub const ReadableStreamReadResult = struct {
|
||||
const ValueUnion =
|
||||
union(enum) { data: []const u8, empty: void };
|
||||
|
||||
value: ValueUnion,
|
||||
done: bool,
|
||||
|
||||
pub fn get_value(self: *const ReadableStreamReadResult) ValueUnion {
|
||||
return self.value;
|
||||
}
|
||||
|
||||
pub fn get_done(self: *const ReadableStreamReadResult) bool {
|
||||
return self.done;
|
||||
}
|
||||
};
|
||||
|
||||
const UnderlyingSource = struct {
|
||||
start: ?Env.Function = null,
|
||||
pull: ?Env.Function = null,
|
||||
cancel: ?Env.Function = null,
|
||||
type: ?[]const u8 = null,
|
||||
};
|
||||
|
||||
const QueueingStrategy = struct {
|
||||
size: ?Env.Function = null,
|
||||
high_water_mark: u32 = 1,
|
||||
};
|
||||
|
||||
pub fn constructor(underlying: ?UnderlyingSource, _strategy: ?QueueingStrategy, page: *Page) !*ReadableStream {
|
||||
const strategy: QueueingStrategy = _strategy orelse .{};
|
||||
|
||||
const cancel_resolver = try page.main_context.createPersistentPromiseResolver(.self);
|
||||
const closed_resolver = try page.main_context.createPersistentPromiseResolver(.self);
|
||||
|
||||
const stream = try page.arena.create(ReadableStream);
|
||||
stream.* = ReadableStream{
|
||||
.cancel_resolver = cancel_resolver,
|
||||
.closed_resolver = closed_resolver,
|
||||
.strategy = strategy,
|
||||
};
|
||||
|
||||
const controller = ReadableStreamDefaultController{ .stream = stream };
|
||||
|
||||
// call start
|
||||
if (underlying) |src| {
|
||||
if (src.start) |start| {
|
||||
try start.call(void, .{controller});
|
||||
}
|
||||
|
||||
if (src.cancel) |cancel| {
|
||||
stream.cancel_fn = cancel;
|
||||
}
|
||||
|
||||
if (src.pull) |pull| {
|
||||
stream.pull_fn = pull;
|
||||
try stream.pullIf();
|
||||
}
|
||||
}
|
||||
|
||||
return stream;
|
||||
}
|
||||
|
||||
pub fn destructor(self: *ReadableStream) void {
|
||||
self.cancel_resolver.deinit();
|
||||
self.closed_resolver.deinit();
|
||||
// reader resolver is scoped to the page lifetime and is cleaned up by it.
|
||||
}
|
||||
|
||||
pub fn get_locked(self: *const ReadableStream) bool {
|
||||
return self.locked;
|
||||
}
|
||||
|
||||
pub fn _cancel(self: *ReadableStream, reason: ?[]const u8, page: *Page) !Env.Promise {
|
||||
if (self.locked) {
|
||||
return error.TypeError;
|
||||
}
|
||||
|
||||
self.state = .{ .cancelled = if (reason) |r| try page.arena.dupe(u8, r) else null };
|
||||
|
||||
// Call cancel callback.
|
||||
if (self.cancel_fn) |cancel| {
|
||||
if (reason) |r| {
|
||||
try cancel.call(void, .{r});
|
||||
} else {
|
||||
try cancel.call(void, .{});
|
||||
}
|
||||
}
|
||||
|
||||
try self.cancel_resolver.resolve({});
|
||||
return self.cancel_resolver.promise();
|
||||
}
|
||||
|
||||
pub fn pullIf(self: *ReadableStream) !void {
|
||||
if (self.pull_fn) |pull_fn| {
|
||||
// Must be under the high water mark AND readable.
|
||||
if ((self.queue.items.len < self.strategy.high_water_mark) and self.state == .readable) {
|
||||
const controller = ReadableStreamDefaultController{ .stream = self };
|
||||
try pull_fn.call(void, .{controller});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const GetReaderOptions = struct {
|
||||
// Mode must equal 'byob' or be undefined. RangeError otherwise.
|
||||
mode: ?[]const u8 = null,
|
||||
};
|
||||
|
||||
pub fn _getReader(self: *ReadableStream, _options: ?GetReaderOptions) !ReadableStreamDefaultReader {
|
||||
if (self.locked) {
|
||||
return error.TypeError;
|
||||
}
|
||||
|
||||
// TODO: Determine if we need the ReadableStreamBYOBReader
|
||||
const options = _options orelse GetReaderOptions{};
|
||||
_ = options;
|
||||
|
||||
return ReadableStreamDefaultReader.constructor(self);
|
||||
}
|
||||
|
||||
// TODO: pipeThrough (requires TransformStream)
|
||||
|
||||
// TODO: pipeTo (requires WritableStream)
|
||||
|
||||
// TODO: tee
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "streams: ReadableStream" {
|
||||
try testing.htmlRunner("streams/readable_stream.html");
|
||||
}
|
||||
@@ -1,79 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const log = @import("../../log.zig");
|
||||
|
||||
const Page = @import("../page.zig").Page;
|
||||
const Env = @import("../env.zig").Env;
|
||||
|
||||
const ReadableStream = @import("./ReadableStream.zig");
|
||||
const ReadableStreamReadResult = @import("./ReadableStream.zig").ReadableStreamReadResult;
|
||||
|
||||
const ReadableStreamDefaultController = @This();
|
||||
|
||||
stream: *ReadableStream,
|
||||
|
||||
pub fn get_desiredSize(self: *const ReadableStreamDefaultController) i32 {
|
||||
// TODO: This may need tuning at some point if it becomes a performance issue.
|
||||
return @intCast(self.stream.queue.capacity - self.stream.queue.items.len);
|
||||
}
|
||||
|
||||
pub fn _close(self: *ReadableStreamDefaultController, _reason: ?[]const u8, page: *Page) !void {
|
||||
const reason = if (_reason) |reason| try page.arena.dupe(u8, reason) else null;
|
||||
self.stream.state = .{ .closed = reason };
|
||||
|
||||
// Resolve the Reader Promise
|
||||
if (self.stream.reader_resolver) |*rr| {
|
||||
try rr.resolve(ReadableStreamReadResult{ .value = .empty, .done = true });
|
||||
self.stream.reader_resolver = null;
|
||||
}
|
||||
|
||||
// Resolve the Closed promise.
|
||||
try self.stream.closed_resolver.resolve({});
|
||||
|
||||
// close just sets as closed meaning it wont READ any more but anything in the queue is fine to read.
|
||||
// to discard, must use cancel.
|
||||
}
|
||||
|
||||
pub fn _enqueue(self: *ReadableStreamDefaultController, chunk: []const u8, page: *Page) !void {
|
||||
const stream = self.stream;
|
||||
|
||||
if (stream.state != .readable) {
|
||||
return error.TypeError;
|
||||
}
|
||||
|
||||
const duped_chunk = try page.arena.dupe(u8, chunk);
|
||||
|
||||
if (self.stream.reader_resolver) |*rr| {
|
||||
try rr.resolve(ReadableStreamReadResult{ .value = .{ .data = duped_chunk }, .done = false });
|
||||
self.stream.reader_resolver = null;
|
||||
}
|
||||
|
||||
try self.stream.queue.append(page.arena, duped_chunk);
|
||||
try self.stream.pullIf();
|
||||
}
|
||||
|
||||
pub fn _error(self: *ReadableStreamDefaultController, err: Env.JsObject) !void {
|
||||
self.stream.state = .{ .errored = err };
|
||||
|
||||
if (self.stream.reader_resolver) |*rr| {
|
||||
try rr.reject(err);
|
||||
self.stream.reader_resolver = null;
|
||||
}
|
||||
}
|
||||
@@ -1,96 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const log = @import("../../log.zig");
|
||||
const Env = @import("../env.zig").Env;
|
||||
const Page = @import("../page.zig").Page;
|
||||
const ReadableStream = @import("./ReadableStream.zig");
|
||||
const ReadableStreamReadResult = @import("./ReadableStream.zig").ReadableStreamReadResult;
|
||||
|
||||
const ReadableStreamDefaultReader = @This();
|
||||
|
||||
stream: *ReadableStream,
|
||||
|
||||
pub fn constructor(stream: *ReadableStream) ReadableStreamDefaultReader {
|
||||
return .{ .stream = stream };
|
||||
}
|
||||
|
||||
pub fn get_closed(self: *const ReadableStreamDefaultReader) Env.Promise {
|
||||
return self.stream.closed_resolver.promise();
|
||||
}
|
||||
|
||||
pub fn _cancel(self: *ReadableStreamDefaultReader, reason: ?[]const u8, page: *Page) !Env.Promise {
|
||||
return try self.stream._cancel(reason, page);
|
||||
}
|
||||
|
||||
pub fn _read(self: *const ReadableStreamDefaultReader, page: *Page) !Env.Promise {
|
||||
const stream = self.stream;
|
||||
|
||||
switch (stream.state) {
|
||||
.readable => {
|
||||
if (stream.queue.items.len > 0) {
|
||||
const data = self.stream.queue.orderedRemove(0);
|
||||
const resolver = page.main_context.createPromiseResolver();
|
||||
|
||||
try resolver.resolve(ReadableStreamReadResult{ .value = .{ .data = data }, .done = false });
|
||||
try self.stream.pullIf();
|
||||
return resolver.promise();
|
||||
} else {
|
||||
if (self.stream.reader_resolver) |rr| {
|
||||
return rr.promise();
|
||||
} else {
|
||||
const persistent_resolver = try page.main_context.createPersistentPromiseResolver(.page);
|
||||
self.stream.reader_resolver = persistent_resolver;
|
||||
return persistent_resolver.promise();
|
||||
}
|
||||
}
|
||||
},
|
||||
.closed => |_| {
|
||||
const resolver = page.main_context.createPromiseResolver();
|
||||
|
||||
if (stream.queue.items.len > 0) {
|
||||
const data = self.stream.queue.orderedRemove(0);
|
||||
try resolver.resolve(ReadableStreamReadResult{ .value = .{ .data = data }, .done = false });
|
||||
} else {
|
||||
try resolver.resolve(ReadableStreamReadResult{ .value = .empty, .done = true });
|
||||
}
|
||||
|
||||
return resolver.promise();
|
||||
},
|
||||
.cancelled => |_| {
|
||||
const resolver = page.main_context.createPromiseResolver();
|
||||
try resolver.resolve(ReadableStreamReadResult{ .value = .empty, .done = true });
|
||||
return resolver.promise();
|
||||
},
|
||||
.errored => |err| {
|
||||
const resolver = page.main_context.createPromiseResolver();
|
||||
try resolver.reject(err);
|
||||
return resolver.promise();
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn _releaseLock(self: *const ReadableStreamDefaultReader) !void {
|
||||
self.stream.locked = false;
|
||||
|
||||
if (self.stream.reader_resolver) |rr| {
|
||||
try rr.reject("TypeError");
|
||||
}
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
pub const Interfaces = .{
|
||||
@import("ReadableStream.zig"),
|
||||
@import("ReadableStream.zig").ReadableStreamReadResult,
|
||||
@import("ReadableStreamDefaultReader.zig"),
|
||||
@import("ReadableStreamDefaultController.zig"),
|
||||
};
|
||||
@@ -1,509 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const parser = @import("../netsurf.zig");
|
||||
const Env = @import("../env.zig").Env;
|
||||
const Page = @import("../page.zig").Page;
|
||||
const FormData = @import("../xhr/form_data.zig").FormData;
|
||||
|
||||
const kv = @import("../key_value.zig");
|
||||
const iterator = @import("../iterator/iterator.zig");
|
||||
|
||||
pub const Interfaces = .{
|
||||
URL,
|
||||
URLSearchParams,
|
||||
KeyIterable,
|
||||
ValueIterable,
|
||||
EntryIterable,
|
||||
};
|
||||
|
||||
// https://url.spec.whatwg.org/#url
|
||||
//
|
||||
// TODO we could avoid many of these getter string allocatoration in two differents
|
||||
// way:
|
||||
//
|
||||
// 1. We can eventually get the slice of scheme *with* the following char in
|
||||
// the underlying string. But I don't know if it's possible and how to do that.
|
||||
// I mean, if the rawuri contains `https://foo.bar`, uri.scheme is a slice
|
||||
// containing only `https`. I want `https:` so, in theory, I don't need to
|
||||
// allocatorate data, I should be able to retrieve the scheme + the following `:`
|
||||
// from rawuri.
|
||||
//
|
||||
// 2. The other way would be to copy the `std.Uri` code to have a dedicated
|
||||
// parser including the characters we want for the web API.
|
||||
pub const URL = struct {
|
||||
uri: std.Uri,
|
||||
search_params: URLSearchParams,
|
||||
|
||||
pub const empty = URL{
|
||||
.uri = .{ .scheme = "" },
|
||||
.search_params = .{},
|
||||
};
|
||||
|
||||
const URLArg = union(enum) {
|
||||
url: *URL,
|
||||
element: *parser.ElementHTML,
|
||||
string: []const u8,
|
||||
|
||||
fn toString(self: URLArg, arena: Allocator) !?[]const u8 {
|
||||
switch (self) {
|
||||
.string => |s| return s,
|
||||
.url => |url| return try url.toString(arena),
|
||||
.element => |e| return try parser.elementGetAttribute(@ptrCast(e), "href"),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub fn constructor(url: URLArg, base: ?URLArg, page: *Page) !URL {
|
||||
const arena = page.arena;
|
||||
const url_str = try url.toString(arena) orelse return error.InvalidArgument;
|
||||
|
||||
var raw: ?[]const u8 = null;
|
||||
if (base) |b| {
|
||||
if (try b.toString(arena)) |bb| {
|
||||
raw = try @import("../../url.zig").URL.stitch(arena, url_str, bb, .{});
|
||||
}
|
||||
}
|
||||
|
||||
if (raw == null) {
|
||||
// if it was a URL, then it's already be owned by the arena
|
||||
raw = if (url == .url) url_str else try arena.dupe(u8, url_str);
|
||||
}
|
||||
|
||||
const uri = std.Uri.parse(raw.?) catch blk: {
|
||||
if (!std.mem.endsWith(u8, raw.?, "://")) {
|
||||
return error.TypeError;
|
||||
}
|
||||
// schema only is valid!
|
||||
break :blk std.Uri{
|
||||
.scheme = raw.?[0 .. raw.?.len - 3],
|
||||
.host = .{ .percent_encoded = "" },
|
||||
};
|
||||
};
|
||||
|
||||
return init(arena, uri);
|
||||
}
|
||||
|
||||
pub fn init(arena: Allocator, uri: std.Uri) !URL {
|
||||
return .{
|
||||
.uri = uri,
|
||||
.search_params = try URLSearchParams.init(
|
||||
arena,
|
||||
uriComponentNullStr(uri.query),
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn get_origin(self: *URL, page: *Page) ![]const u8 {
|
||||
var aw = std.Io.Writer.Allocating.init(page.arena);
|
||||
try self.uri.writeToStream(&aw.writer, .{
|
||||
.scheme = true,
|
||||
.authentication = false,
|
||||
.authority = true,
|
||||
.path = false,
|
||||
.query = false,
|
||||
.fragment = false,
|
||||
});
|
||||
return aw.written();
|
||||
}
|
||||
|
||||
// get_href returns the URL by writing all its components.
|
||||
pub fn get_href(self: *URL, page: *Page) ![]const u8 {
|
||||
return self.toString(page.arena);
|
||||
}
|
||||
|
||||
pub fn _toString(self: *URL, page: *Page) ![]const u8 {
|
||||
return self.toString(page.arena);
|
||||
}
|
||||
|
||||
// format the url with all its components.
|
||||
pub fn toString(self: *const URL, arena: Allocator) ![]const u8 {
|
||||
var aw = std.Io.Writer.Allocating.init(arena);
|
||||
try self.uri.writeToStream(&aw.writer, .{
|
||||
.scheme = true,
|
||||
.authentication = true,
|
||||
.authority = true,
|
||||
.path = uriComponentNullStr(self.uri.path).len > 0,
|
||||
});
|
||||
|
||||
if (self.search_params.get_size() > 0) {
|
||||
try aw.writer.writeByte('?');
|
||||
try self.search_params.write(&aw.writer);
|
||||
}
|
||||
|
||||
{
|
||||
const fragment = uriComponentNullStr(self.uri.fragment);
|
||||
if (fragment.len > 0) {
|
||||
try aw.writer.writeByte('#');
|
||||
try aw.writer.writeAll(fragment);
|
||||
}
|
||||
}
|
||||
|
||||
return aw.written();
|
||||
}
|
||||
|
||||
pub fn get_protocol(self: *URL, page: *Page) ![]const u8 {
|
||||
return try std.mem.concat(page.arena, u8, &[_][]const u8{ self.uri.scheme, ":" });
|
||||
}
|
||||
|
||||
pub fn get_username(self: *URL) []const u8 {
|
||||
return uriComponentNullStr(self.uri.user);
|
||||
}
|
||||
|
||||
pub fn get_password(self: *URL) []const u8 {
|
||||
return uriComponentNullStr(self.uri.password);
|
||||
}
|
||||
|
||||
pub fn get_host(self: *URL, page: *Page) ![]const u8 {
|
||||
var aw = std.Io.Writer.Allocating.init(page.arena);
|
||||
try self.uri.writeToStream(&aw.writer, .{
|
||||
.scheme = false,
|
||||
.authentication = false,
|
||||
.authority = true,
|
||||
.path = false,
|
||||
.query = false,
|
||||
.fragment = false,
|
||||
});
|
||||
return aw.written();
|
||||
}
|
||||
|
||||
pub fn get_hostname(self: *URL) []const u8 {
|
||||
return uriComponentNullStr(self.uri.host);
|
||||
}
|
||||
|
||||
pub fn get_port(self: *URL, page: *Page) ![]const u8 {
|
||||
const arena = page.arena;
|
||||
if (self.uri.port == null) return try arena.dupe(u8, "");
|
||||
|
||||
var aw = std.Io.Writer.Allocating.init(arena);
|
||||
try aw.writer.printInt(self.uri.port.?, 10, .lower, .{});
|
||||
return aw.written();
|
||||
}
|
||||
|
||||
pub fn get_pathname(self: *URL) []const u8 {
|
||||
if (uriComponentStr(self.uri.path).len == 0) return "/";
|
||||
return uriComponentStr(self.uri.path);
|
||||
}
|
||||
|
||||
pub fn get_search(self: *URL, page: *Page) ![]const u8 {
|
||||
const arena = page.arena;
|
||||
|
||||
if (self.search_params.get_size() == 0) {
|
||||
return "";
|
||||
}
|
||||
|
||||
var buf: std.ArrayListUnmanaged(u8) = .{};
|
||||
try buf.append(arena, '?');
|
||||
try self.search_params.encode(buf.writer(arena));
|
||||
return buf.items;
|
||||
}
|
||||
|
||||
pub fn set_search(self: *URL, qs_: ?[]const u8, page: *Page) !void {
|
||||
self.search_params = .{};
|
||||
if (qs_) |qs| {
|
||||
self.search_params = try URLSearchParams.init(page.arena, qs);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_hash(self: *URL, page: *Page) ![]const u8 {
|
||||
const arena = page.arena;
|
||||
if (self.uri.fragment == null) return try arena.dupe(u8, "");
|
||||
|
||||
return try std.mem.concat(arena, u8, &[_][]const u8{ "#", uriComponentNullStr(self.uri.fragment) });
|
||||
}
|
||||
|
||||
pub fn get_searchParams(self: *URL) *URLSearchParams {
|
||||
return &self.search_params;
|
||||
}
|
||||
|
||||
pub fn _toJSON(self: *URL, page: *Page) ![]const u8 {
|
||||
return self.get_href(page);
|
||||
}
|
||||
};
|
||||
|
||||
// uriComponentNullStr converts an optional std.Uri.Component to string value.
|
||||
// The string value can be undecoded.
|
||||
fn uriComponentNullStr(c: ?std.Uri.Component) []const u8 {
|
||||
if (c == null) return "";
|
||||
|
||||
return uriComponentStr(c.?);
|
||||
}
|
||||
|
||||
fn uriComponentStr(c: std.Uri.Component) []const u8 {
|
||||
return switch (c) {
|
||||
.raw => |v| v,
|
||||
.percent_encoded => |v| v,
|
||||
};
|
||||
}
|
||||
|
||||
// https://url.spec.whatwg.org/#interface-urlsearchparams
|
||||
pub const URLSearchParams = struct {
|
||||
entries: kv.List = .{},
|
||||
|
||||
const URLSearchParamsOpts = union(enum) {
|
||||
qs: []const u8,
|
||||
form_data: *const FormData,
|
||||
js_obj: Env.JsObject,
|
||||
};
|
||||
pub fn constructor(opts_: ?URLSearchParamsOpts, page: *Page) !URLSearchParams {
|
||||
const opts = opts_ orelse return .{ .entries = .{} };
|
||||
return switch (opts) {
|
||||
.qs => |qs| init(page.arena, qs),
|
||||
.form_data => |fd| .{ .entries = try fd.entries.clone(page.arena) },
|
||||
.js_obj => |js_obj| {
|
||||
const arena = page.arena;
|
||||
var it = js_obj.nameIterator();
|
||||
|
||||
var entries: kv.List = .{};
|
||||
try entries.ensureTotalCapacity(arena, it.count);
|
||||
|
||||
while (try it.next()) |js_name| {
|
||||
const name = try js_name.toString(arena);
|
||||
const js_val = try js_obj.get(name);
|
||||
entries.appendOwnedAssumeCapacity(
|
||||
name,
|
||||
try js_val.toString(arena),
|
||||
);
|
||||
}
|
||||
|
||||
return .{ .entries = entries };
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
pub fn init(arena: Allocator, qs_: ?[]const u8) !URLSearchParams {
|
||||
return .{
|
||||
.entries = if (qs_) |qs| try parseQuery(arena, qs) else .{},
|
||||
};
|
||||
}
|
||||
|
||||
pub fn get_size(self: *const URLSearchParams) u32 {
|
||||
return @intCast(self.entries.count());
|
||||
}
|
||||
|
||||
pub fn _append(self: *URLSearchParams, name: []const u8, value: []const u8, page: *Page) !void {
|
||||
return self.entries.append(page.arena, name, value);
|
||||
}
|
||||
|
||||
pub fn _set(self: *URLSearchParams, name: []const u8, value: []const u8, page: *Page) !void {
|
||||
return self.entries.set(page.arena, name, value);
|
||||
}
|
||||
|
||||
pub fn _delete(self: *URLSearchParams, name: []const u8, value_: ?[]const u8) void {
|
||||
if (value_) |value| {
|
||||
return self.entries.deleteKeyValue(name, value);
|
||||
}
|
||||
return self.entries.delete(name);
|
||||
}
|
||||
|
||||
pub fn _get(self: *const URLSearchParams, name: []const u8) ?[]const u8 {
|
||||
return self.entries.get(name);
|
||||
}
|
||||
|
||||
pub fn _getAll(self: *const URLSearchParams, name: []const u8, page: *Page) ![]const []const u8 {
|
||||
return self.entries.getAll(page.call_arena, name);
|
||||
}
|
||||
|
||||
pub fn _has(self: *const URLSearchParams, name: []const u8) bool {
|
||||
return self.entries.has(name);
|
||||
}
|
||||
|
||||
pub fn _keys(self: *const URLSearchParams) KeyIterable {
|
||||
return .{ .inner = self.entries.keyIterator() };
|
||||
}
|
||||
|
||||
pub fn _values(self: *const URLSearchParams) ValueIterable {
|
||||
return .{ .inner = self.entries.valueIterator() };
|
||||
}
|
||||
|
||||
pub fn _entries(self: *const URLSearchParams) EntryIterable {
|
||||
return .{ .inner = self.entries.entryIterator() };
|
||||
}
|
||||
|
||||
pub fn _symbol_iterator(self: *const URLSearchParams) EntryIterable {
|
||||
return self._entries();
|
||||
}
|
||||
|
||||
pub fn _toString(self: *const URLSearchParams, page: *Page) ![]const u8 {
|
||||
var arr: std.ArrayListUnmanaged(u8) = .empty;
|
||||
try self.write(arr.writer(page.call_arena));
|
||||
return arr.items;
|
||||
}
|
||||
|
||||
fn write(self: *const URLSearchParams, writer: anytype) !void {
|
||||
return kv.urlEncode(self.entries, .query, writer);
|
||||
}
|
||||
|
||||
// TODO
|
||||
pub fn _sort(_: *URLSearchParams) void {}
|
||||
|
||||
fn encode(self: *const URLSearchParams, writer: anytype) !void {
|
||||
return kv.urlEncode(self.entries, .query, writer);
|
||||
}
|
||||
};
|
||||
|
||||
// Parse the given query.
|
||||
fn parseQuery(arena: Allocator, s: []const u8) !kv.List {
|
||||
var list = kv.List{};
|
||||
|
||||
const ln = s.len;
|
||||
if (ln == 0) {
|
||||
return list;
|
||||
}
|
||||
|
||||
var query = if (s[0] == '?') s[1..] else s;
|
||||
while (query.len > 0) {
|
||||
const i = std.mem.indexOfScalarPos(u8, query, 0, '=') orelse query.len;
|
||||
const name = query[0..i];
|
||||
|
||||
var value: ?[]const u8 = null;
|
||||
if (i < query.len) {
|
||||
query = query[i + 1 ..];
|
||||
const j = std.mem.indexOfScalarPos(u8, query, 0, '&') orelse query.len;
|
||||
value = query[0..j];
|
||||
|
||||
query = if (j < query.len) query[j + 1 ..] else "";
|
||||
} else {
|
||||
query = "";
|
||||
}
|
||||
|
||||
try list.appendOwned(
|
||||
arena,
|
||||
try unescape(arena, name),
|
||||
if (value) |v| try unescape(arena, v) else "",
|
||||
);
|
||||
}
|
||||
|
||||
return list;
|
||||
}
|
||||
|
||||
fn unescape(arena: Allocator, input: []const u8) ![]const u8 {
|
||||
const HEX_CHAR = comptime blk: {
|
||||
var all = std.mem.zeroes([256]bool);
|
||||
for ('a'..('f' + 1)) |b| all[b] = true;
|
||||
for ('A'..('F' + 1)) |b| all[b] = true;
|
||||
for ('0'..('9' + 1)) |b| all[b] = true;
|
||||
break :blk all;
|
||||
};
|
||||
|
||||
const HEX_DECODE = comptime blk: {
|
||||
var all = std.mem.zeroes([256]u8);
|
||||
for ('a'..('z' + 1)) |b| all[b] = b - 'a' + 10;
|
||||
for ('A'..('Z' + 1)) |b| all[b] = b - 'A' + 10;
|
||||
for ('0'..('9' + 1)) |b| all[b] = b - '0';
|
||||
break :blk all;
|
||||
};
|
||||
|
||||
var has_plus = false;
|
||||
var unescaped_len = input.len;
|
||||
|
||||
{
|
||||
// Figure out if we have any spaces and what the final unescaped length
|
||||
// will be (which will let us know if we have anything to unescape in
|
||||
// the first place)
|
||||
var i: usize = 0;
|
||||
while (i < input.len) {
|
||||
const c = input[i];
|
||||
if (c == '%') {
|
||||
if (i + 2 >= input.len or !HEX_CHAR[input[i + 1]] or !HEX_CHAR[input[i + 2]]) {
|
||||
return error.EscapeError;
|
||||
}
|
||||
i += 3;
|
||||
unescaped_len -= 2;
|
||||
} else if (c == '+') {
|
||||
has_plus = true;
|
||||
i += 1;
|
||||
} else {
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// no encoding, and no plus. nothing to unescape
|
||||
if (unescaped_len == input.len and has_plus == false) {
|
||||
// we always dupe, because we know our caller wants it always duped.
|
||||
return arena.dupe(u8, input);
|
||||
}
|
||||
|
||||
var unescaped = try arena.alloc(u8, unescaped_len);
|
||||
errdefer arena.free(unescaped);
|
||||
|
||||
var input_pos: usize = 0;
|
||||
for (0..unescaped_len) |unescaped_pos| {
|
||||
switch (input[input_pos]) {
|
||||
'+' => {
|
||||
unescaped[unescaped_pos] = ' ';
|
||||
input_pos += 1;
|
||||
},
|
||||
'%' => {
|
||||
const encoded = input[input_pos + 1 .. input_pos + 3];
|
||||
const encoded_as_uint = @as(u16, @bitCast(encoded[0..2].*));
|
||||
unescaped[unescaped_pos] = switch (encoded_as_uint) {
|
||||
asUint(u16, "20") => ' ',
|
||||
asUint(u16, "21") => '!',
|
||||
asUint(u16, "22") => '"',
|
||||
asUint(u16, "23") => '#',
|
||||
asUint(u16, "24") => '$',
|
||||
asUint(u16, "25") => '%',
|
||||
asUint(u16, "26") => '&',
|
||||
asUint(u16, "27") => '\'',
|
||||
asUint(u16, "28") => '(',
|
||||
asUint(u16, "29") => ')',
|
||||
asUint(u16, "2A") => '*',
|
||||
asUint(u16, "2B") => '+',
|
||||
asUint(u16, "2C") => ',',
|
||||
asUint(u16, "2F") => '/',
|
||||
asUint(u16, "3A") => ':',
|
||||
asUint(u16, "3B") => ';',
|
||||
asUint(u16, "3D") => '=',
|
||||
asUint(u16, "3F") => '?',
|
||||
asUint(u16, "40") => '@',
|
||||
asUint(u16, "5B") => '[',
|
||||
asUint(u16, "5D") => ']',
|
||||
else => HEX_DECODE[encoded[0]] << 4 | HEX_DECODE[encoded[1]],
|
||||
};
|
||||
input_pos += 3;
|
||||
},
|
||||
else => |c| {
|
||||
unescaped[unescaped_pos] = c;
|
||||
input_pos += 1;
|
||||
},
|
||||
}
|
||||
}
|
||||
return unescaped;
|
||||
}
|
||||
|
||||
fn asUint(comptime T: type, comptime string: []const u8) T {
|
||||
return @bitCast(string[0..string.len].*);
|
||||
}
|
||||
|
||||
const KeyIterable = iterator.Iterable(kv.KeyIterator, "URLSearchParamsKeyIterator");
|
||||
const ValueIterable = iterator.Iterable(kv.ValueIterator, "URLSearchParamsValueIterator");
|
||||
const EntryIterable = iterator.Iterable(kv.EntryIterator, "URLSearchParamsEntryIterator");
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser: URL" {
|
||||
try testing.htmlRunner("url/url.html");
|
||||
}
|
||||
|
||||
test "Browser: URLSearchParams" {
|
||||
try testing.htmlRunner("url/url_search_params.html");
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user