mirror of
https://github.com/lightpanda-io/browser.git
synced 2025-10-28 22:53:28 +00:00
Initial work on integrating libcurl and making all http nonblocking
This commit is contained in:
1
.gitignore
vendored
1
.gitignore
vendored
@@ -5,3 +5,4 @@ zig-out
|
||||
/vendor/libiconv/
|
||||
lightpanda.id
|
||||
/v8/
|
||||
/vendor/nghttp2/lib/includes/nghttp2/nghttp2ver.h
|
||||
|
||||
12
.gitmodules
vendored
12
.gitmodules
vendored
@@ -19,3 +19,15 @@
|
||||
[submodule "vendor/mimalloc"]
|
||||
path = vendor/mimalloc
|
||||
url = https://github.com/microsoft/mimalloc.git/
|
||||
[submodule "vendor/nghttp2"]
|
||||
path = vendor/nghttp2
|
||||
url = https://github.com/nghttp2/nghttp2.git
|
||||
[submodule "vendor/mbedtls"]
|
||||
path = vendor/mbedtls
|
||||
url = https://github.com/Mbed-TLS/mbedtls.git
|
||||
[submodule "vendor/zlib"]
|
||||
path = vendor/zlib
|
||||
url = https://github.com/madler/zlib.git
|
||||
[submodule "vendor/curl"]
|
||||
path = vendor/curl
|
||||
url = https://github.com/curl/curl.git
|
||||
|
||||
615
build.zig
615
build.zig
@@ -19,11 +19,13 @@
|
||||
const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
|
||||
const Build = std.Build;
|
||||
|
||||
/// Do not rename this constant. It is scanned by some scripts to determine
|
||||
/// which zig version to install.
|
||||
const recommended_zig_version = "0.14.1";
|
||||
|
||||
pub fn build(b: *std.Build) !void {
|
||||
pub fn build(b: *Build) !void {
|
||||
switch (comptime builtin.zig_version.order(std.SemanticVersion.parse(recommended_zig_version) catch unreachable)) {
|
||||
.eq => {},
|
||||
.lt => {
|
||||
@@ -138,29 +140,28 @@ pub fn build(b: *std.Build) !void {
|
||||
}
|
||||
}
|
||||
|
||||
fn common(b: *std.Build, opts: *std.Build.Step.Options, step: *std.Build.Step.Compile) !void {
|
||||
fn common(b: *Build, opts: *Build.Step.Options, step: *Build.Step.Compile) !void {
|
||||
const mod = step.root_module;
|
||||
const target = mod.resolved_target.?;
|
||||
const optimize = mod.optimize.?;
|
||||
const dep_opts = .{ .target = target, .optimize = optimize };
|
||||
|
||||
try moduleNetSurf(b, step, target);
|
||||
mod.addImport("tls", b.dependency("tls", dep_opts).module("tls"));
|
||||
mod.addImport("build_config", opts.createModule());
|
||||
mod.addImport("tigerbeetle-io", b.dependency("tigerbeetle_io", .{}).module("tigerbeetle_io"));
|
||||
|
||||
{
|
||||
// v8
|
||||
mod.link_libcpp = true;
|
||||
|
||||
const v8_opts = b.addOptions();
|
||||
v8_opts.addOption(bool, "inspector_subtype", false);
|
||||
|
||||
const v8_mod = b.dependency("v8", dep_opts).module("v8");
|
||||
v8_mod.addOptions("default_exports", v8_opts);
|
||||
mod.addImport("v8", v8_mod);
|
||||
}
|
||||
|
||||
mod.link_libcpp = true;
|
||||
|
||||
{
|
||||
const release_dir = if (mod.optimize.? == .Debug) "debug" else "release";
|
||||
const os = switch (target.result.os.tag) {
|
||||
.linux => "linux",
|
||||
@@ -181,21 +182,210 @@ fn common(b: *std.Build, opts: *std.Build.Step.Options, step: *std.Build.Step.Co
|
||||
);
|
||||
};
|
||||
mod.addObjectFile(mod.owner.path(lib_path));
|
||||
|
||||
switch (target.result.os.tag) {
|
||||
.macos => {
|
||||
// v8 has a dependency, abseil-cpp, which, on Mac, uses CoreFoundation
|
||||
mod.addSystemFrameworkPath(.{ .cwd_relative = "/System/Library/Frameworks" });
|
||||
mod.linkFramework("CoreFoundation", .{});
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
||||
switch (target.result.os.tag) {
|
||||
.macos => {
|
||||
// v8 has a dependency, abseil-cpp, which, on Mac, uses CoreFoundation
|
||||
mod.addSystemFrameworkPath(.{ .cwd_relative = "/System/Library/Frameworks" });
|
||||
mod.linkFramework("CoreFoundation", .{});
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
{
|
||||
//curl
|
||||
{
|
||||
const is_linux = target.result.os.tag == .linux;
|
||||
if (is_linux) {
|
||||
mod.addCMacro("HAVE_LINUX_TCP_H", "1");
|
||||
mod.addCMacro("HAVE_MSG_NOSIGNAL", "1");
|
||||
mod.addCMacro("HAVE_GETHOSTBYNAME_R", "1");
|
||||
}
|
||||
mod.addCMacro("_FILE_OFFSET_BITS", "64");
|
||||
mod.addCMacro("BUILDING_LIBCURL", "1");
|
||||
mod.addCMacro("CURL_DISABLE_AWS", "1");
|
||||
mod.addCMacro("CURL_DISABLE_DICT", "1");
|
||||
mod.addCMacro("CURL_DISABLE_DOH", "1");
|
||||
mod.addCMacro("CURL_DISABLE_FILE", "1");
|
||||
mod.addCMacro("CURL_DISABLE_FTP", "1");
|
||||
mod.addCMacro("CURL_DISABLE_GOPHER", "1");
|
||||
mod.addCMacro("CURL_DISABLE_KERBEROS", "1");
|
||||
mod.addCMacro("CURL_DISABLE_IMAP", "1");
|
||||
mod.addCMacro("CURL_DISABLE_IPFS", "1");
|
||||
mod.addCMacro("CURL_DISABLE_LDAP", "1");
|
||||
mod.addCMacro("CURL_DISABLE_LDAPS", "1");
|
||||
mod.addCMacro("CURL_DISABLE_MQTT", "1");
|
||||
mod.addCMacro("CURL_DISABLE_NTLM", "1");
|
||||
mod.addCMacro("CURL_DISABLE_PROGRESS_METER", "1");
|
||||
mod.addCMacro("CURL_DISABLE_POP3", "1");
|
||||
mod.addCMacro("CURL_DISABLE_RTSP", "1");
|
||||
mod.addCMacro("CURL_DISABLE_SMB", "1");
|
||||
mod.addCMacro("CURL_DISABLE_SMTP", "1");
|
||||
mod.addCMacro("CURL_DISABLE_TELNET", "1");
|
||||
mod.addCMacro("CURL_DISABLE_TFTP", "1");
|
||||
mod.addCMacro("CURL_EXTERN_SYMBOL", "__attribute__ ((__visibility__ (\"default\"))");
|
||||
mod.addCMacro("CURL_OS", if (is_linux) "\"Linux\"" else "\"mac\"");
|
||||
mod.addCMacro("CURL_STATICLIB", "1");
|
||||
mod.addCMacro("ENABLE_IPV6", "1");
|
||||
mod.addCMacro("HAVE_ALARM", "1");
|
||||
mod.addCMacro("HAVE_ALLOCA_H", "1");
|
||||
mod.addCMacro("HAVE_ARPA_INET_H", "1");
|
||||
mod.addCMacro("HAVE_ARPA_TFTP_H", "1");
|
||||
mod.addCMacro("HAVE_ASSERT_H", "1");
|
||||
mod.addCMacro("HAVE_BASENAME", "1");
|
||||
mod.addCMacro("HAVE_BOOL_T", "1");
|
||||
mod.addCMacro("HAVE_BUILTIN_AVAILABLE", "1");
|
||||
mod.addCMacro("HAVE_CLOCK_GETTIME_MONOTONIC", "1");
|
||||
mod.addCMacro("HAVE_DLFCN_H", "1");
|
||||
mod.addCMacro("HAVE_ERRNO_H", "1");
|
||||
mod.addCMacro("HAVE_FCNTL", "1");
|
||||
mod.addCMacro("HAVE_FCNTL_H", "1");
|
||||
mod.addCMacro("HAVE_FCNTL_O_NONBLOCK", "1");
|
||||
mod.addCMacro("HAVE_FREEADDRINFO", "1");
|
||||
mod.addCMacro("HAVE_FSETXATTR", "1");
|
||||
mod.addCMacro("HAVE_FSETXATTR_5", "1");
|
||||
mod.addCMacro("HAVE_FTRUNCATE", "1");
|
||||
mod.addCMacro("HAVE_GETADDRINFO", "1");
|
||||
mod.addCMacro("HAVE_GETEUID", "1");
|
||||
mod.addCMacro("HAVE_GETHOSTBYNAME", "1");
|
||||
mod.addCMacro("HAVE_GETHOSTBYNAME_R_6", "1");
|
||||
mod.addCMacro("HAVE_GETHOSTNAME", "1");
|
||||
mod.addCMacro("HAVE_GETPEERNAME", "1");
|
||||
mod.addCMacro("HAVE_GETPPID", "1");
|
||||
mod.addCMacro("HAVE_GETPPID", "1");
|
||||
mod.addCMacro("HAVE_GETPROTOBYNAME", "1");
|
||||
mod.addCMacro("HAVE_GETPWUID", "1");
|
||||
mod.addCMacro("HAVE_GETPWUID_R", "1");
|
||||
mod.addCMacro("HAVE_GETRLIMIT", "1");
|
||||
mod.addCMacro("HAVE_GETSOCKNAME", "1");
|
||||
mod.addCMacro("HAVE_GETTIMEOFDAY", "1");
|
||||
mod.addCMacro("HAVE_GMTIME_R", "1");
|
||||
mod.addCMacro("HAVE_IDN2_H", "1");
|
||||
mod.addCMacro("HAVE_IF_NAMETOINDEX", "1");
|
||||
mod.addCMacro("HAVE_IFADDRS_H", "1");
|
||||
mod.addCMacro("HAVE_INET_ADDR", "1");
|
||||
mod.addCMacro("HAVE_INET_PTON", "1");
|
||||
mod.addCMacro("HAVE_INTTYPES_H", "1");
|
||||
mod.addCMacro("HAVE_IOCTL", "1");
|
||||
mod.addCMacro("HAVE_IOCTL_FIONBIO", "1");
|
||||
mod.addCMacro("HAVE_IOCTL_SIOCGIFADDR", "1");
|
||||
mod.addCMacro("HAVE_LDAP_URL_PARSE", "1");
|
||||
mod.addCMacro("HAVE_LIBGEN_H", "1");
|
||||
mod.addCMacro("HAVE_LIBZ", "1");
|
||||
mod.addCMacro("HAVE_LL", "1");
|
||||
mod.addCMacro("HAVE_LOCALE_H", "1");
|
||||
mod.addCMacro("HAVE_LOCALTIME_R", "1");
|
||||
mod.addCMacro("HAVE_LONGLONG", "1");
|
||||
mod.addCMacro("HAVE_MALLOC_H", "1");
|
||||
mod.addCMacro("HAVE_MEMORY_H", "1");
|
||||
mod.addCMacro("HAVE_NET_IF_H", "1");
|
||||
mod.addCMacro("HAVE_NETDB_H", "1");
|
||||
mod.addCMacro("HAVE_NETINET_IN_H", "1");
|
||||
mod.addCMacro("HAVE_NETINET_TCP_H", "1");
|
||||
mod.addCMacro("HAVE_PIPE", "1");
|
||||
mod.addCMacro("HAVE_POLL", "1");
|
||||
mod.addCMacro("HAVE_POLL_FINE", "1");
|
||||
mod.addCMacro("HAVE_POLL_H", "1");
|
||||
mod.addCMacro("HAVE_POSIX_STRERROR_R", "1");
|
||||
mod.addCMacro("HAVE_PTHREAD_H", "1");
|
||||
mod.addCMacro("HAVE_PWD_H", "1");
|
||||
mod.addCMacro("HAVE_RECV", "1");
|
||||
mod.addCMacro("HAVE_SA_FAMILY_T", "1");
|
||||
mod.addCMacro("HAVE_SELECT", "1");
|
||||
mod.addCMacro("HAVE_SEND", "1");
|
||||
mod.addCMacro("HAVE_SETJMP_H", "1");
|
||||
mod.addCMacro("HAVE_SETLOCALE", "1");
|
||||
mod.addCMacro("HAVE_SETRLIMIT", "1");
|
||||
mod.addCMacro("HAVE_SETSOCKOPT", "1");
|
||||
mod.addCMacro("HAVE_SIGACTION", "1");
|
||||
mod.addCMacro("HAVE_SIGINTERRUPT", "1");
|
||||
mod.addCMacro("HAVE_SIGNAL", "1");
|
||||
mod.addCMacro("HAVE_SIGNAL_H", "1");
|
||||
mod.addCMacro("HAVE_SIGSETJMP", "1");
|
||||
mod.addCMacro("HAVE_SOCKADDR_IN6_SIN6_SCOPE_ID", "1");
|
||||
mod.addCMacro("HAVE_SOCKET", "1");
|
||||
mod.addCMacro("HAVE_STDBOOL_H", "1");
|
||||
mod.addCMacro("HAVE_STDINT_H", "1");
|
||||
mod.addCMacro("HAVE_STDIO_H", "1");
|
||||
mod.addCMacro("HAVE_STDLIB_H", "1");
|
||||
mod.addCMacro("HAVE_STRCASECMP", "1");
|
||||
mod.addCMacro("HAVE_STRDUP", "1");
|
||||
mod.addCMacro("HAVE_STRERROR_R", "1");
|
||||
mod.addCMacro("HAVE_STRING_H", "1");
|
||||
mod.addCMacro("HAVE_STRINGS_H", "1");
|
||||
mod.addCMacro("HAVE_STRSTR", "1");
|
||||
mod.addCMacro("HAVE_STRTOK_R", "1");
|
||||
mod.addCMacro("HAVE_STRTOLL", "1");
|
||||
mod.addCMacro("HAVE_STRUCT_SOCKADDR_STORAGE", "1");
|
||||
mod.addCMacro("HAVE_STRUCT_TIMEVAL", "1");
|
||||
mod.addCMacro("HAVE_SYS_IOCTL_H", "1");
|
||||
mod.addCMacro("HAVE_SYS_PARAM_H", "1");
|
||||
mod.addCMacro("HAVE_SYS_POLL_H", "1");
|
||||
mod.addCMacro("HAVE_SYS_RESOURCE_H", "1");
|
||||
mod.addCMacro("HAVE_SYS_SELECT_H", "1");
|
||||
mod.addCMacro("HAVE_SYS_SOCKET_H", "1");
|
||||
mod.addCMacro("HAVE_SYS_STAT_H", "1");
|
||||
mod.addCMacro("HAVE_SYS_TIME_H", "1");
|
||||
mod.addCMacro("HAVE_SYS_TYPES_H", "1");
|
||||
mod.addCMacro("HAVE_SYS_UIO_H", "1");
|
||||
mod.addCMacro("HAVE_SYS_UN_H", "1");
|
||||
mod.addCMacro("HAVE_TERMIO_H", "1");
|
||||
mod.addCMacro("HAVE_TERMIOS_H", "1");
|
||||
mod.addCMacro("HAVE_TIME_H", "1");
|
||||
mod.addCMacro("HAVE_UNAME", "1");
|
||||
mod.addCMacro("HAVE_UNISTD_H", "1");
|
||||
mod.addCMacro("HAVE_UTIME", "1");
|
||||
mod.addCMacro("HAVE_UTIME_H", "1");
|
||||
mod.addCMacro("HAVE_UTIMES", "1");
|
||||
mod.addCMacro("HAVE_VARIADIC_MACROS_C99", "1");
|
||||
mod.addCMacro("HAVE_VARIADIC_MACROS_GCC", "1");
|
||||
mod.addCMacro("HAVE_ZLIB_H", "1");
|
||||
mod.addCMacro("RANDOM_FILE", "\"/dev/urandom\"");
|
||||
mod.addCMacro("RECV_TYPE_ARG1", "int");
|
||||
mod.addCMacro("RECV_TYPE_ARG2", "void *");
|
||||
mod.addCMacro("RECV_TYPE_ARG3", "size_t");
|
||||
mod.addCMacro("RECV_TYPE_ARG4", "int");
|
||||
mod.addCMacro("RECV_TYPE_RETV", "ssize_t");
|
||||
mod.addCMacro("SEND_QUAL_ARG2", "const");
|
||||
mod.addCMacro("SEND_TYPE_ARG1", "int");
|
||||
mod.addCMacro("SEND_TYPE_ARG2", "void *");
|
||||
mod.addCMacro("SEND_TYPE_ARG3", "size_t");
|
||||
mod.addCMacro("SEND_TYPE_ARG4", "int");
|
||||
mod.addCMacro("SEND_TYPE_RETV", "ssize_t");
|
||||
mod.addCMacro("SIZEOF_CURL_OFF_T", "8");
|
||||
mod.addCMacro("SIZEOF_INT", "4");
|
||||
mod.addCMacro("SIZEOF_LONG", "8");
|
||||
mod.addCMacro("SIZEOF_OFF_T", "8");
|
||||
mod.addCMacro("SIZEOF_SHORT", "2");
|
||||
mod.addCMacro("SIZEOF_SIZE_T", "8");
|
||||
mod.addCMacro("SIZEOF_TIME_T", "8");
|
||||
mod.addCMacro("STDC_HEADERS", "1");
|
||||
mod.addCMacro("TIME_WITH_SYS_TIME", "1");
|
||||
mod.addCMacro("USE_NGHTTP2", "1");
|
||||
mod.addCMacro("USE_MBEDTLS", "1");
|
||||
mod.addCMacro("USE_THREADS_POSIX", "1");
|
||||
mod.addCMacro("USE_UNIX_SOCKETS", "1");
|
||||
}
|
||||
|
||||
mod.addImport("build_config", opts.createModule());
|
||||
try buildZlib(b, mod);
|
||||
try buildMbedtls(b, mod);
|
||||
try buildNghttp2(b, mod);
|
||||
try buildCurl(b, mod);
|
||||
|
||||
switch (target.result.os.tag) {
|
||||
.macos => {
|
||||
// needed for proxying on mac
|
||||
mod.addSystemFrameworkPath(.{ .cwd_relative = "/System/Library/Frameworks" });
|
||||
mod.linkFramework("CoreFoundation", .{});
|
||||
mod.linkFramework("SystemConfiguration", .{});
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn moduleNetSurf(b: *std.Build, step: *std.Build.Step.Compile, target: std.Build.ResolvedTarget) !void {
|
||||
fn moduleNetSurf(b: *Build, step: *Build.Step.Compile, target: std.Build.ResolvedTarget) !void {
|
||||
const os = target.result.os.tag;
|
||||
const arch = target.result.cpu.arch;
|
||||
|
||||
@@ -250,3 +440,396 @@ fn moduleNetSurf(b: *std.Build, step: *std.Build.Step.Compile, target: std.Build
|
||||
step.addIncludePath(b.path(ns ++ "/" ++ lib ++ "/src"));
|
||||
}
|
||||
}
|
||||
|
||||
fn buildZlib(b: *Build, m: *Build.Module) !void {
|
||||
const zlib = b.addLibrary(.{
|
||||
.name = "zlib",
|
||||
.root_module = m,
|
||||
});
|
||||
|
||||
const root = "vendor/zlib/";
|
||||
zlib.installHeader(b.path(root ++ "zlib.h"), "zlib.h");
|
||||
zlib.installHeader(b.path(root ++ "zconf.h"), "zconf.h");
|
||||
zlib.addCSourceFiles(.{
|
||||
.flags = &.{
|
||||
"-DHAVE_SYS_TYPES_H",
|
||||
"-DHAVE_STDINT_H",
|
||||
"-DHAVE_STDDEF_H",
|
||||
},
|
||||
.files = &.{
|
||||
root ++ "adler32.c",
|
||||
root ++ "compress.c",
|
||||
root ++ "crc32.c",
|
||||
root ++ "deflate.c",
|
||||
root ++ "gzclose.c",
|
||||
root ++ "gzlib.c",
|
||||
root ++ "gzread.c",
|
||||
root ++ "gzwrite.c",
|
||||
root ++ "inflate.c",
|
||||
root ++ "infback.c",
|
||||
root ++ "inftrees.c",
|
||||
root ++ "inffast.c",
|
||||
root ++ "trees.c",
|
||||
root ++ "uncompr.c",
|
||||
root ++ "zutil.c",
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn buildMbedtls(b: *Build, m: *Build.Module) !void {
|
||||
const mbedtls = b.addLibrary(.{
|
||||
.name = "mbedtls",
|
||||
.root_module = m,
|
||||
});
|
||||
|
||||
const root = "vendor/mbedtls/";
|
||||
mbedtls.addIncludePath(b.path(root ++ "include"));
|
||||
mbedtls.addIncludePath(b.path(root ++ "library"));
|
||||
|
||||
mbedtls.addCSourceFiles(.{
|
||||
.flags = &.{
|
||||
},
|
||||
.files = &.{
|
||||
root ++ "library/aes.c",
|
||||
root ++ "library/aesni.c",
|
||||
root ++ "library/aesce.c",
|
||||
root ++ "library/aria.c",
|
||||
root ++ "library/asn1parse.c",
|
||||
root ++ "library/asn1write.c",
|
||||
root ++ "library/base64.c",
|
||||
root ++ "library/bignum.c",
|
||||
root ++ "library/bignum_core.c",
|
||||
root ++ "library/bignum_mod.c",
|
||||
root ++ "library/bignum_mod_raw.c",
|
||||
root ++ "library/camellia.c",
|
||||
root ++ "library/ccm.c",
|
||||
root ++ "library/chacha20.c",
|
||||
root ++ "library/chachapoly.c",
|
||||
root ++ "library/cipher.c",
|
||||
root ++ "library/cipher_wrap.c",
|
||||
root ++ "library/constant_time.c",
|
||||
root ++ "library/cmac.c",
|
||||
root ++ "library/ctr_drbg.c",
|
||||
root ++ "library/des.c",
|
||||
root ++ "library/dhm.c",
|
||||
root ++ "library/ecdh.c",
|
||||
root ++ "library/ecdsa.c",
|
||||
root ++ "library/ecjpake.c",
|
||||
root ++ "library/ecp.c",
|
||||
root ++ "library/ecp_curves.c",
|
||||
root ++ "library/entropy.c",
|
||||
root ++ "library/entropy_poll.c",
|
||||
root ++ "library/error.c",
|
||||
root ++ "library/gcm.c",
|
||||
root ++ "library/hkdf.c",
|
||||
root ++ "library/hmac_drbg.c",
|
||||
root ++ "library/lmots.c",
|
||||
root ++ "library/lms.c",
|
||||
root ++ "library/md.c",
|
||||
root ++ "library/md5.c",
|
||||
root ++ "library/memory_buffer_alloc.c",
|
||||
root ++ "library/nist_kw.c",
|
||||
root ++ "library/oid.c",
|
||||
root ++ "library/padlock.c",
|
||||
root ++ "library/pem.c",
|
||||
root ++ "library/pk.c",
|
||||
root ++ "library/pk_ecc.c",
|
||||
root ++ "library/pk_wrap.c",
|
||||
root ++ "library/pkcs12.c",
|
||||
root ++ "library/pkcs5.c",
|
||||
root ++ "library/pkparse.c",
|
||||
root ++ "library/pkwrite.c",
|
||||
root ++ "library/platform.c",
|
||||
root ++ "library/platform_util.c",
|
||||
root ++ "library/poly1305.c",
|
||||
root ++ "library/psa_crypto.c",
|
||||
root ++ "library/psa_crypto_aead.c",
|
||||
root ++ "library/psa_crypto_cipher.c",
|
||||
root ++ "library/psa_crypto_client.c",
|
||||
root ++ "library/psa_crypto_ffdh.c",
|
||||
root ++ "library/psa_crypto_driver_wrappers_no_static.c",
|
||||
root ++ "library/psa_crypto_ecp.c",
|
||||
root ++ "library/psa_crypto_hash.c",
|
||||
root ++ "library/psa_crypto_mac.c",
|
||||
root ++ "library/psa_crypto_pake.c",
|
||||
root ++ "library/psa_crypto_rsa.c",
|
||||
root ++ "library/psa_crypto_se.c",
|
||||
root ++ "library/psa_crypto_slot_management.c",
|
||||
root ++ "library/psa_crypto_storage.c",
|
||||
root ++ "library/psa_its_file.c",
|
||||
root ++ "library/psa_util.c",
|
||||
root ++ "library/ripemd160.c",
|
||||
root ++ "library/rsa.c",
|
||||
root ++ "library/rsa_alt_helpers.c",
|
||||
root ++ "library/sha1.c",
|
||||
root ++ "library/sha3.c",
|
||||
root ++ "library/sha256.c",
|
||||
root ++ "library/sha512.c",
|
||||
root ++ "library/threading.c",
|
||||
root ++ "library/timing.c",
|
||||
root ++ "library/version.c",
|
||||
root ++ "library/version_features.c",
|
||||
root ++ "library/pkcs7.c",
|
||||
root ++ "library/x509.c",
|
||||
root ++ "library/x509_create.c",
|
||||
root ++ "library/x509_crl.c",
|
||||
root ++ "library/x509_crt.c",
|
||||
root ++ "library/x509_csr.c",
|
||||
root ++ "library/x509write.c",
|
||||
root ++ "library/x509write_crt.c",
|
||||
root ++ "library/x509write_csr.c",
|
||||
root ++ "library/debug.c",
|
||||
root ++ "library/mps_reader.c",
|
||||
root ++ "library/mps_trace.c",
|
||||
root ++ "library/net_sockets.c",
|
||||
root ++ "library/ssl_cache.c",
|
||||
root ++ "library/ssl_ciphersuites.c",
|
||||
root ++ "library/ssl_client.c",
|
||||
root ++ "library/ssl_cookie.c",
|
||||
root ++ "library/ssl_debug_helpers_generated.c",
|
||||
root ++ "library/ssl_msg.c",
|
||||
root ++ "library/ssl_ticket.c",
|
||||
root ++ "library/ssl_tls.c",
|
||||
root ++ "library/ssl_tls12_client.c",
|
||||
root ++ "library/ssl_tls12_server.c",
|
||||
root ++ "library/ssl_tls13_keys.c",
|
||||
root ++ "library/ssl_tls13_server.c",
|
||||
root ++ "library/ssl_tls13_client.c",
|
||||
root ++ "library/ssl_tls13_generic.c",
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn buildNghttp2(b: *Build, m: *Build.Module) !void {
|
||||
const nghttp2 = b.addLibrary(.{
|
||||
.name = "nghttp2",
|
||||
.root_module = m,
|
||||
});
|
||||
|
||||
const nghttp2_version_file = b.addWriteFile(
|
||||
"vendor/nghttp2/lib/includes/nghttp2/nghttp2ver.h",
|
||||
\\ #ifndef NGHTTP2VER_H
|
||||
\\ #define NGHTTP2VER_H
|
||||
\\ #define NGHTTP2_VERSION "1.66"
|
||||
\\ #define NGHTTP2_VERSION_NUM 0x014300
|
||||
\\ #endif /* NGHTTP2VER_H */
|
||||
);
|
||||
nghttp2.step.dependOn(&nghttp2_version_file.step);
|
||||
|
||||
const root = "vendor/nghttp2/";
|
||||
nghttp2.addIncludePath(b.path(root ++ "lib"));
|
||||
nghttp2.addIncludePath(b.path(root ++ "lib/includes"));
|
||||
nghttp2.addCSourceFiles(.{
|
||||
.flags = &.{
|
||||
"-DNGHTTP2_STATICLIB",
|
||||
"-DHAVE_NETINET_IN",
|
||||
"-DHAVE_TIME_H",
|
||||
},
|
||||
.files = &.{
|
||||
root ++ "lib/sfparse.c",
|
||||
root ++ "lib/nghttp2_alpn.c",
|
||||
root ++ "lib/nghttp2_buf.c",
|
||||
root ++ "lib/nghttp2_callbacks.c",
|
||||
root ++ "lib/nghttp2_debug.c",
|
||||
root ++ "lib/nghttp2_extpri.c",
|
||||
root ++ "lib/nghttp2_frame.c",
|
||||
root ++ "lib/nghttp2_hd.c",
|
||||
root ++ "lib/nghttp2_hd_huffman.c",
|
||||
root ++ "lib/nghttp2_hd_huffman_data.c",
|
||||
root ++ "lib/nghttp2_helper.c",
|
||||
root ++ "lib/nghttp2_http.c",
|
||||
root ++ "lib/nghttp2_map.c",
|
||||
root ++ "lib/nghttp2_mem.c",
|
||||
root ++ "lib/nghttp2_option.c",
|
||||
root ++ "lib/nghttp2_outbound_item.c",
|
||||
root ++ "lib/nghttp2_pq.c",
|
||||
root ++ "lib/nghttp2_priority_spec.c",
|
||||
root ++ "lib/nghttp2_queue.c",
|
||||
root ++ "lib/nghttp2_rcbuf.c",
|
||||
root ++ "lib/nghttp2_session.c",
|
||||
root ++ "lib/nghttp2_stream.c",
|
||||
root ++ "lib/nghttp2_submit.c",
|
||||
root ++ "lib/nghttp2_version.c",
|
||||
root ++ "lib/nghttp2_ratelim.c",
|
||||
root ++ "lib/nghttp2_time.c",
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn buildCurl(b: *Build, m: *Build.Module) !void {
|
||||
const curl = b.addLibrary(.{
|
||||
.name = "curl",
|
||||
.root_module = m,
|
||||
});
|
||||
|
||||
const root = "vendor/curl/";
|
||||
|
||||
curl.addIncludePath(b.path(root ++ "lib"));
|
||||
curl.addIncludePath(b.path(root ++ "include"));
|
||||
curl.addCSourceFiles(.{
|
||||
.flags = &.{
|
||||
},
|
||||
.files = &.{
|
||||
root ++ "lib/altsvc.c",
|
||||
root ++ "lib/amigaos.c",
|
||||
root ++ "lib/asyn-ares.c",
|
||||
root ++ "lib/asyn-base.c",
|
||||
root ++ "lib/asyn-thrdd.c",
|
||||
root ++ "lib/bufq.c",
|
||||
root ++ "lib/bufref.c",
|
||||
root ++ "lib/cf-h1-proxy.c",
|
||||
root ++ "lib/cf-h2-proxy.c",
|
||||
root ++ "lib/cf-haproxy.c",
|
||||
root ++ "lib/cf-https-connect.c",
|
||||
root ++ "lib/cf-socket.c",
|
||||
root ++ "lib/cfilters.c",
|
||||
root ++ "lib/conncache.c",
|
||||
root ++ "lib/connect.c",
|
||||
root ++ "lib/content_encoding.c",
|
||||
root ++ "lib/cookie.c",
|
||||
root ++ "lib/cshutdn.c",
|
||||
root ++ "lib/curl_addrinfo.c",
|
||||
root ++ "lib/curl_des.c",
|
||||
root ++ "lib/curl_endian.c",
|
||||
root ++ "lib/curl_fnmatch.c",
|
||||
root ++ "lib/curl_get_line.c",
|
||||
root ++ "lib/curl_gethostname.c",
|
||||
root ++ "lib/curl_gssapi.c",
|
||||
root ++ "lib/curl_memrchr.c",
|
||||
root ++ "lib/curl_ntlm_core.c",
|
||||
root ++ "lib/curl_range.c",
|
||||
root ++ "lib/curl_rtmp.c",
|
||||
root ++ "lib/curl_sasl.c",
|
||||
root ++ "lib/curl_sha512_256.c",
|
||||
root ++ "lib/curl_sspi.c",
|
||||
root ++ "lib/curl_threads.c",
|
||||
root ++ "lib/curl_trc.c",
|
||||
root ++ "lib/cw-out.c",
|
||||
root ++ "lib/cw-pause.c",
|
||||
root ++ "lib/dict.c",
|
||||
root ++ "lib/doh.c",
|
||||
root ++ "lib/dynhds.c",
|
||||
root ++ "lib/easy.c",
|
||||
root ++ "lib/easygetopt.c",
|
||||
root ++ "lib/easyoptions.c",
|
||||
root ++ "lib/escape.c",
|
||||
root ++ "lib/fake_addrinfo.c",
|
||||
root ++ "lib/file.c",
|
||||
root ++ "lib/fileinfo.c",
|
||||
root ++ "lib/fopen.c",
|
||||
root ++ "lib/formdata.c",
|
||||
root ++ "lib/ftp.c",
|
||||
root ++ "lib/ftplistparser.c",
|
||||
root ++ "lib/getenv.c",
|
||||
root ++ "lib/getinfo.c",
|
||||
root ++ "lib/gopher.c",
|
||||
root ++ "lib/hash.c",
|
||||
root ++ "lib/headers.c",
|
||||
root ++ "lib/hmac.c",
|
||||
root ++ "lib/hostip.c",
|
||||
root ++ "lib/hostip4.c",
|
||||
root ++ "lib/hostip6.c",
|
||||
root ++ "lib/hsts.c",
|
||||
root ++ "lib/http.c",
|
||||
root ++ "lib/http1.c",
|
||||
root ++ "lib/http2.c",
|
||||
root ++ "lib/http_aws_sigv4.c",
|
||||
root ++ "lib/http_chunks.c",
|
||||
root ++ "lib/http_digest.c",
|
||||
root ++ "lib/http_negotiate.c",
|
||||
root ++ "lib/http_ntlm.c",
|
||||
root ++ "lib/http_proxy.c",
|
||||
root ++ "lib/httpsrr.c",
|
||||
root ++ "lib/idn.c",
|
||||
root ++ "lib/if2ip.c",
|
||||
root ++ "lib/imap.c",
|
||||
root ++ "lib/krb5.c",
|
||||
root ++ "lib/ldap.c",
|
||||
root ++ "lib/llist.c",
|
||||
root ++ "lib/macos.c",
|
||||
root ++ "lib/md4.c",
|
||||
root ++ "lib/md5.c",
|
||||
root ++ "lib/memdebug.c",
|
||||
root ++ "lib/mime.c",
|
||||
root ++ "lib/mprintf.c",
|
||||
root ++ "lib/mqtt.c",
|
||||
root ++ "lib/multi.c",
|
||||
root ++ "lib/multi_ev.c",
|
||||
root ++ "lib/netrc.c",
|
||||
root ++ "lib/noproxy.c",
|
||||
root ++ "lib/openldap.c",
|
||||
root ++ "lib/parsedate.c",
|
||||
root ++ "lib/pingpong.c",
|
||||
root ++ "lib/pop3.c",
|
||||
root ++ "lib/progress.c",
|
||||
root ++ "lib/psl.c",
|
||||
root ++ "lib/rand.c",
|
||||
root ++ "lib/rename.c",
|
||||
root ++ "lib/request.c",
|
||||
root ++ "lib/rtsp.c",
|
||||
root ++ "lib/select.c",
|
||||
root ++ "lib/sendf.c",
|
||||
root ++ "lib/setopt.c",
|
||||
root ++ "lib/sha256.c",
|
||||
root ++ "lib/share.c",
|
||||
root ++ "lib/slist.c",
|
||||
root ++ "lib/smb.c",
|
||||
root ++ "lib/smtp.c",
|
||||
root ++ "lib/socketpair.c",
|
||||
root ++ "lib/socks.c",
|
||||
root ++ "lib/socks_gssapi.c",
|
||||
root ++ "lib/socks_sspi.c",
|
||||
root ++ "lib/speedcheck.c",
|
||||
root ++ "lib/splay.c",
|
||||
root ++ "lib/strcase.c",
|
||||
root ++ "lib/strdup.c",
|
||||
root ++ "lib/strequal.c",
|
||||
root ++ "lib/strerror.c",
|
||||
root ++ "lib/system_win32.c",
|
||||
root ++ "lib/telnet.c",
|
||||
root ++ "lib/tftp.c",
|
||||
root ++ "lib/transfer.c",
|
||||
root ++ "lib/uint-bset.c",
|
||||
root ++ "lib/uint-hash.c",
|
||||
root ++ "lib/uint-spbset.c",
|
||||
root ++ "lib/uint-table.c",
|
||||
root ++ "lib/url.c",
|
||||
root ++ "lib/urlapi.c",
|
||||
root ++ "lib/version.c",
|
||||
root ++ "lib/ws.c",
|
||||
root ++ "lib/curlx/base64.c",
|
||||
root ++ "lib/curlx/dynbuf.c",
|
||||
root ++ "lib/curlx/inet_ntop.c",
|
||||
root ++ "lib/curlx/nonblock.c",
|
||||
root ++ "lib/curlx/strparse.c",
|
||||
root ++ "lib/curlx/timediff.c",
|
||||
root ++ "lib/curlx/timeval.c",
|
||||
root ++ "lib/curlx/wait.c",
|
||||
root ++ "lib/curlx/warnless.c",
|
||||
root ++ "lib/vquic/curl_ngtcp2.c",
|
||||
root ++ "lib/vquic/curl_osslq.c",
|
||||
root ++ "lib/vquic/curl_quiche.c",
|
||||
root ++ "lib/vquic/vquic.c",
|
||||
root ++ "lib/vquic/vquic-tls.c",
|
||||
root ++ "lib/vauth/cleartext.c",
|
||||
root ++ "lib/vauth/cram.c",
|
||||
root ++ "lib/vauth/digest.c",
|
||||
root ++ "lib/vauth/digest_sspi.c",
|
||||
root ++ "lib/vauth/gsasl.c",
|
||||
root ++ "lib/vauth/krb5_gssapi.c",
|
||||
root ++ "lib/vauth/krb5_sspi.c",
|
||||
root ++ "lib/vauth/ntlm.c",
|
||||
root ++ "lib/vauth/ntlm_sspi.c",
|
||||
root ++ "lib/vauth/oauth2.c",
|
||||
root ++ "lib/vauth/spnego_gssapi.c",
|
||||
root ++ "lib/vauth/spnego_sspi.c",
|
||||
root ++ "lib/vauth/vauth.c",
|
||||
root ++ "lib/vtls/cipher_suite.c",
|
||||
root ++ "lib/vtls/mbedtls.c",
|
||||
root ++ "lib/vtls/mbedtls_threadlock.c",
|
||||
root ++ "lib/vtls/vtls.c",
|
||||
root ++ "lib/vtls/vtls_scache.c",
|
||||
root ++ "lib/vtls/x509asn1.c",
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
@@ -4,10 +4,6 @@
|
||||
.version = "0.0.0",
|
||||
.fingerprint = 0xda130f3af836cea0,
|
||||
.dependencies = .{
|
||||
.tls = .{
|
||||
.url = "https://github.com/ianic/tls.zig/archive/55845f755d9e2e821458ea55693f85c737cd0c7a.tar.gz",
|
||||
.hash = "tls-0.1.0-ER2e0m43BQAshi8ixj1qf3w2u2lqKtXtkrxUJ4AGZDcl",
|
||||
},
|
||||
.tigerbeetle_io = .{
|
||||
.url = "https://github.com/lightpanda-io/tigerbeetle-io/archive/61d9652f1a957b7f4db723ea6aa0ce9635e840ce.tar.gz",
|
||||
.hash = "tigerbeetle_io-0.0.0-ViLgxpyRBAB5BMfIcj3KMXfbJzwARs9uSl8aRy2OXULd",
|
||||
|
||||
13
src/app.zig
13
src/app.zig
@@ -1,10 +1,11 @@
|
||||
const std = @import("std");
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const log = @import("log.zig");
|
||||
const Loop = @import("runtime/loop.zig").Loop;
|
||||
const http = @import("http/client.zig");
|
||||
const Platform = @import("runtime/js.zig").Platform;
|
||||
const http = @import("http/client.zig");
|
||||
|
||||
const Telemetry = @import("telemetry/telemetry.zig").Telemetry;
|
||||
const Notification = @import("notification.zig").Notification;
|
||||
@@ -17,7 +18,7 @@ pub const App = struct {
|
||||
platform: ?*const Platform,
|
||||
allocator: Allocator,
|
||||
telemetry: Telemetry,
|
||||
http_client: http.Client,
|
||||
http_client: *http.Client,
|
||||
app_dir_path: ?[]const u8,
|
||||
notification: *Notification,
|
||||
|
||||
@@ -59,12 +60,8 @@ pub const App = struct {
|
||||
.platform = config.platform,
|
||||
.app_dir_path = app_dir_path,
|
||||
.notification = notification,
|
||||
.http_client = try http.Client.init(allocator, loop, .{
|
||||
.max_concurrent = 3,
|
||||
.http_proxy = config.http_proxy,
|
||||
.proxy_type = config.proxy_type,
|
||||
.proxy_auth = config.proxy_auth,
|
||||
.tls_verify_host = config.tls_verify_host,
|
||||
.http_client = try http.Client.init(allocator, .{
|
||||
.max_concurrent_transfers = 3,
|
||||
}),
|
||||
.config = config,
|
||||
};
|
||||
|
||||
543
src/browser/ScriptManager.zig
Normal file
543
src/browser/ScriptManager.zig
Normal file
@@ -0,0 +1,543 @@
|
||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
|
||||
const log = @import("../log.zig");
|
||||
const parser = @import("netsurf.zig");
|
||||
const http = @import("../http/client.zig");
|
||||
|
||||
const App = @import("../app.zig").App;
|
||||
const Env = @import("env.zig").Env;
|
||||
const Page = @import("page.zig").Page;
|
||||
const URL = @import("../url.zig").URL;
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
const ArrayListUnmanaged = std.ArrayListUnmanaged;
|
||||
|
||||
const ScriptManager = @This();
|
||||
|
||||
page: *Page,
|
||||
|
||||
// Only once this is true can deferred scripts be run
|
||||
static_scripts_done: bool,
|
||||
|
||||
// Normal scripts (non-deffered & non-async). These must be executed ni order
|
||||
scripts: OrderList,
|
||||
|
||||
// List of deferred scripts. These must be executed in order, but only once
|
||||
// dom_loaded == true,
|
||||
deferred: OrderList,
|
||||
|
||||
client: *http.Client,
|
||||
allocator: Allocator,
|
||||
buffer_pool: BufferPool,
|
||||
script_pool: std.heap.MemoryPool(PendingScript),
|
||||
|
||||
const OrderList = std.DoublyLinkedList(*PendingScript);
|
||||
|
||||
pub fn init(app: *App, page: *Page) ScriptManager {
|
||||
const allocator = app.allocator;
|
||||
return .{
|
||||
.page = page,
|
||||
.scripts = .{},
|
||||
.deferred = .{},
|
||||
.allocator = allocator,
|
||||
.client = app.http_client,
|
||||
.static_scripts_done = false,
|
||||
.buffer_pool = BufferPool.init(allocator, 5),
|
||||
.script_pool = std.heap.MemoryPool(PendingScript).init(allocator),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: *ScriptManager) void {
|
||||
self.buffer_pool.deinit();
|
||||
self.script_pool.deinit();
|
||||
}
|
||||
|
||||
pub fn addFromElement(self: *ScriptManager, element: *parser.Element) !void {
|
||||
if (try parser.elementGetAttribute(element, "nomodule") != null) {
|
||||
// these scripts should only be loaded if we don't support modules
|
||||
// but since we do support modules, we can just skip them.
|
||||
return;
|
||||
}
|
||||
|
||||
// If a script tag gets dynamically created and added to the dom:
|
||||
// document.getElementsByTagName('head')[0].appendChild(script)
|
||||
// that script tag will immediately get executed by our scriptAddedCallback.
|
||||
// However, if the location where the script tag is inserted happens to be
|
||||
// below where processHTMLDoc curently is, then we'll re-run that same script
|
||||
// again in processHTMLDoc. This flag is used to let us know if a specific
|
||||
// <script> has already been processed.
|
||||
if (try parser.scriptGetProcessed(@ptrCast(element))) {
|
||||
return;
|
||||
}
|
||||
try parser.scriptSetProcessed(@ptrCast(element), true);
|
||||
|
||||
const kind: Script.Kind = blk: {
|
||||
const script_type = try parser.elementGetAttribute(element, "type") orelse break :blk .javascript;
|
||||
if (script_type.len == 0) {
|
||||
break :blk .javascript;
|
||||
}
|
||||
if (std.ascii.eqlIgnoreCase(script_type, "application/javascript")) {
|
||||
break :blk .javascript;
|
||||
}
|
||||
if (std.ascii.eqlIgnoreCase(script_type, "text/javascript")) {
|
||||
break :blk .javascript;
|
||||
}
|
||||
if (std.ascii.eqlIgnoreCase(script_type, "module")) {
|
||||
break :blk .module;
|
||||
}
|
||||
log.warn(.user_script, "unknown script type", .{ .type = script_type });
|
||||
return;
|
||||
};
|
||||
|
||||
var onload: ?Script.Callback = null;
|
||||
var onerror: ?Script.Callback = null;
|
||||
|
||||
const page = self.page;
|
||||
if (page.getNodeState(@ptrCast(element))) |se| {
|
||||
// if the script has a node state, then it was dynamically added and thus
|
||||
// the onload/onerror were saved in the state (if there are any)
|
||||
if (se.onload) |function| {
|
||||
onload = .{ .function = function };
|
||||
}
|
||||
if (se.onerror) |function| {
|
||||
onerror = .{ .function = function };
|
||||
}
|
||||
} else {
|
||||
// if the script has no node state, then it could still be dynamically
|
||||
// added (could have been dynamically added, but no attributes were set
|
||||
// which required a node state to be created) or it could be a inline
|
||||
// <script>.
|
||||
if (try parser.elementGetAttribute(element, "onload")) |string| {
|
||||
onload = .{ .string = string };
|
||||
}
|
||||
if (try parser.elementGetAttribute(element, "onerror")) |string| {
|
||||
onerror = .{ .string = string };
|
||||
}
|
||||
}
|
||||
|
||||
var source: Script.Source = undefined;
|
||||
var remote_url: ?[:0]const u8 = null;
|
||||
if (try parser.elementGetAttribute(element, "src")) |src| {
|
||||
remote_url = try URL.stitch(page.arena, src, page.url.raw, .{ .null_terminated = true });
|
||||
source = .{ .remote = .{} };
|
||||
} else {
|
||||
const inline_source = try parser.nodeTextContent(@ptrCast(element)) orelse return;
|
||||
source = .{ .@"inline" = inline_source };
|
||||
}
|
||||
|
||||
var script = Script{
|
||||
.kind = kind,
|
||||
.onload = onload,
|
||||
.onerror = onerror,
|
||||
.element = element,
|
||||
.source = source,
|
||||
.url = remote_url orelse page.url.raw,
|
||||
.is_defer = try parser.elementGetAttribute(element, "defer") != null,
|
||||
.is_async = try parser.elementGetAttribute(element, "async") != null,
|
||||
};
|
||||
|
||||
if (source == .@"inline" and self.scripts.first == null) {
|
||||
// inline script with no pending scripts, execute it immediately.
|
||||
return script.eval(page);
|
||||
}
|
||||
|
||||
const pending_script = try self.script_pool.create();
|
||||
errdefer self.script_pool.destroy(pending_script);
|
||||
pending_script.* = .{
|
||||
.script = script,
|
||||
.complete = false,
|
||||
.manager = self,
|
||||
.node = undefined,
|
||||
};
|
||||
|
||||
if (source == .@"inline") {
|
||||
// if we're here, it means that we have pending scripts (i.e. self.ordered
|
||||
// is not empty). Because the script is inline, it's complete/ready, but
|
||||
// we need to process them in order
|
||||
pending_script.complete = true;
|
||||
self.scripts.append(&pending_script.node);
|
||||
return;
|
||||
}
|
||||
|
||||
try self.client.request(.{
|
||||
.url = remote_url.?,
|
||||
.ctx = pending_script,
|
||||
.method = .GET,
|
||||
.start_callback = startCallback,
|
||||
.header_callback = headerCallback,
|
||||
.data_callback = dataCallback,
|
||||
.done_callback = doneCallback,
|
||||
.error_callback = errorCallback,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn staticScriptsDone(self: *ScriptManager) void {
|
||||
std.debug.assert(self.static_scripts_done == false);
|
||||
self.static_scripts_done = true;
|
||||
}
|
||||
|
||||
// try to evaluate completed scripts (in order). This is called whenever a script
|
||||
// is completed.
|
||||
fn evaluate(self: *ScriptManager) void {
|
||||
const page = self.page;
|
||||
|
||||
while (self.scripts.first) |n| {
|
||||
var pending_script = n.data;
|
||||
if (pending_script.complete == false) {
|
||||
return;
|
||||
}
|
||||
defer pending_script.deinit();
|
||||
pending_script.script.eval(page);
|
||||
}
|
||||
|
||||
if (self.static_scripts_done == false) {
|
||||
// We can only execute deferred scripts if
|
||||
// 1 - all the normal scripts are done
|
||||
// 2 - and we've loaded all the normal scripts
|
||||
// The last one isn't obvious, but it's possible for self.scripts to/
|
||||
// be empty not because we're done executing all the normal scripts
|
||||
// but because we're done executing some (or maybe none), but we're still
|
||||
// parsing the HTML.
|
||||
return;
|
||||
}
|
||||
|
||||
while (self.deferred.first) |n| {
|
||||
var pending_script = n.data;
|
||||
if (pending_script.complete == false) {
|
||||
return;
|
||||
}
|
||||
defer pending_script.deinit();
|
||||
pending_script.script.eval(page);
|
||||
}
|
||||
|
||||
page.documentIsLoaded();
|
||||
}
|
||||
|
||||
fn getList(self: *ScriptManager, script: *const Script) ?*OrderList {
|
||||
if (script.is_defer) {
|
||||
return &self.deferred;
|
||||
}
|
||||
|
||||
if (script.is_async) {
|
||||
// async don't need to execute in order.
|
||||
return null;
|
||||
}
|
||||
|
||||
return &self.scripts;
|
||||
}
|
||||
|
||||
fn startCallback(transfer: *http.Transfer) !void {
|
||||
const script: *PendingScript = @alignCast(@ptrCast(transfer.ctx));
|
||||
script.startCallback(transfer) catch |err| {
|
||||
log.err(.http, "SM.startCallback", .{ .err = err, .transfer = transfer });
|
||||
return err;
|
||||
};
|
||||
}
|
||||
|
||||
fn headerCallback(transfer: *http.Transfer) !void {
|
||||
const script: *PendingScript = @alignCast(@ptrCast(transfer.ctx));
|
||||
script.headerCallback(transfer) catch |err| {
|
||||
log.err(.http, "SM.headerCallback", .{ .err = err, .transfer = transfer });
|
||||
return err;
|
||||
};
|
||||
}
|
||||
|
||||
fn dataCallback(transfer: *http.Transfer, data: []const u8) !void {
|
||||
const script: *PendingScript = @alignCast(@ptrCast(transfer.ctx));
|
||||
script.dataCallback(data) catch |err| {
|
||||
log.err(.http, "SM.dataCallback", .{ .err = err, .transfer = transfer, .len = data.len });
|
||||
return err;
|
||||
};
|
||||
}
|
||||
|
||||
fn doneCallback(transfer: *http.Transfer) !void {
|
||||
const script: *PendingScript = @alignCast(@ptrCast(transfer.ctx));
|
||||
script.doneCallback(transfer);
|
||||
}
|
||||
|
||||
fn errorCallback(transfer: *http.Transfer, err: anyerror) void {
|
||||
const script: *PendingScript = @alignCast(@ptrCast(transfer.ctx));
|
||||
script.errorCallback(transfer, err);
|
||||
}
|
||||
|
||||
// A script which is pending execution.
|
||||
// It could be pending because:
|
||||
// (a) we're still downloading its content or
|
||||
// (b) this is a non-async script that has to be executed in order
|
||||
const PendingScript = struct {
|
||||
script: Script,
|
||||
complete: bool,
|
||||
node: OrderList.Node,
|
||||
manager: *ScriptManager,
|
||||
|
||||
fn deinit(self: *PendingScript) void {
|
||||
var manager = self.manager;
|
||||
if (self.script.source == .remote) {
|
||||
manager.buffer_pool.release(self.script.source.remote);
|
||||
}
|
||||
if (manager.getList(&self.script)) |list| {
|
||||
list.remove(&self.node);
|
||||
}
|
||||
}
|
||||
|
||||
fn startCallback(self: *PendingScript, transfer: *http.Transfer) !void {
|
||||
if (self.manager.getList(&self.script)) |list| {
|
||||
self.node.data = self;
|
||||
list.append(&self.node);
|
||||
}
|
||||
|
||||
// if the script is async, it isn't tracked in a list, because we can
|
||||
// execute it as soon as it's done loading.
|
||||
log.debug(.http, "script fetch start", .{ .req = transfer });
|
||||
}
|
||||
|
||||
fn headerCallback(self: *PendingScript, transfer: *http.Transfer) !void {
|
||||
const header = &transfer.response_header.?;
|
||||
if (header.status != 200) {
|
||||
return error.InvalidStatusCode;
|
||||
}
|
||||
|
||||
// @newhttp TODO: pre size based on content-length
|
||||
// @newhttp TODO: max-length enfocement
|
||||
self.script.source = .{ .remote = self.manager.buffer_pool.get() };
|
||||
|
||||
log.debug(.http, "script header", .{
|
||||
.req = transfer,
|
||||
.status = header.status,
|
||||
.content_type = header.contentType(),
|
||||
});
|
||||
}
|
||||
|
||||
fn dataCallback(self: *PendingScript, data: []const u8) !void {
|
||||
// @newhttp TODO: max-length enforcement
|
||||
try self.script.source.remote.appendSlice(self.manager.allocator, data);
|
||||
}
|
||||
|
||||
fn doneCallback(self: *PendingScript, transfer: *http.Transfer) void {
|
||||
log.debug(.http, "script fetch complete", .{ .req = transfer });
|
||||
if (self.script.is_async) {
|
||||
// async script can be evaluated immediately
|
||||
defer self.deinit();
|
||||
self.script.eval(self.manager.page);
|
||||
} else {
|
||||
self.complete = true;
|
||||
self.manager.evaluate();
|
||||
}
|
||||
}
|
||||
|
||||
fn errorCallback(self: *PendingScript, transfer: *http.Transfer, err: anyerror) void {
|
||||
log.warn(.http, "script fetch error", .{ .req = transfer, .err = err });
|
||||
self.deinit();
|
||||
}
|
||||
};
|
||||
|
||||
const Script = struct {
|
||||
kind: Kind,
|
||||
url: []const u8,
|
||||
is_async: bool,
|
||||
is_defer: bool,
|
||||
source: Source,
|
||||
onload: ?Callback,
|
||||
onerror: ?Callback,
|
||||
element: *parser.Element,
|
||||
|
||||
const Kind = enum {
|
||||
module,
|
||||
javascript,
|
||||
};
|
||||
|
||||
const Callback = union(enum) {
|
||||
string: []const u8,
|
||||
function: Env.Function,
|
||||
};
|
||||
|
||||
const Source = union(enum) {
|
||||
@"inline": []const u8,
|
||||
remote: std.ArrayListUnmanaged(u8),
|
||||
|
||||
fn content(self: Source) []const u8 {
|
||||
return switch (self) {
|
||||
.remote => |buf| buf.items,
|
||||
.@"inline" => |c| c,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
fn eval(self: *Script, page: *Page) void {
|
||||
|
||||
// inline scripts aren't cached. remote ones are.
|
||||
const cacheable = self.source == .remote;
|
||||
|
||||
const url = self.url;
|
||||
|
||||
log.debug(.browser, "executing script", .{
|
||||
.src = url,
|
||||
.kind = self.kind,
|
||||
.cacheable = cacheable,
|
||||
});
|
||||
|
||||
const js_context = page.main_context;
|
||||
var try_catch: Env.TryCatch = undefined;
|
||||
try_catch.init(js_context);
|
||||
defer try_catch.deinit();
|
||||
|
||||
const success = blk: {
|
||||
const content = self.source.content();
|
||||
switch (self.kind) {
|
||||
.javascript => _ = js_context.eval(content, url) catch break :blk false,
|
||||
.module => {
|
||||
// We don't care about waiting for the evaluation here.
|
||||
_ = js_context.module(content, url, cacheable) catch break :blk false;
|
||||
},
|
||||
}
|
||||
break :blk true;
|
||||
};
|
||||
|
||||
if (success) {
|
||||
self.executeCallback("onload", page);
|
||||
return;
|
||||
}
|
||||
|
||||
if (page.delayed_navigation) {
|
||||
// If we're navigating to another page, an error is expected
|
||||
// since we probably terminated the script forcefully.
|
||||
return;
|
||||
}
|
||||
|
||||
const msg = try_catch.err(page.arena) catch |err| @errorName(err) orelse "unknown";
|
||||
log.warn(.user_script, "eval script", .{
|
||||
.url = url,
|
||||
.err = msg,
|
||||
.cacheable = cacheable,
|
||||
});
|
||||
|
||||
self.executeCallback("onerror", page);
|
||||
}
|
||||
|
||||
fn executeCallback(self: *const Script, comptime typ: []const u8, page: *Page) void {
|
||||
const callback = @field(self, typ) orelse return;
|
||||
|
||||
switch (callback) {
|
||||
.string => |str| {
|
||||
var try_catch: Env.TryCatch = undefined;
|
||||
try_catch.init(page.main_context);
|
||||
defer try_catch.deinit();
|
||||
|
||||
_ = page.main_context.exec(str, typ) catch |err| {
|
||||
const msg = try_catch.err(page.arena) catch @errorName(err) orelse "unknown";
|
||||
log.warn(.user_script, "script callback", .{
|
||||
.url = self.url,
|
||||
.err = msg,
|
||||
.type = typ,
|
||||
.@"inline" = true,
|
||||
});
|
||||
};
|
||||
},
|
||||
.function => |f| {
|
||||
const Event = @import("events/event.zig").Event;
|
||||
const loadevt = parser.eventCreate() catch |err| {
|
||||
log.err(.browser, "SM event creation", .{ .err = err });
|
||||
return;
|
||||
};
|
||||
defer parser.eventDestroy(loadevt);
|
||||
|
||||
var result: Env.Function.Result = undefined;
|
||||
const iface = Event.toInterface(loadevt) catch |err| {
|
||||
log.err(.browser, "SM event interface", .{ .err = err });
|
||||
return;
|
||||
};
|
||||
f.tryCall(void, .{iface}, &result) catch {
|
||||
log.warn(.user_script, "script callback", .{
|
||||
.url = self.url,
|
||||
.type = typ,
|
||||
.err = result.exception,
|
||||
.stack = result.stack,
|
||||
.@"inline" = false,
|
||||
});
|
||||
};
|
||||
},
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const BufferPool = struct {
|
||||
free: List = .{},
|
||||
available: usize,
|
||||
allocator: Allocator,
|
||||
max_concurrent_transfers: u8,
|
||||
node_pool: std.heap.MemoryPool(List.Node),
|
||||
|
||||
const List = std.DoublyLinkedList(std.ArrayListUnmanaged(u8));
|
||||
|
||||
fn init(allocator: Allocator, max_concurrent_transfers: u8) BufferPool {
|
||||
return .{
|
||||
.free = .{},
|
||||
.available = 0,
|
||||
.allocator = allocator,
|
||||
.max_concurrent_transfers = max_concurrent_transfers,
|
||||
.node_pool = std.heap.MemoryPool(List.Node).init(allocator),
|
||||
};
|
||||
}
|
||||
|
||||
fn deinit(self: *BufferPool) void {
|
||||
const allocator = self.allocator;
|
||||
|
||||
var node = self.free.first;
|
||||
while (node) |n| {
|
||||
node = n.next;
|
||||
n.data.deinit(allocator);
|
||||
}
|
||||
self.node_pool.deinit();
|
||||
}
|
||||
|
||||
fn get(self: *BufferPool) ArrayListUnmanaged(u8) {
|
||||
const node = self.free.popFirst() orelse {
|
||||
// return a new buffer
|
||||
return .{};
|
||||
};
|
||||
|
||||
defer self.node_pool.destroy(node);
|
||||
return node.data;
|
||||
}
|
||||
|
||||
fn release(self: *BufferPool, buffer: ArrayListUnmanaged(u8)) void {
|
||||
// @newhttp TODO: discard buffers that are larger than some configured max?
|
||||
|
||||
// create mutable copy
|
||||
var b = buffer;
|
||||
|
||||
if (self.available == self.max_concurrent_transfers) {
|
||||
b.deinit(self.allocator);
|
||||
}
|
||||
|
||||
const node = self.node_pool.create() catch |err| {
|
||||
b.deinit(self.allocator);
|
||||
log.err(.http, "SM BufferPool release", .{ .err = err });
|
||||
return;
|
||||
};
|
||||
|
||||
b.clearRetainingCapacity();
|
||||
node.data = b;
|
||||
self.available += 1;
|
||||
self.free.append(node);
|
||||
}
|
||||
};
|
||||
@@ -28,7 +28,6 @@ const Session = @import("session.zig").Session;
|
||||
const Notification = @import("../notification.zig").Notification;
|
||||
|
||||
const log = @import("../log.zig");
|
||||
|
||||
const http = @import("../http/client.zig");
|
||||
|
||||
// Browser is an instance of the browser.
|
||||
@@ -61,7 +60,7 @@ pub const Browser = struct {
|
||||
.session = null,
|
||||
.allocator = allocator,
|
||||
.notification = notification,
|
||||
.http_client = &app.http_client,
|
||||
.http_client = app.http_client,
|
||||
.page_arena = ArenaAllocator.init(allocator),
|
||||
.session_arena = ArenaAllocator.init(allocator),
|
||||
.transfer_arena = ArenaAllocator.init(allocator),
|
||||
|
||||
@@ -22,11 +22,11 @@ const Allocator = std.mem.Allocator;
|
||||
pub const Mime = struct {
|
||||
content_type: ContentType,
|
||||
params: []const u8 = "",
|
||||
charset: ?[]const u8 = null,
|
||||
charset: ?[:0]const u8 = null,
|
||||
|
||||
pub const unknown = Mime{
|
||||
.params = "",
|
||||
.charset = "",
|
||||
.charset = null,
|
||||
.content_type = .{ .unknown = {} },
|
||||
};
|
||||
|
||||
@@ -52,7 +52,7 @@ pub const Mime = struct {
|
||||
other: struct { type: []const u8, sub_type: []const u8 },
|
||||
};
|
||||
|
||||
pub fn parse(arena: Allocator, input: []u8) !Mime {
|
||||
pub fn parse(input: []u8) !Mime {
|
||||
if (input.len > 255) {
|
||||
return error.TooBig;
|
||||
}
|
||||
@@ -69,7 +69,7 @@ pub const Mime = struct {
|
||||
|
||||
const params = trimLeft(normalized[type_len..]);
|
||||
|
||||
var charset: ?[]const u8 = null;
|
||||
var charset: ?[:0]const u8 = null;
|
||||
|
||||
var it = std.mem.splitScalar(u8, params, ';');
|
||||
while (it.next()) |attr| {
|
||||
@@ -86,7 +86,24 @@ pub const Mime = struct {
|
||||
}, name) orelse continue;
|
||||
|
||||
switch (attribute_name) {
|
||||
.charset => charset = try parseAttributeValue(arena, value),
|
||||
.charset => {
|
||||
// We used to have a proper value parser, but we currently
|
||||
// only care about the charset attribute, plus only about
|
||||
// the UTF-8 value. It's a lot easier to do it this way,
|
||||
// and it doesn't require an allocation to (a) unescape the
|
||||
// value or (b) ensure the correct lifetime.
|
||||
if (value.len == 0) {
|
||||
break;
|
||||
}
|
||||
var attribute_value = value;
|
||||
if (value[0] == '"' and value[value.len - 1] == '"') {
|
||||
attribute_value = value[1 .. value.len - 1];
|
||||
}
|
||||
|
||||
if (std.ascii.eqlIgnoreCase(attribute_value, "utf-8")) {
|
||||
charset = "UTF-8";
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -224,58 +241,6 @@ pub const Mime = struct {
|
||||
break :blk v;
|
||||
};
|
||||
|
||||
fn parseAttributeValue(arena: Allocator, value: []const u8) ![]const u8 {
|
||||
if (value[0] != '"') {
|
||||
// almost certainly referenced from an http.Request which has its
|
||||
// own lifetime.
|
||||
return arena.dupe(u8, value);
|
||||
}
|
||||
|
||||
// 1 to skip the opening quote
|
||||
var value_pos: usize = 1;
|
||||
var unescaped_len: usize = 0;
|
||||
const last = value.len - 1;
|
||||
|
||||
while (value_pos < value.len) {
|
||||
switch (value[value_pos]) {
|
||||
'"' => break,
|
||||
'\\' => {
|
||||
if (value_pos == last) {
|
||||
return error.Invalid;
|
||||
}
|
||||
const next = value[value_pos + 1];
|
||||
if (T_SPECIAL[next] == false) {
|
||||
return error.Invalid;
|
||||
}
|
||||
value_pos += 2;
|
||||
},
|
||||
else => value_pos += 1,
|
||||
}
|
||||
unescaped_len += 1;
|
||||
}
|
||||
|
||||
if (unescaped_len == 0) {
|
||||
return error.Invalid;
|
||||
}
|
||||
|
||||
value_pos = 1;
|
||||
const owned = try arena.alloc(u8, unescaped_len);
|
||||
for (0..unescaped_len) |i| {
|
||||
switch (value[value_pos]) {
|
||||
'"' => break,
|
||||
'\\' => {
|
||||
owned[i] = value[value_pos + 1];
|
||||
value_pos += 2;
|
||||
},
|
||||
else => |c| {
|
||||
owned[i] = c;
|
||||
value_pos += 1;
|
||||
},
|
||||
}
|
||||
}
|
||||
return owned;
|
||||
}
|
||||
|
||||
const VALID_CODEPOINTS = blk: {
|
||||
var v: [256]bool = undefined;
|
||||
for (0..256) |i| {
|
||||
@@ -329,7 +294,7 @@ test "Mime: invalid " {
|
||||
|
||||
for (invalids) |invalid| {
|
||||
const mutable_input = try testing.arena_allocator.dupe(u8, invalid);
|
||||
try testing.expectError(error.Invalid, Mime.parse(undefined, mutable_input));
|
||||
try testing.expectError(error.Invalid, Mime.parse(mutable_input));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -409,7 +374,7 @@ test "Mime: isHTML" {
|
||||
const isHTML = struct {
|
||||
fn isHTML(expected: bool, input: []const u8) !void {
|
||||
const mutable_input = try testing.arena_allocator.dupe(u8, input);
|
||||
var mime = try Mime.parse(testing.arena_allocator, mutable_input);
|
||||
var mime = try Mime.parse(mutable_input);
|
||||
try testing.expectEqual(expected, mime.isHTML());
|
||||
}
|
||||
}.isHTML;
|
||||
@@ -495,7 +460,7 @@ const Expectation = struct {
|
||||
fn expect(expected: Expectation, input: []const u8) !void {
|
||||
const mutable_input = try testing.arena_allocator.dupe(u8, input);
|
||||
|
||||
const actual = try Mime.parse(testing.arena_allocator, mutable_input);
|
||||
const actual = try Mime.parse(mutable_input);
|
||||
try testing.expectEqual(
|
||||
std.meta.activeTag(expected.content_type),
|
||||
std.meta.activeTag(actual.content_type),
|
||||
|
||||
@@ -2362,6 +2362,31 @@ fn parserErr(err: HubbubErr) ParserError!void {
|
||||
};
|
||||
}
|
||||
|
||||
pub const Parser = struct {
|
||||
html_doc: *DocumentHTML,
|
||||
parser: *c.dom_hubbub_parser,
|
||||
|
||||
pub fn init(encoding: ?[:0]const u8) !Parser {
|
||||
var params = parseParams(encoding);
|
||||
var doc: ?*c.dom_document = undefined;
|
||||
var parser: ?*c.dom_hubbub_parser = undefined;
|
||||
|
||||
try parserErr(c.dom_hubbub_parser_create(¶ms, &parser, &doc));
|
||||
return .{
|
||||
.parser = parser.?,
|
||||
.html_doc = @ptrCast(doc.?),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Parser) void {
|
||||
c.dom_hubbub_parser_destroy(self.parser);
|
||||
}
|
||||
|
||||
pub fn process(self: *Parser, data: []const u8) !void {
|
||||
try parserErr(c.dom_hubbub_parser_parse_chunk(self.parser, data.ptr, data.len));
|
||||
}
|
||||
};
|
||||
|
||||
// documentHTMLParseFromStr parses the given HTML string.
|
||||
// The caller is responsible for closing the document.
|
||||
pub fn documentHTMLParseFromStr(str: []const u8) !*DocumentHTML {
|
||||
@@ -2370,18 +2395,10 @@ pub fn documentHTMLParseFromStr(str: []const u8) !*DocumentHTML {
|
||||
}
|
||||
|
||||
pub fn documentHTMLParse(reader: anytype, enc: ?[:0]const u8) !*DocumentHTML {
|
||||
var parser: ?*c.dom_hubbub_parser = undefined;
|
||||
var doc: ?*c.dom_document = undefined;
|
||||
var err: c.hubbub_error = undefined;
|
||||
var params = parseParams(enc);
|
||||
|
||||
err = c.dom_hubbub_parser_create(¶ms, &parser, &doc);
|
||||
try parserErr(err);
|
||||
defer c.dom_hubbub_parser_destroy(parser);
|
||||
|
||||
try parseData(parser.?, reader);
|
||||
|
||||
return @as(*DocumentHTML, @ptrCast(doc.?));
|
||||
var parser = try Parser.init(enc);
|
||||
defer parser.deinit();
|
||||
try parseData(parser.parser, reader);
|
||||
return parser.html_doc;
|
||||
}
|
||||
|
||||
pub fn documentParseFragmentFromStr(self: *Document, str: []const u8) !*DocumentFragment {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -128,6 +128,7 @@ pub const Session = struct {
|
||||
// window.setTimeout and running microtasks should be ignored
|
||||
self.browser.app.loop.reset();
|
||||
|
||||
self.page.?.deinit();
|
||||
self.page = null;
|
||||
|
||||
// clear netsurf memory arena.
|
||||
|
||||
@@ -4,7 +4,6 @@ const Allocator = std.mem.Allocator;
|
||||
const ArenaAllocator = std.heap.ArenaAllocator;
|
||||
|
||||
const log = @import("../../log.zig");
|
||||
const http = @import("../../http/client.zig");
|
||||
const DateTime = @import("../../datetime.zig").DateTime;
|
||||
const public_suffix_list = @import("../../data/public_suffix_list.zig").lookup;
|
||||
|
||||
@@ -104,17 +103,18 @@ pub const Jar = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn populateFromResponse(self: *Jar, uri: *const Uri, header: *const http.ResponseHeader) !void {
|
||||
const now = std.time.timestamp();
|
||||
var it = header.iterate("set-cookie");
|
||||
while (it.next()) |set_cookie| {
|
||||
const c = Cookie.parse(self.allocator, uri, set_cookie) catch |err| {
|
||||
log.warn(.web_api, "cookie parse failed", .{ .raw = set_cookie, .err = err });
|
||||
continue;
|
||||
};
|
||||
try self.add(c, now);
|
||||
}
|
||||
}
|
||||
// @newhttp
|
||||
// pub fn populateFromResponse(self: *Jar, uri: *const Uri, header: *const http.ResponseHeader) !void {
|
||||
// const now = std.time.timestamp();
|
||||
// var it = header.iterate("set-cookie");
|
||||
// while (it.next()) |set_cookie| {
|
||||
// const c = Cookie.parse(self.allocator, uri, set_cookie) catch |err| {
|
||||
// log.warn(.web_api, "cookie parse failed", .{ .raw = set_cookie, .err = err });
|
||||
// continue;
|
||||
// };
|
||||
// try self.add(c, now);
|
||||
// }
|
||||
// }
|
||||
|
||||
fn writeCookie(cookie: *const Cookie, writer: anytype) !void {
|
||||
if (cookie.name.len > 0) {
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const DOMError = @import("../netsurf.zig").DOMError;
|
||||
@@ -28,8 +29,8 @@ const log = @import("../../log.zig");
|
||||
const URL = @import("../../url.zig").URL;
|
||||
const Mime = @import("../mime.zig").Mime;
|
||||
const parser = @import("../netsurf.zig");
|
||||
const http = @import("../../http/client.zig");
|
||||
const Page = @import("../page.zig").Page;
|
||||
const http = @import("../../http/client.zig");
|
||||
const Loop = @import("../../runtime/loop.zig").Loop;
|
||||
const CookieJar = @import("../storage/storage.zig").CookieJar;
|
||||
|
||||
@@ -83,7 +84,7 @@ pub const XMLHttpRequest = struct {
|
||||
arena: Allocator,
|
||||
request: ?*http.Request = null,
|
||||
|
||||
method: http.Request.Method,
|
||||
method: http.Method,
|
||||
state: State,
|
||||
url: ?URL = null,
|
||||
origin_url: *const URL,
|
||||
@@ -264,10 +265,11 @@ pub const XMLHttpRequest = struct {
|
||||
}
|
||||
|
||||
pub fn destructor(self: *XMLHttpRequest) void {
|
||||
if (self.request) |req| {
|
||||
req.abort();
|
||||
self.request = null;
|
||||
}
|
||||
// @newhttp
|
||||
// if (self.request) |req| {
|
||||
// req.abort();
|
||||
self.request = null;
|
||||
// }
|
||||
}
|
||||
|
||||
pub fn reset(self: *XMLHttpRequest) void {
|
||||
@@ -414,7 +416,7 @@ pub const XMLHttpRequest = struct {
|
||||
}
|
||||
|
||||
const methods = [_]struct {
|
||||
tag: http.Request.Method,
|
||||
tag: http.Method,
|
||||
name: []const u8,
|
||||
}{
|
||||
.{ .tag = .DELETE, .name = "DELETE" },
|
||||
@@ -424,20 +426,12 @@ pub const XMLHttpRequest = struct {
|
||||
.{ .tag = .POST, .name = "POST" },
|
||||
.{ .tag = .PUT, .name = "PUT" },
|
||||
};
|
||||
const methods_forbidden = [_][]const u8{ "CONNECT", "TRACE", "TRACK" };
|
||||
|
||||
pub fn validMethod(m: []const u8) DOMError!http.Request.Method {
|
||||
pub fn validMethod(m: []const u8) DOMError!http.Method {
|
||||
for (methods) |method| {
|
||||
if (std.ascii.eqlIgnoreCase(method.name, m)) {
|
||||
return method.tag;
|
||||
}
|
||||
}
|
||||
// If method is a forbidden method, then throw a "SecurityError" DOMException.
|
||||
for (methods_forbidden) |method| {
|
||||
if (std.ascii.eqlIgnoreCase(method, m)) {
|
||||
return DOMError.Security;
|
||||
}
|
||||
}
|
||||
|
||||
// If method is not a method, then throw a "SyntaxError" DOMException.
|
||||
return DOMError.Syntax;
|
||||
@@ -461,13 +455,15 @@ pub const XMLHttpRequest = struct {
|
||||
self.request_body = try self.arena.dupe(u8, b);
|
||||
}
|
||||
|
||||
try page.request_factory.initAsync(
|
||||
page.arena,
|
||||
self.method,
|
||||
&self.url.?.uri,
|
||||
self,
|
||||
onHttpRequestReady,
|
||||
);
|
||||
// @newhttp
|
||||
_ = page;
|
||||
// try page.request_factory.initAsync(
|
||||
// page.arena,
|
||||
// self.method,
|
||||
// &self.url.?.uri,
|
||||
// self,
|
||||
// onHttpRequestReady,
|
||||
// );
|
||||
}
|
||||
|
||||
fn onHttpRequestReady(ctx: *anyopaque, request: *http.Request) !void {
|
||||
|
||||
@@ -236,11 +236,12 @@ pub fn httpRequestStart(arena: Allocator, bc: anytype, request: *const Notificat
|
||||
const page = bc.session.currentPage() orelse unreachable;
|
||||
|
||||
// Modify request with extra CDP headers
|
||||
try request.headers.ensureTotalCapacity(request.arena, request.headers.items.len + cdp.extra_headers.items.len);
|
||||
for (cdp.extra_headers.items) |extra| {
|
||||
const new = putAssumeCapacity(request.headers, extra);
|
||||
if (!new) log.debug(.cdp, "request header overwritten", .{ .name = extra.name });
|
||||
}
|
||||
// @newhttp
|
||||
// try request.headers.ensureTotalCapacity(request.arena, request.headers.items.len + cdp.extra_headers.items.len);
|
||||
// for (cdp.extra_headers.items) |extra| {
|
||||
// const new = putAssumeCapacity(request.headers, extra);
|
||||
// if (!new) log.debug(.cdp, "request header overwritten", .{ .name = extra.name });
|
||||
// }
|
||||
|
||||
const document_url = try urlToString(arena, &page.url.uri, .{
|
||||
.scheme = true,
|
||||
@@ -262,11 +263,12 @@ pub fn httpRequestStart(arena: Allocator, bc: anytype, request: *const Notificat
|
||||
.fragment = true,
|
||||
});
|
||||
|
||||
var headers: std.StringArrayHashMapUnmanaged([]const u8) = .empty;
|
||||
try headers.ensureTotalCapacity(arena, request.headers.items.len);
|
||||
for (request.headers.items) |header| {
|
||||
headers.putAssumeCapacity(header.name, header.value);
|
||||
}
|
||||
// @newhttp
|
||||
const headers: std.StringArrayHashMapUnmanaged([]const u8) = .empty;
|
||||
// try headers.ensureTotalCapacity(arena, request.headers.items.len);
|
||||
// for (request.headers.items) |header| {
|
||||
// headers.putAssumeCapacity(header.name, header.value);
|
||||
// }
|
||||
|
||||
// We're missing a bunch of fields, but, for now, this seems like enough
|
||||
try cdp.sendEvent("Network.requestWillBeSent", .{
|
||||
@@ -303,11 +305,12 @@ pub fn httpRequestComplete(arena: Allocator, bc: anytype, request: *const Notifi
|
||||
.query = true,
|
||||
});
|
||||
|
||||
var headers: std.StringArrayHashMapUnmanaged([]const u8) = .empty;
|
||||
try headers.ensureTotalCapacity(arena, request.headers.len);
|
||||
for (request.headers) |header| {
|
||||
headers.putAssumeCapacity(header.name, header.value);
|
||||
}
|
||||
// @newhttp
|
||||
const headers: std.StringArrayHashMapUnmanaged([]const u8) = .empty;
|
||||
// try headers.ensureTotalCapacity(arena, request.headers.len);
|
||||
// for (request.headers) |header| {
|
||||
// headers.putAssumeCapacity(header.name, header.value);
|
||||
// }
|
||||
|
||||
// We're missing a bunch of fields, but, for now, this seems like enough
|
||||
try cdp.sendEvent("Network.responseReceived", .{
|
||||
|
||||
@@ -148,12 +148,10 @@ fn navigate(cmd: anytype) !void {
|
||||
return error.SessionIdNotLoaded;
|
||||
}
|
||||
|
||||
const url = try URL.parse(params.url, "https");
|
||||
|
||||
var page = bc.session.currentPage() orelse return error.PageNotLoaded;
|
||||
bc.loader_id = bc.cdp.loader_id_gen.next();
|
||||
|
||||
try page.navigate(url, .{
|
||||
try page.navigate(params.url, .{
|
||||
.reason = .address_bar,
|
||||
.cdp_id = cmd.input.id,
|
||||
});
|
||||
|
||||
@@ -8,7 +8,8 @@ pub fn lookup(value: []const u8) bool {
|
||||
const public_suffix_list = std.StaticStringMap(void).initComptime(entries);
|
||||
|
||||
const entries: []const struct { []const u8, void } =
|
||||
if (builtin.is_test) &.{
|
||||
// @newhttp
|
||||
if (builtin.is_test or true) &.{
|
||||
.{ "api.gov.uk", {} },
|
||||
.{ "gov.uk", {} },
|
||||
} else &.{
|
||||
|
||||
93
src/http/ca_certs.zig
Normal file
93
src/http/ca_certs.zig
Normal file
@@ -0,0 +1,93 @@
|
||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const c = @import("client.zig").c;
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
// TODO: on BSD / Linux, we could just read the PEM file directly.
|
||||
// This whole rescan + decode is really just needed for MacOS. On Linux
|
||||
// bundle.rescan does find the .pem file(s) which could be in a few different
|
||||
// places, so it's still useful, just not efficient.
|
||||
pub fn load(allocator: Allocator, arena: Allocator) !c.curl_blob {
|
||||
var bundle: std.crypto.Certificate.Bundle = .{};
|
||||
try bundle.rescan(allocator);
|
||||
defer bundle.deinit(allocator);
|
||||
|
||||
var it = bundle.map.valueIterator();
|
||||
const bytes = bundle.bytes.items;
|
||||
|
||||
const encoder = std.base64.standard.Encoder;
|
||||
var arr: std.ArrayListUnmanaged(u8) = .empty;
|
||||
|
||||
const encoded_size = encoder.calcSize(bytes.len);
|
||||
const buffer_size = encoded_size +
|
||||
(bundle.map.count() * 75) + // start / end per certificate + extra, just in case
|
||||
(encoded_size / 64) // newline per 64 characters
|
||||
;
|
||||
try arr.ensureTotalCapacity(arena, buffer_size);
|
||||
var writer = arr.writer(arena);
|
||||
|
||||
while (it.next()) |index| {
|
||||
const cert = try std.crypto.Certificate.der.Element.parse(bytes, index.*);
|
||||
|
||||
try writer.writeAll("-----BEGIN CERTIFICATE-----\n");
|
||||
var line_writer = LineWriter{.inner = writer};
|
||||
try encoder.encodeWriter(&line_writer, bytes[index.*..cert.slice.end]);
|
||||
try writer.writeAll("\n-----END CERTIFICATE-----\n");
|
||||
}
|
||||
|
||||
// Final encoding should not be larger than our initial size estimate
|
||||
std.debug.assert(buffer_size > arr.items.len);
|
||||
|
||||
return .{
|
||||
.len = arr.items.len,
|
||||
.data = arr.items.ptr,
|
||||
.flags = 0,
|
||||
};
|
||||
}
|
||||
|
||||
// Wraps lines @ 64 columns
|
||||
const LineWriter = struct {
|
||||
col: usize = 0,
|
||||
inner: std.ArrayListUnmanaged(u8).Writer,
|
||||
|
||||
pub fn writeAll(self: *LineWriter, data: []const u8) !void {
|
||||
var writer = self.inner;
|
||||
|
||||
var col = self.col;
|
||||
const len = 64 - col;
|
||||
|
||||
var remain = data;
|
||||
if (remain.len > len) {
|
||||
col = 0;
|
||||
try writer.writeAll(data[0..len]);
|
||||
try writer.writeByte('\n');
|
||||
remain = data[len..];
|
||||
}
|
||||
|
||||
while (remain.len > 64) {
|
||||
try writer.writeAll(remain[0..64]);
|
||||
try writer.writeByte('\n');
|
||||
remain = data[len..];
|
||||
}
|
||||
try writer.writeAll(remain);
|
||||
self.col = col + remain.len;
|
||||
}
|
||||
};
|
||||
4351
src/http/client.zig
4351
src/http/client.zig
File diff suppressed because it is too large
Load Diff
238
src/http/errors.zig
Normal file
238
src/http/errors.zig
Normal file
@@ -0,0 +1,238 @@
|
||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const c = @import("client.zig").c;
|
||||
|
||||
pub const Error = error {
|
||||
UnsupportedProtocol,
|
||||
FailedInit,
|
||||
UrlMalformat,
|
||||
NotBuiltIn,
|
||||
CouldntResolveProxy,
|
||||
CouldntResolveHost,
|
||||
CouldntConnect,
|
||||
WeirdServerReply,
|
||||
RemoteAccessDenied,
|
||||
FtpAcceptFailed,
|
||||
FtpWeirdPassReply,
|
||||
FtpAcceptTimeout,
|
||||
FtpWeirdPasvReply,
|
||||
FtpWeird227Format,
|
||||
FtpCantGetHost,
|
||||
Http2,
|
||||
FtpCouldntSetType,
|
||||
PartialFile,
|
||||
FtpCouldntRetrFile,
|
||||
QuoteError,
|
||||
HttpReturnedError,
|
||||
WriteError,
|
||||
UploadFailed,
|
||||
ReadError,
|
||||
OutOfMemory,
|
||||
OperationTimedout,
|
||||
FtpPortFailed,
|
||||
FtpCouldntUseRest,
|
||||
RangeError,
|
||||
SslConnectError,
|
||||
BadDownloadResume,
|
||||
FileCouldntReadFile,
|
||||
LdapCannotBind,
|
||||
LdapSearchFailed,
|
||||
AbortedByCallback,
|
||||
BadFunctionArgument,
|
||||
InterfaceFailed,
|
||||
TooManyRedirects,
|
||||
UnknownOption,
|
||||
SetoptOptionSyntax,
|
||||
GotNothing,
|
||||
SslEngineNotfound,
|
||||
SslEngineSetfailed,
|
||||
SendError,
|
||||
RecvError,
|
||||
SslCertproblem,
|
||||
SslCipher,
|
||||
PeerFailedVerification,
|
||||
BadContentEncoding,
|
||||
FilesizeExceeded,
|
||||
UseSslFailed,
|
||||
SendFailRewind,
|
||||
SslEngineInitfailed,
|
||||
LoginDenied,
|
||||
TftpNotfound,
|
||||
TftpPerm,
|
||||
RemoteDiskFull,
|
||||
TftpIllegal,
|
||||
TftpUnknownid,
|
||||
RemoteFileExists,
|
||||
TftpNosuchuser,
|
||||
SslCacertBadfile,
|
||||
RemoteFileNotFound,
|
||||
Ssh,
|
||||
SslShutdownFailed,
|
||||
Again,
|
||||
SslCrlBadfile,
|
||||
SslIssuerError,
|
||||
FtpPretFailed,
|
||||
RtspCseqError,
|
||||
RtspSessionError,
|
||||
FtpBadFileList,
|
||||
ChunkFailed,
|
||||
NoConnectionAvailable,
|
||||
SslPinnedpubkeynotmatch,
|
||||
SslInvalidcertstatus,
|
||||
Http2Stream,
|
||||
RecursiveApiCall,
|
||||
AuthError,
|
||||
Http3,
|
||||
QuicConnectError,
|
||||
Proxy,
|
||||
SslClientcert,
|
||||
UnrecoverablePoll,
|
||||
TooLarge,
|
||||
Unknown,
|
||||
};
|
||||
|
||||
pub fn fromCode(code: c.CURLcode) Error {
|
||||
std.debug.assert(code != c.CURLE_OK);
|
||||
|
||||
return switch (code) {
|
||||
c.CURLE_UNSUPPORTED_PROTOCOL => Error.UnsupportedProtocol,
|
||||
c.CURLE_FAILED_INIT => Error.FailedInit,
|
||||
c.CURLE_URL_MALFORMAT => Error.UrlMalformat,
|
||||
c.CURLE_NOT_BUILT_IN => Error.NotBuiltIn,
|
||||
c.CURLE_COULDNT_RESOLVE_PROXY => Error.CouldntResolveProxy,
|
||||
c.CURLE_COULDNT_RESOLVE_HOST => Error.CouldntResolveHost,
|
||||
c.CURLE_COULDNT_CONNECT => Error.CouldntConnect,
|
||||
c.CURLE_WEIRD_SERVER_REPLY => Error.WeirdServerReply,
|
||||
c.CURLE_REMOTE_ACCESS_DENIED => Error.RemoteAccessDenied,
|
||||
c.CURLE_FTP_ACCEPT_FAILED => Error.FtpAcceptFailed,
|
||||
c.CURLE_FTP_WEIRD_PASS_REPLY => Error.FtpWeirdPassReply,
|
||||
c.CURLE_FTP_ACCEPT_TIMEOUT => Error.FtpAcceptTimeout,
|
||||
c.CURLE_FTP_WEIRD_PASV_REPLY => Error.FtpWeirdPasvReply,
|
||||
c.CURLE_FTP_WEIRD_227_FORMAT => Error.FtpWeird227Format,
|
||||
c.CURLE_FTP_CANT_GET_HOST => Error.FtpCantGetHost,
|
||||
c.CURLE_HTTP2 => Error.Http2,
|
||||
c.CURLE_FTP_COULDNT_SET_TYPE => Error.FtpCouldntSetType,
|
||||
c.CURLE_PARTIAL_FILE => Error.PartialFile,
|
||||
c.CURLE_FTP_COULDNT_RETR_FILE => Error.FtpCouldntRetrFile,
|
||||
c.CURLE_QUOTE_ERROR => Error.QuoteError,
|
||||
c.CURLE_HTTP_RETURNED_ERROR => Error.HttpReturnedError,
|
||||
c.CURLE_WRITE_ERROR => Error.WriteError,
|
||||
c.CURLE_UPLOAD_FAILED => Error.UploadFailed,
|
||||
c.CURLE_READ_ERROR => Error.ReadError,
|
||||
c.CURLE_OUT_OF_MEMORY => Error.OutOfMemory,
|
||||
c.CURLE_OPERATION_TIMEDOUT => Error.OperationTimedout,
|
||||
c.CURLE_FTP_PORT_FAILED => Error.FtpPortFailed,
|
||||
c.CURLE_FTP_COULDNT_USE_REST => Error.FtpCouldntUseRest,
|
||||
c.CURLE_RANGE_ERROR => Error.RangeError,
|
||||
c.CURLE_SSL_CONNECT_ERROR => Error.SslConnectError,
|
||||
c.CURLE_BAD_DOWNLOAD_RESUME => Error.BadDownloadResume,
|
||||
c.CURLE_FILE_COULDNT_READ_FILE => Error.FileCouldntReadFile,
|
||||
c.CURLE_LDAP_CANNOT_BIND => Error.LdapCannotBind,
|
||||
c.CURLE_LDAP_SEARCH_FAILED => Error.LdapSearchFailed,
|
||||
c.CURLE_ABORTED_BY_CALLBACK => Error.AbortedByCallback,
|
||||
c.CURLE_BAD_FUNCTION_ARGUMENT => Error.BadFunctionArgument,
|
||||
c.CURLE_INTERFACE_FAILED => Error.InterfaceFailed,
|
||||
c.CURLE_TOO_MANY_REDIRECTS => Error.TooManyRedirects,
|
||||
c.CURLE_UNKNOWN_OPTION => Error.UnknownOption,
|
||||
c.CURLE_SETOPT_OPTION_SYNTAX => Error.SetoptOptionSyntax,
|
||||
c.CURLE_GOT_NOTHING => Error.GotNothing,
|
||||
c.CURLE_SSL_ENGINE_NOTFOUND => Error.SslEngineNotfound,
|
||||
c.CURLE_SSL_ENGINE_SETFAILED => Error.SslEngineSetfailed,
|
||||
c.CURLE_SEND_ERROR => Error.SendError,
|
||||
c.CURLE_RECV_ERROR => Error.RecvError,
|
||||
c.CURLE_SSL_CERTPROBLEM => Error.SslCertproblem,
|
||||
c.CURLE_SSL_CIPHER => Error.SslCipher,
|
||||
c.CURLE_PEER_FAILED_VERIFICATION => Error.PeerFailedVerification,
|
||||
c.CURLE_BAD_CONTENT_ENCODING => Error.BadContentEncoding,
|
||||
c.CURLE_FILESIZE_EXCEEDED => Error.FilesizeExceeded,
|
||||
c.CURLE_USE_SSL_FAILED => Error.UseSslFailed,
|
||||
c.CURLE_SEND_FAIL_REWIND => Error.SendFailRewind,
|
||||
c.CURLE_SSL_ENGINE_INITFAILED => Error.SslEngineInitfailed,
|
||||
c.CURLE_LOGIN_DENIED => Error.LoginDenied,
|
||||
c.CURLE_TFTP_NOTFOUND => Error.TftpNotfound,
|
||||
c.CURLE_TFTP_PERM => Error.TftpPerm,
|
||||
c.CURLE_REMOTE_DISK_FULL => Error.RemoteDiskFull,
|
||||
c.CURLE_TFTP_ILLEGAL => Error.TftpIllegal,
|
||||
c.CURLE_TFTP_UNKNOWNID => Error.TftpUnknownid,
|
||||
c.CURLE_REMOTE_FILE_EXISTS => Error.RemoteFileExists,
|
||||
c.CURLE_TFTP_NOSUCHUSER => Error.TftpNosuchuser,
|
||||
c.CURLE_SSL_CACERT_BADFILE => Error.SslCacertBadfile,
|
||||
c.CURLE_REMOTE_FILE_NOT_FOUND => Error.RemoteFileNotFound,
|
||||
c.CURLE_SSH => Error.Ssh,
|
||||
c.CURLE_SSL_SHUTDOWN_FAILED => Error.SslShutdownFailed,
|
||||
c.CURLE_AGAIN => Error.Again,
|
||||
c.CURLE_SSL_CRL_BADFILE => Error.SslCrlBadfile,
|
||||
c.CURLE_SSL_ISSUER_ERROR => Error.SslIssuerError,
|
||||
c.CURLE_FTP_PRET_FAILED => Error.FtpPretFailed,
|
||||
c.CURLE_RTSP_CSEQ_ERROR => Error.RtspCseqError,
|
||||
c.CURLE_RTSP_SESSION_ERROR => Error.RtspSessionError,
|
||||
c.CURLE_FTP_BAD_FILE_LIST => Error.FtpBadFileList,
|
||||
c.CURLE_CHUNK_FAILED => Error.ChunkFailed,
|
||||
c.CURLE_NO_CONNECTION_AVAILABLE => Error.NoConnectionAvailable,
|
||||
c.CURLE_SSL_PINNEDPUBKEYNOTMATCH => Error.SslPinnedpubkeynotmatch,
|
||||
c.CURLE_SSL_INVALIDCERTSTATUS => Error.SslInvalidcertstatus,
|
||||
c.CURLE_HTTP2_STREAM => Error.Http2Stream,
|
||||
c.CURLE_RECURSIVE_API_CALL => Error.RecursiveApiCall,
|
||||
c.CURLE_AUTH_ERROR => Error.AuthError,
|
||||
c.CURLE_HTTP3 => Error.Http3,
|
||||
c.CURLE_QUIC_CONNECT_ERROR => Error.QuicConnectError,
|
||||
c.CURLE_PROXY => Error.Proxy,
|
||||
c.CURLE_SSL_CLIENTCERT => Error.SslClientcert,
|
||||
c.CURLE_UNRECOVERABLE_POLL => Error.UnrecoverablePoll,
|
||||
c.CURLE_TOO_LARGE => Error.TooLarge,
|
||||
else => Error.Unknown,
|
||||
};
|
||||
}
|
||||
|
||||
pub const Multi = error {
|
||||
BadHandle,
|
||||
BadEasyHandle,
|
||||
OutOfMemory,
|
||||
InternalError,
|
||||
BadSocket,
|
||||
UnknownOption,
|
||||
AddedAlready,
|
||||
RecursiveApiCall,
|
||||
WakeupFailure,
|
||||
BadFunctionArgument,
|
||||
AbortedByCallback,
|
||||
UnrecoverablePoll,
|
||||
Unknown,
|
||||
};
|
||||
|
||||
pub fn fromMCode(code: c.CURLMcode) Multi {
|
||||
std.debug.assert(code != c.CURLM_OK);
|
||||
|
||||
return switch (code) {
|
||||
c.CURLM_BAD_HANDLE => Multi.BadHandle,
|
||||
c.CURLM_BAD_EASY_HANDLE => Multi.BadEasyHandle,
|
||||
c.CURLM_OUT_OF_MEMORY => Multi.OutOfMemory,
|
||||
c.CURLM_INTERNAL_ERROR => Multi.InternalError,
|
||||
c.CURLM_BAD_SOCKET => Multi.BadSocket,
|
||||
c.CURLM_UNKNOWN_OPTION => Multi.UnknownOption,
|
||||
c.CURLM_ADDED_ALREADY => Multi.AddedAlready,
|
||||
c.CURLM_RECURSIVE_API_CALL => Multi.RecursiveApiCall,
|
||||
c.CURLM_WAKEUP_FAILURE => Multi.WakeupFailure,
|
||||
c.CURLM_BAD_FUNCTION_ARGUMENT => Multi.BadFunctionArgument,
|
||||
c.CURLM_ABORTED_BY_CALLBACK => Multi.AbortedByCallback,
|
||||
c.CURLM_UNRECOVERABLE_POLL => Multi.UnrecoverablePoll,
|
||||
else => Multi.Unknown,
|
||||
};
|
||||
}
|
||||
89
src/main.zig
89
src/main.zig
@@ -22,8 +22,8 @@ const Allocator = std.mem.Allocator;
|
||||
|
||||
const log = @import("log.zig");
|
||||
const server = @import("server.zig");
|
||||
const App = @import("app.zig").App;
|
||||
const http = @import("http/client.zig");
|
||||
const App = @import("app.zig").App;
|
||||
const Platform = @import("runtime/js.zig").Platform;
|
||||
const Browser = @import("browser/browser.zig").Browser;
|
||||
|
||||
@@ -107,8 +107,8 @@ fn run(alloc: Allocator) !void {
|
||||
};
|
||||
},
|
||||
.fetch => |opts| {
|
||||
log.debug(.app, "startup", .{ .mode = "fetch", .dump = opts.dump, .url = opts.url });
|
||||
const url = try @import("url.zig").URL.parse(opts.url, null);
|
||||
const url = opts.url;
|
||||
log.debug(.app, "startup", .{ .mode = "fetch", .dump = opts.dump, .url = url });
|
||||
|
||||
// browser
|
||||
var browser = try Browser.init(app);
|
||||
@@ -130,7 +130,7 @@ fn run(alloc: Allocator) !void {
|
||||
},
|
||||
};
|
||||
|
||||
try page.wait(std.time.ns_per_s * 3);
|
||||
try page.wait(5); // 5 seconds
|
||||
|
||||
// dump
|
||||
if (opts.dump) {
|
||||
@@ -633,7 +633,7 @@ test "tests:beforeAll" {
|
||||
log.opts.level = .err;
|
||||
log.opts.format = .logfmt;
|
||||
|
||||
test_wg.startMany(3);
|
||||
test_wg.startMany(2);
|
||||
const platform = try Platform.init();
|
||||
|
||||
{
|
||||
@@ -642,12 +642,6 @@ test "tests:beforeAll" {
|
||||
thread.detach();
|
||||
}
|
||||
|
||||
{
|
||||
const address = try std.net.Address.parseIp("127.0.0.1", 9581);
|
||||
const thread = try std.Thread.spawn(.{}, serveHTTPS, .{address});
|
||||
thread.detach();
|
||||
}
|
||||
|
||||
{
|
||||
const address = try std.net.Address.parseIp("127.0.0.1", 9583);
|
||||
const thread = try std.Thread.spawn(.{}, serveCDP, .{ address, &platform });
|
||||
@@ -759,79 +753,6 @@ fn serveHTTP(address: std.net.Address) !void {
|
||||
}
|
||||
}
|
||||
|
||||
// This is a lot of work for testing TLS, but the TLS (async) code is complicated
|
||||
// This "server" is written specifically to test the client. It assumes the client
|
||||
// isn't a jerk.
|
||||
fn serveHTTPS(address: std.net.Address) !void {
|
||||
const tls = @import("tls");
|
||||
|
||||
var listener = try address.listen(.{ .reuse_address = true });
|
||||
defer listener.deinit();
|
||||
|
||||
test_wg.finish();
|
||||
|
||||
var seed: u64 = undefined;
|
||||
std.posix.getrandom(std.mem.asBytes(&seed)) catch unreachable;
|
||||
var r = std.Random.DefaultPrng.init(seed);
|
||||
const rand = r.random();
|
||||
|
||||
var read_buffer: [1024]u8 = undefined;
|
||||
while (true) {
|
||||
const stream = blk: {
|
||||
const conn = try listener.accept();
|
||||
break :blk conn.stream;
|
||||
};
|
||||
defer stream.close();
|
||||
|
||||
var conn = try tls.server(stream, .{ .auth = null });
|
||||
defer conn.close() catch {};
|
||||
|
||||
var pos: usize = 0;
|
||||
while (true) {
|
||||
const n = try conn.read(read_buffer[pos..]);
|
||||
if (n == 0) {
|
||||
break;
|
||||
}
|
||||
pos += n;
|
||||
const header_end = std.mem.indexOf(u8, read_buffer[0..pos], "\r\n\r\n") orelse {
|
||||
continue;
|
||||
};
|
||||
var it = std.mem.splitScalar(u8, read_buffer[0..header_end], ' ');
|
||||
_ = it.next() orelse unreachable; // method
|
||||
const path = it.next() orelse unreachable;
|
||||
|
||||
var fragment = false;
|
||||
var response: []const u8 = undefined;
|
||||
if (std.mem.eql(u8, path, "/http_client/simple")) {
|
||||
fragment = true;
|
||||
response = "HTTP/1.1 200 \r\nContent-Length: 0\r\nConnection: Close\r\n\r\n";
|
||||
} else if (std.mem.eql(u8, path, "/http_client/body")) {
|
||||
fragment = true;
|
||||
response = "HTTP/1.1 201 CREATED\r\nContent-Length: 20\r\nConnection: Close\r\n Another : HEaDer \r\n\r\n1234567890abcdefhijk";
|
||||
} else if (std.mem.eql(u8, path, "/http_client/redirect/insecure")) {
|
||||
fragment = true;
|
||||
response = "HTTP/1.1 307 GOTO\r\nLocation: http://127.0.0.1:9582/http_client/redirect\r\nConnection: Close\r\n\r\n";
|
||||
} else if (std.mem.eql(u8, path, "/xhr")) {
|
||||
response = "HTTP/1.1 200 OK\r\nContent-Type: text/html; charset=utf-8\r\nContent-Length: 100\r\nConnection: Close\r\n\r\n" ++ ("1234567890" ** 10);
|
||||
} else if (std.mem.eql(u8, path, "/xhr/json")) {
|
||||
response = "HTTP/1.1 200 OK\r\nContent-Type: application/json\r\nContent-Length: 18\r\nConnection: Close\r\n\r\n{\"over\":\"9000!!!\"}";
|
||||
} else {
|
||||
// should not have an unknown path
|
||||
unreachable;
|
||||
}
|
||||
|
||||
var unsent = response;
|
||||
while (unsent.len > 0) {
|
||||
const to_send = if (fragment) rand.intRangeAtMost(usize, 1, unsent.len) else unsent.len;
|
||||
const sent = try conn.write(unsent[0..to_send]);
|
||||
unsent = unsent[sent..];
|
||||
std.time.sleep(std.time.ns_per_us * 5);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn serveCDP(address: std.net.Address, platform: *const Platform) !void {
|
||||
var gpa: std.heap.GeneralPurposeAllocator(.{}) = .init;
|
||||
var app = try App.init(gpa.allocator(), .{
|
||||
|
||||
@@ -2,8 +2,8 @@ const std = @import("std");
|
||||
|
||||
const log = @import("log.zig");
|
||||
const URL = @import("url.zig").URL;
|
||||
const http = @import("http/client.zig");
|
||||
const page = @import("browser/page.zig");
|
||||
const http_client = @import("http/client.zig");
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
@@ -94,8 +94,7 @@ pub const Notification = struct {
|
||||
arena: Allocator,
|
||||
id: usize,
|
||||
url: *const std.Uri,
|
||||
method: http_client.Request.Method,
|
||||
headers: *std.ArrayListUnmanaged(std.http.Header),
|
||||
method: http.Method,
|
||||
has_body: bool,
|
||||
};
|
||||
|
||||
@@ -109,7 +108,6 @@ pub const Notification = struct {
|
||||
id: usize,
|
||||
url: *const std.Uri,
|
||||
status: u16,
|
||||
headers: []http_client.Header,
|
||||
};
|
||||
|
||||
pub fn init(allocator: Allocator, parent: ?*Notification) !*Notification {
|
||||
|
||||
@@ -121,6 +121,10 @@ pub const Loop = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn hasPendingTimeout(self: *Self) bool {
|
||||
return self.pending_timeout_count > 0;
|
||||
}
|
||||
|
||||
// JS callbacks APIs
|
||||
// -----------------
|
||||
|
||||
|
||||
@@ -7,8 +7,8 @@ const Allocator = std.mem.Allocator;
|
||||
|
||||
const log = @import("../log.zig");
|
||||
const App = @import("../app.zig").App;
|
||||
const http = @import("../http/client.zig");
|
||||
const telemetry = @import("telemetry.zig");
|
||||
const HttpClient = @import("../http/client.zig").Client;
|
||||
|
||||
const URL = "https://telemetry.lightpanda.io";
|
||||
const MAX_BATCH_SIZE = 20;
|
||||
@@ -21,7 +21,7 @@ pub const LightPanda = struct {
|
||||
allocator: Allocator,
|
||||
mutex: std.Thread.Mutex,
|
||||
cond: Thread.Condition,
|
||||
client: *HttpClient,
|
||||
client: *http.Client,
|
||||
node_pool: std.heap.MemoryPool(List.Node),
|
||||
|
||||
const List = std.DoublyLinkedList(LightPandaEvent);
|
||||
|
||||
@@ -406,8 +406,8 @@ pub const JsRunner = struct {
|
||||
.url = try page.url.toWebApi(page.arena),
|
||||
});
|
||||
|
||||
var html = std.io.fixedBufferStream(opts.html);
|
||||
try page.loadHTMLDoc(html.reader(), "UTF-8");
|
||||
const html_doc = try parser.documentHTMLParseFromStr(opts.html);
|
||||
try page.setDocument(html_doc);
|
||||
|
||||
return .{
|
||||
.app = app,
|
||||
|
||||
153
src/url.zig
153
src/url.zig
@@ -87,6 +87,7 @@ pub const URL = struct {
|
||||
|
||||
const StitchOpts = struct {
|
||||
alloc: AllocWhen = .always,
|
||||
null_terminated: bool = false,
|
||||
|
||||
const AllocWhen = enum {
|
||||
always,
|
||||
@@ -102,9 +103,13 @@ pub const URL = struct {
|
||||
allocator: Allocator,
|
||||
path: []const u8,
|
||||
base: []const u8,
|
||||
opts: StitchOpts,
|
||||
) ![]const u8 {
|
||||
comptime opts: StitchOpts,
|
||||
) !StitchReturn(opts) {
|
||||
if (base.len == 0 or isComleteHTTPUrl(path)) {
|
||||
if (comptime opts.null_terminated) {
|
||||
return allocator.dupeZ(u8, path);
|
||||
}
|
||||
|
||||
if (opts.alloc == .always) {
|
||||
return allocator.dupe(u8, path);
|
||||
}
|
||||
@@ -112,6 +117,10 @@ pub const URL = struct {
|
||||
}
|
||||
|
||||
if (path.len == 0) {
|
||||
if (comptime opts.null_terminated) {
|
||||
return allocator.dupeZ(u8, base);
|
||||
}
|
||||
|
||||
if (opts.alloc == .always) {
|
||||
return allocator.dupe(u8, base);
|
||||
}
|
||||
@@ -126,6 +135,9 @@ pub const URL = struct {
|
||||
|
||||
if (path[0] == '/') {
|
||||
const pos = std.mem.indexOfScalarPos(u8, base, protocol_end, '/') orelse base.len;
|
||||
if (comptime opts.null_terminated) {
|
||||
return std.fmt.allocPrintZ(allocator, "{s}{s}", .{ base[0..pos], path });
|
||||
}
|
||||
return std.fmt.allocPrint(allocator, "{s}{s}", .{ base[0..pos], path });
|
||||
}
|
||||
|
||||
@@ -134,17 +146,22 @@ pub const URL = struct {
|
||||
normalized_base = base[0 .. pos + protocol_end];
|
||||
}
|
||||
|
||||
var out = try std.fmt.allocPrint(allocator, "{s}/{s}", .{
|
||||
// that extra spacelets us handle opts.null_terminated. If we end up
|
||||
// not trimming anything, it ensures that we have 1 extra byte to store
|
||||
// our null terminator.
|
||||
var out = try std.fmt.allocPrint(allocator, "{s}/{s}" ++ if (comptime opts.null_terminated) " " else "", .{
|
||||
normalized_base,
|
||||
path,
|
||||
});
|
||||
|
||||
const end = if (comptime opts.null_terminated) out.len - 1 else out.len;
|
||||
|
||||
// Strip out ./ and ../. This is done in-place, because doing so can
|
||||
// only ever make `out` smaller. After this, `out` cannot be freed by
|
||||
// an allocator, which is ok, because we expect allocator to be an arena.
|
||||
var in_i: usize = 0;
|
||||
var out_i: usize = 0;
|
||||
while (in_i < out.len) {
|
||||
while (in_i < end) {
|
||||
if (std.mem.startsWith(u8, out[in_i..], "./")) {
|
||||
in_i += 2;
|
||||
continue;
|
||||
@@ -165,9 +182,19 @@ pub const URL = struct {
|
||||
in_i += 1;
|
||||
out_i += 1;
|
||||
}
|
||||
|
||||
if (comptime opts.null_terminated) {
|
||||
// we always have an extra space
|
||||
out[out_i] = 0;
|
||||
return out[0..out_i :0];
|
||||
}
|
||||
return out[0..out_i];
|
||||
}
|
||||
|
||||
fn StitchReturn(comptime opts: StitchOpts) type {
|
||||
return if (opts.null_terminated) [:0]const u8 else []const u8;
|
||||
}
|
||||
|
||||
pub fn concatQueryString(arena: Allocator, url: []const u8, query_string: []const u8) ![]const u8 {
|
||||
std.debug.assert(url.len != 0);
|
||||
|
||||
@@ -362,6 +389,124 @@ test "URL: stitch" {
|
||||
);
|
||||
}
|
||||
|
||||
test "URL: stitch null terminated" {
|
||||
defer testing.reset();
|
||||
|
||||
const Case = struct {
|
||||
base: []const u8,
|
||||
path: []const u8,
|
||||
expected: []const u8,
|
||||
};
|
||||
|
||||
const cases = [_]Case{
|
||||
.{
|
||||
.base = "https://lightpanda.io/xyz/abc/123",
|
||||
.path = "something.js",
|
||||
.expected = "https://lightpanda.io/xyz/abc/something.js",
|
||||
},
|
||||
.{
|
||||
.base = "https://lightpanda.io/xyz/abc/123",
|
||||
.path = "/something.js",
|
||||
.expected = "https://lightpanda.io/something.js",
|
||||
},
|
||||
.{
|
||||
.base = "https://lightpanda.io/",
|
||||
.path = "something.js",
|
||||
.expected = "https://lightpanda.io/something.js",
|
||||
},
|
||||
.{
|
||||
.base = "https://lightpanda.io/",
|
||||
.path = "/something.js",
|
||||
.expected = "https://lightpanda.io/something.js",
|
||||
},
|
||||
.{
|
||||
.base = "https://lightpanda.io",
|
||||
.path = "something.js",
|
||||
.expected = "https://lightpanda.io/something.js",
|
||||
},
|
||||
.{
|
||||
.base = "https://lightpanda.io",
|
||||
.path = "abc/something.js",
|
||||
.expected = "https://lightpanda.io/abc/something.js",
|
||||
},
|
||||
.{
|
||||
.base = "https://lightpanda.io/nested",
|
||||
.path = "abc/something.js",
|
||||
.expected = "https://lightpanda.io/abc/something.js",
|
||||
},
|
||||
.{
|
||||
.base = "https://lightpanda.io/nested/",
|
||||
.path = "abc/something.js",
|
||||
.expected = "https://lightpanda.io/nested/abc/something.js",
|
||||
},
|
||||
.{
|
||||
.base = "https://lightpanda.io/nested/",
|
||||
.path = "/abc/something.js",
|
||||
.expected = "https://lightpanda.io/abc/something.js",
|
||||
},
|
||||
.{
|
||||
.base = "https://lightpanda.io/nested/",
|
||||
.path = "http://www.github.com/lightpanda-io/",
|
||||
.expected = "http://www.github.com/lightpanda-io/",
|
||||
},
|
||||
.{
|
||||
.base = "https://lightpanda.io/nested/",
|
||||
.path = "",
|
||||
.expected = "https://lightpanda.io/nested/",
|
||||
},
|
||||
.{
|
||||
.base = "https://lightpanda.io/abc/aaa",
|
||||
.path = "./hello/./world",
|
||||
.expected = "https://lightpanda.io/abc/hello/world",
|
||||
},
|
||||
.{
|
||||
.base = "https://lightpanda.io/abc/aaa/",
|
||||
.path = "../hello",
|
||||
.expected = "https://lightpanda.io/abc/hello",
|
||||
},
|
||||
.{
|
||||
.base = "https://lightpanda.io/abc/aaa",
|
||||
.path = "../hello",
|
||||
.expected = "https://lightpanda.io/hello",
|
||||
},
|
||||
.{
|
||||
.base = "https://lightpanda.io/abc/aaa/",
|
||||
.path = "./.././.././hello",
|
||||
.expected = "https://lightpanda.io/hello",
|
||||
},
|
||||
.{
|
||||
.base = "some/page",
|
||||
.path = "hello",
|
||||
.expected = "some/hello",
|
||||
},
|
||||
.{
|
||||
.base = "some/page/",
|
||||
.path = "hello",
|
||||
.expected = "some/page/hello",
|
||||
},
|
||||
|
||||
.{
|
||||
.base = "some/page/other",
|
||||
.path = ".././hello",
|
||||
.expected = "some/hello",
|
||||
},
|
||||
};
|
||||
|
||||
for (cases) |case| {
|
||||
const result = try stitch(testing.arena_allocator, case.path, case.base, .{ .null_terminated = true });
|
||||
try testing.expectString(case.expected, result);
|
||||
}
|
||||
|
||||
try testing.expectError(
|
||||
error.InvalidURL,
|
||||
stitch(testing.arena_allocator, "../hello", "https://lightpanda.io/", .{ .null_terminated = true }),
|
||||
);
|
||||
try testing.expectError(
|
||||
error.InvalidURL,
|
||||
stitch(testing.arena_allocator, "../hello", "http://lightpanda.io/", .{ .null_terminated = true }),
|
||||
);
|
||||
}
|
||||
|
||||
test "URL: concatQueryString" {
|
||||
defer testing.reset();
|
||||
const arena = testing.arena_allocator;
|
||||
|
||||
1
vendor/curl
vendored
Submodule
1
vendor/curl
vendored
Submodule
Submodule vendor/curl added at 6845533e24
1
vendor/mbedtls
vendored
Submodule
1
vendor/mbedtls
vendored
Submodule
Submodule vendor/mbedtls added at c765c831e5
1
vendor/nghttp2
vendored
Submodule
1
vendor/nghttp2
vendored
Submodule
Submodule vendor/nghttp2 added at ac22e0efe3
1
vendor/zlib
vendored
Submodule
1
vendor/zlib
vendored
Submodule
Submodule vendor/zlib added at 51b7f2abda
Reference in New Issue
Block a user