From 63f62aec6a6928ef3689c69f45802445839d5ba6 Mon Sep 17 00:00:00 2001 From: Nathan Flurry Date: Wed, 25 Mar 2026 21:56:21 -0700 Subject: [PATCH] feat: improve Node.js conformance and runtime parity --- .agent/contracts/node-bridge.md | 209 + .agent/contracts/node-stdlib.md | 19 +- CLAUDE.md | 20 +- docs/api-reference.mdx | 2 +- docs/docs.json | 14 +- docs/features/child-processes.mdx | 8 +- docs/features/filesystem.mdx | 6 +- docs/features/module-loading.mdx | 6 +- docs/features/networking.mdx | 32 +- docs/features/output-capture.mdx | 6 +- docs/features/permissions.mdx | 6 +- docs/features/resource-limits.mdx | 6 +- docs/features/typescript.mdx | 12 +- docs/features/virtual-filesystem.mdx | 213 +- docs/nodejs-compatibility.mdx | 5 +- docs/nodejs-conformance-report.mdx | 268 +- docs/posix-compatibility.md | 8 +- docs/posix-conformance-report.mdx | 7 +- docs/{features => }/process-isolation.mdx | 1 - docs/quickstart.mdx | 52 +- docs/runtimes/node.mdx | 2 +- docs/use-cases/dev-servers.mdx | 56 +- examples/ai-agent-type-check/docs-gen.json | 14 - examples/ai-agent-type-check/package.json | 3 +- .../scripts/verify-docs.mjs | 73 + examples/code-mode/docs-gen.json | 7 - examples/code-mode/package.json | 3 +- examples/code-mode/scripts/verify-docs.mjs | 17 + examples/features/docs-gen.json | 55 - examples/features/package.json | 3 +- examples/features/scripts/verify-docs.mjs | 68 + examples/features/scripts/verify-e2e.mjs | 70 +- examples/features/src/networking.ts | 22 +- examples/features/src/typescript.ts | 2 +- examples/features/src/virtual-filesystem.ts | 166 - examples/hono-dev-server/docs-gen.json | 6 - examples/hono-dev-server/package.json | 3 +- .../hono-dev-server/scripts/verify-docs.mjs | 41 + examples/hono-dev-server/src/index.ts | 54 +- examples/kitchen-sink/README.md | 10 - examples/kitchen-sink/docs-gen.json | 30 - examples/kitchen-sink/scripts/verify-e2e.mjs | 98 - examples/kitchen-sink/src/create-runtime.ts | 12 - examples/kitchen-sink/src/esm-modules.ts | 19 - .../src/execute-capture-output.ts | 21 - examples/kitchen-sink/src/filesystem.ts | 28 - examples/kitchen-sink/src/network-access.ts | 26 - examples/kitchen-sink/src/run-get-exports.ts | 18 - examples/plugin-system/docs-gen.json | 6 - examples/plugin-system/package.json | 3 +- .../plugin-system/scripts/verify-docs.mjs | 41 + examples/quickstart/README.md | 10 + .../{kitchen-sink => quickstart}/package.json | 7 +- examples/quickstart/scripts/verify-docs.mjs | 79 + examples/quickstart/src/fetch.ts | 24 + examples/quickstart/src/filesystem.ts | 25 + examples/quickstart/src/http-server-hono.ts | 50 + examples/quickstart/src/logging.ts | 19 + examples/quickstart/src/run-command.ts | 22 + examples/quickstart/src/simple.ts | 18 + examples/quickstart/src/typescript.ts | 53 + .../tsconfig.json | 0 native/v8-runtime/src/session.rs | 4 +- native/v8-runtime/src/snapshot.rs | 3 +- native/v8-runtime/src/stream.rs | 2 + .../src/inject/require-setup.ts | 392 +- .../core/src/generated/isolate-runtime.ts | 2 +- packages/core/src/kernel/kernel.ts | 1 + packages/core/src/kernel/proc-layer.ts | 54 +- packages/core/src/kernel/socket-table.ts | 46 +- packages/core/src/shared/bridge-contract.ts | 140 +- packages/core/src/shared/global-exposure.ts | 135 + packages/nodejs/src/bridge-contract.ts | 140 +- packages/nodejs/src/bridge-handlers.ts | 2031 +++- packages/nodejs/src/bridge/active-handles.ts | 42 +- packages/nodejs/src/bridge/fs.ts | 2419 ++++- packages/nodejs/src/bridge/module.ts | 2 + packages/nodejs/src/bridge/network.ts | 8980 +++++++++++++++-- packages/nodejs/src/bridge/polyfills.ts | 77 +- packages/nodejs/src/bridge/process.ts | 50 +- .../nodejs/src/bridge/whatwg-url-ambient.d.ts | 4 + packages/nodejs/src/bridge/whatwg-url.ts | 897 ++ packages/nodejs/src/builtin-modules.ts | 2 + .../nodejs/src/default-network-adapter.ts | 43 +- packages/nodejs/src/execution-driver.ts | 114 +- packages/playground/backend/server.ts | 4 - packages/playground/frontend/index.html | 9 - packages/playground/scripts/build-worker.ts | 18 +- packages/secure-exec/tests/e2e-docker.test.ts | 312 +- .../tests/kernel/e2e-project-matrix.test.ts | 499 +- .../tests/node-conformance/common/fixtures.js | 17 +- .../tests/node-conformance/common/index.js | 10 + .../tests/node-conformance/common/tick.js | 15 + .../tests/node-conformance/common/wpt.js | 28 + .../node-conformance/conformance-report.json | 90 +- .../tests/node-conformance/expectations.json | 1588 +-- .../tests/node-conformance/runner.test.ts | 26 + .../secure-exec/tests/project-matrix.test.ts | 550 +- .../tests/project-matrix/shared.ts | 548 - .../bun.lock | 2 +- .../fixture.json | 0 .../package.json | 2 +- .../src/index.js | 0 .../tests/runtime-driver/node/index.test.ts | 2488 ++++- .../tests/test-suite/node/crypto.ts | 96 +- .../tests/test-suite/node/polyfills.ts | 63 - packages/typescript/src/index.ts | 2 +- pnpm-lock.yaml | 59 +- pnpm-workspace.yaml | 1 - scripts/docs-gen/package.json | 18 - scripts/docs-gen/src/index.ts | 268 - scripts/docs-gen/tsconfig.json | 12 - scripts/generate-node-conformance-report.ts | 13 +- scripts/generate-posix-report.ts | 1 + scripts/ralph/prd.json | 206 +- scripts/ralph/progress.txt | 304 + 116 files changed, 20682 insertions(+), 4331 deletions(-) rename docs/{features => }/process-isolation.mdx (99%) delete mode 100644 examples/ai-agent-type-check/docs-gen.json create mode 100644 examples/ai-agent-type-check/scripts/verify-docs.mjs delete mode 100644 examples/code-mode/docs-gen.json create mode 100644 examples/code-mode/scripts/verify-docs.mjs delete mode 100644 examples/features/docs-gen.json create mode 100644 examples/features/scripts/verify-docs.mjs delete mode 100644 examples/features/src/virtual-filesystem.ts delete mode 100644 examples/hono-dev-server/docs-gen.json create mode 100644 examples/hono-dev-server/scripts/verify-docs.mjs delete mode 100644 examples/kitchen-sink/README.md delete mode 100644 examples/kitchen-sink/docs-gen.json delete mode 100644 examples/kitchen-sink/scripts/verify-e2e.mjs delete mode 100644 examples/kitchen-sink/src/create-runtime.ts delete mode 100644 examples/kitchen-sink/src/esm-modules.ts delete mode 100644 examples/kitchen-sink/src/execute-capture-output.ts delete mode 100644 examples/kitchen-sink/src/filesystem.ts delete mode 100644 examples/kitchen-sink/src/network-access.ts delete mode 100644 examples/kitchen-sink/src/run-get-exports.ts delete mode 100644 examples/plugin-system/docs-gen.json create mode 100644 examples/plugin-system/scripts/verify-docs.mjs create mode 100644 examples/quickstart/README.md rename examples/{kitchen-sink => quickstart}/package.json (58%) create mode 100644 examples/quickstart/scripts/verify-docs.mjs create mode 100644 examples/quickstart/src/fetch.ts create mode 100644 examples/quickstart/src/filesystem.ts create mode 100644 examples/quickstart/src/http-server-hono.ts create mode 100644 examples/quickstart/src/logging.ts create mode 100644 examples/quickstart/src/run-command.ts create mode 100644 examples/quickstart/src/simple.ts create mode 100644 examples/quickstart/src/typescript.ts rename examples/{kitchen-sink => quickstart}/tsconfig.json (100%) create mode 100644 packages/nodejs/src/bridge/whatwg-url-ambient.d.ts create mode 100644 packages/nodejs/src/bridge/whatwg-url.ts create mode 100644 packages/secure-exec/tests/node-conformance/common/tick.js create mode 100644 packages/secure-exec/tests/node-conformance/common/wpt.js delete mode 100644 packages/secure-exec/tests/project-matrix/shared.ts rename packages/secure-exec/tests/projects/{bun-package-manager-layout-pass => bun-layout-pass}/bun.lock (83%) rename packages/secure-exec/tests/projects/{bun-package-manager-layout-pass => bun-layout-pass}/fixture.json (100%) rename packages/secure-exec/tests/projects/{bun-package-manager-layout-pass => bun-layout-pass}/package.json (59%) rename packages/secure-exec/tests/projects/{bun-package-manager-layout-pass => bun-layout-pass}/src/index.js (100%) delete mode 100644 scripts/docs-gen/package.json delete mode 100755 scripts/docs-gen/src/index.ts delete mode 100644 scripts/docs-gen/tsconfig.json diff --git a/.agent/contracts/node-bridge.md b/.agent/contracts/node-bridge.md index 469924fa..c347eaef 100644 --- a/.agent/contracts/node-bridge.md +++ b/.agent/contracts/node-bridge.md @@ -79,6 +79,22 @@ This hardening policy MUST NOT force Node stdlib globals to non-writable/non-con - **WHEN** bridge setup exposes a Node stdlib global surface (for example `process`, timers, `Buffer`, `URL`, `fetch`, or `console`) - **THEN** the bridge MUST preserve Node-compatible behavior and MUST NOT require non-writable/non-configurable descriptors for that stdlib global due to this policy alone +### Requirement: WHATWG URL Bridge Preserves Node Validation And Scalar-Value Semantics +Bridge-provided `URL` and `URLSearchParams` globals SHALL preserve the Node-observable validation, coercion, and inspection behavior that vendored conformance tests assert. + +#### Scenario: WHATWG URL validation preserves Node ERR_* metadata +- **WHEN** sandboxed code calls `new URL()`, `new URL("bad")`, detached `URLSearchParams` methods, or malformed `URLSearchParams` tuple constructors +- **THEN** the bridge MUST throw Node-compatible `TypeError` instances with the expected `ERR_MISSING_ARGS`, `ERR_INVALID_URL`, `ERR_INVALID_THIS`, `ERR_ARG_NOT_ITERABLE`, and `ERR_INVALID_TUPLE` codes + +#### Scenario: WHATWG URL string inputs use scalar-value normalization +- **WHEN** sandboxed code passes strings with surrogate pairs or lone surrogates into `URL` / `URLSearchParams` constructors or setters +- **THEN** the bridge MUST apply string-hint coercion followed by USV-string normalization before handing values to the underlying implementation +- **AND** valid surrogate pairs MUST encode as UTF-8 scalar values while lone surrogates become U+FFFD + +#### Scenario: WHATWG URL custom inspect hooks stay reachable through util.inspect +- **WHEN** sandboxed code calls `util.inspect(urlLike)` for bridged `URL`, `URLSearchParams`, or iterator instances, including negative-depth and nested-object cases +- **THEN** the bridge/runtime polyfill layer MUST continue to invoke the custom inspect hooks instead of falling back to plain `{}` output + ### Requirement: Cryptographic Randomness Bridge Uses Host CSPRNG Bridge-provided randomness for global `crypto` APIs MUST delegate to host `node:crypto` primitives and MUST NOT use isolate-local pseudo-random fallbacks such as `Math.random()`. @@ -154,6 +170,21 @@ Bridge-exposed filesystem metadata calls (`exists`, `stat`, and typed directory - **WHEN** sandboxed code calls bridge `readdir` with typed entry expectations - **THEN** bridge handling MUST return entry type information without a repeated `readDir` probe for each entry +### Requirement: AbortSignal Polyfills Preserve Frozen-Options Cancellation Semantics +Bridge/runtime `AbortController` and `AbortSignal` polyfills SHALL preserve Node-compatible cancellation behavior even when test helpers freeze the options bag and nested signal object. + +#### Scenario: Sandboxed code aborts after freezing an options bag +- **WHEN** sandboxed code passes `{ signal }` through a deep-freeze helper such as the vendored conformance `common.mustNotMutateObjectDeep()` and later calls `controller.abort(reason)` +- **THEN** the abort operation MUST still succeed +- **AND** fs and network APIs observing that signal MUST surface a Node-compatible `AbortError` instead of throwing from signal state mutation + +### Requirement: Standalone NodeRuntime FS Bridge Exposes Proc Hostname Parity +The standalone NodeRuntime filesystem bridge SHALL expose a readable `/proc/sys/kernel/hostname` pseudo-file so vendored Linux fs paths behave consistently outside the kernel-mounted proc layer. + +#### Scenario: Sandboxed standalone runtime reads proc hostname +- **WHEN** sandboxed code in a standalone `NodeRuntime` calls `fs.readFile('/proc/sys/kernel/hostname')`, `fs.readFileSync('/proc/sys/kernel/hostname')`, or opens that path through `fs.promises.open()` +- **THEN** the bridge MUST return a non-empty hostname payload instead of `ENOENT` + ### Requirement: Bridge Boundary Contracts SHALL Be Defined In A Canonical Shared Type Module Bridge global keys and host/isolate boundary type contracts SHALL be defined in canonical shared type modules — bridge-contract types in `packages/nodejs/src/bridge-contract.ts` and global-exposure helpers in `packages/core/src/shared/global-exposure.ts` — and reused across host runtime setup and bridge modules. @@ -218,9 +249,187 @@ Bridge-provided `http.Agent` behavior SHALL preserve the observable pooling stat #### Scenario: Keepalive sockets are reused or discarded - **WHEN** sandboxed code enables `keepAlive` and reuses pooled HTTP connections - **THEN** the bridge MUST mark reused requests via `request.reusedSocket` + +### Requirement: Dgram Socket Option Bridge Preserves Node Validation And Bind-Time Semantics +Bridge-provided `dgram.Socket` option helpers SHALL preserve Node-compatible validation order, not-running errors, and deferred application of constructor buffer-size options. + +#### Scenario: Unbound dgram socket exposes Node-style socket-option errors +- **WHEN** sandboxed code calls `socket.setBroadcast()`, `socket.setTTL()`, `socket.setMulticastTTL()`, or `socket.setMulticastLoopback()` before `bind()` +- **THEN** the bridge MUST throw the corresponding Node-style `Error` with the syscall name and `EBADF` +- **AND** unbound `get*BufferSize()` / `set*BufferSize()` calls MUST throw `ERR_SOCKET_BUFFER_SIZE` with `EBADF` + +#### Scenario: Constructor buffer-size options do not hide unbound error paths +- **WHEN** sandboxed code creates `dgram.createSocket({ recvBufferSize, sendBufferSize })` +- **THEN** the bridge MUST cache those requested sizes until the socket is actually bound +- **AND** it MUST NOT eagerly apply them in a way that makes unbound buffer-size getters/setters succeed + +#### Scenario: Source-specific membership validates argument types before address semantics +- **WHEN** sandboxed code calls `addSourceSpecificMembership()` or `dropSourceSpecificMembership()` with non-string `sourceAddress` or `groupAddress` +- **THEN** the bridge MUST throw Node-compatible `ERR_INVALID_ARG_TYPE` for the offending argument before running multicast/unicast address validation + +### Requirement: HTTP Server Bridge Preserves CONNECT Upgrade And Informational Semantics +Bridge-provided `http.Server` behavior SHALL preserve Node.js event sequencing for `CONNECT`, `upgrade`, and informational `1xx` responses. + +#### Scenario: Sandboxed loopback server receives CONNECT or upgrade traffic +- **WHEN** sandboxed code listens with `http.createServer()` and registers `server.on('connect', ...)` or `server.on('upgrade', ...)` +- **THEN** localhost `CONNECT` and `Connection: Upgrade` requests MUST dispatch those server events instead of being collapsed into a normal `'request'` handler +- **AND** the bridged socket/head arguments MUST remain writable/readable so tunnel and upgrade protocols can continue over the same connection + +#### Scenario: Sandboxed server emits informational responses before the final response +- **WHEN** sandboxed code sends `100`, `102`, or `103` responses via `writeHead()`, `writeContinue()`, `writeProcessing()`, or raw header writes +- **THEN** sandboxed HTTP clients MUST receive matching `'information'` events before the final `'response'` +- **AND** the bridged informational message MUST preserve status code, status text, headers, and raw header casing needed by Node conformance assertions - **AND** destroyed or remotely closed sockets MUST be removed from the pool instead of being reassigned to queued requests #### Scenario: Total socket limits are configured - **WHEN** sandboxed code constructs an `http.Agent` with `maxSockets`, `maxFreeSockets`, or `maxTotalSockets` - **THEN** invalid argument types and ranges MUST throw Node-compatible `ERR_INVALID_ARG_TYPE` / `ERR_OUT_OF_RANGE` errors - **AND** queued requests across origins MUST respect both per-origin and total socket limits + +### Requirement: TLS Bridge Uses Host TLS Semantics For Both External And Loopback Sockets +Bridge-provided `tls` APIs SHALL terminate TLS with host `node:tls` primitives, including sandbox loopback sockets that are paired in-kernel. + +#### Scenario: Sandbox upgrades a client or accepted server socket to TLS +- **WHEN** sandboxed code calls `tls.connect(...)` or `tls.createServer(...)` and the bridged socket is upgraded to TLS +- **THEN** the bridge MUST use host `node:tls` handshakes, certificate validation, and cipher reporting +- **AND** loopback socket pairs MUST use a host-side in-memory duplex transport instead of bypassing the kernel connection model + +#### Scenario: Sandbox reads TLS authorization or cipher metadata +- **WHEN** sandboxed code inspects `tls.TLSSocket.authorized`, `authorizationError`, `getCipher()`, or `tls.getCiphers()` +- **THEN** the bridge MUST surface host `node:tls` results rather than placeholder values + +#### Scenario: Loopback TLS servers resolve SNI contexts and ALPN/session metadata +- **WHEN** sandboxed code uses `tls.Server(...)` or `tls.createServer(...)` with `server.addContext(...)`, `SNICallback`, `ALPNProtocols`, or `ALPNCallback`, and a sandboxed client connects with `servername`, `session`, or `ALPNProtocols` +- **THEN** the server-side bridge MUST resolve the client hello metadata before starting the host TLS handshake +- **AND** `tls.TLSSocket` methods such as `getSession()`, `isSessionReused()`, `getPeerCertificate()`, `getCertificate()`, and `getProtocol()` MUST reflect the underlying host `node:tls` socket state + +### Requirement: HTTP2 Bridge Preserves Basic Session And Stream Lifecycle +Bridge-provided `http2` APIs SHALL preserve the basic client/server session and stream lifecycle needed for sandbox request/response flows. + +#### Scenario: Sandboxed code establishes plaintext or TLS HTTP2 sessions +- **WHEN** sandboxed code calls `http2.createServer(...)`, `http2.createSecureServer(...)`, or `http2.connect(...)` +- **THEN** the bridge MUST surface Node-compatible `'listening'`, `'connect'`, `'connection'`, and `'secureConnection'` events +- **AND** `server.address()`, `session.encrypted`, `session.alpnProtocol`, `session.originSet`, and the internal `kSocket` metadata MUST reflect the host-backed session state + +#### Scenario: Sandboxed code responds through HTTP2 stream events +- **WHEN** a bridged HTTP2 server receives a request and emits `'stream'` +- **THEN** the stream callback MUST receive a writable server stream plus pseudo-header metadata +- **AND** `stream.respond(...)`, `stream.write(...)`, and `stream.end(...)` MUST drive the corresponding host HTTP2 response headers/body/close lifecycle +- **AND** the paired client stream MUST emit `'response'`, `'data'`, `'end'`, and `'close'` with Node-compatible ordering for basic request/response flows + +#### Scenario: Sandboxed code uses HTTP2 push, settings negotiation, or GOAWAY lifecycle +- **WHEN** sandboxed code calls `stream.pushStream(...)`, `session.settings(...)`, `server.updateSettings(...)`, `session.goaway(...)`, or inspects `session.localSettings`, `session.remoteSettings`, and `pendingSettingsAck` +- **THEN** the bridge MUST delegate push-stream creation, settings exchange, and GOAWAY delivery to the host `node:http2` session +- **AND** pushed client streams MUST emit the session `'stream'` event plus pushed-stream `'push'` headers before body delivery +- **AND** nested push attempts and HEAD push write-after-end behavior MUST surface Node-compatible `ERR_HTTP2_NESTED_PUSH` and `ERR_STREAM_WRITE_AFTER_END` errors +- **AND** session/server settings objects exposed in the sandbox MUST track the last host-acknowledged values with stable object identity until the next settings update + +#### Scenario: Sandboxed code inspects HTTP2 flow-control state or pauses inbound streams +- **WHEN** sandboxed code calls `session.setLocalWindowSize(...)`, inspects `session.state`, or pauses/resumes a bridged server stream while request body frames are in flight +- **THEN** the bridge MUST delegate the window-size change to the host `node:http2` session +- **AND** sandbox-visible `session.state` fields such as `effectiveLocalWindowSize`, `localWindowSize`, and `remoteWindowSize` MUST reflect the host session state after the update +- **AND** server-stream `'error'`, `'close'`, `'drain'`, `'data'`, and `'end'` events MUST preserve the host flow-control and RST lifecycle closely enough for Node's vendored flow-control tests + +#### Scenario: Secure HTTP2 servers allow HTTP1 compatibility fallback +- **WHEN** sandboxed code creates `http2.createSecureServer({ allowHTTP1: true }, listener)` and an HTTP/1.1 client connects to that port +- **THEN** the host-backed server MUST negotiate the HTTP/1.1 fallback instead of hanging the connection +- **AND** the sandbox `'request'` listener MUST receive compatibility request/response objects that can complete the HTTP/1.1 exchange + +### Requirement: HTTP ClientRequest Bridge Preserves Abort Destroy And Timeout Lifecycle Semantics +Bridge-provided `http.ClientRequest` behavior SHALL preserve the observable abort, destroy, timeout, and abort-signal lifecycle that Node.js tests inspect. + +#### Scenario: Sandboxed code aborts or destroys an HTTP request +- **WHEN** sandboxed code calls `req.abort()` or `req.destroy()` on an `http.ClientRequest` +- **THEN** the request MUST expose Node-compatible `aborted` / `destroyed` state +- **AND** the request MUST emit `'abort'` at most once for `req.abort()` +- **AND** the request MUST emit `'close'` when teardown completes +- **AND** loopback server-side request objects MUST observe matching `'aborted'` / `ECONNRESET` behavior + +#### Scenario: Sandboxed code configures request timeouts or abort signals +- **WHEN** sandboxed code passes `timeout` or `signal` in `http.request()` options, or calls `req.setTimeout(...)` +- **THEN** invalid timeout values MUST throw Node-compatible argument errors +- **AND** the request/socket timeout callbacks MUST be attached with Node-compatible listener reuse +- **AND** `AbortSignal` cancellation MUST destroy the request with an `AbortError` carrying `code === 'ABORT_ERR'` + +### Requirement: Net Bridge Preserves Socket Timeout Validation, Listen Validation, And Server Bookkeeping +Bridge-provided `net.Socket` and `net.Server` behavior SHALL preserve the timeout validation, listen-address timing, and server bookkeeping that Node.js tests inspect. + +#### Scenario: Sandboxed code configures socket timeouts +- **WHEN** sandboxed code calls `socket.setTimeout(timeout, callback)` on a `net.Socket` +- **THEN** invalid timeout values MUST throw Node-compatible `ERR_INVALID_ARG_TYPE` / `ERR_OUT_OF_RANGE` errors +- **AND** invalid callbacks MUST throw `ERR_INVALID_ARG_TYPE` +- **AND** refed sockets MUST emit `'timeout'` after idle periods while unrefed socket timeout timers MUST NOT keep the runtime alive + +#### Scenario: Sandboxed code reads server.address() immediately after listen() +- **WHEN** sandboxed code calls `server.listen(...)` and synchronously reads `server.address()` before the `'listening'` callback runs +- **THEN** the bridge MUST already expose the bound address and assigned port, including `port: 0` ephemeral bindings + +#### Scenario: Sandboxed code passes invalid listen() arguments +- **WHEN** sandboxed code calls `server.listen(...)` with invalid booleans, malformed option objects, or out-of-range ports +- **THEN** the bridge MUST throw Node-compatible `ERR_INVALID_ARG_VALUE` / `ERR_SOCKET_BAD_PORT` errors +- **AND** accepted numeric strings such as `'0'` MUST still bind successfully like Node + +#### Scenario: Sandboxed code inspects server connection bookkeeping +- **WHEN** sandboxed code uses `server.getConnections(...)`, assigns `server.maxConnections`, or inspects `socket.server` +- **THEN** accepted sockets MUST increment/decrement the observable connection count with Node-compatible callback timing +- **AND** `server.getConnections(...)` MUST return the server instance +- **AND** sockets rejected because `maxConnections` is reached MUST emit a `'drop'` event carrying local and remote address metadata + +#### Scenario: Sandboxed code listens on or connects to Unix path sockets +- **WHEN** sandboxed code calls `server.listen(path)`, `server.listen({ path, readableAll, writableAll })`, `net.connect(path)`, or `net.connect({ path })` +- **THEN** the bridge MUST route those sockets through the kernel `AF_UNIX` path instead of TCP port validation +- **AND** `server.address()` MUST return the bound path string for Unix listeners +- **AND** `readableAll` / `writableAll` listener options MUST be reflected in the created socket file mode bits + +#### Scenario: Sandboxed code validates IP address helpers +- **WHEN** sandboxed code calls `net.isIP(...)`, `net.isIPv4(...)`, or `net.isIPv6(...)` +- **THEN** the bridge MUST match Node-compatible IPv4 / IPv6 validation for plain strings, zoned IPv6 literals, embedded IPv4 IPv6 forms, and string-coercible objects + +### Requirement: Dgram Bridge Preserves Basic UDP Socket Lifecycle And Message Delivery +Bridge-provided `dgram.Socket` behavior SHALL preserve the basic bind, send, receive, close, and address semantics that Node.js tests inspect. + +#### Scenario: Sandboxed code creates and binds UDP sockets +- **WHEN** sandboxed code calls `dgram.createSocket('udp4' | 'udp6')` and then `socket.bind(...)` +- **THEN** the bridge MUST return a reusable `Socket` instance +- **AND** invalid socket types MUST throw Node-compatible `ERR_SOCKET_BAD_TYPE` +- **AND** successful binds MUST emit `'listening'` and make `socket.address()` report the bound family/address/port + +#### Scenario: Sandboxed code sends or receives UDP datagrams +- **WHEN** sandboxed code calls `socket.send(...)` between sandbox UDP sockets or to its own bound port +- **THEN** the bridge MUST preserve datagram message boundaries and callback byte counts +- **AND** unbound sender sockets MUST implicitly bind before sending like Node +- **AND** `'message'` listeners MUST receive a `Buffer` plus `rinfo` metadata carrying `address`, `family`, `port`, and `size` + +#### Scenario: Sandboxed code closes or unrefs a UDP socket +- **WHEN** sandboxed code calls `socket.close()` or `socket.unref()` on a bridged UDP socket +- **THEN** the bridge MUST stop polling for incoming datagrams, release kernel socket ownership, and emit `'close'` with Node-compatible timing + +### Requirement: Raw Loopback HTTP Bridge Preserves Pipelining And Transfer Framing +Bridge-provided loopback `net.connect()` traffic sent to sandbox `http.createServer()` listeners SHALL preserve the HTTP/1.1 framing and sequencing that Node.js raw-socket tests inspect. + +#### Scenario: Sandboxed raw client pipelines multiple loopback HTTP requests +- **WHEN** sandboxed code opens a loopback `net.Socket` to a sandbox `http.Server` and writes multiple HTTP/1.1 requests on the same connection +- **THEN** the bridge MUST parse and dispatch each complete request sequentially from the shared byte stream +- **AND** leading blank lines before the next request line MUST be ignored like Node's parser +- **AND** already-buffered requests MUST still dispatch even if an earlier request destroys the socket or response + +#### Scenario: Sandboxed raw client uses chunked or invalid transfer framing +- **WHEN** loopback raw HTTP traffic uses `Transfer-Encoding: chunked` or malformed transfer-encoding/chunk framing +- **THEN** valid chunked bodies MUST be de-chunked before the request listener sees them +- **AND** malformed transfer-encoding values or invalid chunk extensions MUST receive a raw `400 Bad Request` response with `Connection: close` +- **AND** `204`/`304` responses with an explicit `Transfer-Encoding: chunked` header MUST close the connection without emitting a terminating chunk body + +### Requirement: HTTP Header Validation And Duplicate Header Semantics Match Node +Bridge-provided `http` behavior SHALL preserve Node.js header token validation, path validation, and duplicate header normalization for the public `http` surface and `_http_common`. + +#### Scenario: Sandboxed code validates methods, paths, and header tokens +- **WHEN** sandboxed code calls `http.request()`, `http.validateHeaderName()`, `http.validateHeaderValue()`, or `_http_common` validators +- **THEN** invalid HTTP methods and header names MUST throw `ERR_INVALID_HTTP_TOKEN` +- **AND** invalid request paths MUST throw `ERR_UNESCAPED_CHARACTERS` +- **AND** invalid header values MUST throw `ERR_HTTP_INVALID_HEADER_VALUE` or `ERR_INVALID_CHAR` with Node-compatible messages + +#### Scenario: Sandboxed code sends or receives duplicate headers +- **WHEN** sandboxed code sets duplicate headers such as repeated `set-cookie` +- **THEN** `IncomingMessage.headers['set-cookie']` MUST remain an array of cookie values +- **AND** non-cookie duplicate headers MUST be normalized to the Node-compatible comma-joined string form +- **AND** request-side header inspection (`getHeaderNames()`, `getRawHeaderNames()`) MUST preserve Node-compatible casing and ordering diff --git a/.agent/contracts/node-stdlib.md b/.agent/contracts/node-stdlib.md index 38971e61..86f493db 100644 --- a/.agent/contracts/node-stdlib.md +++ b/.agent/contracts/node-stdlib.md @@ -96,16 +96,33 @@ Modules classified as Unsupported (Tier 5) SHALL throw immediately when required - **THEN** the call MUST throw an error indicating the module is not supported in sandbox ### Requirement: fs Missing API Classification -The following `fs` APIs SHALL be classified as Deferred with deterministic error behavior: `watch`, `watchFile`. The APIs `chmod`, `chown`, `link`, `symlink`, `readlink`, `truncate`, `utimes`, `access`, and `realpath` SHALL be documented as implemented (Bridge tier), delegating to the VFS with permission checks. +The following `fs` watcher APIs SHALL be classified as Deferred with deterministic error behavior: `watch`, `watchFile`, and `fs/promises.watch`. The sandbox VFS/kernel has no inotify/kqueue/FSEvents-equivalent primitive, so these APIs MUST fail fast instead of hanging while waiting for events that can never arrive. The APIs `chmod`, `chown`, `link`, `symlink`, `readlink`, `truncate`, `utimes`, `access`, and `realpath` SHALL be documented as implemented (Bridge tier), delegating to the VFS with permission checks. #### Scenario: Calling a deferred fs API - **WHEN** sandboxed code calls `fs.watch()` - **THEN** the call MUST throw `"fs.watch is not supported in sandbox — use polling"` +#### Scenario: Calling deferred watcher APIs through fs/promises +- **WHEN** sandboxed code iterates `require("fs/promises").watch(...)` +- **THEN** the iterator MUST reject with `"fs.promises.watch is not supported in sandbox — use polling"` +- **AND** it MUST preserve Node-compatible `ERR_INVALID_ARG_TYPE`, `ERR_INVALID_ARG_VALUE`, and `AbortError` validation behavior before the deferred unsupported error path + #### Scenario: Calling an implemented fs API previously listed as missing - **WHEN** sandboxed code calls `fs.access("/some/path", callback)` - **THEN** the call MUST execute normally via the fs bridge without error +### Requirement: fs Validation Paths Preserve Node ERR_* Shapes +Bridge-provided `fs` APIs SHALL throw Node-compatible validation errors before asynchronous dispatch when the argument contract is violated. + +#### Scenario: Callback-style fs API is missing or given a non-function callback +- **WHEN** sandboxed code calls callback-style APIs such as `fs.open()`, `fs.close()`, `fs.exists()`, `fs.stat()`, or `fs.mkdtemp()` without a valid callback +- **THEN** the bridge MUST throw `ERR_INVALID_ARG_TYPE` synchronously instead of returning a Promise or reporting the validation failure through the callback + +#### Scenario: fs validation rejects invalid encodings and numeric option types +- **WHEN** sandboxed code passes an invalid encoding to `fs.readFile*()`, `fs.readdir*()`, `fs.readlink*()`, `fs.writeFile*()`, `fs.appendFile*()`, `fs.realpath*()`, `fs.mkdtemp*()`, `fs.ReadStream()`, `fs.WriteStream()`, or `fs.watch()` +- **THEN** the bridge MUST throw `ERR_INVALID_ARG_VALUE` +- **AND** invalid numeric `start` / `end` stream options or fd/path argument types MUST throw `ERR_INVALID_ARG_TYPE` or `ERR_OUT_OF_RANGE` with Node-compatible names + ### Requirement: child_process.fork Is Permanently Unsupported `child_process.fork()` SHALL be classified as Unsupported and MUST throw a deterministic error explaining that IPC across the isolate boundary is not supported. diff --git a/CLAUDE.md b/CLAUDE.md index 19c9d0fc..a39fc04f 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -32,6 +32,7 @@ ### Node.js Conformance Test Integrity - conformance tests live in `packages/secure-exec/tests/node-conformance/` — they are vendored upstream Node.js v22.14.0 test/parallel/ tests run through the sandbox +- vendored Node conformance helper shims live in `packages/secure-exec/tests/node-conformance/common/`; if a WPT-derived vendored test fails on a missing `../common/*` helper, add the minimal harness/shim there instead of rewriting the vendored test file - `docs-internal/nodejs-compat-roadmap.md` tracks every non-passing test with its fix category and resolution - when implementing bridge/polyfill features where both sides go through our code (e.g., loopback HTTP server + client), prevent overfitting: - **wire-level snapshot tests**: capture raw protocol bytes and compare against known-good captures from real Node.js @@ -42,9 +43,13 @@ - **host-side assertion verification**: periodically run assert-heavy conformance tests through host Node.js to verify the assert polyfill isn't masking failures - never inflate conformance numbers — if a test self-skips (exits 0 without testing anything), mark it `vacuous-skip` in expectations.json, not as a real pass - every entry in `expectations.json` must have a specific, verifiable reason — no vague "fails in sandbox" reasons +- when rerunning a single expected-fail conformance file through `runner.test.ts`, a green Vitest result only means the expectation still matches; only the explicit `now passes! Remove its expectation` failure proves the vendored test itself now passes and the entry is stale +- before deleting explicit `pass` overrides behind a negated glob, rerun the exact promoted vendored files through a direct `createTestNodeRuntime()` harness or another no-expectation path; broad module cleanup can still hide stale passes - after changing expectations.json or adding/removing test files, regenerate both the JSON report and docs page: `pnpm tsx scripts/generate-node-conformance-report.ts` - the script produces `packages/secure-exec/tests/node-conformance/conformance-report.json` (machine-readable) and `docs/nodejs-conformance-report.mdx` (docs page) — commit both - to run the actual conformance suite: `pnpm vitest run packages/secure-exec/tests/node-conformance/runner.test.ts` +- raw `net.connect()` traffic to sandbox `http.createServer()` is implemented entirely in `packages/nodejs/src/bridge/network.ts`; when fixing loopback HTTP behavior, re-run the vendored pipeline/transfer files (`test-http-get-pipeline-problem.js`, `test-http-pipeline-requests-connection-leak.js`, `test-http-transfer-encoding-*.js`, `test-http-chunked-304.js`) because they all exercise the same parser/serializer path +- For callback-style `fs` bridge methods, do Node-style argument validation before entering the callback/error-delivery wrapper; otherwise invalid args that should throw synchronously get converted into callback errors or Promise returns and vendored fs validation coverage goes red ## Tooling @@ -111,6 +116,18 @@ - read `docs-internal/arch/overview.md` for the component map (NodeRuntime, RuntimeDriver, NodeDriver, NodeExecutionDriver, ModuleAccessFileSystem, Permissions) - keep it up to date when adding, removing, or significantly changing components +- keep host bootstrap polyfills in `packages/nodejs/src/execution-driver.ts` aligned with isolate bootstrap polyfills in `packages/core/isolate-runtime/src/inject/require-setup.ts`; drift in shared globals like `AbortController` causes sandbox-only behavior gaps that source-level tests can miss +- vendored fs abort tests deep-freeze option bags via `common.mustNotMutateObjectDeep()`, so sandbox `AbortSignal` state must live outside writable instance properties; freezing `{ signal }` must not break later `controller.abort()` +- vendored `common.mustNotMutateObjectDeep()` helpers must skip populated typed-array/DataView instances; `Object.freeze(new Uint8Array([1]))` throws before the runtime under test executes, which turns option-bag immutability coverage into a harness failure +- when adding bridge globals that the sandbox calls with `.apply(..., { result: { promise: true } })`, register them in the native V8 async bridge list in `native/v8-runtime/src/session.rs`; otherwise the `_loadPolyfill` shim can turn a supposed async wait into a synchronous deadlock +- bridged `net.Server.listen()` must make `server.address()` readable immediately after `listen()` returns, even before the `'listening'` callback, because vendored Node tests read ephemeral ports synchronously +- bridged Unix path sockets (`server.listen(path)`, `net.connect(path)`) must route through kernel `AF_UNIX`, not TCP validation, and `readableAll` / `writableAll` listener options must update the VFS socket-file mode bits that `fs.statSync()` observes +- bridged `net.Socket.setTimeout()` must match Node validation codes (`ERR_INVALID_ARG_TYPE`, `ERR_OUT_OF_RANGE`) and any timeout timer created for an unrefed socket must also be unrefed so it cannot keep the runtime alive by itself +- bridged `dgram.Socket` loopback semantics depend on both layers: the isolate bridge must implicitly bind unbound sender sockets before `send()`, and the kernel UDP path must rewrite wildcard local addresses (`0.0.0.0` / `::`) to concrete loopback source addresses so `rinfo.address` matches Node on self-send/echo tests +- bridged `dgram.Socket` buffer-size options must be cached until `bind()` completes; Node expects unbound `get*BufferSize()` / `set*BufferSize()` calls to throw `ERR_SOCKET_BUFFER_SIZE` with `EBADF`, so eager pre-bind application hides the real error path +- bridged `http2` server streams must start paused on the host and only resume when sandbox code opts into flow (`req.on('data')`, `req.resume()`, or `stream.resume()`); otherwise the host consumes DATA frames too early, sends WINDOW_UPDATE unexpectedly, and hides paused flow-control / pipeline regressions +- bridge exports that userland constructs with `new` must be assigned as constructable function properties, not object-literal method shorthands; shorthand methods like `createReadStream() {}` are not constructable and vendored fs coverage calls `new fs.createReadStream(...)` +- `/proc/sys/kernel/hostname` conformance hits both kernel-backed and standalone NodeRuntime paths; a procfs fix that only lands in the kernel layer still leaves `createTestNodeRuntime()` fs/FileHandle coverage red ## Virtual Kernel Architecture @@ -201,9 +218,6 @@ Follow the style in `packages/secure-exec/src/index.ts`. - all public-facing docs (quickstart, guides, API reference, landing page, README) must focus on the **Node.js runtime** as the primary and default experience — do not lead with WasmVM, kernel internals, or multi-runtime concepts - code examples in docs should use the `NodeRuntime` API (`runtime.run()`, `runtime.exec()`) as the default path; the kernel API (`createKernel`, `kernel.spawn()`) is for advanced multi-process use cases and should be presented as secondary -- keep documentation pages and their runnable example sources in sync: `docs/quickstart.mdx` must match `examples/kitchen-sink/src/`, and `docs/features/*.mdx` must match `examples/features/src/` -- when updating a doc snippet, update the corresponding example file and the docs/example verification scripts in the same change -- when converting runnable example code into documentation snippets, use public package imports like `from "secure-exec"` and `from "@secure-exec/typescript"` instead of repo-local source paths - WasmVM and Python docs are experimental docs and must stay grouped under the `Experimental` section in `docs/docs.json` - docs pages that must stay current with API changes: - `docs/quickstart.mdx` — update when core setup flow changes diff --git a/docs/api-reference.mdx b/docs/api-reference.mdx index 264f042e..24ffda11 100644 --- a/docs/api-reference.mdx +++ b/docs/api-reference.mdx @@ -136,7 +136,7 @@ createTypeScriptTools(options: TypeScriptToolsOptions) | `runtimeDriverFactory` | `NodeRuntimeDriverFactory` | Creates the compiler sandbox runtime. | | `memoryLimit` | `number` | Compiler sandbox isolate memory cap in MB. Default `512`. | | `cpuTimeLimitMs` | `number` | Compiler sandbox CPU time budget in ms. | -| `compilerSpecifier` | `string` | Module specifier used to load the TypeScript compiler. Default `"typescript"`. | +| `compilerSpecifier` | `string` | Module specifier used to load the TypeScript compiler. Default `"/root/node_modules/typescript/lib/typescript.js"`. | **Methods** diff --git a/docs/docs.json b/docs/docs.json index e4c66b32..c312613b 100644 --- a/docs/docs.json +++ b/docs/docs.json @@ -60,24 +60,20 @@ "features/typescript", "features/permissions", "features/filesystem", + "features/virtual-filesystem", "features/networking", "features/module-loading", "features/output-capture", "features/resource-limits", "features/child-processes", - "features/virtual-filesystem", - { - "group": "Advanced", - "pages": [ - "features/process-isolation" - ] - } + "process-isolation" ] }, { "group": "Reference", "pages": [ "api-reference", + "nodejs-compatibility", "benchmarks", { "group": "Comparison", @@ -89,10 +85,6 @@ { "group": "Advanced", "pages": [ - "nodejs-compatibility", - "nodejs-conformance-report", - "posix-compatibility", - "posix-conformance-report", "cost-evaluation", "architecture", "security-model" diff --git a/docs/features/child-processes.mdx b/docs/features/child-processes.mdx index d93bac13..810472cf 100644 --- a/docs/features/child-processes.mdx +++ b/docs/features/child-processes.mdx @@ -12,16 +12,14 @@ Sandboxed code can spawn child processes through the `CommandExecutor` interface ## Runnable example -Source file: `examples/features/src/child-processes.ts` - ```ts import { NodeRuntime, allowAllChildProcess, createNodeDriver, createNodeRuntimeDriverFactory, -} from "secure-exec"; -import type { CommandExecutor } from "secure-exec"; +} from "../../../packages/secure-exec/src/index.ts"; +import type { CommandExecutor } from "../../../packages/secure-exec/src/types.ts"; import { spawn } from "node:child_process"; const commandExecutor: CommandExecutor = { @@ -100,6 +98,8 @@ try { } ``` +Source: [examples/features/src/child-processes.ts](https://github.com/rivet-dev/secure-exec/blob/main/examples/features/src/child-processes.ts) + ## Permission gating Restrict which commands sandboxed code can spawn: diff --git a/docs/features/filesystem.mdx b/docs/features/filesystem.mdx index 88bff587..f4236f2b 100644 --- a/docs/features/filesystem.mdx +++ b/docs/features/filesystem.mdx @@ -12,8 +12,6 @@ secure-exec supports three filesystem backends. The system driver controls which ## Runnable example -Source file: `examples/features/src/filesystem.ts` - ```ts import { NodeRuntime, @@ -21,7 +19,7 @@ import { createInMemoryFileSystem, createNodeDriver, createNodeRuntimeDriverFactory, -} from "secure-exec"; +} from "../../../packages/secure-exec/src/index.ts"; const filesystem = createInMemoryFileSystem(); const runtime = new NodeRuntime({ @@ -57,6 +55,8 @@ try { } ``` +Source: [examples/features/src/filesystem.ts](https://github.com/rivet-dev/secure-exec/blob/main/examples/features/src/filesystem.ts) + ## OPFS (browser) Persistent filesystem using the Origin Private File System API. This is the default for `createBrowserDriver()`. diff --git a/docs/features/module-loading.mdx b/docs/features/module-loading.mdx index ab3f6359..dd471a6d 100644 --- a/docs/features/module-loading.mdx +++ b/docs/features/module-loading.mdx @@ -13,8 +13,6 @@ Sandboxed code can `require()` and `import` modules through secure-exec's module ## Runnable example -Source file: `examples/features/src/module-loading.ts` - ```ts import path from "node:path"; import { fileURLToPath } from "node:url"; @@ -23,7 +21,7 @@ import { allowAllFs, createNodeDriver, createNodeRuntimeDriverFactory, -} from "secure-exec"; +} from "../../../packages/secure-exec/src/index.ts"; const repoRoot = path.resolve(path.dirname(fileURLToPath(import.meta.url)), "../../.."); @@ -60,6 +58,8 @@ try { } ``` +Source: [examples/features/src/module-loading.ts](https://github.com/rivet-dev/secure-exec/blob/main/examples/features/src/module-loading.ts) + ## node_modules overlay Node runtime executions expose a read-only dependency overlay at `/app/node_modules`, sourced from `/node_modules` on the host (default `cwd` is `process.cwd()`). diff --git a/docs/features/networking.mdx b/docs/features/networking.mdx index d5873e67..58377006 100644 --- a/docs/features/networking.mdx +++ b/docs/features/networking.mdx @@ -12,8 +12,6 @@ Network access is deny-by-default. Enable it by setting `useDefaultNetwork: true ## Runnable example -Source file: `examples/features/src/networking.ts` - ```ts import * as http from "node:http"; import { @@ -22,7 +20,7 @@ import { createDefaultNetworkAdapter, createNodeDriver, createNodeRuntimeDriverFactory, -} from "secure-exec"; +} from "../../../packages/secure-exec/src/index.ts"; const logs: string[] = []; const server = http.createServer((_req, res) => { @@ -53,19 +51,23 @@ const runtime = new NodeRuntime({ try { const result = await runtime.exec( ` - const response = await fetch("http://127.0.0.1:${address.port}/"); - const body = await response.text(); - - if (!response.ok || response.status !== 200 || body !== "network-ok") { - throw new Error( - "unexpected response: " + response.status + " " + body, - ); - } - - console.log(JSON.stringify({ status: response.status, body })); + (async () => { + const response = await fetch("http://127.0.0.1:${address.port}/"); + const body = await response.text(); + + if (!response.ok || response.status !== 200 || body !== "network-ok") { + throw new Error( + "unexpected response: " + response.status + " " + body, + ); + } + + console.log(JSON.stringify({ status: response.status, body })); + })().catch((error) => { + console.error(error instanceof Error ? error.message : String(error)); + process.exitCode = 1; + }); `, { - filePath: "/entry.mjs", onStdio: (event) => { logs.push(`[${event.channel}] ${event.message}`); }, @@ -105,6 +107,8 @@ try { } ``` +Source: [examples/features/src/networking.ts](https://github.com/rivet-dev/secure-exec/blob/main/examples/features/src/networking.ts) + ## Quick setup diff --git a/docs/features/output-capture.mdx b/docs/features/output-capture.mdx index 4c9702f9..b59c7e1d 100644 --- a/docs/features/output-capture.mdx +++ b/docs/features/output-capture.mdx @@ -12,14 +12,12 @@ Console output from sandboxed code is **not buffered** into result fields. `exec ## Runnable example -Source file: `examples/features/src/output-capture.ts` - ```ts import { NodeRuntime, createNodeDriver, createNodeRuntimeDriverFactory, -} from "secure-exec"; +} from "../../../packages/secure-exec/src/index.ts"; const events: string[] = []; @@ -66,6 +64,8 @@ try { } ``` +Source: [examples/features/src/output-capture.ts](https://github.com/rivet-dev/secure-exec/blob/main/examples/features/src/output-capture.ts) + ## Default hook Set a runtime-level hook that applies to all executions: diff --git a/docs/features/permissions.mdx b/docs/features/permissions.mdx index 1f215c96..b78df8d9 100644 --- a/docs/features/permissions.mdx +++ b/docs/features/permissions.mdx @@ -12,15 +12,13 @@ All host capabilities are **deny-by-default**. Sandboxed code cannot access the ## Runnable example -Source file: `examples/features/src/permissions.ts` - ```ts import { NodeRuntime, createInMemoryFileSystem, createNodeDriver, createNodeRuntimeDriverFactory, -} from "secure-exec"; +} from "../../../packages/secure-exec/src/index.ts"; const filesystem = createInMemoryFileSystem(); await filesystem.writeFile("/secret.txt", "top secret"); @@ -71,6 +69,8 @@ console.log( ); ``` +Source: [examples/features/src/permissions.ts](https://github.com/rivet-dev/secure-exec/blob/main/examples/features/src/permissions.ts) + ## Permission helpers Quick presets for common configurations: diff --git a/docs/features/resource-limits.mdx b/docs/features/resource-limits.mdx index b6b7dfe9..911d5cbf 100644 --- a/docs/features/resource-limits.mdx +++ b/docs/features/resource-limits.mdx @@ -12,14 +12,12 @@ Resource limits prevent sandboxed code from running forever or exhausting host m ## Runnable example -Source file: `examples/features/src/resource-limits.ts` - ```ts import { NodeRuntime, createNodeDriver, createNodeRuntimeDriverFactory, -} from "secure-exec"; +} from "../../../packages/secure-exec/src/index.ts"; const runtime = new NodeRuntime({ systemDriver: createNodeDriver(), @@ -54,6 +52,8 @@ try { } ``` +Source: [examples/features/src/resource-limits.ts](https://github.com/rivet-dev/secure-exec/blob/main/examples/features/src/resource-limits.ts) + ## CPU time limit Set a CPU time budget in milliseconds. When exceeded, the execution exits with code `124`. diff --git a/docs/features/typescript.mdx b/docs/features/typescript.mdx index f64fe3ed..d0211446 100644 --- a/docs/features/typescript.mdx +++ b/docs/features/typescript.mdx @@ -12,16 +12,14 @@ The `@secure-exec/typescript` companion package runs the TypeScript compiler ins ## Runnable example -Source file: `examples/features/src/typescript.ts` - ```ts import { NodeRuntime, allowAllFs, createNodeDriver, createNodeRuntimeDriverFactory, -} from "secure-exec"; -import { createTypeScriptTools } from "@secure-exec/typescript"; +} from "../../../packages/secure-exec/src/index.ts"; +import { createTypeScriptTools } from "../../../packages/typescript/src/index.ts"; const sourceText = ` export const message: string = "hello from typescript"; @@ -44,7 +42,7 @@ const runtime = new NodeRuntime({ const ts = createTypeScriptTools({ systemDriver: compilerSystemDriver, runtimeDriverFactory, - compilerSpecifier: "typescript", + compilerSpecifier: "/root/node_modules/typescript/lib/typescript.js", }); try { @@ -93,6 +91,8 @@ try { } ``` +Source: [examples/features/src/typescript.ts](https://github.com/rivet-dev/secure-exec/blob/main/examples/features/src/typescript.ts) + ## Install ```bash @@ -119,7 +119,7 @@ const ts = createTypeScriptTools({ | `runtimeDriverFactory` | `NodeRuntimeDriverFactory` | required | Creates the compiler sandbox | | `memoryLimit` | `number` | `512` | Compiler isolate memory cap in MB | | `cpuTimeLimitMs` | `number` | | Compiler CPU time budget in ms | -| `compilerSpecifier` | `string` | `"typescript"` | Module specifier for the TypeScript compiler | +| `compilerSpecifier` | `string` | `"/root/node_modules/typescript/lib/typescript.js"` | Module specifier for the TypeScript compiler | ## Type-check a source string diff --git a/docs/features/virtual-filesystem.mdx b/docs/features/virtual-filesystem.mdx index 80539228..5e59a26a 100644 --- a/docs/features/virtual-filesystem.mdx +++ b/docs/features/virtual-filesystem.mdx @@ -1,21 +1,48 @@ --- -title: Virtual Filesystem +title: Custom Virtual Filesystem description: Implement your own VirtualFileSystem to control how sandboxed code reads and writes files. icon: "hard-drive" --- - - Runnable example for a custom virtual filesystem. - - You can create a custom `VirtualFileSystem` to back the sandbox with any storage layer — a database, S3, a zip archive, or anything else. Sandboxed code uses `fs`, `require`, and other Node APIs as normal, and your implementation handles the actual I/O. -## Runnable example +## The interface + +Your class must implement `VirtualFileSystem` from `secure-exec-core`: + +```ts +import type { VirtualFileSystem, VirtualStat, VirtualDirEntry } from "secure-exec"; + +class MyFileSystem implements VirtualFileSystem { + async readFile(path: string): Promise { /* ... */ } + async readTextFile(path: string): Promise { /* ... */ } + async writeFile(path: string, content: string | Uint8Array): Promise { /* ... */ } + async readDir(path: string): Promise { /* ... */ } + async readDirWithTypes(path: string): Promise { /* ... */ } + async createDir(path: string): Promise { /* ... */ } + async mkdir(path: string): Promise { /* ... */ } + async exists(path: string): Promise { /* ... */ } + async stat(path: string): Promise { /* ... */ } + async removeFile(path: string): Promise { /* ... */ } + async removeDir(path: string): Promise { /* ... */ } + async rename(oldPath: string, newPath: string): Promise { /* ... */ } + async symlink(target: string, linkPath: string): Promise { /* ... */ } + async readlink(path: string): Promise { /* ... */ } + async lstat(path: string): Promise { /* ... */ } + async link(oldPath: string, newPath: string): Promise { /* ... */ } + async chmod(path: string, mode: number): Promise { /* ... */ } + async chown(path: string, uid: number, gid: number): Promise { /* ... */ } + async utimes(path: string, atime: number, mtime: number): Promise { /* ... */ } + async truncate(path: string, length: number): Promise { /* ... */ } +} +``` + +## Example: read-only Map filesystem -Source file: `examples/features/src/virtual-filesystem.ts` +A minimal filesystem backed by a `Map`. Useful when you have a fixed set of files (e.g. loaded from a database) and want to make them available to sandboxed code. ```ts -import type { DirEntry, StatInfo, VirtualFileSystem } from "secure-exec"; +import type { VirtualFileSystem, VirtualStat, VirtualDirEntry } from "secure-exec"; import { NodeRuntime, allowAllFs, @@ -42,13 +69,39 @@ class ReadOnlyMapFS implements VirtualFileSystem { return content; } + async exists(path: string) { + return this.files.has(path) || this.#isDir(path); + } + + async stat(path: string): Promise { + const now = Date.now(); + if (this.files.has(path)) { + return { + mode: 0o444, + size: new TextEncoder().encode(this.files.get(path)!).byteLength, + isDirectory: false, + atimeMs: now, mtimeMs: now, ctimeMs: now, birthtimeMs: now, + }; + } + if (this.#isDir(path)) { + return { + mode: 0o555, + size: 0, + isDirectory: true, + atimeMs: now, mtimeMs: now, ctimeMs: now, birthtimeMs: now, + }; + } + throw new Error(`ENOENT: ${path}`); + } + + async lstat(path: string) { return this.stat(path); } + async readDir(path: string) { const prefix = path === "/" ? "/" : path + "/"; const entries = new Set(); for (const key of this.files.keys()) { - if (!key.startsWith(prefix)) continue; - const rest = key.slice(prefix.length); - if (rest.length > 0) { + if (key.startsWith(prefix)) { + const rest = key.slice(prefix.length); entries.add(rest.split("/")[0]); } } @@ -56,77 +109,29 @@ class ReadOnlyMapFS implements VirtualFileSystem { return [...entries]; } - async readDirWithTypes(path: string): Promise { + async readDirWithTypes(path: string): Promise { const names = await this.readDir(path); const prefix = path === "/" ? "/" : path + "/"; return names.map((name) => ({ name, isDirectory: this.#isDir(prefix + name), - isSymbolicLink: false, })); } + // Write operations throw — this filesystem is read-only async writeFile() { throw new Error("EROFS: read-only filesystem"); } async createDir() { throw new Error("EROFS: read-only filesystem"); } async mkdir() { throw new Error("EROFS: read-only filesystem"); } - - async exists(path: string) { - return this.files.has(path) || this.#isDir(path); - } - - async stat(path: string): Promise { - const now = Date.now(); - if (this.files.has(path)) { - return { - mode: 0o444, - size: new TextEncoder().encode(this.files.get(path) ?? "").byteLength, - isDirectory: false, - isSymbolicLink: false, - atimeMs: now, - mtimeMs: now, - ctimeMs: now, - birthtimeMs: now, - ino: 1, - nlink: 1, - uid: 0, - gid: 0, - }; - } - if (this.#isDir(path)) { - return { - mode: 0o555, - size: 0, - isDirectory: true, - isSymbolicLink: false, - atimeMs: now, - mtimeMs: now, - ctimeMs: now, - birthtimeMs: now, - ino: 1, - nlink: 1, - uid: 0, - gid: 0, - }; - } - throw new Error(`ENOENT: ${path}`); - } - async removeFile() { throw new Error("EROFS: read-only filesystem"); } async removeDir() { throw new Error("EROFS: read-only filesystem"); } async rename() { throw new Error("EROFS: read-only filesystem"); } - async realpath(path: string) { return path; } async symlink() { throw new Error("EROFS: read-only filesystem"); } - async readlink(_path: string): Promise { throw new Error("ENOSYS: no symlinks"); } - async lstat(path: string) { return this.stat(path); } + async readlink() { throw new Error("ENOSYS: no symlinks"); } async link() { throw new Error("EROFS: read-only filesystem"); } async chmod() { throw new Error("EROFS: read-only filesystem"); } async chown() { throw new Error("EROFS: read-only filesystem"); } async utimes() { throw new Error("EROFS: read-only filesystem"); } async truncate() { throw new Error("EROFS: read-only filesystem"); } - async pread(path: string, offset: number, length: number) { - const bytes = await this.readFile(path); - return bytes.slice(offset, offset + length); - } #isDir(path: string) { const prefix = path === "/" ? "/" : path + "/"; @@ -136,84 +141,34 @@ class ReadOnlyMapFS implements VirtualFileSystem { return false; } } +``` + +### Using it -const filesystem = new ReadOnlyMapFS({ - "/config.json": JSON.stringify({ greeting: "hello from custom vfs" }), +```ts +const fs = new ReadOnlyMapFS({ + "/config.json": JSON.stringify({ greeting: "hello" }), + "/src/index.js": ` + const config = JSON.parse(require("fs").readFileSync("/config.json", "utf8")); + console.log(config.greeting); + `, }); -const events: string[] = []; const runtime = new NodeRuntime({ systemDriver: createNodeDriver({ - filesystem, + filesystem: fs, permissions: { ...allowAllFs }, }), runtimeDriverFactory: createNodeRuntimeDriverFactory(), }); -try { - const result = await runtime.exec( - ` - const fs = require("node:fs"); - const config = JSON.parse(fs.readFileSync("/config.json", "utf8")); - console.log(config.greeting); - `, - { - onStdio: (event) => { - if (event.channel === "stdout") { - events.push(event.message); - } - }, - }, - ); - - const message = events.at(-1); - if (result.code !== 0 || message !== "hello from custom vfs") { - throw new Error(`Unexpected runtime result: ${JSON.stringify({ result, events })}`); - } +const result = await runtime.exec(` + const config = JSON.parse(require("fs").readFileSync("/config.json", "utf8")); + console.log(config.greeting); +`); - console.log( - JSON.stringify({ - ok: true, - message, - summary: "sandbox read config data from a custom read-only virtual filesystem", - }), - ); -} finally { - runtime.dispose(); -} -``` - -## The interface - -Your class must implement `VirtualFileSystem` from `secure-exec-core`: - -```ts -import type { DirEntry, StatInfo, VirtualFileSystem } from "secure-exec"; - -class MyFileSystem implements VirtualFileSystem { - async readFile(path: string): Promise { /* ... */ } - async readTextFile(path: string): Promise { /* ... */ } - async writeFile(path: string, content: string | Uint8Array): Promise { /* ... */ } - async readDir(path: string): Promise { /* ... */ } - async readDirWithTypes(path: string): Promise { /* ... */ } - async createDir(path: string): Promise { /* ... */ } - async mkdir(path: string): Promise { /* ... */ } - async exists(path: string): Promise { /* ... */ } - async stat(path: string): Promise { /* ... */ } - async removeFile(path: string): Promise { /* ... */ } - async removeDir(path: string): Promise { /* ... */ } - async rename(oldPath: string, newPath: string): Promise { /* ... */ } - async realpath(path: string): Promise { /* ... */ } - async symlink(target: string, linkPath: string): Promise { /* ... */ } - async readlink(path: string): Promise { /* ... */ } - async lstat(path: string): Promise { /* ... */ } - async link(oldPath: string, newPath: string): Promise { /* ... */ } - async chmod(path: string, mode: number): Promise { /* ... */ } - async chown(path: string, uid: number, gid: number): Promise { /* ... */ } - async utimes(path: string, atime: number, mtime: number): Promise { /* ... */ } - async truncate(path: string, length: number): Promise { /* ... */ } - async pread(path: string, offset: number, length: number): Promise { /* ... */ } -} +// Output captured via onStdio callback — see Output Capture docs +runtime.dispose(); ``` ## More examples diff --git a/docs/nodejs-compatibility.mdx b/docs/nodejs-compatibility.mdx index e1e42a19..c35fc4d2 100644 --- a/docs/nodejs-compatibility.mdx +++ b/docs/nodejs-compatibility.mdx @@ -1,6 +1,7 @@ --- title: Node.js Compatibility description: Target Node.js version and standard-library compatibility matrix for secure-exec. +icon: "list-check" --- ## Target Node Version @@ -59,7 +60,7 @@ Unsupported modules use: `" is not supported in sandbox"`. | Module | Tier | Status | | --- | --- | --- | -| `fs` | 1 (Bridge) + 4 (Deferred APIs) | Implemented: `readFile`, `writeFile`, `appendFile`, `open`, `read`, `write`, `close`, `readdir`, `mkdir`, `rmdir`, `rm`, `unlink`, `stat`, `lstat`, `rename`, `copyFile`, `exists`, `createReadStream`, `createWriteStream`, `writev`, `access`, `realpath`, `chmod`, `chown`, `link`, `symlink`, `readlink`, `truncate`, `utimes`, `cp`, `mkdtemp`, `opendir`, `glob`, `statfs`, `readv`, `fdatasync`, `fsync`. Metadata-sensitive operations (`stat`, `exists`, `readdir` with `withFileTypes`) use metadata-native driver paths instead of content probing. `rename` delegates to driver semantics (atomic where supported; explicit limitation errors where not). Deferred: `watch`, `watchFile`. | +| `fs` | 1 (Bridge) + 4 (Deferred APIs) | Implemented: `readFile`, `writeFile`, `appendFile`, `open`, `read`, `write`, `close`, `readdir`, `mkdir`, `rmdir`, `rm`, `unlink`, `stat`, `lstat`, `rename`, `copyFile`, `exists`, `createReadStream`, `createWriteStream`, `writev`, `access`, `realpath`, `chmod`, `chown`, `link`, `symlink`, `readlink`, `truncate`, `utimes`, `cp`, `mkdtemp`, `opendir`, `glob`, `statfs`, `readv`, `fdatasync`, `fsync`. Metadata-sensitive operations (`stat`, `exists`, `readdir` with `withFileTypes`) use metadata-native driver paths instead of content probing. `rename` delegates to driver semantics (atomic where supported; explicit limitation errors where not). Deferred watcher APIs: `watch`, `watchFile`, `fs.promises.watch`. | | `process` | 1 (Bridge) | Env access (permission-gated), cwd/chdir, exit semantics, timers, stdio, eventing, and basic usage/system metadata APIs. | | `os` | 1 (Bridge) | Platform/arch/version, user/system info, and `os.constants`. | | `child_process` | 1 (Bridge) + 5 (`fork`) | Implemented: `spawn`, `spawnSync`, `exec`, `execSync`, `execFile`, `execFileSync`; `fork` is intentionally unsupported. | @@ -131,7 +132,7 @@ Some Node.js features cannot be supported in secure-exec due to fundamental arch | Behavior | Reason | | --- | --- | | Real OS signals (`SIGTERM`, `SIGUSR1`, etc.) | The sandbox is not an OS process — it's a V8 isolate within a host process. There are no real POSIX signals to deliver. `process.on('SIGINT')` may be emulated in the future. | -| Real file system watchers (`fs.watch`) | The VFS (virtual file system) has no inotify/kqueue equivalent. `fs.watch()` returns a stub that never emits events. Writes to VFS do not trigger watcher notifications. | +| Real file system watchers (`fs.watch`, `fs.watchFile`, `fs.promises.watch`) | The VFS (virtual file system) has no inotify/kqueue/FSEvents-equivalent primitive. These APIs fail fast with deterministic unsupported errors instead of hanging while waiting for events that the sandbox cannot produce. | | Multi-context execution | The sandbox runs one V8 context per isolate. Features requiring context isolation (ShadowRealm, `vm.createContext`) cannot work. | | QUIC protocol | Experimental in Node.js, depends on `tls` + `net` + OpenSSL QUIC support. Not planned. | diff --git a/docs/nodejs-conformance-report.mdx b/docs/nodejs-conformance-report.mdx index 52f53767..0002db26 100644 --- a/docs/nodejs-conformance-report.mdx +++ b/docs/nodejs-conformance-report.mdx @@ -1,6 +1,7 @@ --- title: Node.js Conformance Report description: Node.js v22 test/parallel/ conformance results for the secure-exec sandbox. +icon: "chart-bar" --- {/* AUTO-GENERATED — do not edit. Run: pnpm tsx scripts/generate-node-conformance-report.ts */} @@ -12,24 +13,24 @@ description: Node.js v22 test/parallel/ conformance results for the secure-exec | Node.js version | 22.14.0 | | Source | v22.14.0 (test/parallel/) | | Total tests | 3532 | -| Passing (genuine) | 754 (21.3%) | -| Passing (vacuous self-skip) | 33 | -| Passing (total) | 787 (22.3%) | -| Expected fail | 2674 | -| Skip | 71 | +| Passing (genuine) | 1082 (30.6%) | +| Passing (vacuous self-skip) | 50 | +| Passing (total) | 1132 (32.0%) | +| Expected fail | 2288 | +| Skip | 112 | | Last updated | 2026-03-26 | ## Failure Categories | Category | Tests | | --- | --- | -| implementation-gap | 1372 | -| unsupported-module | 738 | -| requires-v8-flags | 239 | +| implementation-gap | 940 | +| unsupported-module | 755 | +| requires-v8-flags | 247 | | requires-exec-path | 200 | -| unsupported-api | 123 | -| test-infra | 68 | -| vacuous-skip | 33 | +| unsupported-api | 155 | +| test-infra | 98 | +| vacuous-skip | 50 | | native-addon | 3 | | security-constraint | 2 | @@ -60,7 +61,7 @@ description: Node.js v22 test/parallel/ conformance results for the secure-exec | c | 1 | 0 | 1 | 0 | 0.0% | | child | 107 | 4 (2 vacuous) | 103 | 0 | 3.7% | | cli | 14 | 0 | 14 | 0 | 0.0% | -| client | 1 | 0 | 1 | 0 | 0.0% | +| client | 1 | 1 | 0 | 0 | 100.0% | | cluster | 83 | 3 | 80 | 0 | 3.6% | | code | 1 | 0 | 1 | 0 | 0.0% | | common | 5 | 0 | 5 | 0 | 0.0% | @@ -70,7 +71,7 @@ description: Node.js v22 test/parallel/ conformance results for the secure-exec | constants | 1 | 0 | 1 | 0 | 0.0% | | corepack | 1 | 0 | 1 | 0 | 0.0% | | coverage | 1 | 0 | 1 | 0 | 0.0% | -| crypto | 99 | 56 (12 vacuous) | 43 | 0 | 56.6% | +| crypto | 99 | 57 (12 vacuous) | 42 | 0 | 57.6% | | cwd | 3 | 0 | 3 | 0 | 0.0% | | data | 1 | 0 | 1 | 0 | 0.0% | | datetime | 1 | 0 | 1 | 0 | 0.0% | @@ -78,7 +79,7 @@ description: Node.js v22 test/parallel/ conformance results for the secure-exec | debugger | 25 | 0 | 25 | 0 | 0.0% | | delayed | 1 | 1 | 0 | 0 | 100.0% | | destroy | 1 | 1 | 0 | 0 | 100.0% | -| dgram | 76 | 3 | 73 | 0 | 3.9% | +| dgram | 76 | 49 | 27 | 0 | 64.5% | | diagnostic | 2 | 0 | 2 | 0 | 0.0% | | diagnostics | 32 | 1 | 31 | 0 | 3.1% | | directory | 1 | 1 | 0 | 0 | 100.0% | @@ -107,25 +108,25 @@ description: Node.js v22 test/parallel/ conformance results for the secure-exec | experimental | 1 | 0 | 1 | 0 | 0.0% | | fetch | 1 | 0 | 1 | 0 | 0.0% | | file | 8 | 3 | 5 | 0 | 37.5% | -| filehandle | 2 | 2 | 0 | 0 | 100.0% | +| filehandle | 2 | 0 | 2 | 0 | 0.0% | | finalization | 1 | 1 | 0 | 0 | 100.0% | | find | 1 | 0 | 1 | 0 | 0.0% | | fixed | 1 | 0 | 1 | 0 | 0.0% | | force | 2 | 0 | 2 | 0 | 0.0% | | freelist | 1 | 0 | 1 | 0 | 0.0% | | freeze | 1 | 0 | 1 | 0 | 0.0% | -| fs | 232 | 69 (8 vacuous) | 129 | 34 | 34.8% | +| fs | 232 | 104 (8 vacuous) | 114 | 14 | 47.7% | | gc | 3 | 0 | 3 | 0 | 0.0% | | global | 11 | 3 | 8 | 0 | 27.3% | -| h2 | 1 | 0 | 1 | 0 | 0.0% | +| h2 | 1 | 1 | 0 | 0 | 100.0% | | h2leak | 1 | 0 | 1 | 0 | 0.0% | | handle | 2 | 1 | 1 | 0 | 50.0% | | heap | 11 | 0 | 11 | 0 | 0.0% | | heapdump | 1 | 1 | 0 | 0 | 100.0% | | heapsnapshot | 2 | 0 | 2 | 0 | 0.0% | -| http | 377 | 243 (1 vacuous) | 133 | 1 | 64.6% | -| http2 | 256 | 4 | 252 | 0 | 1.6% | -| https | 62 | 4 | 58 | 0 | 6.5% | +| http | 377 | 275 (1 vacuous) | 101 | 1 | 73.1% | +| http2 | 256 | 18 | 238 | 0 | 7.0% | +| https | 62 | 5 (3 vacuous) | 0 | 57 | 100.0% | | icu | 5 | 0 | 5 | 0 | 0.0% | | inspect | 4 | 0 | 4 | 0 | 0.0% | | inspector | 61 | 0 | 61 | 0 | 0.0% | @@ -146,7 +147,7 @@ description: Node.js v22 test/parallel/ conformance results for the secure-exec | mime | 2 | 0 | 2 | 0 | 0.0% | | module | 30 | 5 (2 vacuous) | 24 | 1 | 17.2% | | navigator | 1 | 0 | 1 | 0 | 0.0% | -| net | 149 | 8 | 141 | 0 | 5.4% | +| net | 149 | 98 | 50 | 1 | 66.2% | | next | 9 | 5 | 2 | 2 | 71.4% | | no | 2 | 1 | 1 | 0 | 50.0% | | node | 1 | 0 | 1 | 0 | 0.0% | @@ -203,7 +204,7 @@ description: Node.js v22 test/parallel/ conformance results for the secure-exec | stdio | 5 | 2 | 3 | 0 | 40.0% | | stdout | 7 | 1 | 5 | 1 | 16.7% | | strace | 1 | 1 (1 vacuous) | 0 | 0 | 100.0% | -| stream | 169 | 78 | 85 | 6 | 47.9% | +| stream | 169 | 79 | 84 | 6 | 48.5% | | stream2 | 25 | 15 | 4 | 6 | 78.9% | | stream3 | 4 | 3 | 0 | 1 | 100.0% | | streams | 1 | 0 | 1 | 0 | 0.0% | @@ -215,7 +216,7 @@ description: Node.js v22 test/parallel/ conformance results for the secure-exec | tcp | 3 | 0 | 3 | 0 | 0.0% | | tick | 2 | 1 (1 vacuous) | 1 | 0 | 50.0% | | timers | 56 | 26 | 21 | 9 | 55.3% | -| tls | 192 | 19 | 173 | 0 | 9.9% | +| tls | 192 | 120 (14 vacuous) | 69 | 3 | 63.5% | | tojson | 1 | 0 | 1 | 0 | 0.0% | | trace | 35 | 3 | 32 | 0 | 8.6% | | tracing | 1 | 0 | 1 | 0 | 0.0% | @@ -239,30 +240,27 @@ description: Node.js v22 test/parallel/ conformance results for the secure-exec | webstorage | 1 | 0 | 1 | 0 | 0.0% | | webstream | 4 | 0 | 4 | 0 | 0.0% | | webstreams | 5 | 0 | 5 | 0 | 0.0% | -| whatwg | 60 | 1 | 59 | 0 | 1.7% | +| whatwg | 60 | 25 | 35 | 0 | 41.7% | | windows | 2 | 1 (1 vacuous) | 1 | 0 | 50.0% | | worker | 133 | 11 | 122 | 0 | 8.3% | | wrap | 4 | 0 | 4 | 0 | 0.0% | | x509 | 1 | 0 | 1 | 0 | 0.0% | | zlib | 53 | 17 | 33 | 3 | 34.0% | -| **Total** | **3532** | **787** | **2674** | **71** | **22.7%** | +| **Total** | **3532** | **1132** | **2288** | **112** | **33.1%** | ## Expectations Detail -### implementation-gap (691 entries) +### implementation-gap (658 entries) **Glob patterns:** - `test-v8-*.js` — v8 module exposed as empty stub — no real v8 APIs (serialize, deserialize, getHeapStatistics, promiseHooks, etc.) are implemented -- `test-dgram-*.js` — dgram module bridged via kernel UDP — most tests fail on API gaps (bind, send, multicast, cluster) -- `test-net-*.js` — net module bridged via kernel TCP — most tests fail on API gaps (socket options, pipe, cluster, FD handling) -- `test-tls-*.js` — tls module bridged via kernel — most tests fail on missing TLS fixture files or crypto API gaps -- `test-https-*.js` — https depends on tls — most tests fail on missing TLS fixture files or crypto API gaps -- `test-http2-*.js` — http2 module bridged via kernel — most tests fail on API gaps, missing fixtures, or protocol handling +- `test-http2-!(allow-http1|client-request-options-errors|client-setLocalWindowSize|error-order|goaway-delayed-request|goaway-opaquedata|misbehaving-flow-control|misbehaving-flow-control-paused|request-response-proto|respond-file-filehandle|server-push-stream|server-push-stream-errors-args|server-push-stream-head|server-setLocalWindowSize|session-settings|status-code-invalid|update-settings|window-size).js` — outside the landed allowHTTP1, push/settings, request-response, and window-size slices, the remaining http2 suite still fails on compatibility wrappers, secure-session bootstrap, multiplexing/teardown, and file-response helper gaps +- `test-https-*.js` — https conformance still hangs on most TLS-backed client/server lifecycle and certificate-handling paths, so the remaining broad slice is skipped pending exact inventory -*685 individual tests — see expectations.json for full list.* +*655 individual tests — see expectations.json for full list.* -### unsupported-module (191 entries) +### unsupported-module (208 entries) **Glob patterns:** @@ -278,7 +276,7 @@ description: Node.js v22 test/parallel/ conformance results for the secure-exec - `test-debugger-*.js` — debugger protocol requires inspector which is Tier 5 (Unsupported) - `test-quic-*.js` — QUIC protocol depends on tls which is Tier 4 (Deferred) - +
197 individual tests | Test | Reason | | --- | --- | @@ -332,7 +330,6 @@ description: Node.js v22 test/parallel/ conformance results for the secure-exec | `test-fs-mkdir.js` | requires worker_threads module which is Tier 4 (Deferred) | | `test-fs-whatwg-url.js` | requires worker_threads module which is Tier 4 (Deferred) | | `test-fs-write-file-sync.js` | requires worker_threads module which is Tier 4 (Deferred) | -| `test-h2-large-header-cause-client-to-hangup.js` | requires http2 module — createServer/createSecureServer unsupported | | `test-http-agent-reuse-drained-socket-only.js` | requires net module which is Tier 4 (Deferred) | | `test-http-autoselectfamily.js` | requires dns module — DNS resolution not available in sandbox | | `test-http-client-error-rawbytes.js` | requires net module which is Tier 4 (Deferred) | @@ -348,9 +345,7 @@ description: Node.js v22 test/parallel/ conformance results for the secure-exec | `test-http-multi-line-headers.js` | requires net module which is Tier 4 (Deferred) | | `test-http-no-content-length.js` | requires net module which is Tier 4 (Deferred) | | `test-http-perf_hooks.js` | requires perf_hooks module which is Tier 4 (Deferred) | -| `test-http-pipeline-requests-connection-leak.js` | requires net module which is Tier 4 (Deferred) | | `test-http-request-agent.js` | requires https module — depends on tls which is Tier 4 (Deferred) | -| `test-http-response-no-headers.js` | requires net module which is Tier 4 (Deferred) | | `test-http-response-splitting.js` | requires net module which is Tier 4 (Deferred) | | `test-http-response-status-message.js` | requires net module which is Tier 4 (Deferred) | | `test-http-server-headers-timeout-delayed-headers.js` | requires net module which is Tier 4 (Deferred) | @@ -425,7 +420,6 @@ description: Node.js v22 test/parallel/ conformance results for the secure-exec | `test-socket-writes-before-passed-to-tls-socket.js` | requires net module which is Tier 4 (Deferred) | | `test-stdio-pipe-redirect.js` | requires worker_threads module which is Tier 4 (Deferred) | | `test-stream-base-typechecking.js` | requires net module which is Tier 4 (Deferred) | -| `test-stream-pipeline-http2.js` | requires http2 module — createServer/createSecureServer unsupported | | `test-stream-pipeline.js` | requires net module which is Tier 4 (Deferred) | | `test-stream-preprocess.js` | requires readline module which is Tier 4 (Deferred) | | `test-stream-writable-samecb-singletick.js` | async_hooks module is a deferred stub — AsyncLocalStorage, AsyncResource, createHook exported but not functional | @@ -444,8 +438,6 @@ description: Node.js v22 test/parallel/ conformance results for the secure-exec | `test-fetch-mock.js` | requires node:test module which is not available in sandbox | | `test-fs-operations-with-surrogate-pairs.js` | requires node:test module which is not available in sandbox | | `test-fs-readdir-recursive.js` | requires node:test module which is not available in sandbox | -| `test-http-common.js` | Cannot find module '_http_common' — Node.js internal module _http_common not exposed in sandbox | -| `test-http-invalidheaderfield2.js` | Cannot find module '_http_common' — Node.js internal module _http_common not exposed in sandbox | | `test-http-parser.js` | Cannot find module '_http_common' — Node.js internal module _http_common (and HTTPParser) not exposed in sandbox | | `test-npm-version.js` | Cannot find module '/deps/npm/package.json' — npm is not bundled in the sandbox runtime | | `test-outgoing-message-pipe.js` | Cannot find module '_http_outgoing' — Node.js internal module _http_outgoing not exposed in sandbox | @@ -462,10 +454,33 @@ description: Node.js v22 test/parallel/ conformance results for the secure-exec | `test-assert-fail-deprecation.js` | requires 'test' module (node:test) which is not available in sandbox | | `test-buffer-resizable.js` | requires 'test' module (node:test) which is not available in sandbox | | `test-stream-consumers.js` | stream/consumers submodule not available in stream polyfill | - - - -### unsupported-api (78 entries) +| `test-fs-promises-file-handle-read-worker.js` | worker_threads.Worker is not supported in sandbox | +| `test-dgram-bind-socket-close-before-cluster-reply.js` | the fixture depends on the cluster module, which remains unsupported in the sandbox | +| `test-dgram-cluster-bind-error.js` | the fixture depends on cluster-managed dgram handle sharing, which remains unsupported in the sandbox | +| `test-dgram-cluster-close-during-bind.js` | the fixture depends on the cluster module, which remains unsupported in the sandbox | +| `test-dgram-cluster-close-in-listening.js` | the fixture depends on the cluster module, which remains unsupported in the sandbox | +| `test-dgram-unref-in-cluster.js` | the fixture depends on the cluster module, which remains unsupported in the sandbox | +| `test-net-listen-exclusive-random-ports.js` | the fixture depends on the cluster module, which remains unsupported in the sandbox | +| `test-net-listen-handle-in-cluster-1.js` | the fixture depends on the cluster module, which remains unsupported in the sandbox | +| `test-net-listen-handle-in-cluster-2.js` | the fixture depends on the cluster module, which remains unsupported in the sandbox | +| `test-net-listen-twice.js` | the fixture depends on the cluster module, which remains unsupported in the sandbox | +| `test-net-server-close-before-ipc-response.js` | the fixture depends on the cluster module, which remains unsupported in the sandbox | +| `test-net-server-drop-connections-in-cluster.js` | the fixture depends on the cluster module, which remains unsupported in the sandbox | +| `test-net-socket-constructor.js` | the fixture depends on the cluster module, which remains unsupported in the sandbox | +| `test-http2-server-push-stream-errors.js` | the vendored internal/test/binding and internal/http2/util modules are not exposed in the sandbox, so the internal nghttp2 error-path fixture aborts before exercising pushStream parity | +| `test-tls-canonical-ip.js` | Cannot find module 'internal/test/binding' | +| `test-tls-client-allow-partial-trust-chain.js` | Cannot find module 'test' | +| `test-tls-clientcertengine-unsupported.js` | Cannot find module 'internal/test/binding' | +| `test-tls-close-notify.js` | Cannot find module 'internal/test/binding' | +| `test-tls-keyengine-unsupported.js` | Cannot find module 'internal/test/binding' | +| `test-tls-reinitialize-listeners.js` | Cannot find module 'internal/net' | +| `test-tls-translate-peer-certificate.js` | Cannot find module '_tls_common' | +| `test-tls-wrap-no-abort.js` | Cannot find module 'internal/test/binding' | +| `test-tls-wrap-timeout.js` | Cannot find module 'internal/timers' | + +
+ +### unsupported-api (110 entries) **Glob patterns:** @@ -473,7 +488,7 @@ description: Node.js v22 test/parallel/ conformance results for the secure-exec - `test-shadow-*.js` — ShadowRealm is experimental and not supported in sandbox - `test-compile-*.js` — V8 compile cache/code cache features not available in sandbox - +
107 individual tests | Test | Reason | | --- | --- | @@ -486,26 +501,26 @@ description: Node.js v22 test/parallel/ conformance results for the secure-exec | `test-child-process-send-after-close.js` | uses child_process.fork — IPC across isolate boundary not supported | | `test-child-process-send-keep-open.js` | uses child_process.fork — IPC across isolate boundary not supported | | `test-child-process-send-type-error.js` | uses child_process.fork — IPC across isolate boundary not supported | -| `test-fs-options-immutable.js` | hangs — fs.watch() with frozen options waits for events that never arrive (VFS has no inotify) | -| `test-fs-promises-watch.js` | hangs — fs.promises.watch() waits forever for filesystem events (VFS has no watcher) | -| `test-fs-watch-file-enoent-after-deletion.js` | hangs — fs.watchFile() waits for stat changes that never arrive (VFS has no inotify) | -| `test-fs-watch-recursive-add-file-to-existing-subfolder.js` | hangs — fs.watch(\{recursive\}) waits for filesystem events that never arrive (VFS has no inotify) | -| `test-fs-watch-recursive-add-file-to-new-folder.js` | hangs — fs.watch(\{recursive\}) waits for filesystem events that never arrive (VFS has no inotify) | -| `test-fs-watch-recursive-add-file.js` | hangs — fs.watch(\{recursive\}) waits for filesystem events that never arrive (VFS has no inotify) | -| `test-fs-watch-recursive-assert-leaks.js` | hangs — fs.watch(\{recursive\}) waits for filesystem events that never arrive (VFS has no inotify) | -| `test-fs-watch-recursive-delete.js` | hangs — fs.watch(\{recursive\}) waits for filesystem events that never arrive (VFS has no inotify) | -| `test-fs-watch-recursive-linux-parallel-remove.js` | hangs — fs.watch(\{recursive\}) waits for filesystem events that never arrive (VFS has no inotify) | -| `test-fs-watch-recursive-sync-write.js` | hangs — fs.watch() with recursive option waits forever for events | -| `test-fs-watch-recursive-update-file.js` | hangs — fs.watch(\{recursive\}) waits for filesystem events that never arrive (VFS has no inotify) | -| `test-fs-watch-stop-async.js` | uses fs.watch/watchFile — inotify not available in VFS | -| `test-fs-watch-stop-sync.js` | uses fs.watch/watchFile — inotify not available in VFS | -| `test-fs-watch.js` | hangs — fs.watch() waits for filesystem events that never arrive (VFS has no inotify) | +| `test-fs-options-immutable.js` | fails fast — fs.watch is deferred because the sandbox VFS/kernel has no inotify/kqueue/FSEvents-style watcher primitive | +| `test-fs-promises-watch.js` | fails fast — fs.promises.watch is deferred because the sandbox VFS/kernel has no inotify/kqueue/FSEvents-style watcher primitive | +| `test-fs-watch-encoding.js` | fails fast — fs.watch is deferred because the sandbox VFS/kernel has no inotify/kqueue/FSEvents-style watcher primitive | +| `test-fs-watch-file-enoent-after-deletion.js` | fails fast — fs.watchFile is deferred because the sandbox VFS/kernel has no inotify/kqueue/FSEvents-style watcher primitive | +| `test-fs-watch-recursive-add-file-to-existing-subfolder.js` | fails fast — recursive fs.watch is deferred because the sandbox VFS/kernel has no inotify/kqueue/FSEvents-style watcher primitive | +| `test-fs-watch-recursive-add-file-to-new-folder.js` | fails fast — recursive fs.watch is deferred because the sandbox VFS/kernel has no inotify/kqueue/FSEvents-style watcher primitive | +| `test-fs-watch-recursive-add-file.js` | fails fast — recursive fs.watch is deferred because the sandbox VFS/kernel has no inotify/kqueue/FSEvents-style watcher primitive | +| `test-fs-watch-recursive-assert-leaks.js` | fails fast — recursive fs.watch is deferred because the sandbox VFS/kernel has no inotify/kqueue/FSEvents-style watcher primitive | +| `test-fs-watch-recursive-delete.js` | fails fast — recursive fs.watch is deferred because the sandbox VFS/kernel has no inotify/kqueue/FSEvents-style watcher primitive | +| `test-fs-watch-recursive-linux-parallel-remove.js` | fails fast — recursive fs.watch is deferred because the sandbox VFS/kernel has no inotify/kqueue/FSEvents-style watcher primitive | +| `test-fs-watch-recursive-sync-write.js` | fails fast — recursive fs.watch is deferred because the sandbox VFS/kernel has no inotify/kqueue/FSEvents-style watcher primitive | +| `test-fs-watch-recursive-update-file.js` | fails fast — recursive fs.watch is deferred because the sandbox VFS/kernel has no inotify/kqueue/FSEvents-style watcher primitive | +| `test-fs-watch-stop-async.js` | fails fast — fs.watchFile is deferred because the sandbox VFS/kernel has no inotify/kqueue/FSEvents-style watcher primitive | +| `test-fs-watch-stop-sync.js` | fails fast — fs.watchFile is deferred because the sandbox VFS/kernel has no inotify/kqueue/FSEvents-style watcher primitive | +| `test-fs-watch.js` | fails fast — fs.watch is deferred because the sandbox VFS/kernel has no inotify/kqueue/FSEvents-style watcher primitive | +| `test-fs-watchfile.js` | fails fast — fs.watchFile is deferred because the sandbox VFS/kernel has no inotify/kqueue/FSEvents-style watcher primitive | | `test-process-external-stdio-close.js` | uses child_process.fork — IPC across isolate boundary not supported | | `test-events-uncaught-exception-stack.js` | sandbox does not route synchronous throws from EventEmitter.emit('error') to process 'uncaughtException' handler | -| `test-fs-promises-file-handle-writeFile.js` | Readable.from is not available in the browser — stream.Readable.from() factory not implemented in sandbox stream polyfill | | `test-fs-promises-writefile.js` | Readable.from is not available in the browser — stream.Readable.from() factory not implemented; used by writeFile() Readable/iterable overload | | `test-http-addrequest-localaddress.js` | TypeError: agent.addRequest is not a function — http.Agent.addRequest() internal method not implemented in http polyfill | -| `test-http-header-validators.js` | TypeError: Cannot read properties of undefined (reading 'constructor') — validateHeaderName/validateHeaderValue not exported from http polyfill module | | `test-http-import-websocket.js` | ReferenceError: WebSocket is not defined — WebSocket global not available in sandbox; undici WebSocket not polyfilled as a global | | `test-http-incoming-matchKnownFields.js` | TypeError: incomingMessage._addHeaderLine is not a function — http.IncomingMessage._addHeaderLine() internal method not implemented in http polyfill | | `test-http-outgoing-destroy.js` | Error: The _implicitHeader() method is not implemented — http.OutgoingMessage._implicitHeader() not implemented; required by write() after destroy() path | @@ -543,21 +558,53 @@ description: Node.js v22 test/parallel/ conformance results for the secure-exec | `test-util-types-exists.js` | require('util/types') subpath import not supported by sandbox module system | | `test-websocket.js` | WebSocket global is not defined in sandbox — Node.js 22 added WebSocket as a global but the sandbox does not expose it | | `test-webstream-readable-from.js` | ReadableStream.from() static method not implemented in sandbox WebStreams polyfill — added in Node.js 20 and not available globally in sandbox | -| `test-webstreams-clone-unref.js` | structuredClone(\{ transfer: [stream] \}) for ReadableStream/WritableStream not supported in sandbox — transferable stream structured clone not implemented | +| `test-webstreams-clone-unref.js` | structuredClone({ transfer: [stream] }) for ReadableStream/WritableStream not supported in sandbox — transferable stream structured clone not implemented | | `test-zlib-brotli-16GB.js` | getDefaultHighWaterMark() not exported from readable-stream v3 polyfill — test also relies on native zlib BrotliDecompress buffering behavior with _readableState internals | +| `test-fs-watch-recursive-add-file-with-url.js` | fails fast — recursive fs.watch is deferred because the sandbox VFS/kernel has no inotify/kqueue/FSEvents-style watcher primitive | +| `test-fs-watch-recursive-add-folder.js` | fails fast — recursive fs.watch is deferred because the sandbox VFS/kernel has no inotify/kqueue/FSEvents-style watcher primitive | +| `test-fs-watch-recursive-promise.js` | fails fast — fs.promises.watch is deferred because the sandbox VFS/kernel has no inotify/kqueue/FSEvents-style watcher primitive | +| `test-fs-watch-recursive-symlink.js` | fails fast — recursive fs.watch is deferred because the sandbox VFS/kernel has no inotify/kqueue/FSEvents-style watcher primitive | +| `test-fs-watch-recursive-watch-file.js` | fails fast — fs.watchFile is deferred because the sandbox VFS/kernel has no inotify/kqueue/FSEvents-style watcher primitive | | `test-buffer-constructor-outside-node-modules.js` | ReferenceError: document is not defined — test uses browser DOM API not available in sandbox | | `test-child-process-fork.js` | child_process.fork is not supported in sandbox | -| `test-fs-promises-file-handle-read-worker.js` | fs.promises.open (FileHandle API) not implemented | -| `test-fs-watch-close-when-destroyed.js` | fs.watch not supported in sandbox | -| `test-fs-watch-ref-unref.js` | fs.watch not supported in sandbox | -| `test-fs-watchfile-ref-unref.js` | fs.watchFile not supported in sandbox | -| `test-fs-write-stream-file-handle-2.js` | fs.promises.open (FileHandle API) not implemented | - - - -### requires-v8-flags (239 entries) - -*239 individual tests — see expectations.json for full list.* +| `test-fs-watch-close-when-destroyed.js` | fails fast — fs.watch is deferred because the sandbox VFS/kernel has no inotify/kqueue/FSEvents-style watcher primitive | +| `test-fs-watch-ref-unref.js` | fails fast — fs.watch is deferred because the sandbox VFS/kernel has no inotify/kqueue/FSEvents-style watcher primitive | +| `test-fs-watchfile-ref-unref.js` | fails fast — fs.watchFile is deferred because the sandbox VFS/kernel has no inotify/kqueue/FSEvents-style watcher primitive | +| `test-dgram-blocklist.js` | net.BlockList is not implemented in the sandbox net bridge, so dgram sendBlockList and receiveBlockList coverage aborts immediately | +| `test-dgram-send-cb-quelches-error.js` | dns.setServers() is not implemented in the sandbox dns module, so the DNS failure callback-vs-error-emission fixture cannot run | +| `test-dgram-send-queue-info.js` | dgram.Socket getSendQueueSize() and getSendQueueCount() are not implemented in the bridge | +| `test-net-autoselectfamily-attempt-timeout-cli-option.js` | net.getDefaultAutoSelectFamilyAttemptTimeout() is still missing, so the CLI autoSelectFamily timeout test aborts before exercising connection behavior | +| `test-net-autoselectfamily-attempt-timeout-default-value.js` | net.getDefaultAutoSelectFamilyAttemptTimeout() is still missing, so the default timeout helper test aborts before exercising connection behavior | +| `test-net-blocklist.js` | net.BlockList is not implemented in the sandbox net bridge | +| `test-net-child-process-connect-reset.js` | the child-process spawn path used by the reset fixture still returns ENOSYS in the sandbox | +| `test-net-connect-reset-before-connected.js` | net.Socket.resetAndDestroy() is still missing from the sandbox socket implementation | +| `test-net-connect-reset.js` | net.Socket.resetAndDestroy() is still missing from the sandbox socket implementation | +| `test-net-deprecated-setsimultaneousaccepts.js` | the deprecated net._setSimultaneousAccepts() helper is not implemented in the sandbox bridge | +| `test-net-perf_hooks.js` | perf_hooks.PerformanceObserver remains unsupported in the sandbox, so the net perf observer fixture aborts early | +| `test-net-server-blocklist.js` | net.BlockList is not implemented in the sandbox net bridge | +| `test-net-server-simultaneous-accepts-produce-warning-once.js` | the deprecated net._setSimultaneousAccepts() helper is not implemented in the sandbox bridge | +| `test-net-write-arguments.js` | the legacy net.Stream constructor surface is still missing, so write-argument validation aborts before the vendored assertions run | +| `test-tls-addca.js` | TypeError: contextWithCert.context.addCACert is not a function | +| `test-tls-check-server-identity.js` | TypeError: tls.checkServerIdentity is not a function | +| `test-tls-cipher-list.js` | ENOSYS: function not implemented, spawn | +| `test-tls-cli-min-max-conflict.js` | ENOSYS: function not implemented, spawn | +| `test-tls-env-extra-ca-no-crypto.js` | child_process.fork is not supported in sandbox | +| `test-tls-error-servername.js` | TypeError: duplexPair is not a function or its return value is not iterable | +| `test-tls-generic-stream.js` | TypeError: duplexPair is not a function or its return value is not iterable | +| `test-tls-handshake-exception.js` | AssertionError2: ENOSYS: function not implemented, spawn | +| `test-tls-handshake-nohang.js` | TypeError: tls.createSecurePair is not a function | +| `test-tls-legacy-deprecated.js` | TypeError: tls.createSecurePair is not a function | +| `test-tls-securepair-fiftharg.js` | TypeError: tls.createSecurePair is not a function | +| `test-tls-securepair-leak.js` | TypeError: createSecurePair is not a function | +| `test-tls-server-setoptions-clientcertengine.js` | TypeError: server.setOptions is not a function | +| `test-tls-socket-snicallback-without-server.js` | TypeError: duplexPair is not a function or its return value is not iterable | +| `test-tls-transport-destroy-after-own-gc.js` | TypeError: duplexPair is not a function or its return value is not iterable | + +
+ +### requires-v8-flags (247 entries) + +*247 individual tests — see expectations.json for full list.* ### requires-exec-path (173 entries) @@ -565,7 +612,7 @@ description: Node.js v22 test/parallel/ conformance results for the secure-exec - `test-permission-*.js` — spawns child Node.js process via process.execPath — sandbox does not provide a real node binary - +
172 individual tests | Test | Reason | | --- | --- | @@ -742,30 +789,31 @@ description: Node.js v22 test/parallel/ conformance results for the secure-exec | `test-webstorage.js` | spawns child Node.js process via process.execPath — sandbox does not provide a real node binary | | `test-windows-failed-heap-allocation.js` | spawns child Node.js process via process.execPath — sandbox does not provide a real node binary | - +
### security-constraint (2 entries) - +
2 individual tests | Test | Reason | | --- | --- | | `test-crypto-pbkdf2.js` | SharedArrayBuffer is intentionally removed by sandbox hardening, so the vendored TypedArray coverage loop aborts before the remaining pbkdf2 assertions run | | `test-process-binding-internalbinding-allowlist.js` | process.binding is not supported in sandbox (security constraint) | - +
-### test-infra (22 entries) +### test-infra (52 entries) **Glob patterns:** - `test-runner-*.js` — Node.js test runner infrastructure — not runtime behavior - `test-eslint-*.js` — ESLint integration tests — Node.js CI tooling, not runtime - +
50 individual tests | Test | Reason | | --- | --- | +| `test-whatwg-url-canparse.js` | depends on internal/test/binding for a debug-only fast-API counter assertion; URL.canParse() behavior itself now passes in the sandbox | | `test-benchmark-cli.js` | Cannot find module '../../benchmark/_cli.js' — benchmark CLI helper not vendored in conformance test tree | | `test-http-client-req-error-dont-double-fire.js` | Cannot find module '../common/internet' — internet connectivity helper not vendored in conformance test tree | | `test-inspect-async-hook-setup-at-inspect.js` | TypeError: common.skipIfInspectorDisabled is not a function — skipIfInspectorDisabled() helper not implemented in conformance common shim; test requires V8 inspector | @@ -786,12 +834,41 @@ description: Node.js v22 test/parallel/ conformance results for the secure-exec | `test-vm-parse-abort-on-uncaught-exception.js` | passes in sandbox — overrides glob pattern | | `test-worker-messaging-errors-handler.js` | passes in sandbox — overrides glob pattern | | `test-worker-messaging-errors-invalid.js` | passes in sandbox — overrides glob pattern | - - +| `test-dgram-reuseport.js` | the vendored ../common/udp helper is missing from the conformance VFS, so the reusePort fixture cannot run | +| `test-net-autoselectfamily-commandline-option.js` | the vendored ../common/dns shim is missing from the conformance VFS, so the autoSelectFamily DNS fixture cannot run | +| `test-net-autoselectfamily-default.js` | the vendored ../common/dns shim is missing from the conformance VFS, so the autoSelectFamily DNS fixture cannot run | +| `test-net-autoselectfamily-ipv4first.js` | the vendored ../common/dns shim is missing from the conformance VFS, so the autoSelectFamily DNS fixture cannot run | +| `test-net-autoselectfamily.js` | the vendored ../common/dns shim is missing from the conformance VFS, so the autoSelectFamily DNS fixture cannot run | +| `test-net-better-error-messages-port-hostname.js` | the vendored ../common/internet helper is missing from the conformance VFS, so the port/hostname error-message fixture cannot run | +| `test-net-connect-immediate-finish.js` | the vendored ../common/internet helper is missing from the conformance VFS, so the external-connect fixture cannot run | +| `test-net-connect-memleak.js` | the vendored ../common/gc helper is missing from the conformance VFS, so the GC-sensitive connect leak fixture cannot run | +| `test-net-end-close.js` | the vendored end/close fixture depends on internal/test/binding, which is absent in the sandbox | +| `test-net-normalize-args.js` | the vendored internal/net helper module is missing from the conformance VFS, so argument-normalization coverage cannot run | +| `test-net-persistent-nodelay.js` | the persistent nodelay fixture depends on internal/test/binding, which is absent in the sandbox | +| `test-net-persistent-ref-unref.js` | the persistent ref/unref fixture depends on internal/test/binding, which is absent in the sandbox | +| `test-net-reuseport.js` | the vendored ../common/net helper is missing from the conformance VFS, so the reusePort fixture cannot run | +| `test-tls-cli-max-version-1.3.js` | Cannot find module './test-tls-min-max-version.js' | +| `test-tls-cli-min-version-1.2.js` | Cannot find module './test-tls-min-max-version.js' | +| `test-tls-client-reject-12.js` | Cannot find module './test-tls-client-reject.js' | +| `test-tls-client-resume-12.js` | Cannot find module './test-tls-client-resume.js' | +| `test-tls-connect-memleak.js` | Cannot find module '../common/gc' | +| `test-tls-destroy-stream-12.js` | Cannot find module './test-tls-destroy-stream.js' | +| `test-tls-enable-keylog-cli.js` | SyntaxError: Illegal return statement | +| `test-tls-enable-trace-cli.js` | SyntaxError: Illegal return statement | +| `test-tls-enable-trace.js` | SyntaxError: Illegal return statement | +| `test-tls-env-bad-extra-ca.js` | SyntaxError: Illegal return statement | +| `test-tls-env-extra-ca.js` | SyntaxError: Illegal return statement | +| `test-tls-net-socket-keepalive-12.js` | Cannot find module './test-tls-net-socket-keepalive.js' | +| `test-tls-ticket-12.js` | Cannot find module './test-tls-ticket.js' | +| `test-tls-ticket-cluster.js` | SyntaxError: Illegal return statement | +| `test-tls-wrap-econnreset-pipe.js` | SyntaxError: Illegal return statement | +| `test-tls-write-error.js` | Cannot find module '../common/tls' | + +
### native-addon (3 entries) - +
3 individual tests | Test | Reason | | --- | --- | @@ -799,11 +876,11 @@ description: Node.js v22 test/parallel/ conformance results for the secure-exec | `test-internal-process-binding.js` | uses process.binding() or native addons — not available in sandbox | | `test-process-binding-util.js` | uses process.binding() or native addons — not available in sandbox | - +
-### vacuous-skip (33 entries) +### vacuous-skip (50 entries) - +
50 individual tests | Test | Reason | | --- | --- | @@ -840,5 +917,22 @@ description: Node.js v22 test/parallel/ conformance results for the secure-exec | `test-child-process-stdio-overlapped.js` | vacuous pass — test self-skips because required overlapped-checker binary not found in sandbox | | `test-fs-utimes-y2K38.js` | vacuous pass — test self-skips because child_process.spawnSync(touch) fails in sandbox | | `test-tick-processor-arguments.js` | vacuous pass — test self-skips because common.enoughTestMem is undefined in sandbox shim | - - +| `test-tls-alert-handling.js` | vacuous pass — test self-skips via common.skip() because common.hasCrypto is false | +| `test-tls-alert.js` | vacuous pass — test self-skips via common.skip() because common.hasCrypto is false | +| `test-tls-client-renegotiation-limit.js` | vacuous pass — test self-skips via common.skip() because common.hasCrypto is false | +| `test-tls-connect-address-family.js` | vacuous pass — test self-skips via common.skip() because common.hasCrypto is false | +| `test-tls-destroy-whilst-write.js` | vacuous pass — test self-skips via common.skip() because common.hasCrypto is false | +| `test-tls-dhe.js` | vacuous pass — test self-skips via common.skip() because common.hasCrypto is false | +| `test-tls-ecdh-auto.js` | vacuous pass — test self-skips via common.skip() because common.hasCrypto is false | +| `test-tls-ecdh-multiple.js` | vacuous pass — test self-skips via common.skip() because common.hasCrypto is false | +| `test-tls-ecdh.js` | vacuous pass — test self-skips via common.skip() because common.hasCrypto is false | +| `test-tls-ocsp-callback.js` | vacuous pass — test self-skips via common.skip() because common.hasCrypto is false | +| `test-tls-psk-server.js` | vacuous pass — test self-skips via common.skip() because common.hasCrypto is false | +| `test-tls-securepair-server.js` | vacuous pass — test self-skips via common.skip() because common.hasCrypto is false | +| `test-tls-server-verify.js` | vacuous pass — test self-skips via common.skip() because common.hasCrypto is false | +| `test-tls-session-cache.js` | vacuous pass — test self-skips via common.skip() because common.hasCrypto is false | +| `test-https-client-renegotiation-limit.js` | vacuous pass — test self-skips via common.skip() because common.hasCrypto is false | +| `test-https-connect-address-family.js` | vacuous pass — test self-skips via common.skip() because common.hasCrypto is false | +| `test-https-foafssl.js` | vacuous pass — test self-skips via common.skip() because common.hasCrypto is false | + +
diff --git a/docs/posix-compatibility.md b/docs/posix-compatibility.md index ebe1a9a1..0565831c 100644 --- a/docs/posix-compatibility.md +++ b/docs/posix-compatibility.md @@ -1,14 +1,12 @@ ---- -title: POSIX Compatibility ---- +# POSIX Compatibility > **This is a living document.** Update it when kernel, WasmVM, Node bridge, or Python bridge behavior changes for any POSIX-relevant feature. -> **Looking for automated test results?** See the [POSIX Conformance Report](posix-conformance-report) for os-test suite results with per-suite pass rates and exclusion details. +> **Looking for automated test results?** See the [POSIX Conformance Report](posix-conformance-report.mdx) for os-test suite results with per-suite pass rates and exclusion details. This document tracks how closely the secure-exec kernel, runtimes, and bridges conform to POSIX and Linux behavior. The goal is full POSIX compliance 1:1 — every syscall, signal, and shell behavior should match a real Linux system unless an architectural constraint makes it impossible. -For command-level support (ls, grep, awk, etc.), see [WasmVM Supported Commands](wasmvm/supported-commands.md). For Node.js API compatibility (fs, http, crypto modules), see [Node.js Compatibility](nodejs-compatibility). For Python API compatibility, see [Python Compatibility](python-compatibility). +For command-level support (ls, grep, awk, etc.), see [WasmVM Supported Commands](wasmvm/supported-commands.md). For Node.js API compatibility (fs, http, crypto modules), see [Node.js Compatibility](nodejs-compatibility.mdx). For Python API compatibility, see [Python Compatibility](python-compatibility.mdx). --- diff --git a/docs/posix-conformance-report.mdx b/docs/posix-conformance-report.mdx index 07ca828a..ed708103 100644 --- a/docs/posix-conformance-report.mdx +++ b/docs/posix-conformance-report.mdx @@ -1,6 +1,7 @@ --- title: POSIX Conformance Report description: os-test POSIX.1-2024 conformance results for WasmVM. +icon: "chart-bar" --- {/* AUTO-GENERATED — do not edit. Run scripts/generate-posix-report.ts */} @@ -14,7 +15,7 @@ description: os-test POSIX.1-2024 conformance results for WasmVM. | Passing | 3347 (99.9%) | | Expected fail | 3 | | Skip | 0 | -| Native verified | undefined of 3347 passing tests verified against native output (NaN%) | +| Native parity | 98.4% | | Last updated | 2026-03-23 | ## Per-Suite Results @@ -46,5 +47,5 @@ WASI Preview 1 lacks the required syscall. | Test | Reason | Issue | | --- | --- | --- | -| `basic/sys_statvfs/fstatvfs` | fstatvfs() not part of WASI — no filesystem statistics interface | [#48](https://github.com/rivet-dev/secure-exec/issues/48) | -| `basic/sys_statvfs/statvfs` | statvfs() not part of WASI — no filesystem statistics interface | [#48](https://github.com/rivet-dev/secure-exec/issues/48) | +| `basic/sys_statvfs/fstatvfs` | fstatvfs() not part of WASI — no filesystem statistics interface | [#34](https://github.com/rivet-dev/secure-exec/issues/34) | +| `basic/sys_statvfs/statvfs` | statvfs() not part of WASI — no filesystem statistics interface | [#34](https://github.com/rivet-dev/secure-exec/issues/34) | diff --git a/docs/features/process-isolation.mdx b/docs/process-isolation.mdx similarity index 99% rename from docs/features/process-isolation.mdx rename to docs/process-isolation.mdx index 8739ed19..1704d766 100644 --- a/docs/features/process-isolation.mdx +++ b/docs/process-isolation.mdx @@ -1,7 +1,6 @@ --- title: Process Isolation description: Configure V8 process topology to control crash blast radius and resource partitioning. -icon: castle --- Process isolation depends on `@secure-exec/v8`, which is experimental. APIs and behavior may change without notice. diff --git a/docs/quickstart.mdx b/docs/quickstart.mdx index e745c7c4..41c8ed75 100644 --- a/docs/quickstart.mdx +++ b/docs/quickstart.mdx @@ -28,9 +28,7 @@ icon: "rocket" A `NodeRuntime` executes JavaScript in an isolated V8 sandbox with its own virtual filesystem, module system, and permissions. - Source: `examples/kitchen-sink/src/create-runtime.ts` - - ```ts Create Runtime + ```ts import { NodeRuntime, createNodeDriver, @@ -41,8 +39,6 @@ icon: "rocket" systemDriver: createNodeDriver(), runtimeDriverFactory: createNodeRuntimeDriverFactory(), }); - - runtime.dispose(); ``` @@ -50,8 +46,6 @@ icon: "rocket" Use `runtime.run()` to execute JavaScript and get back exported values. Use `runtime.exec()` for scripts that produce console output. - Source: `examples/kitchen-sink/src/run-get-exports.ts` - ```ts Run & Get Exports import { NodeRuntime, @@ -73,8 +67,6 @@ icon: "rocket" runtime.dispose(); ``` - Source: `examples/kitchen-sink/src/execute-capture-output.ts` - ```ts Execute & Capture Output import { NodeRuntime, @@ -99,8 +91,6 @@ icon: "rocket" runtime.dispose(); ``` - Source: `examples/kitchen-sink/src/filesystem.ts` - ```ts Filesystem import { NodeRuntime, @@ -115,7 +105,7 @@ icon: "rocket" const runtime = new NodeRuntime({ systemDriver: createNodeDriver({ filesystem, - permissions: { ...allowAllFs }, + permissions: { fs: allowAllFs }, }), runtimeDriverFactory: createNodeRuntimeDriverFactory(), }); @@ -132,8 +122,6 @@ icon: "rocket" runtime.dispose(); ``` - Source: `examples/kitchen-sink/src/network-access.ts` - ```ts Network Access import { NodeRuntime, @@ -144,8 +132,7 @@ icon: "rocket" const runtime = new NodeRuntime({ systemDriver: createNodeDriver({ - useDefaultNetwork: true, - permissions: { ...allowAllNetwork }, + permissions: { network: allowAllNetwork }, }), runtimeDriverFactory: createNodeRuntimeDriverFactory(), onStdio: (event) => { @@ -154,17 +141,13 @@ icon: "rocket" }); await runtime.exec(` - const response = await fetch("http://example.com"); + const response = await fetch("https://example.com"); console.log(response.status); // 200 - `, { - filePath: "/entry.mjs", // enables top-level await - }); + `); runtime.dispose(); ``` - Source: `examples/kitchen-sink/src/esm-modules.ts` - ```ts ESM Modules import { NodeRuntime, @@ -191,6 +174,31 @@ icon: "rocket" +## Kernel API + +For multi-process workloads (shell commands, child processes, inter-process communication), use the kernel API. This requires both a Node.js runtime and a shell runtime (WasmVM). + +```ts +import { + createKernel, + createInMemoryFileSystem, + createNodeRuntime, +} from "secure-exec"; + +const kernel = createKernel({ + filesystem: createInMemoryFileSystem(), +}); +await kernel.mount(createNodeRuntime()); + +// spawn() runs a command directly (no shell needed) +const proc = kernel.spawn("node", ["-e", "console.log('hello')"], { + onStdout: (data) => process.stdout.write(data), +}); +await proc.wait(); + +await kernel.dispose(); +``` + ## Next steps diff --git a/docs/runtimes/node.mdx b/docs/runtimes/node.mdx index 7c48a34f..599228ea 100644 --- a/docs/runtimes/node.mdx +++ b/docs/runtimes/node.mdx @@ -63,7 +63,7 @@ const runtime = new NodeRuntime({ These exports are also available from `@secure-exec/nodejs`. -By default, all runtimes share a single V8 child process. You can pass a dedicated `V8Runtime` handle via `createNodeRuntimeDriverFactory({ v8Runtime })` to control crash blast radius and resource partitioning. See [Process Isolation](/features/process-isolation) for topology options and trade-offs. +By default, all runtimes share a single V8 child process. You can pass a dedicated `V8Runtime` handle via `createNodeRuntimeDriverFactory({ v8Runtime })` to control crash blast radius and resource partitioning. See [Process Isolation](/process-isolation) for topology options and trade-offs. ## exec vs run diff --git a/docs/use-cases/dev-servers.mdx b/docs/use-cases/dev-servers.mdx index 80c496e7..0bc1614b 100644 --- a/docs/use-cases/dev-servers.mdx +++ b/docs/use-cases/dev-servers.mdx @@ -14,16 +14,10 @@ Let users run their own dev servers inside a sandboxed isolate. Secure Exec can Start a user-provided Hono server inside the isolate, wait for its health endpoint, fetch a response from the host, then terminate. -Source file: `examples/hono-dev-server/src/index.ts` - ```ts Hono Dev Server import { createServer } from "node:net"; -import path from "node:path"; -import { createRequire } from "node:module"; -import { fileURLToPath } from "node:url"; import { NodeRuntime, - allowAllFs, allowAllNetwork, createNodeDriver, createNodeRuntimeDriverFactory, @@ -32,16 +26,11 @@ import { const host = "127.0.0.1"; const port = await findOpenPort(); const logs: string[] = []; -const require = createRequire(import.meta.url); -const repoRoot = path.resolve(path.dirname(fileURLToPath(import.meta.url)), "../../.."); -const honoEntry = toSandboxModulePath(require.resolve("hono")); -const honoNodeServerEntry = toSandboxModulePath(require.resolve("@hono/node-server")); const runtime = new NodeRuntime({ systemDriver: createNodeDriver({ - moduleAccess: { cwd: repoRoot }, useDefaultNetwork: true, - permissions: { ...allowAllFs, ...allowAllNetwork }, + permissions: { ...allowAllNetwork }, }), runtimeDriverFactory: createNodeRuntimeDriverFactory(), memoryLimit: 128, @@ -49,22 +38,26 @@ const runtime = new NodeRuntime({ }); const execPromise = runtime.exec(` - globalThis.global = globalThis; - const { Hono } = require("${honoEntry}"); - const { serve } = require("${honoNodeServerEntry}"); - - const app = new Hono(); - app.get("/", (c) => c.text("hello from sandboxed hono")); - app.get("/health", (c) => c.json({ ok: true })); - - serve({ - fetch: app.fetch, - port: ${port}, - hostname: "${host}", - }); + (async () => { + const { Hono } = require("hono"); + const { serve } = require("@hono/node-server"); + + const app = new Hono(); + app.get("/", (c) => c.text("hello from sandboxed hono")); + app.get("/health", (c) => c.json({ ok: true })); + + serve({ + fetch: app.fetch, + port: ${port}, + hostname: "${host}", + }); - console.log("server:listening:${port}"); - setInterval(() => {}, 1 << 30); + console.log("server:listening:${port}"); + await new Promise(() => {}); + })().catch((error) => { + console.error(error); + process.exitCode = 1; + }); `, { onStdio: (event) => logs.push(`[${event.channel}] ${event.message}`), }); @@ -83,15 +76,6 @@ try { await execPromise.catch(() => undefined); } -function toSandboxModulePath(hostPath: string): string { - const hostNodeModulesRoot = path.join(repoRoot, "node_modules"); - const relativePath = path.relative(hostNodeModulesRoot, hostPath); - if (relativePath.startsWith("..")) { - throw new Error(`Expected module inside ${hostNodeModulesRoot}: ${hostPath}`); - } - return path.posix.join("/root/node_modules", relativePath.split(path.sep).join("/")); -} - async function findOpenPort(): Promise { return new Promise((resolve, reject) => { const server = createServer(); diff --git a/examples/ai-agent-type-check/docs-gen.json b/examples/ai-agent-type-check/docs-gen.json deleted file mode 100644 index 3a47dd50..00000000 --- a/examples/ai-agent-type-check/docs-gen.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "kind": "titledBlocks", - "docsPath": "../../docs/use-cases/ai-agent-code-exec.mdx", - "entries": [ - { - "title": "JavaScript Execution", - "examplePath": "../ai-sdk/src/index.ts" - }, - { - "title": "Type-Checked Execution", - "examplePath": "src/index.ts" - } - ] -} diff --git a/examples/ai-agent-type-check/package.json b/examples/ai-agent-type-check/package.json index aeba7ff4..48a4a1c1 100644 --- a/examples/ai-agent-type-check/package.json +++ b/examples/ai-agent-type-check/package.json @@ -5,7 +5,7 @@ "scripts": { "check-types": "tsc --noEmit -p tsconfig.json", "dev": "tsx src/index.ts", - "verify-docs": "docs-gen verify --config docs-gen.json" + "verify-docs": "node scripts/verify-docs.mjs" }, "dependencies": { "@ai-sdk/anthropic": "^3.0.58", @@ -15,7 +15,6 @@ "zod": "^3.24.0" }, "devDependencies": { - "@secure-exec/docs-gen": "workspace:*", "@types/node": "^22.10.2", "tsx": "^4.19.2", "typescript": "^5.7.2" diff --git a/examples/ai-agent-type-check/scripts/verify-docs.mjs b/examples/ai-agent-type-check/scripts/verify-docs.mjs new file mode 100644 index 00000000..9308067b --- /dev/null +++ b/examples/ai-agent-type-check/scripts/verify-docs.mjs @@ -0,0 +1,73 @@ +import { readFile } from "node:fs/promises"; +import path from "node:path"; +import { fileURLToPath } from "node:url"; + +const __dirname = path.dirname(fileURLToPath(import.meta.url)); +const repoRoot = path.resolve(__dirname, "../../.."); +const docsPath = path.join(repoRoot, "docs/use-cases/ai-agent-code-exec.mdx"); + +const expectedFiles = new Map([ + ["JavaScript Execution", path.join(repoRoot, "examples/ai-sdk/src/index.ts")], + ["Type-Checked Execution", path.join(repoRoot, "examples/ai-agent-type-check/src/index.ts")], +]); + +function normalizeTitle(title) { + return title.trim().replace(/^"|"$/g, ""); +} + +function normalizeCode(source) { + const normalized = source.replace(/\r\n/g, "\n").replace(/^\n+|\n+$/g, ""); + const lines = normalized.split("\n"); + const nonEmptyLines = lines.filter((line) => line.trim().length > 0); + const minIndent = nonEmptyLines.reduce((indent, line) => { + const lineIndent = line.match(/^ */)?.[0].length ?? 0; + return Math.min(indent, lineIndent); + }, Number.POSITIVE_INFINITY); + + if (!Number.isFinite(minIndent) || minIndent === 0) { + return normalized; + } + + return lines.map((line) => line.slice(minIndent)).join("\n"); +} + +const docsSource = await readFile(docsPath, "utf8"); +const blockPattern = /^\s*```ts(?:\s+([^\n]+))?\n([\s\S]*?)^\s*```/gm; +const docBlocks = new Map(); + +for (const match of docsSource.matchAll(blockPattern)) { + const rawTitle = match[1]; + if (!rawTitle) { + continue; + } + + const title = normalizeTitle(rawTitle); + if (!expectedFiles.has(title)) { + continue; + } + + docBlocks.set(title, normalizeCode(match[2] ?? "")); +} + +const mismatches = []; + +for (const [title, filePath] of expectedFiles) { + const fileSource = normalizeCode(await readFile(filePath, "utf8")); + const docSource = docBlocks.get(title); + + if (!docSource) { + mismatches.push(`Missing docs snippet for ${title}`); + continue; + } + + if (docSource !== fileSource) { + mismatches.push(`Snippet mismatch for ${title}`); + } +} + +if (mismatches.length > 0) { + console.error(mismatches.join("\n")); + process.exit(1); +} + +console.log("AI agent docs match example sources."); diff --git a/examples/code-mode/docs-gen.json b/examples/code-mode/docs-gen.json deleted file mode 100644 index ad40a872..00000000 --- a/examples/code-mode/docs-gen.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "kind": "contains", - "docsPath": "../../docs/use-cases/code-mode.mdx", - "required": [ - "examples/code-mode" - ] -} diff --git a/examples/code-mode/package.json b/examples/code-mode/package.json index 5128aabb..a4249597 100644 --- a/examples/code-mode/package.json +++ b/examples/code-mode/package.json @@ -5,7 +5,7 @@ "scripts": { "check-types": "tsc --noEmit -p tsconfig.json", "dev": "tsx src/index.ts", - "verify-docs": "docs-gen verify --config docs-gen.json" + "verify-docs": "node scripts/verify-docs.mjs" }, "dependencies": { "@ai-sdk/anthropic": "^3.0.58", @@ -14,7 +14,6 @@ "zod": "^3.24.0" }, "devDependencies": { - "@secure-exec/docs-gen": "workspace:*", "@types/node": "^22.10.2", "tsx": "^4.19.2", "typescript": "^5.7.2" diff --git a/examples/code-mode/scripts/verify-docs.mjs b/examples/code-mode/scripts/verify-docs.mjs new file mode 100644 index 00000000..04b066f4 --- /dev/null +++ b/examples/code-mode/scripts/verify-docs.mjs @@ -0,0 +1,17 @@ +import { readFile } from "node:fs/promises"; +import path from "node:path"; +import { fileURLToPath } from "node:url"; + +const __dirname = path.dirname(fileURLToPath(import.meta.url)); +const repoRoot = path.resolve(__dirname, "../../.."); +const docsPath = path.join(repoRoot, "docs/use-cases/code-mode.mdx"); + +const docsSource = await readFile(docsPath, "utf8"); + +// Verify the docs page links to the example +if (!docsSource.includes("examples/code-mode")) { + console.error("Code Mode docs missing link to example"); + process.exit(1); +} + +console.log("Code Mode docs verified."); diff --git a/examples/features/docs-gen.json b/examples/features/docs-gen.json deleted file mode 100644 index c9b851a2..00000000 --- a/examples/features/docs-gen.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "kind": "multiFirstTsBlock", - "entries": [ - { - "docsPath": "../../docs/features/child-processes.mdx", - "examplePath": "src/child-processes.ts" - }, - { - "docsPath": "../../docs/features/filesystem.mdx", - "examplePath": "src/filesystem.ts" - }, - { - "docsPath": "../../docs/features/module-loading.mdx", - "examplePath": "src/module-loading.ts" - }, - { - "docsPath": "../../docs/features/networking.mdx", - "examplePath": "src/networking.ts" - }, - { - "docsPath": "../../docs/features/output-capture.mdx", - "examplePath": "src/output-capture.ts" - }, - { - "docsPath": "../../docs/features/permissions.mdx", - "examplePath": "src/permissions.ts" - }, - { - "docsPath": "../../docs/features/resource-limits.mdx", - "examplePath": "src/resource-limits.ts" - }, - { - "docsPath": "../../docs/features/typescript.mdx", - "examplePath": "src/typescript.ts" - }, - { - "docsPath": "../../docs/features/virtual-filesystem.mdx", - "examplePath": "src/virtual-filesystem.ts" - } - ], - "importReplacements": [ - { - "from": "\"../../../packages/secure-exec/src/index.ts\"", - "to": "\"secure-exec\"" - }, - { - "from": "\"../../../packages/secure-exec/src/types.ts\"", - "to": "\"secure-exec\"" - }, - { - "from": "\"../../../packages/typescript/src/index.ts\"", - "to": "\"@secure-exec/typescript\"" - } - ] -} diff --git a/examples/features/package.json b/examples/features/package.json index d68e7636..5544a6f6 100644 --- a/examples/features/package.json +++ b/examples/features/package.json @@ -4,7 +4,7 @@ "type": "module", "scripts": { "check-types": "tsc --noEmit -p tsconfig.json", - "verify-docs": "docs-gen verify --config docs-gen.json", + "verify-docs": "node scripts/verify-docs.mjs", "verify-e2e": "node scripts/verify-e2e.mjs", "test": "pnpm run verify-docs && pnpm run verify-e2e" }, @@ -13,7 +13,6 @@ "secure-exec": "workspace:*" }, "devDependencies": { - "@secure-exec/docs-gen": "workspace:*", "@types/node": "^22.10.2", "typescript": "^5.7.2" } diff --git a/examples/features/scripts/verify-docs.mjs b/examples/features/scripts/verify-docs.mjs new file mode 100644 index 00000000..985cf1a0 --- /dev/null +++ b/examples/features/scripts/verify-docs.mjs @@ -0,0 +1,68 @@ +import { readFile } from "node:fs/promises"; +import path from "node:path"; +import { fileURLToPath } from "node:url"; + +const __dirname = path.dirname(fileURLToPath(import.meta.url)); +const repoRoot = path.resolve(__dirname, "../../.."); +const examplesRoot = path.resolve(__dirname, ".."); + +const docToExample = new Map([ + ["docs/features/child-processes.mdx", "src/child-processes.ts"], + ["docs/features/filesystem.mdx", "src/filesystem.ts"], + ["docs/features/module-loading.mdx", "src/module-loading.ts"], + ["docs/features/networking.mdx", "src/networking.ts"], + ["docs/features/output-capture.mdx", "src/output-capture.ts"], + ["docs/features/permissions.mdx", "src/permissions.ts"], + ["docs/features/resource-limits.mdx", "src/resource-limits.ts"], + ["docs/features/typescript.mdx", "src/typescript.ts"], +]); + +function normalizeCode(source) { + const normalized = source.replace(/\r\n/g, "\n").replace(/^\n+|\n+$/g, ""); + const lines = normalized.split("\n"); + const nonEmptyLines = lines.filter((line) => line.trim().length > 0); + const minIndent = nonEmptyLines.reduce((indent, line) => { + const lineIndent = line.match(/^ */)?.[0].length ?? 0; + return Math.min(indent, lineIndent); + }, Number.POSITIVE_INFINITY); + + if (!Number.isFinite(minIndent) || minIndent === 0) { + return normalized; + } + + return lines.map((line) => line.slice(minIndent)).join("\n"); +} + +function getFirstTsBlock(source) { + const match = source.match(/^\s*```ts(?: [^\n]+)?\n([\s\S]*?)^\s*```/m); + if (!match?.[1]) { + return null; + } + + return normalizeCode(match[1]); +} + +const mismatches = []; + +for (const [docPath, examplePath] of docToExample) { + const docsSource = await readFile(path.join(repoRoot, docPath), "utf8"); + const exampleSource = await readFile(path.join(examplesRoot, examplePath), "utf8"); + const docBlock = getFirstTsBlock(docsSource); + const normalizedExample = normalizeCode(exampleSource); + + if (!docBlock) { + mismatches.push(`Missing TypeScript example in ${docPath}`); + continue; + } + + if (docBlock !== normalizedExample) { + mismatches.push(`Snippet mismatch: ${docPath}`); + } +} + +if (mismatches.length > 0) { + console.error(mismatches.join("\n")); + process.exit(1); +} + +console.log("Feature docs match example sources."); diff --git a/examples/features/scripts/verify-e2e.mjs b/examples/features/scripts/verify-e2e.mjs index 1f068755..a73b387a 100644 --- a/examples/features/scripts/verify-e2e.mjs +++ b/examples/features/scripts/verify-e2e.mjs @@ -14,7 +14,6 @@ const featureFiles = [ "src/permissions.ts", "src/resource-limits.ts", "src/typescript.ts", - "src/virtual-filesystem.ts", ]; function runExample(relativePath) { @@ -27,61 +26,16 @@ function runExample(relativePath) { let stdout = ""; let stderr = ""; - let settled = false; - const timeout = setTimeout(() => { - if (settled) return; - settled = true; - child.kill("SIGKILL"); - reject(new Error(`${relativePath} timed out\nstdout:\n${stdout}\nstderr:\n${stderr}`)); - }, 30_000); - - function tryGetPayload() { - const jsonLine = stdout - .trim() - .split("\n") - .map((line) => line.trim()) - .filter(Boolean) - .at(-1); - - if (!jsonLine) { - return null; - } - - try { - return JSON.parse(jsonLine); - } catch { - return null; - } - } child.stdout.on("data", (chunk) => { stdout += chunk.toString(); - - const payload = tryGetPayload(); - if (!settled && payload?.ok) { - settled = true; - clearTimeout(timeout); - child.kill("SIGKILL"); - resolve(payload); - } }); child.stderr.on("data", (chunk) => { stderr += chunk.toString(); }); - child.on("error", (error) => { - if (settled) return; - settled = true; - clearTimeout(timeout); - reject(error); - }); + child.on("error", reject); child.on("close", (code) => { - clearTimeout(timeout); - if (settled) { - return; - } - - settled = true; if (code !== 0) { reject( new Error( @@ -91,12 +45,30 @@ function runExample(relativePath) { return; } - const payload = tryGetPayload(); - if (!payload) { + const jsonLine = stdout + .trim() + .split("\n") + .map((line) => line.trim()) + .filter(Boolean) + .at(-1); + + if (!jsonLine) { reject(new Error(`${relativePath} produced no JSON result`)); return; } + let payload; + try { + payload = JSON.parse(jsonLine); + } catch (error) { + reject( + new Error( + `${relativePath} produced invalid JSON\nstdout:\n${stdout}\nstderr:\n${stderr}\n${error}`, + ), + ); + return; + } + if (!payload?.ok) { reject( new Error( diff --git a/examples/features/src/networking.ts b/examples/features/src/networking.ts index 3c162ef5..51fa5f3d 100644 --- a/examples/features/src/networking.ts +++ b/examples/features/src/networking.ts @@ -36,19 +36,23 @@ const runtime = new NodeRuntime({ try { const result = await runtime.exec( ` - const response = await fetch("http://127.0.0.1:${address.port}/"); - const body = await response.text(); + (async () => { + const response = await fetch("http://127.0.0.1:${address.port}/"); + const body = await response.text(); - if (!response.ok || response.status !== 200 || body !== "network-ok") { - throw new Error( - "unexpected response: " + response.status + " " + body, - ); - } + if (!response.ok || response.status !== 200 || body !== "network-ok") { + throw new Error( + "unexpected response: " + response.status + " " + body, + ); + } - console.log(JSON.stringify({ status: response.status, body })); + console.log(JSON.stringify({ status: response.status, body })); + })().catch((error) => { + console.error(error instanceof Error ? error.message : String(error)); + process.exitCode = 1; + }); `, { - filePath: "/entry.mjs", onStdio: (event) => { logs.push(`[${event.channel}] ${event.message}`); }, diff --git a/examples/features/src/typescript.ts b/examples/features/src/typescript.ts index a88be0af..ee73cfc6 100644 --- a/examples/features/src/typescript.ts +++ b/examples/features/src/typescript.ts @@ -27,7 +27,7 @@ const runtime = new NodeRuntime({ const ts = createTypeScriptTools({ systemDriver: compilerSystemDriver, runtimeDriverFactory, - compilerSpecifier: "typescript", + compilerSpecifier: "/root/node_modules/typescript/lib/typescript.js", }); try { diff --git a/examples/features/src/virtual-filesystem.ts b/examples/features/src/virtual-filesystem.ts deleted file mode 100644 index 8216034f..00000000 --- a/examples/features/src/virtual-filesystem.ts +++ /dev/null @@ -1,166 +0,0 @@ -import type { DirEntry, StatInfo, VirtualFileSystem } from "secure-exec"; -import { - NodeRuntime, - allowAllFs, - createNodeDriver, - createNodeRuntimeDriverFactory, -} from "secure-exec"; - -class ReadOnlyMapFS implements VirtualFileSystem { - private files: Map; - - constructor(files: Record) { - this.files = new Map(Object.entries(files)); - } - - async readFile(path: string) { - const content = this.files.get(path); - if (content === undefined) throw new Error(`ENOENT: ${path}`); - return new TextEncoder().encode(content); - } - - async readTextFile(path: string) { - const content = this.files.get(path); - if (content === undefined) throw new Error(`ENOENT: ${path}`); - return content; - } - - async readDir(path: string) { - const prefix = path === "/" ? "/" : path + "/"; - const entries = new Set(); - for (const key of this.files.keys()) { - if (!key.startsWith(prefix)) continue; - const rest = key.slice(prefix.length); - if (rest.length > 0) { - entries.add(rest.split("/")[0]); - } - } - if (entries.size === 0) throw new Error(`ENOENT: ${path}`); - return [...entries]; - } - - async readDirWithTypes(path: string): Promise { - const names = await this.readDir(path); - const prefix = path === "/" ? "/" : path + "/"; - return names.map((name) => ({ - name, - isDirectory: this.#isDir(prefix + name), - isSymbolicLink: false, - })); - } - - async writeFile() { throw new Error("EROFS: read-only filesystem"); } - async createDir() { throw new Error("EROFS: read-only filesystem"); } - async mkdir() { throw new Error("EROFS: read-only filesystem"); } - - async exists(path: string) { - return this.files.has(path) || this.#isDir(path); - } - - async stat(path: string): Promise { - const now = Date.now(); - if (this.files.has(path)) { - return { - mode: 0o444, - size: new TextEncoder().encode(this.files.get(path) ?? "").byteLength, - isDirectory: false, - isSymbolicLink: false, - atimeMs: now, - mtimeMs: now, - ctimeMs: now, - birthtimeMs: now, - ino: 1, - nlink: 1, - uid: 0, - gid: 0, - }; - } - if (this.#isDir(path)) { - return { - mode: 0o555, - size: 0, - isDirectory: true, - isSymbolicLink: false, - atimeMs: now, - mtimeMs: now, - ctimeMs: now, - birthtimeMs: now, - ino: 1, - nlink: 1, - uid: 0, - gid: 0, - }; - } - throw new Error(`ENOENT: ${path}`); - } - - async removeFile() { throw new Error("EROFS: read-only filesystem"); } - async removeDir() { throw new Error("EROFS: read-only filesystem"); } - async rename() { throw new Error("EROFS: read-only filesystem"); } - async realpath(path: string) { return path; } - async symlink() { throw new Error("EROFS: read-only filesystem"); } - async readlink(_path: string): Promise { throw new Error("ENOSYS: no symlinks"); } - async lstat(path: string) { return this.stat(path); } - async link() { throw new Error("EROFS: read-only filesystem"); } - async chmod() { throw new Error("EROFS: read-only filesystem"); } - async chown() { throw new Error("EROFS: read-only filesystem"); } - async utimes() { throw new Error("EROFS: read-only filesystem"); } - async truncate() { throw new Error("EROFS: read-only filesystem"); } - async pread(path: string, offset: number, length: number) { - const bytes = await this.readFile(path); - return bytes.slice(offset, offset + length); - } - - #isDir(path: string) { - const prefix = path === "/" ? "/" : path + "/"; - for (const key of this.files.keys()) { - if (key.startsWith(prefix)) return true; - } - return false; - } -} - -const filesystem = new ReadOnlyMapFS({ - "/config.json": JSON.stringify({ greeting: "hello from custom vfs" }), -}); -const events: string[] = []; - -const runtime = new NodeRuntime({ - systemDriver: createNodeDriver({ - filesystem, - permissions: { ...allowAllFs }, - }), - runtimeDriverFactory: createNodeRuntimeDriverFactory(), -}); - -try { - const result = await runtime.exec( - ` - const fs = require("node:fs"); - const config = JSON.parse(fs.readFileSync("/config.json", "utf8")); - console.log(config.greeting); - `, - { - onStdio: (event) => { - if (event.channel === "stdout") { - events.push(event.message); - } - }, - }, - ); - - const message = events.at(-1); - if (result.code !== 0 || message !== "hello from custom vfs") { - throw new Error(`Unexpected runtime result: ${JSON.stringify({ result, events })}`); - } - - console.log( - JSON.stringify({ - ok: true, - message, - summary: "sandbox read config data from a custom read-only virtual filesystem", - }), - ); -} finally { - runtime.dispose(); -} diff --git a/examples/hono-dev-server/docs-gen.json b/examples/hono-dev-server/docs-gen.json deleted file mode 100644 index b6ca5138..00000000 --- a/examples/hono-dev-server/docs-gen.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "kind": "namedTsBlock", - "docsPath": "../../docs/use-cases/dev-servers.mdx", - "title": "Hono Dev Server", - "examplePath": "src/index.ts" -} diff --git a/examples/hono-dev-server/package.json b/examples/hono-dev-server/package.json index 82916045..03c6917d 100644 --- a/examples/hono-dev-server/package.json +++ b/examples/hono-dev-server/package.json @@ -5,7 +5,7 @@ "scripts": { "check-types": "tsc --noEmit -p tsconfig.json", "dev": "tsx src/index.ts", - "verify-docs": "docs-gen verify --config docs-gen.json" + "verify-docs": "node scripts/verify-docs.mjs" }, "dependencies": { "@hono/node-server": "^1.19.6", @@ -13,7 +13,6 @@ "secure-exec": "workspace:*" }, "devDependencies": { - "@secure-exec/docs-gen": "workspace:*", "@types/node": "^22.10.2", "tsx": "^4.19.2", "typescript": "^5.7.2" diff --git a/examples/hono-dev-server/scripts/verify-docs.mjs b/examples/hono-dev-server/scripts/verify-docs.mjs new file mode 100644 index 00000000..8c8e4b37 --- /dev/null +++ b/examples/hono-dev-server/scripts/verify-docs.mjs @@ -0,0 +1,41 @@ +import { readFile } from "node:fs/promises"; +import path from "node:path"; +import { fileURLToPath } from "node:url"; + +const __dirname = path.dirname(fileURLToPath(import.meta.url)); +const repoRoot = path.resolve(__dirname, "../../.."); +const docsPath = path.join(repoRoot, "docs/use-cases/dev-servers.mdx"); +const examplePath = path.join(repoRoot, "examples/hono-dev-server/src/index.ts"); + +function normalizeCode(source) { + const normalized = source.replace(/\r\n/g, "\n").replace(/^\n+|\n+$/g, ""); + const lines = normalized.split("\n"); + const nonEmptyLines = lines.filter((line) => line.trim().length > 0); + const minIndent = nonEmptyLines.reduce((indent, line) => { + const lineIndent = line.match(/^ */)?.[0].length ?? 0; + return Math.min(indent, lineIndent); + }, Number.POSITIVE_INFINITY); + + if (!Number.isFinite(minIndent) || minIndent === 0) { + return normalized; + } + + return lines.map((line) => line.slice(minIndent)).join("\n"); +} + +const docsSource = await readFile(docsPath, "utf8"); +const match = docsSource.match(/^\s*```ts Hono Dev Server\n([\s\S]*?)^\s*```/m); +if (!match) { + console.error("Missing docs snippet for Hono Dev Server"); + process.exit(1); +} + +const docSource = normalizeCode(match[1] ?? ""); +const fileSource = normalizeCode(await readFile(examplePath, "utf8")); + +if (docSource !== fileSource) { + console.error("Snippet mismatch for Hono Dev Server"); + process.exit(1); +} + +console.log("Dev server docs match example source."); diff --git a/examples/hono-dev-server/src/index.ts b/examples/hono-dev-server/src/index.ts index cb0950a2..b96c232d 100644 --- a/examples/hono-dev-server/src/index.ts +++ b/examples/hono-dev-server/src/index.ts @@ -1,10 +1,6 @@ import { createServer } from "node:net"; -import path from "node:path"; -import { createRequire } from "node:module"; -import { fileURLToPath } from "node:url"; import { NodeRuntime, - allowAllFs, allowAllNetwork, createNodeDriver, createNodeRuntimeDriverFactory, @@ -13,39 +9,38 @@ import { const host = "127.0.0.1"; const port = await findOpenPort(); const logs: string[] = []; -const require = createRequire(import.meta.url); -const repoRoot = path.resolve(path.dirname(fileURLToPath(import.meta.url)), "../../.."); -const honoEntry = toSandboxModulePath(require.resolve("hono")); -const honoNodeServerEntry = toSandboxModulePath(require.resolve("@hono/node-server")); const runtime = new NodeRuntime({ systemDriver: createNodeDriver({ - moduleAccess: { cwd: repoRoot }, useDefaultNetwork: true, - permissions: { ...allowAllFs, ...allowAllNetwork }, + permissions: { ...allowAllNetwork }, }), runtimeDriverFactory: createNodeRuntimeDriverFactory(), memoryLimit: 128, - cpuTimeLimitMs: 60_000, + cpuTimeLimitMs: 5000, }); const execPromise = runtime.exec(` - globalThis.global = globalThis; - const { Hono } = require("${honoEntry}"); - const { serve } = require("${honoNodeServerEntry}"); - - const app = new Hono(); - app.get("/", (c) => c.text("hello from sandboxed hono")); - app.get("/health", (c) => c.json({ ok: true })); + (async () => { + const { Hono } = require("hono"); + const { serve } = require("@hono/node-server"); + + const app = new Hono(); + app.get("/", (c) => c.text("hello from sandboxed hono")); + app.get("/health", (c) => c.json({ ok: true })); + + serve({ + fetch: app.fetch, + port: ${port}, + hostname: "${host}", + }); - serve({ - fetch: app.fetch, - port: ${port}, - hostname: "${host}", + console.log("server:listening:${port}"); + await new Promise(() => {}); + })().catch((error) => { + console.error(error); + process.exitCode = 1; }); - - console.log("server:listening:${port}"); - setInterval(() => {}, 1 << 30); `, { onStdio: (event) => logs.push(`[${event.channel}] ${event.message}`), }); @@ -64,15 +59,6 @@ try { await execPromise.catch(() => undefined); } -function toSandboxModulePath(hostPath: string): string { - const hostNodeModulesRoot = path.join(repoRoot, "node_modules"); - const relativePath = path.relative(hostNodeModulesRoot, hostPath); - if (relativePath.startsWith("..")) { - throw new Error(`Expected module inside ${hostNodeModulesRoot}: ${hostPath}`); - } - return path.posix.join("/root/node_modules", relativePath.split(path.sep).join("/")); -} - async function findOpenPort(): Promise { return new Promise((resolve, reject) => { const server = createServer(); diff --git a/examples/kitchen-sink/README.md b/examples/kitchen-sink/README.md deleted file mode 100644 index 0be9e26a..00000000 --- a/examples/kitchen-sink/README.md +++ /dev/null @@ -1,10 +0,0 @@ -# Kitchen Sink Examples - -These files mirror the examples in [docs/quickstart.mdx](../../docs/quickstart.mdx). - -Verify them with: - -```bash -pnpm --filter @secure-exec/example-kitchen-sink check-types -pnpm --filter @secure-exec/example-kitchen-sink verify-docs -``` diff --git a/examples/kitchen-sink/docs-gen.json b/examples/kitchen-sink/docs-gen.json deleted file mode 100644 index 4a1dd66d..00000000 --- a/examples/kitchen-sink/docs-gen.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "kind": "titledBlocks", - "docsPath": "../../docs/quickstart.mdx", - "entries": [ - { - "title": "Create Runtime", - "examplePath": "src/create-runtime.ts" - }, - { - "title": "Run & Get Exports", - "examplePath": "src/run-get-exports.ts" - }, - { - "title": "Execute & Capture Output", - "examplePath": "src/execute-capture-output.ts" - }, - { - "title": "Filesystem", - "examplePath": "src/filesystem.ts" - }, - { - "title": "Network Access", - "examplePath": "src/network-access.ts" - }, - { - "title": "ESM Modules", - "examplePath": "src/esm-modules.ts" - } - ] -} diff --git a/examples/kitchen-sink/scripts/verify-e2e.mjs b/examples/kitchen-sink/scripts/verify-e2e.mjs deleted file mode 100644 index 8807b462..00000000 --- a/examples/kitchen-sink/scripts/verify-e2e.mjs +++ /dev/null @@ -1,98 +0,0 @@ -import { spawn } from "node:child_process"; -import path from "node:path"; -import { fileURLToPath } from "node:url"; - -const __dirname = path.dirname(fileURLToPath(import.meta.url)); -const examplesRoot = path.resolve(__dirname, ".."); - -const exampleChecks = [ - { path: "src/create-runtime.ts", contains: [] }, - { path: "src/run-get-exports.ts", contains: ["hello from secure-exec"] }, - { - path: "src/execute-capture-output.ts", - contains: ["hello from secure-exec", "exit code: 0"], - }, - { path: "src/filesystem.ts", contains: ["hello from the sandbox"] }, - { path: "src/network-access.ts", contains: ["200"] }, - { path: "src/esm-modules.ts", contains: ["42"] }, -]; - -function runExample({ path: relativePath, contains }) { - return new Promise((resolve, reject) => { - const child = spawn("pnpm", ["exec", "tsx", relativePath], { - cwd: examplesRoot, - env: process.env, - stdio: ["ignore", "pipe", "pipe"], - }); - - let stdout = ""; - let stderr = ""; - let settled = false; - - const timeout = setTimeout(() => { - if (settled) return; - settled = true; - child.kill("SIGKILL"); - reject(new Error(`${relativePath} timed out\nstdout:\n${stdout}\nstderr:\n${stderr}`)); - }, 30_000); - - function hasExpectedOutput() { - return contains.every((value) => stdout.includes(value)); - } - - child.stdout.on("data", (chunk) => { - stdout += chunk.toString(); - - if (!settled && hasExpectedOutput()) { - settled = true; - clearTimeout(timeout); - child.kill("SIGKILL"); - resolve({ stdout, stderr }); - } - }); - - child.stderr.on("data", (chunk) => { - stderr += chunk.toString(); - }); - - child.on("error", (error) => { - if (settled) return; - settled = true; - clearTimeout(timeout); - reject(error); - }); - - child.on("close", (code) => { - clearTimeout(timeout); - if (settled) return; - settled = true; - - if (code !== 0) { - reject( - new Error( - `${relativePath} exited with code ${code}\nstdout:\n${stdout}\nstderr:\n${stderr}`, - ), - ); - return; - } - - if (!hasExpectedOutput()) { - reject( - new Error( - `${relativePath} completed without expected output\nstdout:\n${stdout}\nstderr:\n${stderr}`, - ), - ); - return; - } - - resolve({ stdout, stderr }); - }); - }); -} - -for (const example of exampleChecks) { - await runExample(example); - console.log(`${example.path}: ok`); -} - -console.log("Quickstart examples passed end-to-end."); diff --git a/examples/kitchen-sink/src/create-runtime.ts b/examples/kitchen-sink/src/create-runtime.ts deleted file mode 100644 index ec1df4b6..00000000 --- a/examples/kitchen-sink/src/create-runtime.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { - NodeRuntime, - createNodeDriver, - createNodeRuntimeDriverFactory, -} from "secure-exec"; - -const runtime = new NodeRuntime({ - systemDriver: createNodeDriver(), - runtimeDriverFactory: createNodeRuntimeDriverFactory(), -}); - -runtime.dispose(); diff --git a/examples/kitchen-sink/src/esm-modules.ts b/examples/kitchen-sink/src/esm-modules.ts deleted file mode 100644 index a3ea3997..00000000 --- a/examples/kitchen-sink/src/esm-modules.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { - NodeRuntime, - createNodeDriver, - createNodeRuntimeDriverFactory, -} from "secure-exec"; - -const runtime = new NodeRuntime({ - systemDriver: createNodeDriver(), - runtimeDriverFactory: createNodeRuntimeDriverFactory(), -}); - -const result = await runtime.run<{ answer: number }>( - `export const answer = 42;`, - "/entry.mjs" // .mjs extension triggers ESM mode -); - -console.log(result.exports?.answer); // 42 - -runtime.dispose(); diff --git a/examples/kitchen-sink/src/execute-capture-output.ts b/examples/kitchen-sink/src/execute-capture-output.ts deleted file mode 100644 index 68d0c3b3..00000000 --- a/examples/kitchen-sink/src/execute-capture-output.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { - NodeRuntime, - createNodeDriver, - createNodeRuntimeDriverFactory, -} from "secure-exec"; - -const runtime = new NodeRuntime({ - systemDriver: createNodeDriver(), - runtimeDriverFactory: createNodeRuntimeDriverFactory(), - onStdio: (event) => { - process.stdout.write(event.message); - }, -}); - -const result = await runtime.exec(` - console.log("hello from secure-exec"); -`); - -console.log("exit code:", result.code); // 0 - -runtime.dispose(); diff --git a/examples/kitchen-sink/src/filesystem.ts b/examples/kitchen-sink/src/filesystem.ts deleted file mode 100644 index a5b0b1e3..00000000 --- a/examples/kitchen-sink/src/filesystem.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { - NodeRuntime, - createNodeDriver, - createNodeRuntimeDriverFactory, - createInMemoryFileSystem, - allowAllFs, -} from "secure-exec"; - -const filesystem = createInMemoryFileSystem(); - -const runtime = new NodeRuntime({ - systemDriver: createNodeDriver({ - filesystem, - permissions: { ...allowAllFs }, - }), - runtimeDriverFactory: createNodeRuntimeDriverFactory(), -}); - -await runtime.exec(` - const fs = require("node:fs"); - fs.mkdirSync("/workspace", { recursive: true }); - fs.writeFileSync("/workspace/hello.txt", "hello from the sandbox"); -`); - -const bytes = await filesystem.readFile("/workspace/hello.txt"); -console.log(new TextDecoder().decode(bytes)); // "hello from the sandbox" - -runtime.dispose(); diff --git a/examples/kitchen-sink/src/network-access.ts b/examples/kitchen-sink/src/network-access.ts deleted file mode 100644 index 80623939..00000000 --- a/examples/kitchen-sink/src/network-access.ts +++ /dev/null @@ -1,26 +0,0 @@ -import { - NodeRuntime, - createNodeDriver, - createNodeRuntimeDriverFactory, - allowAllNetwork, -} from "secure-exec"; - -const runtime = new NodeRuntime({ - systemDriver: createNodeDriver({ - useDefaultNetwork: true, - permissions: { ...allowAllNetwork }, - }), - runtimeDriverFactory: createNodeRuntimeDriverFactory(), - onStdio: (event) => { - process.stdout.write(event.message); - }, -}); - -await runtime.exec(` - const response = await fetch("http://example.com"); - console.log(response.status); // 200 -`, { - filePath: "/entry.mjs", // enables top-level await -}); - -runtime.dispose(); diff --git a/examples/kitchen-sink/src/run-get-exports.ts b/examples/kitchen-sink/src/run-get-exports.ts deleted file mode 100644 index 5e62ad24..00000000 --- a/examples/kitchen-sink/src/run-get-exports.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { - NodeRuntime, - createNodeDriver, - createNodeRuntimeDriverFactory, -} from "secure-exec"; - -const runtime = new NodeRuntime({ - systemDriver: createNodeDriver(), - runtimeDriverFactory: createNodeRuntimeDriverFactory(), -}); - -const result = await runtime.run<{ message: string }>( - `module.exports = { message: "hello from secure-exec" };` -); - -console.log(result.exports?.message); // "hello from secure-exec" - -runtime.dispose(); diff --git a/examples/plugin-system/docs-gen.json b/examples/plugin-system/docs-gen.json deleted file mode 100644 index 4b7d6cb4..00000000 --- a/examples/plugin-system/docs-gen.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "kind": "namedTsBlock", - "docsPath": "../../docs/use-cases/plugin-systems.mdx", - "title": "Plugin Runner", - "examplePath": "src/index.ts" -} diff --git a/examples/plugin-system/package.json b/examples/plugin-system/package.json index a6873e33..8c825acc 100644 --- a/examples/plugin-system/package.json +++ b/examples/plugin-system/package.json @@ -5,13 +5,12 @@ "scripts": { "check-types": "tsc --noEmit -p tsconfig.json", "dev": "tsx src/index.ts", - "verify-docs": "docs-gen verify --config docs-gen.json" + "verify-docs": "node scripts/verify-docs.mjs" }, "dependencies": { "secure-exec": "workspace:*" }, "devDependencies": { - "@secure-exec/docs-gen": "workspace:*", "@types/node": "^22.10.2", "tsx": "^4.19.2", "typescript": "^5.7.2" diff --git a/examples/plugin-system/scripts/verify-docs.mjs b/examples/plugin-system/scripts/verify-docs.mjs new file mode 100644 index 00000000..042f807b --- /dev/null +++ b/examples/plugin-system/scripts/verify-docs.mjs @@ -0,0 +1,41 @@ +import { readFile } from "node:fs/promises"; +import path from "node:path"; +import { fileURLToPath } from "node:url"; + +const __dirname = path.dirname(fileURLToPath(import.meta.url)); +const repoRoot = path.resolve(__dirname, "../../.."); +const docsPath = path.join(repoRoot, "docs/use-cases/plugin-systems.mdx"); +const examplePath = path.join(repoRoot, "examples/plugin-system/src/index.ts"); + +function normalizeCode(source) { + const normalized = source.replace(/\r\n/g, "\n").replace(/^\n+|\n+$/g, ""); + const lines = normalized.split("\n"); + const nonEmptyLines = lines.filter((line) => line.trim().length > 0); + const minIndent = nonEmptyLines.reduce((indent, line) => { + const lineIndent = line.match(/^ */)?.[0].length ?? 0; + return Math.min(indent, lineIndent); + }, Number.POSITIVE_INFINITY); + + if (!Number.isFinite(minIndent) || minIndent === 0) { + return normalized; + } + + return lines.map((line) => line.slice(minIndent)).join("\n"); +} + +const docsSource = await readFile(docsPath, "utf8"); +const match = docsSource.match(/^\s*```ts Plugin Runner\n([\s\S]*?)^\s*```/m); +if (!match) { + console.error("Missing docs snippet for Plugin Runner"); + process.exit(1); +} + +const docSource = normalizeCode(match[1] ?? ""); +const fileSource = normalizeCode(await readFile(examplePath, "utf8")); + +if (docSource !== fileSource) { + console.error("Snippet mismatch for Plugin Runner"); + process.exit(1); +} + +console.log("Plugin system docs match example source."); diff --git a/examples/quickstart/README.md b/examples/quickstart/README.md new file mode 100644 index 00000000..e4e51704 --- /dev/null +++ b/examples/quickstart/README.md @@ -0,0 +1,10 @@ +# Quickstart Examples + +These files mirror the examples in [docs/quickstart.mdx](../../docs/quickstart.mdx). + +Verify them with: + +```bash +pnpm --filter @secure-exec/example-quickstart check-types +pnpm --filter @secure-exec/example-quickstart verify-docs +``` diff --git a/examples/kitchen-sink/package.json b/examples/quickstart/package.json similarity index 58% rename from examples/kitchen-sink/package.json rename to examples/quickstart/package.json index c18b3d3f..5d1eab63 100644 --- a/examples/kitchen-sink/package.json +++ b/examples/quickstart/package.json @@ -1,12 +1,10 @@ { - "name": "@secure-exec/example-kitchen-sink", + "name": "@secure-exec/example-quickstart", "private": true, "type": "module", "scripts": { "check-types": "tsc --noEmit -p tsconfig.json", - "verify-docs": "docs-gen verify --config docs-gen.json", - "verify-e2e": "node scripts/verify-e2e.mjs", - "test": "pnpm run verify-docs && pnpm run verify-e2e" + "verify-docs": "node scripts/verify-docs.mjs" }, "dependencies": { "@secure-exec/typescript": "workspace:*", @@ -15,7 +13,6 @@ "secure-exec": "workspace:*" }, "devDependencies": { - "@secure-exec/docs-gen": "workspace:*", "@types/node": "^22.10.2", "typescript": "^5.7.2" } diff --git a/examples/quickstart/scripts/verify-docs.mjs b/examples/quickstart/scripts/verify-docs.mjs new file mode 100644 index 00000000..405e8b9a --- /dev/null +++ b/examples/quickstart/scripts/verify-docs.mjs @@ -0,0 +1,79 @@ +import { readFile } from "node:fs/promises"; +import path from "node:path"; +import { fileURLToPath } from "node:url"; + +const __dirname = path.dirname(fileURLToPath(import.meta.url)); +const repoRoot = path.resolve(__dirname, "../../.."); +const docsPath = path.join(repoRoot, "docs/quickstart.mdx"); + +const expectedFiles = new Map([ + ["Simple", "src/simple.ts"], + ["TypeScript", "src/typescript.ts"], + ["Logging", "src/logging.ts"], + ["Filesystem", "src/filesystem.ts"], + ["Fetch", "src/fetch.ts"], + ["HTTP Server (Hono)", "src/http-server-hono.ts"], + ["Run Command", "src/run-command.ts"], +]); + +function normalizeTitle(title) { + return title.trim().replace(/^"|"$/g, ""); +} + +function normalizeCode(source) { + const normalized = source.replace(/\r\n/g, "\n").replace(/^\n+|\n+$/g, ""); + const lines = normalized.split("\n"); + const nonEmptyLines = lines.filter((line) => line.trim().length > 0); + const minIndent = nonEmptyLines.reduce((indent, line) => { + const lineIndent = line.match(/^ */)?.[0].length ?? 0; + return Math.min(indent, lineIndent); + }, Number.POSITIVE_INFINITY); + + if (!Number.isFinite(minIndent) || minIndent === 0) { + return normalized; + } + + return lines.map((line) => line.slice(minIndent)).join("\n"); +} + +const docsSource = await readFile(docsPath, "utf8"); +const blockPattern = /^\s*```ts(?:\s+([^\n]+))?\n([\s\S]*?)^\s*```/gm; +const docBlocks = new Map(); + +for (const match of docsSource.matchAll(blockPattern)) { + const rawTitle = match[1]; + if (!rawTitle) { + continue; + } + + const title = normalizeTitle(rawTitle); + if (!expectedFiles.has(title)) { + continue; + } + + docBlocks.set(title, normalizeCode(match[2] ?? "")); +} + +const mismatches = []; + +for (const [title, relativePath] of expectedFiles) { + const filePath = path.join(path.dirname(__dirname), relativePath); + const fileSource = normalizeCode(await readFile(filePath, "utf8")); + const docSource = docBlocks.get(title); + + if (!docSource) { + mismatches.push(`Missing docs snippet for ${title}`); + continue; + } + + if (docSource !== fileSource) { + mismatches.push(`Snippet mismatch for ${title}`); + } +} + +if (mismatches.length > 0) { + console.error(mismatches.join("\n")); + process.exit(1); +} + +console.log("Quickstart docs match example sources."); diff --git a/examples/quickstart/src/fetch.ts b/examples/quickstart/src/fetch.ts new file mode 100644 index 00000000..554c0177 --- /dev/null +++ b/examples/quickstart/src/fetch.ts @@ -0,0 +1,24 @@ +import { + createKernel, + createInMemoryFileSystem, + createNodeRuntime, +} from "secure-exec"; + +const kernel = createKernel({ + filesystem: createInMemoryFileSystem(), + permissions: { + network: () => ({ allow: true }), + }, +}); +await kernel.mount(createNodeRuntime()); + +const result = await kernel.exec(`node -e " + (async () => { + const response = await fetch('https://example.com'); + console.log(response.status); + })(); +"`); + +console.log(result.stdout); // "200\n" + +await kernel.dispose(); diff --git a/examples/quickstart/src/filesystem.ts b/examples/quickstart/src/filesystem.ts new file mode 100644 index 00000000..ef532e21 --- /dev/null +++ b/examples/quickstart/src/filesystem.ts @@ -0,0 +1,25 @@ +import { + createKernel, + createInMemoryFileSystem, + createNodeRuntime, +} from "secure-exec"; + +const filesystem = createInMemoryFileSystem(); +const kernel = createKernel({ + filesystem, + permissions: { + fs: () => ({ allow: true }), + }, +}); +await kernel.mount(createNodeRuntime()); + +await kernel.exec(`node -e " + const fs = require('node:fs'); + fs.mkdirSync('/workspace', { recursive: true }); + fs.writeFileSync('/workspace/hello.txt', 'hello from the sandbox'); +"`); + +const bytes = await filesystem.readFile("/workspace/hello.txt"); +console.log(new TextDecoder().decode(bytes)); // "hello from the sandbox" + +await kernel.dispose(); diff --git a/examples/quickstart/src/http-server-hono.ts b/examples/quickstart/src/http-server-hono.ts new file mode 100644 index 00000000..72496098 --- /dev/null +++ b/examples/quickstart/src/http-server-hono.ts @@ -0,0 +1,50 @@ +import { + NodeRuntime, + NodeFileSystem, + allowAll, + createNodeDriver, + createNodeRuntimeDriverFactory, +} from "secure-exec"; + +const port = 3000; +const runtime = new NodeRuntime({ + systemDriver: createNodeDriver({ + filesystem: new NodeFileSystem(), + useDefaultNetwork: true, + permissions: allowAll, + }), + runtimeDriverFactory: createNodeRuntimeDriverFactory(), +}); + +// Start a Hono server inside the sandbox +const execPromise = runtime.exec(` + (async () => { + const { Hono } = require("hono"); + const { serve } = require("@hono/node-server"); + + const app = new Hono(); + app.get("/", (c) => c.text("hello from hono")); + + serve({ fetch: app.fetch, port: ${port}, hostname: "127.0.0.1" }); + await new Promise(() => {}); + })(); +`); + +// Wait for the server to be ready, then fetch from the host +const url = "http://127.0.0.1:" + port + "/"; +for (let i = 0; i < 50; i++) { + try { + const r = await runtime.network.fetch(url, { method: "GET" }); + if (r.status === 200) break; + } catch { + await new Promise((r) => setTimeout(r, 100)); + } +} + +const response = await runtime.network.fetch(url, { method: "GET" }); + +console.log(response.status); // 200 +console.log(response.body); // "hello from hono" + +await runtime.terminate(); +await execPromise.catch(() => {}); diff --git a/examples/quickstart/src/logging.ts b/examples/quickstart/src/logging.ts new file mode 100644 index 00000000..70a6ab1a --- /dev/null +++ b/examples/quickstart/src/logging.ts @@ -0,0 +1,19 @@ +import { + createKernel, + createInMemoryFileSystem, + createNodeRuntime, +} from "secure-exec"; + +const kernel = createKernel({ + filesystem: createInMemoryFileSystem(), +}); +await kernel.mount(createNodeRuntime()); + +const result = await kernel.exec( + "node -e \"console.log('hello from secure-exec')\"" +); + +console.log(result.stdout); // "hello from secure-exec\n" +console.log(result.stderr); // "" + +await kernel.dispose(); diff --git a/examples/quickstart/src/run-command.ts b/examples/quickstart/src/run-command.ts new file mode 100644 index 00000000..4baabdfe --- /dev/null +++ b/examples/quickstart/src/run-command.ts @@ -0,0 +1,22 @@ +import { + createKernel, + createInMemoryFileSystem, + createNodeRuntime, +} from "secure-exec"; + +const kernel = createKernel({ + filesystem: createInMemoryFileSystem(), + permissions: { + childProcess: () => ({ allow: true }), + }, +}); +await kernel.mount(createNodeRuntime()); + +const result = await kernel.exec(`node -e " + const { execSync } = require('node:child_process'); + console.log(execSync('node --version', { encoding: 'utf8' }).trim()); +"`); + +console.log(result.stdout); // e.g. "v22.x.x\n" + +await kernel.dispose(); diff --git a/examples/quickstart/src/simple.ts b/examples/quickstart/src/simple.ts new file mode 100644 index 00000000..629a56bd --- /dev/null +++ b/examples/quickstart/src/simple.ts @@ -0,0 +1,18 @@ +import { + createKernel, + createInMemoryFileSystem, + createNodeRuntime, +} from "secure-exec"; + +const kernel = createKernel({ + filesystem: createInMemoryFileSystem(), +}); +await kernel.mount(createNodeRuntime()); + +const result = await kernel.exec( + "node -e \"console.log('hello from secure-exec')\"" +); + +console.log(result.stdout); // "hello from secure-exec\n" + +await kernel.dispose(); diff --git a/examples/quickstart/src/typescript.ts b/examples/quickstart/src/typescript.ts new file mode 100644 index 00000000..02ba9439 --- /dev/null +++ b/examples/quickstart/src/typescript.ts @@ -0,0 +1,53 @@ +import { + NodeRuntime, + createNodeDriver, + createNodeRuntimeDriverFactory, +} from "secure-exec"; +import { createTypeScriptTools } from "@secure-exec/typescript"; + +const systemDriver = createNodeDriver(); +const runtimeDriverFactory = createNodeRuntimeDriverFactory(); + +const runtime = new NodeRuntime({ + systemDriver, + runtimeDriverFactory, +}); +const ts = createTypeScriptTools({ + systemDriver, + runtimeDriverFactory, +}); + +const sourceText = ` + const message: string = "hello from typescript"; + module.exports = { message }; +`; + +const typecheck = await ts.typecheckSource({ + sourceText, + filePath: "/root/example.ts", + compilerOptions: { + module: "commonjs", + target: "es2022", + }, +}); + +if (!typecheck.success) { + throw new Error(typecheck.diagnostics.map((d) => d.message).join("\n")); +} + +const compiled = await ts.compileSource({ + sourceText, + filePath: "/root/example.ts", + compilerOptions: { + module: "commonjs", + target: "es2022", + }, +}); + +const result = await runtime.run<{ message: string }>( + compiled.outputText ?? "", + "/root/example.js" +); + +const message = result.exports?.message; +// "hello from typescript" diff --git a/examples/kitchen-sink/tsconfig.json b/examples/quickstart/tsconfig.json similarity index 100% rename from examples/kitchen-sink/tsconfig.json rename to examples/quickstart/tsconfig.json diff --git a/native/v8-runtime/src/session.rs b/native/v8-runtime/src/session.rs index 55411342..a33545c5 100644 --- a/native/v8-runtime/src/session.rs +++ b/native/v8-runtime/src/session.rs @@ -684,7 +684,7 @@ pub(crate) const SYNC_BRIDGE_FNS: [&str; 32] = [ "_networkHttpServerRespondRaw", ]; -pub(crate) const ASYNC_BRIDGE_FNS: [&str; 8] = [ +pub(crate) const ASYNC_BRIDGE_FNS: [&str; 10] = [ // Module loading (async) "_dynamicImport", // Timer @@ -696,6 +696,8 @@ pub(crate) const ASYNC_BRIDGE_FNS: [&str; 8] = [ "_networkHttpServerListenRaw", "_networkHttpServerCloseRaw", "_networkHttpServerWaitRaw", + "_networkHttp2ServerWaitRaw", + "_networkHttp2SessionWaitRaw", ]; /// Run the session event loop: dispatch incoming messages to V8. diff --git a/native/v8-runtime/src/snapshot.rs b/native/v8-runtime/src/snapshot.rs index 4f462463..1a3c1ba2 100644 --- a/native/v8-runtime/src/snapshot.rs +++ b/native/v8-runtime/src/snapshot.rs @@ -768,7 +768,8 @@ mod tests { var asyncFns = ['_dynamicImport', '_scheduleTimer', '_networkFetchRaw', '_networkDnsLookupRaw', '_networkHttpRequestRaw', '_networkHttpServerListenRaw', - '_networkHttpServerCloseRaw']; + '_networkHttpServerCloseRaw', '_networkHttpServerWaitRaw', + '_networkHttp2ServerWaitRaw', '_networkHttp2SessionWaitRaw']; for (var i = 0; i < asyncFns.length; i++) { if (typeof globalThis[asyncFns[i]] !== 'function') { throw new Error('Missing async stub: ' + asyncFns[i] + diff --git a/native/v8-runtime/src/stream.rs b/native/v8-runtime/src/stream.rs index 004fefef..1745aeff 100644 --- a/native/v8-runtime/src/stream.rs +++ b/native/v8-runtime/src/stream.rs @@ -7,6 +7,7 @@ /// function is called: /// - "child_stdout", "child_stderr", "child_exit" → _childProcessDispatch /// - "http_request" → _httpServerDispatch +/// - "http2" → _http2Dispatch /// - "timer" → _timerDispatch pub fn dispatch_stream_event(scope: &mut v8::HandleScope, event_type: &str, payload: &[u8]) { // Look up the dispatch function on the global object @@ -16,6 +17,7 @@ pub fn dispatch_stream_event(scope: &mut v8::HandleScope, event_type: &str, payl let dispatch_name = match event_type { "child_stdout" | "child_stderr" | "child_exit" => "_childProcessDispatch", "http_request" => "_httpServerDispatch", + "http2" => "_http2Dispatch", "timer" => "_timerDispatch", _ => return, // Unknown event type — ignore }; diff --git a/packages/core/isolate-runtime/src/inject/require-setup.ts b/packages/core/isolate-runtime/src/inject/require-setup.ts index d3d8bfbb..23a91ba6 100644 --- a/packages/core/isolate-runtime/src/inject/require-setup.ts +++ b/packages/core/isolate-runtime/src/inject/require-setup.ts @@ -14,26 +14,57 @@ if ( typeof globalThis.AbortController === 'undefined' || - typeof globalThis.AbortSignal === 'undefined' + typeof globalThis.AbortSignal === 'undefined' || + typeof globalThis.AbortSignal?.prototype?.addEventListener !== 'function' || + typeof globalThis.AbortSignal?.prototype?.removeEventListener !== 'function' ) { + const abortSignalState = new WeakMap(); + function getAbortSignalState(signal) { + const state = abortSignalState.get(signal); + if (!state) { + throw new Error('Invalid AbortSignal'); + } + return state; + } + class AbortSignal { constructor() { - this.aborted = false; - this.reason = undefined; this.onabort = null; - this._listeners = []; + abortSignalState.set(this, { + aborted: false, + reason: undefined, + listeners: [], + }); + } + + get aborted() { + return getAbortSignalState(this).aborted; + } + + get reason() { + return getAbortSignalState(this).reason; + } + + get _listeners() { + return getAbortSignalState(this).listeners.slice(); + } + + getEventListeners(type) { + if (type !== 'abort') return []; + return getAbortSignalState(this).listeners.slice(); } addEventListener(type, listener) { if (type !== 'abort' || typeof listener !== 'function') return; - this._listeners.push(listener); + getAbortSignalState(this).listeners.push(listener); } removeEventListener(type, listener) { if (type !== 'abort' || typeof listener !== 'function') return; - const index = this._listeners.indexOf(listener); + const listeners = getAbortSignalState(this).listeners; + const index = listeners.indexOf(listener); if (index !== -1) { - this._listeners.splice(index, 1); + listeners.splice(index, 1); } } @@ -44,7 +75,7 @@ this.onabort.call(this, event); } catch {} } - const listeners = this._listeners.slice(); + const listeners = getAbortSignalState(this).listeners.slice(); for (const listener of listeners) { try { listener.call(this, event); @@ -60,9 +91,10 @@ } abort(reason) { - if (this.signal.aborted) return; - this.signal.aborted = true; - this.signal.reason = reason; + const state = getAbortSignalState(this.signal); + if (state.aborted) return; + state.aborted = true; + state.reason = reason; this.signal.dispatchEvent({ type: 'abort' }); } } @@ -71,6 +103,18 @@ __requireExposeCustomGlobal('AbortController', AbortController); } + if ( + typeof globalThis.AbortSignal === 'function' && + typeof globalThis.AbortController === 'function' && + typeof globalThis.AbortSignal.abort !== 'function' + ) { + globalThis.AbortSignal.abort = function abort(reason) { + const controller = new globalThis.AbortController(); + controller.abort(reason); + return controller.signal; + }; + } + if (typeof globalThis.structuredClone !== 'function') { function structuredClonePolyfill(value) { if (value === null || typeof value !== 'object') { @@ -213,6 +257,26 @@ })(enc); } } + + if (typeof BufferCtor.allocUnsafe === 'function' && !BufferCtor.allocUnsafe._secureExecPatched) { + var _origAllocUnsafe = BufferCtor.allocUnsafe; + BufferCtor.allocUnsafe = function(size) { + try { + return _origAllocUnsafe.apply(this, arguments); + } catch (error) { + if ( + error && + error.name === 'RangeError' && + typeof size === 'number' && + size > maxLength + ) { + throw new Error('Array buffer allocation failed'); + } + throw error; + } + }; + BufferCtor.allocUnsafe._secureExecPatched = true; + } } return result; @@ -229,6 +293,177 @@ } if (name === 'util') { + if ( + typeof result.inspect === 'function' && + typeof result.inspect.custom === 'undefined' + ) { + result.inspect.custom = Symbol.for('nodejs.util.inspect.custom'); + } + if ( + typeof result.inspect === 'function' && + !result.inspect._secureExecPatchedCustomInspect + ) { + const customInspectSymbol = result.inspect.custom || Symbol.for('nodejs.util.inspect.custom'); + const originalInspect = result.inspect; + const formatObjectKey = function(key) { + return /^[A-Za-z_$][A-Za-z0-9_$]*$/.test(key) + ? key + : originalInspect(key); + }; + const containsCustomInspectable = function(value, depth, seen) { + if (value === null) { + return false; + } + if (typeof value !== 'object' && typeof value !== 'function') { + return false; + } + if (typeof value[customInspectSymbol] === 'function') { + return true; + } + if (depth < 0 || seen.has(value)) { + return false; + } + seen.add(value); + if (Array.isArray(value)) { + for (const entry of value) { + if (containsCustomInspectable(entry, depth - 1, seen)) { + seen.delete(value); + return true; + } + } + seen.delete(value); + return false; + } + for (const key of Object.keys(value)) { + if (containsCustomInspectable(value[key], depth - 1, seen)) { + seen.delete(value); + return true; + } + } + seen.delete(value); + return false; + }; + const inspectWithCustom = function(value, depth, options, seen) { + if (value === null || (typeof value !== 'object' && typeof value !== 'function')) { + return originalInspect(value, options); + } + if (seen.has(value)) { + return '[Circular]'; + } + if (typeof value[customInspectSymbol] === 'function') { + return value[customInspectSymbol](depth, options, result.inspect); + } + if (depth < 0) { + return originalInspect(value, options); + } + seen.add(value); + if (Array.isArray(value)) { + const items = value.map((entry) => inspectWithCustom(entry, depth - 1, options, seen)); + seen.delete(value); + return `[ ${items.join(', ')} ]`; + } + const proto = Object.getPrototypeOf(value); + if (proto === Object.prototype || proto === null) { + const entries = Object.keys(value).map( + (key) => `${formatObjectKey(key)}: ${inspectWithCustom(value[key], depth - 1, options, seen)}` + ); + seen.delete(value); + return `{ ${entries.join(', ')} }`; + } + seen.delete(value); + return originalInspect(value, options); + }; + result.inspect = function inspect(value, options) { + const inspectOptions = + typeof options === 'object' && options !== null ? options : {}; + const depth = + typeof inspectOptions.depth === 'number' ? inspectOptions.depth : 2; + if (!containsCustomInspectable(value, depth, new Set())) { + return originalInspect.call(this, value, options); + } + return inspectWithCustom(value, depth, inspectOptions, new Set()); + }; + result.inspect.custom = customInspectSymbol; + result.inspect._secureExecPatchedCustomInspect = true; + } + return result; + } + + if (name === 'events') { + if (typeof result.getEventListeners !== 'function') { + result.getEventListeners = function getEventListeners(target, eventName) { + if (target && typeof target.listeners === 'function') { + return target.listeners(eventName); + } + if ( + target && + typeof target.getEventListeners === 'function' + ) { + return target.getEventListeners(eventName); + } + if ( + target && + eventName === 'abort' && + Array.isArray(target._listeners) + ) { + return target._listeners.slice(); + } + return []; + }; + } + return result; + } + + if (name === 'stream') { + const ReadableCtor = result.Readable; + const readableFrom = + typeof ReadableCtor === 'function' ? ReadableCtor.from : undefined; + const readableFromSource = + typeof readableFrom === 'function' + ? Function.prototype.toString.call(readableFrom) + : ''; + const hasBrowserReadableFromStub = + readableFromSource.indexOf( + 'Readable.from is not available in the browser', + ) !== -1 || + readableFromSource.indexOf('require_from_browser') !== -1; + if ( + typeof ReadableCtor === 'function' && + (typeof readableFrom !== 'function' || hasBrowserReadableFromStub) + ) { + ReadableCtor.from = function from(iterable, options) { + const readable = new ReadableCtor(Object.assign({ read() {} }, options || {})); + Promise.resolve().then(async function() { + try { + if ( + iterable && + typeof iterable[Symbol.asyncIterator] === 'function' + ) { + for await (const chunk of iterable) { + readable.push(chunk); + } + } else if ( + iterable && + typeof iterable[Symbol.iterator] === 'function' + ) { + for (const chunk of iterable) { + readable.push(chunk); + } + } else { + readable.push(iterable); + } + readable.push(null); + } catch (error) { + if (typeof readable.destroy === 'function') { + readable.destroy(error); + } else { + readable.emit('error', error); + } + } + }); + return readable; + }; + } return result; } @@ -300,10 +535,8 @@ // constants object. Node.js zlib.constants bundles all Z_ values plus // DEFLATE (1), INFLATE (2), GZIP (3), DEFLATERAW (4), INFLATERAW (5), // UNZIP (6), GUNZIP (7). Packages like ssh2 destructure constants. - var zlibConstants = typeof result.constants === 'object' && result.constants !== null - ? result.constants - : {}; if (typeof result.constants !== 'object' || result.constants === null) { + var zlibConstants = {}; var constKeys = Object.keys(result); for (var ci = 0; ci < constKeys.length; ci++) { var ck = constKeys[ci]; @@ -311,32 +544,6 @@ zlibConstants[ck] = result[ck]; } } - // Add Z_* constants that esbuild may strip from the browserify-zlib bundle. - if (typeof zlibConstants.Z_NO_FLUSH !== 'number') zlibConstants.Z_NO_FLUSH = 0; - if (typeof zlibConstants.Z_PARTIAL_FLUSH !== 'number') zlibConstants.Z_PARTIAL_FLUSH = 1; - if (typeof zlibConstants.Z_SYNC_FLUSH !== 'number') zlibConstants.Z_SYNC_FLUSH = 2; - if (typeof zlibConstants.Z_FULL_FLUSH !== 'number') zlibConstants.Z_FULL_FLUSH = 3; - if (typeof zlibConstants.Z_FINISH !== 'number') zlibConstants.Z_FINISH = 4; - if (typeof zlibConstants.Z_BLOCK !== 'number') zlibConstants.Z_BLOCK = 5; - if (typeof zlibConstants.Z_TREES !== 'number') zlibConstants.Z_TREES = 6; - if (typeof zlibConstants.Z_OK !== 'number') zlibConstants.Z_OK = 0; - if (typeof zlibConstants.Z_STREAM_END !== 'number') zlibConstants.Z_STREAM_END = 1; - if (typeof zlibConstants.Z_NEED_DICT !== 'number') zlibConstants.Z_NEED_DICT = 2; - if (typeof zlibConstants.Z_ERRNO !== 'number') zlibConstants.Z_ERRNO = -1; - if (typeof zlibConstants.Z_STREAM_ERROR !== 'number') zlibConstants.Z_STREAM_ERROR = -2; - if (typeof zlibConstants.Z_DATA_ERROR !== 'number') zlibConstants.Z_DATA_ERROR = -3; - if (typeof zlibConstants.Z_MEM_ERROR !== 'number') zlibConstants.Z_MEM_ERROR = -4; - if (typeof zlibConstants.Z_BUF_ERROR !== 'number') zlibConstants.Z_BUF_ERROR = -5; - if (typeof zlibConstants.Z_VERSION_ERROR !== 'number') zlibConstants.Z_VERSION_ERROR = -6; - if (typeof zlibConstants.Z_NO_COMPRESSION !== 'number') zlibConstants.Z_NO_COMPRESSION = 0; - if (typeof zlibConstants.Z_BEST_SPEED !== 'number') zlibConstants.Z_BEST_SPEED = 1; - if (typeof zlibConstants.Z_BEST_COMPRESSION !== 'number') zlibConstants.Z_BEST_COMPRESSION = 9; - if (typeof zlibConstants.Z_DEFAULT_COMPRESSION !== 'number') zlibConstants.Z_DEFAULT_COMPRESSION = -1; - if (typeof zlibConstants.Z_FILTERED !== 'number') zlibConstants.Z_FILTERED = 1; - if (typeof zlibConstants.Z_HUFFMAN_ONLY !== 'number') zlibConstants.Z_HUFFMAN_ONLY = 2; - if (typeof zlibConstants.Z_RLE !== 'number') zlibConstants.Z_RLE = 3; - if (typeof zlibConstants.Z_FIXED !== 'number') zlibConstants.Z_FIXED = 4; - if (typeof zlibConstants.Z_DEFAULT_STRATEGY !== 'number') zlibConstants.Z_DEFAULT_STRATEGY = 0; // Add mode constants that Node.js exposes but browserify-zlib does not. if (typeof zlibConstants.DEFLATE !== 'number') zlibConstants.DEFLATE = 1; if (typeof zlibConstants.INFLATE !== 'number') zlibConstants.INFLATE = 2; @@ -345,8 +552,8 @@ if (typeof zlibConstants.INFLATERAW !== 'number') zlibConstants.INFLATERAW = 5; if (typeof zlibConstants.UNZIP !== 'number') zlibConstants.UNZIP = 6; if (typeof zlibConstants.GUNZIP !== 'number') zlibConstants.GUNZIP = 7; + result.constants = zlibConstants; } - result.constants = zlibConstants; return result; } @@ -535,8 +742,6 @@ this._algorithm = algorithm; if (typeof key === 'string') { this._key = Buffer.from(key, 'utf8'); - } else if (key && typeof key === 'object' && key._raw !== undefined) { - this._key = Buffer.from(key._raw, 'base64'); } else if (key && typeof key === 'object' && key._pem !== undefined) { // SandboxKeyObject — extract underlying key material this._key = Buffer.from(key._pem, 'utf8'); @@ -1623,18 +1828,9 @@ } function scheduleCryptoCallback(callback, args) { - var invoke = function() { + setTimeout(function() { callback.apply(undefined, args); - }; - if (typeof process !== 'undefined' && process && typeof process.nextTick === 'function') { - process.nextTick(invoke); - return; - } - if (typeof queueMicrotask === 'function') { - queueMicrotask(invoke); - return; - } - Promise.resolve().then(invoke); + }, 0); } function shouldThrowCryptoValidationError(error) { @@ -2467,7 +2663,6 @@ 'diagnostics_channel', ]); const _unsupportedCoreModules = new Set([ - 'dgram', 'cluster', 'wasi', 'inspector', @@ -2658,6 +2853,56 @@ return promisesModule; } + if (name === 'stream/consumers') { + if (__internalModuleCache['stream/consumers']) return __internalModuleCache['stream/consumers']; + const consumersModule = {}; + consumersModule.buffer = async function buffer(stream) { + const chunks = []; + const pushChunk = function(chunk) { + if (typeof chunk === 'string') { + chunks.push(Buffer.from(chunk)); + } else if (Buffer.isBuffer(chunk)) { + chunks.push(chunk); + } else if (ArrayBuffer.isView(chunk)) { + chunks.push(Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)); + } else if (chunk instanceof ArrayBuffer) { + chunks.push(Buffer.from(new Uint8Array(chunk))); + } else { + chunks.push(Buffer.from(String(chunk))); + } + }; + if (stream && typeof stream[Symbol.asyncIterator] === 'function') { + for await (const chunk of stream) { + pushChunk(chunk); + } + return Buffer.concat(chunks); + } + return new Promise(function(resolve, reject) { + stream.on('data', pushChunk); + stream.on('end', function() { + resolve(Buffer.concat(chunks)); + }); + stream.on('error', reject); + }); + }; + consumersModule.text = async function text(stream) { + return (await consumersModule.buffer(stream)).toString('utf8'); + }; + consumersModule.json = async function json(stream) { + return JSON.parse(await consumersModule.text(stream)); + }; + consumersModule.arrayBuffer = async function arrayBuffer(stream) { + const buffer = await consumersModule.buffer(stream); + return buffer.buffer.slice( + buffer.byteOffset, + buffer.byteOffset + buffer.byteLength, + ); + }; + __internalModuleCache['stream/consumers'] = consumersModule; + _debugRequire('loaded', name, 'stream-consumers-special'); + return consumersModule; + } + // Special handling for child_process module if (name === 'child_process') { if (__internalModuleCache['child_process']) return __internalModuleCache['child_process']; @@ -2701,6 +2946,17 @@ return httpAgentModule; } + if (name === '_http_common') { + if (__internalModuleCache['_http_common']) return __internalModuleCache['_http_common']; + const httpCommonModule = { + _checkIsHttpToken: _httpModule._checkIsHttpToken, + _checkInvalidHeaderChar: _httpModule._checkInvalidHeaderChar, + }; + __internalModuleCache['_http_common'] = httpCommonModule; + _debugRequire('loaded', name, 'http-common-special'); + return httpCommonModule; + } + // Special handling for https module if (name === 'https') { if (__internalModuleCache['https']) return __internalModuleCache['https']; @@ -2717,6 +2973,24 @@ return _http2Module; } + if (name === 'internal/http2/util') { + if (__internalModuleCache[name]) return __internalModuleCache[name]; + class NghttpError extends Error { + constructor(message) { + super(message); + this.name = 'Error'; + this.code = 'ERR_HTTP2_ERROR'; + } + } + const utilModule = { + kSocket: Symbol.for('secure-exec.http2.kSocket'), + NghttpError, + }; + __internalModuleCache[name] = utilModule; + _debugRequire('loaded', name, 'http2-util-special'); + return utilModule; + } + // Special handling for dns module if (name === 'dns') { if (__internalModuleCache['dns']) return __internalModuleCache['dns']; @@ -2725,6 +2999,14 @@ return _dnsModule; } + // Special handling for dgram module + if (name === 'dgram') { + if (__internalModuleCache['dgram']) return __internalModuleCache['dgram']; + __internalModuleCache['dgram'] = _dgramModule; + _debugRequire('loaded', name, 'dgram-special'); + return _dgramModule; + } + // Special handling for os module if (name === 'os') { if (__internalModuleCache['os']) return __internalModuleCache['os']; diff --git a/packages/core/src/generated/isolate-runtime.ts b/packages/core/src/generated/isolate-runtime.ts index 38fe0491..b5640811 100644 --- a/packages/core/src/generated/isolate-runtime.ts +++ b/packages/core/src/generated/isolate-runtime.ts @@ -11,7 +11,7 @@ export const ISOLATE_RUNTIME_SOURCES = { "initCommonjsModuleGlobals": "\"use strict\";\n(() => {\n // ../core/isolate-runtime/src/common/global-exposure.ts\n function defineRuntimeGlobalBinding(name, value, mutable) {\n Object.defineProperty(globalThis, name, {\n value,\n writable: mutable,\n configurable: mutable,\n enumerable: true\n });\n }\n function createRuntimeGlobalExposer(mutable) {\n return (name, value) => {\n defineRuntimeGlobalBinding(name, value, mutable);\n };\n }\n function getRuntimeExposeMutableGlobal() {\n if (typeof globalThis.__runtimeExposeMutableGlobal === \"function\") {\n return globalThis.__runtimeExposeMutableGlobal;\n }\n return createRuntimeGlobalExposer(true);\n }\n\n // ../core/isolate-runtime/src/inject/init-commonjs-module-globals.ts\n var __runtimeExposeMutableGlobal = getRuntimeExposeMutableGlobal();\n __runtimeExposeMutableGlobal(\"module\", { exports: {} });\n __runtimeExposeMutableGlobal(\"exports\", globalThis.module.exports);\n})();\n", "overrideProcessCwd": "\"use strict\";\n(() => {\n // ../core/isolate-runtime/src/inject/override-process-cwd.ts\n var __cwd = globalThis.__runtimeProcessCwdOverride;\n if (typeof __cwd === \"string\") {\n process.cwd = () => __cwd;\n }\n})();\n", "overrideProcessEnv": "\"use strict\";\n(() => {\n // ../core/isolate-runtime/src/inject/override-process-env.ts\n var __envPatch = globalThis.__runtimeProcessEnvOverride;\n if (__envPatch && typeof __envPatch === \"object\") {\n Object.assign(process.env, __envPatch);\n }\n})();\n", - "requireSetup": "\"use strict\";\n(() => {\n // ../core/isolate-runtime/src/inject/require-setup.ts\n var __requireExposeCustomGlobal = typeof globalThis.__runtimeExposeCustomGlobal === \"function\" ? globalThis.__runtimeExposeCustomGlobal : function exposeCustomGlobal(name2, value) {\n Object.defineProperty(globalThis, name2, {\n value,\n writable: false,\n configurable: false,\n enumerable: true\n });\n };\n if (typeof globalThis.AbortController === \"undefined\" || typeof globalThis.AbortSignal === \"undefined\") {\n class AbortSignal {\n constructor() {\n this.aborted = false;\n this.reason = void 0;\n this.onabort = null;\n this._listeners = [];\n }\n addEventListener(type, listener) {\n if (type !== \"abort\" || typeof listener !== \"function\") return;\n this._listeners.push(listener);\n }\n removeEventListener(type, listener) {\n if (type !== \"abort\" || typeof listener !== \"function\") return;\n const index = this._listeners.indexOf(listener);\n if (index !== -1) {\n this._listeners.splice(index, 1);\n }\n }\n dispatchEvent(event) {\n if (!event || event.type !== \"abort\") return false;\n if (typeof this.onabort === \"function\") {\n try {\n this.onabort.call(this, event);\n } catch {\n }\n }\n const listeners = this._listeners.slice();\n for (const listener of listeners) {\n try {\n listener.call(this, event);\n } catch {\n }\n }\n return true;\n }\n }\n class AbortController {\n constructor() {\n this.signal = new AbortSignal();\n }\n abort(reason) {\n if (this.signal.aborted) return;\n this.signal.aborted = true;\n this.signal.reason = reason;\n this.signal.dispatchEvent({ type: \"abort\" });\n }\n }\n __requireExposeCustomGlobal(\"AbortSignal\", AbortSignal);\n __requireExposeCustomGlobal(\"AbortController\", AbortController);\n }\n if (typeof globalThis.structuredClone !== \"function\") {\n let structuredClonePolyfill = function(value) {\n if (value === null || typeof value !== \"object\") {\n return value;\n }\n if (value instanceof ArrayBuffer) {\n return value.slice(0);\n }\n if (ArrayBuffer.isView(value)) {\n if (value instanceof Uint8Array) {\n return new Uint8Array(value);\n }\n return new value.constructor(value);\n }\n return JSON.parse(JSON.stringify(value));\n };\n structuredClonePolyfill2 = structuredClonePolyfill;\n __requireExposeCustomGlobal(\"structuredClone\", structuredClonePolyfill);\n }\n var structuredClonePolyfill2;\n if (typeof globalThis.SharedArrayBuffer === \"undefined\") {\n globalThis.SharedArrayBuffer = ArrayBuffer;\n __requireExposeCustomGlobal(\"SharedArrayBuffer\", ArrayBuffer);\n }\n if (typeof globalThis.btoa !== \"function\") {\n __requireExposeCustomGlobal(\"btoa\", function btoa(input) {\n return Buffer.from(String(input), \"binary\").toString(\"base64\");\n });\n }\n if (typeof globalThis.atob !== \"function\") {\n __requireExposeCustomGlobal(\"atob\", function atob(input) {\n return Buffer.from(String(input), \"base64\").toString(\"binary\");\n });\n }\n function _dirname(p) {\n const lastSlash = p.lastIndexOf(\"/\");\n if (lastSlash === -1) return \".\";\n if (lastSlash === 0) return \"/\";\n return p.slice(0, lastSlash);\n }\n if (typeof globalThis.TextDecoder === \"function\") {\n _OrigTextDecoder = globalThis.TextDecoder;\n _utf8Aliases = {\n \"utf-8\": true,\n \"utf8\": true,\n \"unicode-1-1-utf-8\": true,\n \"ascii\": true,\n \"us-ascii\": true,\n \"iso-8859-1\": true,\n \"latin1\": true,\n \"binary\": true,\n \"windows-1252\": true,\n \"utf-16le\": true,\n \"utf-16\": true,\n \"ucs-2\": true,\n \"ucs2\": true\n };\n globalThis.TextDecoder = function TextDecoder(encoding, options) {\n var label = encoding !== void 0 ? String(encoding).toLowerCase().replace(/\\s/g, \"\") : \"utf-8\";\n if (_utf8Aliases[label]) {\n return new _OrigTextDecoder(\"utf-8\", options);\n }\n return new _OrigTextDecoder(encoding, options);\n };\n globalThis.TextDecoder.prototype = _OrigTextDecoder.prototype;\n }\n var _OrigTextDecoder;\n var _utf8Aliases;\n function _patchPolyfill(name2, result2) {\n if (typeof result2 !== \"object\" && typeof result2 !== \"function\" || result2 === null) {\n return result2;\n }\n if (name2 === \"buffer\") {\n const maxLength = typeof result2.kMaxLength === \"number\" ? result2.kMaxLength : 2147483647;\n const maxStringLength = typeof result2.kStringMaxLength === \"number\" ? result2.kStringMaxLength : 536870888;\n if (typeof result2.constants !== \"object\" || result2.constants === null) {\n result2.constants = {};\n }\n if (typeof result2.constants.MAX_LENGTH !== \"number\") {\n result2.constants.MAX_LENGTH = maxLength;\n }\n if (typeof result2.constants.MAX_STRING_LENGTH !== \"number\") {\n result2.constants.MAX_STRING_LENGTH = maxStringLength;\n }\n if (typeof result2.kMaxLength !== \"number\") {\n result2.kMaxLength = maxLength;\n }\n if (typeof result2.kStringMaxLength !== \"number\") {\n result2.kStringMaxLength = maxStringLength;\n }\n const BufferCtor = result2.Buffer;\n if ((typeof BufferCtor === \"function\" || typeof BufferCtor === \"object\") && BufferCtor !== null) {\n if (typeof BufferCtor.kMaxLength !== \"number\") {\n BufferCtor.kMaxLength = maxLength;\n }\n if (typeof BufferCtor.kStringMaxLength !== \"number\") {\n BufferCtor.kStringMaxLength = maxStringLength;\n }\n if (typeof BufferCtor.constants !== \"object\" || BufferCtor.constants === null) {\n BufferCtor.constants = result2.constants;\n }\n var proto = BufferCtor.prototype;\n if (proto && typeof proto.utf8Slice !== \"function\") {\n var encodings = [\"utf8\", \"latin1\", \"ascii\", \"hex\", \"base64\", \"ucs2\", \"utf16le\"];\n for (var ei = 0; ei < encodings.length; ei++) {\n var enc = encodings[ei];\n (function(e) {\n if (typeof proto[e + \"Slice\"] !== \"function\") {\n proto[e + \"Slice\"] = function(start, end) {\n return this.toString(e, start, end);\n };\n }\n if (typeof proto[e + \"Write\"] !== \"function\") {\n proto[e + \"Write\"] = function(string, offset, length) {\n return this.write(string, offset, length, e);\n };\n }\n })(enc);\n }\n }\n }\n return result2;\n }\n if (name2 === \"util\" && typeof result2.formatWithOptions === \"undefined\" && typeof result2.format === \"function\") {\n result2.formatWithOptions = function formatWithOptions(inspectOptions, ...args) {\n return result2.format.apply(null, args);\n };\n }\n if (name2 === \"util\") {\n return result2;\n }\n if (name2 === \"url\") {\n const OriginalURL = result2.URL;\n if (typeof OriginalURL !== \"function\" || OriginalURL._patched) {\n return result2;\n }\n const PatchedURL = function PatchedURL2(url, base) {\n if (typeof url === \"string\" && url.startsWith(\"file:\") && !url.startsWith(\"file://\") && base === void 0) {\n if (typeof process !== \"undefined\" && typeof process.cwd === \"function\") {\n const cwd = process.cwd();\n if (cwd) {\n try {\n return new OriginalURL(url, \"file://\" + cwd + \"/\");\n } catch (e) {\n }\n }\n }\n }\n return base !== void 0 ? new OriginalURL(url, base) : new OriginalURL(url);\n };\n Object.keys(OriginalURL).forEach(function(key) {\n try {\n PatchedURL[key] = OriginalURL[key];\n } catch {\n }\n });\n Object.setPrototypeOf(PatchedURL, OriginalURL);\n PatchedURL.prototype = OriginalURL.prototype;\n PatchedURL._patched = true;\n const descriptor = Object.getOwnPropertyDescriptor(result2, \"URL\");\n if (descriptor && descriptor.configurable !== true && descriptor.writable !== true && typeof descriptor.set !== \"function\") {\n return result2;\n }\n try {\n result2.URL = PatchedURL;\n } catch {\n try {\n Object.defineProperty(result2, \"URL\", {\n value: PatchedURL,\n writable: true,\n configurable: true,\n enumerable: descriptor?.enumerable ?? true\n });\n } catch {\n }\n }\n return result2;\n }\n if (name2 === \"zlib\") {\n var zlibConstants = typeof result2.constants === \"object\" && result2.constants !== null ? result2.constants : {};\n if (typeof result2.constants !== \"object\" || result2.constants === null) {\n var constKeys = Object.keys(result2);\n for (var ci = 0; ci < constKeys.length; ci++) {\n var ck = constKeys[ci];\n if (ck.indexOf(\"Z_\") === 0 && typeof result2[ck] === \"number\") {\n zlibConstants[ck] = result2[ck];\n }\n }\n if (typeof zlibConstants.Z_NO_FLUSH !== \"number\") zlibConstants.Z_NO_FLUSH = 0;\n if (typeof zlibConstants.Z_PARTIAL_FLUSH !== \"number\") zlibConstants.Z_PARTIAL_FLUSH = 1;\n if (typeof zlibConstants.Z_SYNC_FLUSH !== \"number\") zlibConstants.Z_SYNC_FLUSH = 2;\n if (typeof zlibConstants.Z_FULL_FLUSH !== \"number\") zlibConstants.Z_FULL_FLUSH = 3;\n if (typeof zlibConstants.Z_FINISH !== \"number\") zlibConstants.Z_FINISH = 4;\n if (typeof zlibConstants.Z_BLOCK !== \"number\") zlibConstants.Z_BLOCK = 5;\n if (typeof zlibConstants.Z_TREES !== \"number\") zlibConstants.Z_TREES = 6;\n if (typeof zlibConstants.Z_OK !== \"number\") zlibConstants.Z_OK = 0;\n if (typeof zlibConstants.Z_STREAM_END !== \"number\") zlibConstants.Z_STREAM_END = 1;\n if (typeof zlibConstants.Z_NEED_DICT !== \"number\") zlibConstants.Z_NEED_DICT = 2;\n if (typeof zlibConstants.Z_ERRNO !== \"number\") zlibConstants.Z_ERRNO = -1;\n if (typeof zlibConstants.Z_STREAM_ERROR !== \"number\") zlibConstants.Z_STREAM_ERROR = -2;\n if (typeof zlibConstants.Z_DATA_ERROR !== \"number\") zlibConstants.Z_DATA_ERROR = -3;\n if (typeof zlibConstants.Z_MEM_ERROR !== \"number\") zlibConstants.Z_MEM_ERROR = -4;\n if (typeof zlibConstants.Z_BUF_ERROR !== \"number\") zlibConstants.Z_BUF_ERROR = -5;\n if (typeof zlibConstants.Z_VERSION_ERROR !== \"number\") zlibConstants.Z_VERSION_ERROR = -6;\n if (typeof zlibConstants.Z_NO_COMPRESSION !== \"number\") zlibConstants.Z_NO_COMPRESSION = 0;\n if (typeof zlibConstants.Z_BEST_SPEED !== \"number\") zlibConstants.Z_BEST_SPEED = 1;\n if (typeof zlibConstants.Z_BEST_COMPRESSION !== \"number\") zlibConstants.Z_BEST_COMPRESSION = 9;\n if (typeof zlibConstants.Z_DEFAULT_COMPRESSION !== \"number\") zlibConstants.Z_DEFAULT_COMPRESSION = -1;\n if (typeof zlibConstants.Z_FILTERED !== \"number\") zlibConstants.Z_FILTERED = 1;\n if (typeof zlibConstants.Z_HUFFMAN_ONLY !== \"number\") zlibConstants.Z_HUFFMAN_ONLY = 2;\n if (typeof zlibConstants.Z_RLE !== \"number\") zlibConstants.Z_RLE = 3;\n if (typeof zlibConstants.Z_FIXED !== \"number\") zlibConstants.Z_FIXED = 4;\n if (typeof zlibConstants.Z_DEFAULT_STRATEGY !== \"number\") zlibConstants.Z_DEFAULT_STRATEGY = 0;\n if (typeof zlibConstants.DEFLATE !== \"number\") zlibConstants.DEFLATE = 1;\n if (typeof zlibConstants.INFLATE !== \"number\") zlibConstants.INFLATE = 2;\n if (typeof zlibConstants.GZIP !== \"number\") zlibConstants.GZIP = 3;\n if (typeof zlibConstants.DEFLATERAW !== \"number\") zlibConstants.DEFLATERAW = 4;\n if (typeof zlibConstants.INFLATERAW !== \"number\") zlibConstants.INFLATERAW = 5;\n if (typeof zlibConstants.UNZIP !== \"number\") zlibConstants.UNZIP = 6;\n if (typeof zlibConstants.GUNZIP !== \"number\") zlibConstants.GUNZIP = 7;\n }\n result2.constants = zlibConstants;\n return result2;\n }\n if (name2 === \"crypto\") {\n let createCryptoRangeError2 = function(name3, message) {\n var error = new RangeError(message);\n error.code = \"ERR_OUT_OF_RANGE\";\n error.name = \"RangeError\";\n return error;\n }, createCryptoError2 = function(code, message) {\n var error = new Error(message);\n error.code = code;\n return error;\n }, encodeCryptoResult2 = function(buffer, encoding) {\n if (!encoding || encoding === \"buffer\") return buffer;\n return buffer.toString(encoding);\n }, isSharedArrayBufferInstance2 = function(value) {\n return typeof SharedArrayBuffer !== \"undefined\" && value instanceof SharedArrayBuffer;\n }, isBinaryLike2 = function(value) {\n return Buffer.isBuffer(value) || ArrayBuffer.isView(value) || value instanceof ArrayBuffer || isSharedArrayBufferInstance2(value);\n }, normalizeByteSource2 = function(value, name3, options) {\n var allowNull = options && options.allowNull;\n if (allowNull && value === null) {\n return null;\n }\n if (typeof value === \"string\") {\n return Buffer.from(value, \"utf8\");\n }\n if (Buffer.isBuffer(value)) {\n return Buffer.from(value);\n }\n if (ArrayBuffer.isView(value)) {\n return Buffer.from(value.buffer, value.byteOffset, value.byteLength);\n }\n if (value instanceof ArrayBuffer || isSharedArrayBufferInstance2(value)) {\n return Buffer.from(value);\n }\n throw createInvalidArgTypeError(\n name3,\n \"of type string or an instance of ArrayBuffer, Buffer, TypedArray, or DataView\",\n value\n );\n }, serializeCipherBridgeOptions2 = function(options) {\n if (!options) {\n return \"\";\n }\n var serialized = {};\n if (options.authTagLength !== void 0) {\n serialized.authTagLength = options.authTagLength;\n }\n if (options.authTag) {\n serialized.authTag = options.authTag.toString(\"base64\");\n }\n if (options.aad) {\n serialized.aad = options.aad.toString(\"base64\");\n }\n if (options.aadOptions !== void 0) {\n serialized.aadOptions = options.aadOptions;\n }\n if (options.autoPadding !== void 0) {\n serialized.autoPadding = options.autoPadding;\n }\n if (options.validateOnly !== void 0) {\n serialized.validateOnly = options.validateOnly;\n }\n return JSON.stringify(serialized);\n };\n var createCryptoRangeError = createCryptoRangeError2, createCryptoError = createCryptoError2, encodeCryptoResult = encodeCryptoResult2, isSharedArrayBufferInstance = isSharedArrayBufferInstance2, isBinaryLike = isBinaryLike2, normalizeByteSource = normalizeByteSource2, serializeCipherBridgeOptions = serializeCipherBridgeOptions2;\n var _runtimeRequire = globalThis.require;\n var _streamModule = _runtimeRequire && _runtimeRequire(\"stream\");\n var _utilModule = _runtimeRequire && _runtimeRequire(\"util\");\n var _Transform = _streamModule && _streamModule.Transform;\n var _inherits = _utilModule && _utilModule.inherits;\n if (typeof _cryptoHashDigest !== \"undefined\") {\n let SandboxHash2 = function(algorithm, options) {\n if (!(this instanceof SandboxHash2)) {\n return new SandboxHash2(algorithm, options);\n }\n if (!_Transform || !_inherits) {\n throw new Error(\"stream.Transform is required for crypto.Hash\");\n }\n if (typeof algorithm !== \"string\") {\n throw createInvalidArgTypeError(\"algorithm\", \"of type string\", algorithm);\n }\n _Transform.call(this, options);\n this._algorithm = algorithm;\n this._chunks = [];\n this._finalized = false;\n this._cachedDigest = null;\n this._allowCachedDigest = false;\n };\n var SandboxHash = SandboxHash2;\n _inherits(SandboxHash2, _Transform);\n SandboxHash2.prototype.update = function update(data, inputEncoding) {\n if (this._finalized) {\n throw createCryptoError2(\"ERR_CRYPTO_HASH_FINALIZED\", \"Digest already called\");\n }\n if (typeof data === \"string\") {\n this._chunks.push(Buffer.from(data, inputEncoding || \"utf8\"));\n } else if (isBinaryLike2(data)) {\n this._chunks.push(Buffer.from(data));\n } else {\n throw createInvalidArgTypeError(\n \"data\",\n \"one of type string, Buffer, TypedArray, or DataView\",\n data\n );\n }\n return this;\n };\n SandboxHash2.prototype._finishDigest = function _finishDigest() {\n if (this._cachedDigest) {\n return this._cachedDigest;\n }\n var combined = Buffer.concat(this._chunks);\n var resultBase64 = _cryptoHashDigest.applySync(void 0, [\n this._algorithm,\n combined.toString(\"base64\")\n ]);\n this._cachedDigest = Buffer.from(resultBase64, \"base64\");\n this._finalized = true;\n return this._cachedDigest;\n };\n SandboxHash2.prototype.digest = function digest(encoding) {\n if (this._finalized && !this._allowCachedDigest) {\n throw createCryptoError2(\"ERR_CRYPTO_HASH_FINALIZED\", \"Digest already called\");\n }\n var resultBuffer = this._finishDigest();\n this._allowCachedDigest = false;\n return encodeCryptoResult2(resultBuffer, encoding);\n };\n SandboxHash2.prototype.copy = function copy() {\n if (this._finalized) {\n throw createCryptoError2(\"ERR_CRYPTO_HASH_FINALIZED\", \"Digest already called\");\n }\n var c = new SandboxHash2(this._algorithm);\n c._chunks = this._chunks.slice();\n return c;\n };\n SandboxHash2.prototype._transform = function _transform(chunk, encoding, callback) {\n try {\n this.update(chunk, encoding === \"buffer\" ? void 0 : encoding);\n callback();\n } catch (error) {\n callback(normalizeCryptoBridgeError(error));\n }\n };\n SandboxHash2.prototype._flush = function _flush(callback) {\n try {\n var output = this._finishDigest();\n this._allowCachedDigest = true;\n this.push(output);\n callback();\n } catch (error) {\n callback(normalizeCryptoBridgeError(error));\n }\n };\n result2.createHash = function createHash(algorithm, options) {\n return new SandboxHash2(algorithm, options);\n };\n result2.Hash = SandboxHash2;\n }\n if (typeof _cryptoHmacDigest !== \"undefined\") {\n let SandboxHmac2 = function(algorithm, key) {\n this._algorithm = algorithm;\n if (typeof key === \"string\") {\n this._key = Buffer.from(key, \"utf8\");\n } else if (key && typeof key === \"object\" && key._raw !== void 0) {\n this._key = Buffer.from(key._raw, \"base64\");\n } else if (key && typeof key === \"object\" && key._pem !== void 0) {\n this._key = Buffer.from(key._pem, \"utf8\");\n } else {\n this._key = Buffer.from(key);\n }\n this._chunks = [];\n };\n var SandboxHmac = SandboxHmac2;\n SandboxHmac2.prototype.update = function update(data, inputEncoding) {\n if (typeof data === \"string\") {\n this._chunks.push(Buffer.from(data, inputEncoding || \"utf8\"));\n } else {\n this._chunks.push(Buffer.from(data));\n }\n return this;\n };\n SandboxHmac2.prototype.digest = function digest(encoding) {\n var combined = Buffer.concat(this._chunks);\n var resultBase64 = _cryptoHmacDigest.applySync(void 0, [\n this._algorithm,\n this._key.toString(\"base64\"),\n combined.toString(\"base64\")\n ]);\n var resultBuffer = Buffer.from(resultBase64, \"base64\");\n if (!encoding || encoding === \"buffer\") return resultBuffer;\n return resultBuffer.toString(encoding);\n };\n SandboxHmac2.prototype.copy = function copy() {\n var c = new SandboxHmac2(this._algorithm, this._key);\n c._chunks = this._chunks.slice();\n return c;\n };\n SandboxHmac2.prototype.write = function write(data, encoding) {\n this.update(data, encoding);\n return true;\n };\n SandboxHmac2.prototype.end = function end(data, encoding) {\n if (data) this.update(data, encoding);\n };\n result2.createHmac = function createHmac(algorithm, key) {\n return new SandboxHmac2(algorithm, key);\n };\n result2.Hmac = SandboxHmac2;\n }\n if (typeof _cryptoRandomFill !== \"undefined\") {\n result2.randomBytes = function randomBytes(size, callback) {\n if (typeof size !== \"number\" || size < 0 || size !== (size | 0)) {\n var err = new TypeError('The \"size\" argument must be of type number. Received type ' + typeof size);\n if (typeof callback === \"function\") {\n callback(err);\n return;\n }\n throw err;\n }\n if (size > 2147483647) {\n var rangeErr = new RangeError('The value of \"size\" is out of range. It must be >= 0 && <= 2147483647. Received ' + size);\n if (typeof callback === \"function\") {\n callback(rangeErr);\n return;\n }\n throw rangeErr;\n }\n var buf = Buffer.alloc(size);\n var offset = 0;\n while (offset < size) {\n var chunk = Math.min(size - offset, 65536);\n var base64 = _cryptoRandomFill.applySync(void 0, [chunk]);\n var hostBytes = Buffer.from(base64, \"base64\");\n hostBytes.copy(buf, offset);\n offset += chunk;\n }\n if (typeof callback === \"function\") {\n callback(null, buf);\n return;\n }\n return buf;\n };\n result2.randomFillSync = function randomFillSync(buffer, offset, size) {\n if (offset === void 0) offset = 0;\n var byteLength = buffer.byteLength !== void 0 ? buffer.byteLength : buffer.length;\n if (size === void 0) size = byteLength - offset;\n if (offset < 0 || size < 0 || offset + size > byteLength) {\n throw new RangeError('The value of \"offset + size\" is out of range.');\n }\n var bytes = new Uint8Array(buffer.buffer || buffer, buffer.byteOffset ? buffer.byteOffset + offset : offset, size);\n var filled = 0;\n while (filled < size) {\n var chunk = Math.min(size - filled, 65536);\n var base64 = _cryptoRandomFill.applySync(void 0, [chunk]);\n var hostBytes = Buffer.from(base64, \"base64\");\n bytes.set(hostBytes, filled);\n filled += chunk;\n }\n return buffer;\n };\n result2.randomFill = function randomFill(buffer, offsetOrCb, sizeOrCb, callback) {\n var offset = 0;\n var size;\n var cb;\n if (typeof offsetOrCb === \"function\") {\n cb = offsetOrCb;\n } else if (typeof sizeOrCb === \"function\") {\n offset = offsetOrCb || 0;\n cb = sizeOrCb;\n } else {\n offset = offsetOrCb || 0;\n size = sizeOrCb;\n cb = callback;\n }\n if (typeof cb !== \"function\") {\n throw new TypeError(\"Callback must be a function\");\n }\n try {\n result2.randomFillSync(buffer, offset, size);\n cb(null, buffer);\n } catch (e) {\n cb(e);\n }\n };\n result2.randomInt = function randomInt(minOrMax, maxOrCb, callback) {\n var min, max, cb;\n if (typeof maxOrCb === \"function\" || maxOrCb === void 0) {\n min = 0;\n max = minOrMax;\n cb = maxOrCb;\n } else {\n min = minOrMax;\n max = maxOrCb;\n cb = callback;\n }\n if (!Number.isSafeInteger(min)) {\n var minErr = new TypeError('The \"min\" argument must be a safe integer');\n if (typeof cb === \"function\") {\n cb(minErr);\n return;\n }\n throw minErr;\n }\n if (!Number.isSafeInteger(max)) {\n var maxErr = new TypeError('The \"max\" argument must be a safe integer');\n if (typeof cb === \"function\") {\n cb(maxErr);\n return;\n }\n throw maxErr;\n }\n if (max <= min) {\n var rangeErr2 = new RangeError('The value of \"max\" is out of range. It must be greater than the value of \"min\" (' + min + \")\");\n if (typeof cb === \"function\") {\n cb(rangeErr2);\n return;\n }\n throw rangeErr2;\n }\n var range = max - min;\n var bytes = 6;\n var maxValid = Math.pow(2, 48) - Math.pow(2, 48) % range;\n var val;\n do {\n var base64 = _cryptoRandomFill.applySync(void 0, [bytes]);\n var buf = Buffer.from(base64, \"base64\");\n val = buf.readUIntBE(0, bytes);\n } while (val >= maxValid);\n var result22 = min + val % range;\n if (typeof cb === \"function\") {\n cb(null, result22);\n return;\n }\n return result22;\n };\n }\n if (typeof _cryptoPbkdf2 !== \"undefined\") {\n let createPbkdf2ArgTypeError2 = function(name3, value) {\n var received;\n if (value == null) {\n received = \" Received \" + value;\n } else if (typeof value === \"object\") {\n received = value.constructor && value.constructor.name ? \" Received an instance of \" + value.constructor.name : \" Received [object Object]\";\n } else {\n var inspected = typeof value === \"string\" ? \"'\" + value + \"'\" : String(value);\n received = \" Received type \" + typeof value + \" (\" + inspected + \")\";\n }\n var error = new TypeError('The \"' + name3 + '\" argument must be of type number.' + received);\n error.code = \"ERR_INVALID_ARG_TYPE\";\n return error;\n }, validatePbkdf2Args2 = function(password, salt, iterations, keylen, digest) {\n var pwBuf = normalizeByteSource2(password, \"password\");\n var saltBuf = normalizeByteSource2(salt, \"salt\");\n if (typeof iterations !== \"number\") {\n throw createPbkdf2ArgTypeError2(\"iterations\", iterations);\n }\n if (!Number.isInteger(iterations)) {\n throw createCryptoRangeError2(\n \"iterations\",\n 'The value of \"iterations\" is out of range. It must be an integer. Received ' + iterations\n );\n }\n if (iterations < 1 || iterations > 2147483647) {\n throw createCryptoRangeError2(\n \"iterations\",\n 'The value of \"iterations\" is out of range. It must be >= 1 && <= 2147483647. Received ' + iterations\n );\n }\n if (typeof keylen !== \"number\") {\n throw createPbkdf2ArgTypeError2(\"keylen\", keylen);\n }\n if (!Number.isInteger(keylen)) {\n throw createCryptoRangeError2(\n \"keylen\",\n 'The value of \"keylen\" is out of range. It must be an integer. Received ' + keylen\n );\n }\n if (keylen < 0 || keylen > 2147483647) {\n throw createCryptoRangeError2(\n \"keylen\",\n 'The value of \"keylen\" is out of range. It must be >= 0 && <= 2147483647. Received ' + keylen\n );\n }\n if (typeof digest !== \"string\") {\n throw createInvalidArgTypeError(\"digest\", \"of type string\", digest);\n }\n return {\n password: pwBuf,\n salt: saltBuf\n };\n };\n var createPbkdf2ArgTypeError = createPbkdf2ArgTypeError2, validatePbkdf2Args = validatePbkdf2Args2;\n result2.pbkdf2Sync = function pbkdf2Sync(password, salt, iterations, keylen, digest) {\n var normalized = validatePbkdf2Args2(password, salt, iterations, keylen, digest);\n try {\n var resultBase64 = _cryptoPbkdf2.applySync(void 0, [\n normalized.password.toString(\"base64\"),\n normalized.salt.toString(\"base64\"),\n iterations,\n keylen,\n digest\n ]);\n return Buffer.from(resultBase64, \"base64\");\n } catch (error) {\n throw normalizeCryptoBridgeError(error);\n }\n };\n result2.pbkdf2 = function pbkdf2(password, salt, iterations, keylen, digest, callback) {\n if (typeof digest === \"function\" && callback === void 0) {\n callback = digest;\n digest = void 0;\n }\n if (typeof callback !== \"function\") {\n throw createInvalidArgTypeError(\"callback\", \"of type function\", callback);\n }\n try {\n var derived = result2.pbkdf2Sync(password, salt, iterations, keylen, digest);\n scheduleCryptoCallback(callback, [null, derived]);\n } catch (e) {\n throw normalizeCryptoBridgeError(e);\n }\n };\n }\n if (typeof _cryptoScrypt !== \"undefined\") {\n result2.scryptSync = function scryptSync(password, salt, keylen, options) {\n var pwBuf = typeof password === \"string\" ? Buffer.from(password, \"utf8\") : Buffer.from(password);\n var saltBuf = typeof salt === \"string\" ? Buffer.from(salt, \"utf8\") : Buffer.from(salt);\n var opts = {};\n if (options) {\n if (options.N !== void 0) opts.N = options.N;\n if (options.r !== void 0) opts.r = options.r;\n if (options.p !== void 0) opts.p = options.p;\n if (options.maxmem !== void 0) opts.maxmem = options.maxmem;\n if (options.cost !== void 0) opts.N = options.cost;\n if (options.blockSize !== void 0) opts.r = options.blockSize;\n if (options.parallelization !== void 0) opts.p = options.parallelization;\n }\n var resultBase64 = _cryptoScrypt.applySync(void 0, [\n pwBuf.toString(\"base64\"),\n saltBuf.toString(\"base64\"),\n keylen,\n JSON.stringify(opts)\n ]);\n return Buffer.from(resultBase64, \"base64\");\n };\n result2.scrypt = function scrypt(password, salt, keylen, optionsOrCb, callback) {\n var opts = optionsOrCb;\n var cb = callback;\n if (typeof optionsOrCb === \"function\") {\n opts = void 0;\n cb = optionsOrCb;\n }\n try {\n var derived = result2.scryptSync(password, salt, keylen, opts);\n cb(null, derived);\n } catch (e) {\n cb(e);\n }\n };\n }\n if (typeof _cryptoCipheriv !== \"undefined\") {\n let SandboxCipher2 = function(algorithm, key, iv, options) {\n if (!(this instanceof SandboxCipher2)) {\n return new SandboxCipher2(algorithm, key, iv, options);\n }\n if (typeof algorithm !== \"string\") {\n throw createInvalidArgTypeError(\"cipher\", \"of type string\", algorithm);\n }\n _Transform.call(this);\n this._algorithm = algorithm;\n this._key = normalizeByteSource2(key, \"key\");\n this._iv = normalizeByteSource2(iv, \"iv\", { allowNull: true });\n this._options = options || void 0;\n this._authTag = null;\n this._finalized = false;\n this._sessionCreated = false;\n this._sessionId = void 0;\n this._aad = null;\n this._aadOptions = void 0;\n this._autoPadding = void 0;\n this._chunks = [];\n this._bufferedMode = !_useSessionCipher || !!options;\n if (!this._bufferedMode) {\n this._ensureSession();\n } else if (!options) {\n _cryptoCipheriv.applySync(void 0, [\n this._algorithm,\n this._key.toString(\"base64\"),\n this._iv === null ? null : this._iv.toString(\"base64\"),\n \"\",\n serializeCipherBridgeOptions2({ validateOnly: true })\n ]);\n }\n };\n var SandboxCipher = SandboxCipher2;\n var _useSessionCipher = typeof _cryptoCipherivCreate !== \"undefined\";\n _inherits(SandboxCipher2, _Transform);\n SandboxCipher2.prototype._ensureSession = function _ensureSession() {\n if (this._bufferedMode || this._sessionCreated) {\n return;\n }\n this._sessionCreated = true;\n this._sessionId = _cryptoCipherivCreate.applySync(void 0, [\n \"cipher\",\n this._algorithm,\n this._key.toString(\"base64\"),\n this._iv === null ? null : this._iv.toString(\"base64\"),\n serializeCipherBridgeOptions2(this._getBridgeOptions())\n ]);\n };\n SandboxCipher2.prototype._getBridgeOptions = function _getBridgeOptions() {\n var options = {};\n if (this._options && this._options.authTagLength !== void 0) {\n options.authTagLength = this._options.authTagLength;\n }\n if (this._aad) {\n options.aad = this._aad;\n }\n if (this._aadOptions !== void 0) {\n options.aadOptions = this._aadOptions;\n }\n if (this._autoPadding !== void 0) {\n options.autoPadding = this._autoPadding;\n }\n return Object.keys(options).length === 0 ? null : options;\n };\n SandboxCipher2.prototype.update = function update(data, inputEncoding, outputEncoding) {\n if (this._finalized) {\n throw new Error(\"Attempting to call update() after final()\");\n }\n var buf;\n if (typeof data === \"string\") {\n buf = Buffer.from(data, inputEncoding || \"utf8\");\n } else {\n buf = normalizeByteSource2(data, \"data\");\n }\n if (!this._bufferedMode) {\n this._ensureSession();\n var resultBase64 = _cryptoCipherivUpdate.applySync(void 0, [this._sessionId, buf.toString(\"base64\")]);\n var resultBuffer = Buffer.from(resultBase64, \"base64\");\n return encodeCryptoResult2(resultBuffer, outputEncoding);\n }\n this._chunks.push(buf);\n return encodeCryptoResult2(Buffer.alloc(0), outputEncoding);\n };\n SandboxCipher2.prototype.final = function final(outputEncoding) {\n if (this._finalized) throw new Error(\"Attempting to call final() after already finalized\");\n this._finalized = true;\n var parsed;\n if (!this._bufferedMode) {\n this._ensureSession();\n var resultJson = _cryptoCipherivFinal.applySync(void 0, [this._sessionId]);\n parsed = JSON.parse(resultJson);\n } else {\n var combined = Buffer.concat(this._chunks);\n var resultJson2 = _cryptoCipheriv.applySync(void 0, [\n this._algorithm,\n this._key.toString(\"base64\"),\n this._iv === null ? null : this._iv.toString(\"base64\"),\n combined.toString(\"base64\"),\n serializeCipherBridgeOptions2(this._getBridgeOptions())\n ]);\n parsed = JSON.parse(resultJson2);\n }\n if (parsed.authTag) {\n this._authTag = Buffer.from(parsed.authTag, \"base64\");\n }\n var resultBuffer = Buffer.from(parsed.data, \"base64\");\n return encodeCryptoResult2(resultBuffer, outputEncoding);\n };\n SandboxCipher2.prototype.getAuthTag = function getAuthTag() {\n if (!this._finalized) throw new Error(\"Cannot call getAuthTag before final()\");\n if (!this._authTag) throw new Error(\"Auth tag is not available\");\n return this._authTag;\n };\n SandboxCipher2.prototype.setAAD = function setAAD(aad, options) {\n this._bufferedMode = true;\n this._aad = normalizeByteSource2(aad, \"buffer\");\n this._aadOptions = options;\n return this;\n };\n SandboxCipher2.prototype.setAutoPadding = function setAutoPadding(autoPadding) {\n this._bufferedMode = true;\n this._autoPadding = autoPadding !== false;\n return this;\n };\n SandboxCipher2.prototype._transform = function _transform(chunk, encoding, callback) {\n try {\n var output = this.update(chunk, encoding === \"buffer\" ? void 0 : encoding);\n if (output.length) {\n this.push(output);\n }\n callback();\n } catch (error) {\n callback(normalizeCryptoBridgeError(error));\n }\n };\n SandboxCipher2.prototype._flush = function _flush(callback) {\n try {\n var output = this.final();\n if (output.length) {\n this.push(output);\n }\n callback();\n } catch (error) {\n callback(normalizeCryptoBridgeError(error));\n }\n };\n result2.createCipheriv = function createCipheriv(algorithm, key, iv, options) {\n return new SandboxCipher2(algorithm, key, iv, options);\n };\n result2.Cipheriv = SandboxCipher2;\n }\n if (typeof _cryptoDecipheriv !== \"undefined\") {\n let SandboxDecipher2 = function(algorithm, key, iv, options) {\n if (!(this instanceof SandboxDecipher2)) {\n return new SandboxDecipher2(algorithm, key, iv, options);\n }\n if (typeof algorithm !== \"string\") {\n throw createInvalidArgTypeError(\"cipher\", \"of type string\", algorithm);\n }\n _Transform.call(this);\n this._algorithm = algorithm;\n this._key = normalizeByteSource2(key, \"key\");\n this._iv = normalizeByteSource2(iv, \"iv\", { allowNull: true });\n this._options = options || void 0;\n this._authTag = null;\n this._finalized = false;\n this._sessionCreated = false;\n this._aad = null;\n this._aadOptions = void 0;\n this._autoPadding = void 0;\n this._chunks = [];\n this._bufferedMode = !_useSessionCipher || !!options;\n if (!this._bufferedMode) {\n this._ensureSession();\n } else if (!options) {\n _cryptoDecipheriv.applySync(void 0, [\n this._algorithm,\n this._key.toString(\"base64\"),\n this._iv === null ? null : this._iv.toString(\"base64\"),\n \"\",\n serializeCipherBridgeOptions2({ validateOnly: true })\n ]);\n }\n };\n var SandboxDecipher = SandboxDecipher2;\n _inherits(SandboxDecipher2, _Transform);\n SandboxDecipher2.prototype._ensureSession = function _ensureSession() {\n if (!this._bufferedMode && !this._sessionCreated) {\n this._sessionCreated = true;\n this._sessionId = _cryptoCipherivCreate.applySync(void 0, [\n \"decipher\",\n this._algorithm,\n this._key.toString(\"base64\"),\n this._iv === null ? null : this._iv.toString(\"base64\"),\n serializeCipherBridgeOptions2(this._getBridgeOptions())\n ]);\n }\n };\n SandboxDecipher2.prototype._getBridgeOptions = function _getBridgeOptions() {\n var options = {};\n if (this._options && this._options.authTagLength !== void 0) {\n options.authTagLength = this._options.authTagLength;\n }\n if (this._authTag) {\n options.authTag = this._authTag;\n }\n if (this._aad) {\n options.aad = this._aad;\n }\n if (this._aadOptions !== void 0) {\n options.aadOptions = this._aadOptions;\n }\n if (this._autoPadding !== void 0) {\n options.autoPadding = this._autoPadding;\n }\n return Object.keys(options).length === 0 ? null : options;\n };\n SandboxDecipher2.prototype.update = function update(data, inputEncoding, outputEncoding) {\n if (this._finalized) {\n throw new Error(\"Attempting to call update() after final()\");\n }\n var buf;\n if (typeof data === \"string\") {\n buf = Buffer.from(data, inputEncoding || \"utf8\");\n } else {\n buf = normalizeByteSource2(data, \"data\");\n }\n if (!this._bufferedMode) {\n this._ensureSession();\n var resultBase64 = _cryptoCipherivUpdate.applySync(void 0, [this._sessionId, buf.toString(\"base64\")]);\n var resultBuffer = Buffer.from(resultBase64, \"base64\");\n return encodeCryptoResult2(resultBuffer, outputEncoding);\n }\n this._chunks.push(buf);\n return encodeCryptoResult2(Buffer.alloc(0), outputEncoding);\n };\n SandboxDecipher2.prototype.final = function final(outputEncoding) {\n if (this._finalized) throw new Error(\"Attempting to call final() after already finalized\");\n this._finalized = true;\n var resultBuffer;\n if (!this._bufferedMode) {\n this._ensureSession();\n var resultJson = _cryptoCipherivFinal.applySync(void 0, [this._sessionId]);\n var parsed = JSON.parse(resultJson);\n resultBuffer = Buffer.from(parsed.data, \"base64\");\n } else {\n var combined = Buffer.concat(this._chunks);\n var options = {};\n var resultBase64 = _cryptoDecipheriv.applySync(void 0, [\n this._algorithm,\n this._key.toString(\"base64\"),\n this._iv === null ? null : this._iv.toString(\"base64\"),\n combined.toString(\"base64\"),\n serializeCipherBridgeOptions2(this._getBridgeOptions())\n ]);\n resultBuffer = Buffer.from(resultBase64, \"base64\");\n }\n return encodeCryptoResult2(resultBuffer, outputEncoding);\n };\n SandboxDecipher2.prototype.setAuthTag = function setAuthTag(tag) {\n this._bufferedMode = true;\n this._authTag = typeof tag === \"string\" ? Buffer.from(tag, \"base64\") : normalizeByteSource2(tag, \"buffer\");\n return this;\n };\n SandboxDecipher2.prototype.setAAD = function setAAD(aad, options) {\n this._bufferedMode = true;\n this._aad = normalizeByteSource2(aad, \"buffer\");\n this._aadOptions = options;\n return this;\n };\n SandboxDecipher2.prototype.setAutoPadding = function setAutoPadding(autoPadding) {\n this._bufferedMode = true;\n this._autoPadding = autoPadding !== false;\n return this;\n };\n SandboxDecipher2.prototype._transform = function _transform(chunk, encoding, callback) {\n try {\n var output = this.update(chunk, encoding === \"buffer\" ? void 0 : encoding);\n if (output.length) {\n this.push(output);\n }\n callback();\n } catch (error) {\n callback(normalizeCryptoBridgeError(error));\n }\n };\n SandboxDecipher2.prototype._flush = function _flush(callback) {\n try {\n var output = this.final();\n if (output.length) {\n this.push(output);\n }\n callback();\n } catch (error) {\n callback(normalizeCryptoBridgeError(error));\n }\n };\n result2.createDecipheriv = function createDecipheriv(algorithm, key, iv, options) {\n return new SandboxDecipher2(algorithm, key, iv, options);\n };\n result2.Decipheriv = SandboxDecipher2;\n }\n if (typeof _cryptoSign !== \"undefined\") {\n result2.sign = function sign(algorithm, data, key) {\n var dataBuf = typeof data === \"string\" ? Buffer.from(data, \"utf8\") : Buffer.from(data);\n var sigBase64;\n try {\n sigBase64 = _cryptoSign.applySync(void 0, [\n algorithm === void 0 ? null : algorithm,\n dataBuf.toString(\"base64\"),\n JSON.stringify(serializeBridgeValue(key))\n ]);\n } catch (error) {\n throw normalizeCryptoBridgeError(error);\n }\n return Buffer.from(sigBase64, \"base64\");\n };\n }\n if (typeof _cryptoVerify !== \"undefined\") {\n result2.verify = function verify(algorithm, data, key, signature) {\n var dataBuf = typeof data === \"string\" ? Buffer.from(data, \"utf8\") : Buffer.from(data);\n var sigBuf = typeof signature === \"string\" ? Buffer.from(signature, \"base64\") : Buffer.from(signature);\n try {\n return _cryptoVerify.applySync(void 0, [\n algorithm === void 0 ? null : algorithm,\n dataBuf.toString(\"base64\"),\n JSON.stringify(serializeBridgeValue(key)),\n sigBuf.toString(\"base64\")\n ]);\n } catch (error) {\n throw normalizeCryptoBridgeError(error);\n }\n };\n }\n if (typeof _cryptoAsymmetricOp !== \"undefined\") {\n let asymmetricBridgeCall2 = function(operation, key, data) {\n var dataBuf = toRawBuffer(data);\n var resultBase64;\n try {\n resultBase64 = _cryptoAsymmetricOp.applySync(void 0, [\n operation,\n JSON.stringify(serializeBridgeValue(key)),\n dataBuf.toString(\"base64\")\n ]);\n } catch (error) {\n throw normalizeCryptoBridgeError(error);\n }\n return Buffer.from(resultBase64, \"base64\");\n };\n var asymmetricBridgeCall = asymmetricBridgeCall2;\n result2.publicEncrypt = function publicEncrypt(key, data) {\n return asymmetricBridgeCall2(\"publicEncrypt\", key, data);\n };\n result2.privateDecrypt = function privateDecrypt(key, data) {\n return asymmetricBridgeCall2(\"privateDecrypt\", key, data);\n };\n result2.privateEncrypt = function privateEncrypt(key, data) {\n return asymmetricBridgeCall2(\"privateEncrypt\", key, data);\n };\n result2.publicDecrypt = function publicDecrypt(key, data) {\n return asymmetricBridgeCall2(\"publicDecrypt\", key, data);\n };\n }\n if (typeof _cryptoDiffieHellmanSessionCreate !== \"undefined\" && typeof _cryptoDiffieHellmanSessionCall !== \"undefined\") {\n let serializeDhKeyObject2 = function(value) {\n if (value.type === \"secret\") {\n return {\n type: \"secret\",\n raw: Buffer.from(value.export()).toString(\"base64\")\n };\n }\n return {\n type: value.type,\n pem: value._pem || value.export({\n type: value.type === \"private\" ? \"pkcs8\" : \"spki\",\n format: \"pem\"\n })\n };\n }, serializeDhValue2 = function(value) {\n if (value === null || typeof value === \"string\" || typeof value === \"number\" || typeof value === \"boolean\") {\n return value;\n }\n if (Buffer.isBuffer(value)) {\n return {\n __type: \"buffer\",\n value: Buffer.from(value).toString(\"base64\")\n };\n }\n if (value instanceof ArrayBuffer) {\n return {\n __type: \"buffer\",\n value: Buffer.from(new Uint8Array(value)).toString(\"base64\")\n };\n }\n if (ArrayBuffer.isView(value)) {\n return {\n __type: \"buffer\",\n value: Buffer.from(value.buffer, value.byteOffset, value.byteLength).toString(\"base64\")\n };\n }\n if (typeof value === \"bigint\") {\n return {\n __type: \"bigint\",\n value: value.toString()\n };\n }\n if (value && typeof value === \"object\" && (value.type === \"public\" || value.type === \"private\" || value.type === \"secret\") && typeof value.export === \"function\") {\n return {\n __type: \"keyObject\",\n value: serializeDhKeyObject2(value)\n };\n }\n if (Array.isArray(value)) {\n return value.map(serializeDhValue2);\n }\n if (value && typeof value === \"object\") {\n var output = {};\n var keys = Object.keys(value);\n for (var i = 0; i < keys.length; i++) {\n if (value[keys[i]] !== void 0) {\n output[keys[i]] = serializeDhValue2(value[keys[i]]);\n }\n }\n return output;\n }\n return String(value);\n }, restoreDhValue2 = function(value) {\n if (!value || typeof value !== \"object\") {\n return value;\n }\n if (value.__type === \"buffer\") {\n return Buffer.from(value.value, \"base64\");\n }\n if (value.__type === \"bigint\") {\n return BigInt(value.value);\n }\n if (Array.isArray(value)) {\n return value.map(restoreDhValue2);\n }\n var output = {};\n var keys = Object.keys(value);\n for (var i = 0; i < keys.length; i++) {\n output[keys[i]] = restoreDhValue2(value[keys[i]]);\n }\n return output;\n }, createDhSession2 = function(type, name3, argsLike) {\n var args = [];\n for (var i = 0; i < argsLike.length; i++) {\n args.push(serializeDhValue2(argsLike[i]));\n }\n return _cryptoDiffieHellmanSessionCreate.applySync(void 0, [\n JSON.stringify({\n type,\n name: name3,\n args\n })\n ]);\n }, callDhSession2 = function(sessionId, method, argsLike) {\n var args = [];\n for (var i = 0; i < argsLike.length; i++) {\n args.push(serializeDhValue2(argsLike[i]));\n }\n var response = JSON.parse(_cryptoDiffieHellmanSessionCall.applySync(void 0, [\n sessionId,\n JSON.stringify({\n method,\n args\n })\n ]));\n if (response && response.hasResult === false) {\n return void 0;\n }\n return restoreDhValue2(response && response.result);\n }, SandboxDiffieHellman2 = function(sessionId) {\n this._sessionId = sessionId;\n }, SandboxECDH2 = function(sessionId) {\n SandboxDiffieHellman2.call(this, sessionId);\n };\n var serializeDhKeyObject = serializeDhKeyObject2, serializeDhValue = serializeDhValue2, restoreDhValue = restoreDhValue2, createDhSession = createDhSession2, callDhSession = callDhSession2, SandboxDiffieHellman = SandboxDiffieHellman2, SandboxECDH = SandboxECDH2;\n Object.defineProperty(SandboxDiffieHellman2.prototype, \"verifyError\", {\n get: function getVerifyError() {\n return callDhSession2(this._sessionId, \"verifyError\", []);\n }\n });\n SandboxDiffieHellman2.prototype.generateKeys = function generateKeys(encoding) {\n if (arguments.length === 0) return callDhSession2(this._sessionId, \"generateKeys\", []);\n return callDhSession2(this._sessionId, \"generateKeys\", [encoding]);\n };\n SandboxDiffieHellman2.prototype.computeSecret = function computeSecret(key, inputEncoding, outputEncoding) {\n return callDhSession2(this._sessionId, \"computeSecret\", Array.prototype.slice.call(arguments));\n };\n SandboxDiffieHellman2.prototype.getPrime = function getPrime(encoding) {\n if (arguments.length === 0) return callDhSession2(this._sessionId, \"getPrime\", []);\n return callDhSession2(this._sessionId, \"getPrime\", [encoding]);\n };\n SandboxDiffieHellman2.prototype.getGenerator = function getGenerator(encoding) {\n if (arguments.length === 0) return callDhSession2(this._sessionId, \"getGenerator\", []);\n return callDhSession2(this._sessionId, \"getGenerator\", [encoding]);\n };\n SandboxDiffieHellman2.prototype.getPublicKey = function getPublicKey(encoding) {\n if (arguments.length === 0) return callDhSession2(this._sessionId, \"getPublicKey\", []);\n return callDhSession2(this._sessionId, \"getPublicKey\", [encoding]);\n };\n SandboxDiffieHellman2.prototype.getPrivateKey = function getPrivateKey(encoding) {\n if (arguments.length === 0) return callDhSession2(this._sessionId, \"getPrivateKey\", []);\n return callDhSession2(this._sessionId, \"getPrivateKey\", [encoding]);\n };\n SandboxDiffieHellman2.prototype.setPublicKey = function setPublicKey(key, encoding) {\n return callDhSession2(this._sessionId, \"setPublicKey\", Array.prototype.slice.call(arguments));\n };\n SandboxDiffieHellman2.prototype.setPrivateKey = function setPrivateKey(key, encoding) {\n return callDhSession2(this._sessionId, \"setPrivateKey\", Array.prototype.slice.call(arguments));\n };\n SandboxECDH2.prototype = Object.create(SandboxDiffieHellman2.prototype);\n SandboxECDH2.prototype.constructor = SandboxECDH2;\n SandboxECDH2.prototype.getPublicKey = function getPublicKey(encoding, format) {\n return callDhSession2(this._sessionId, \"getPublicKey\", Array.prototype.slice.call(arguments));\n };\n result2.createDiffieHellman = function createDiffieHellman() {\n return new SandboxDiffieHellman2(createDhSession2(\"dh\", void 0, arguments));\n };\n result2.getDiffieHellman = function getDiffieHellman(name3) {\n return new SandboxDiffieHellman2(createDhSession2(\"group\", name3, []));\n };\n result2.createDiffieHellmanGroup = result2.getDiffieHellman;\n result2.createECDH = function createECDH(curve) {\n return new SandboxECDH2(createDhSession2(\"ecdh\", curve, []));\n };\n if (typeof _cryptoDiffieHellman !== \"undefined\") {\n result2.diffieHellman = function diffieHellman(options) {\n var resultJson = _cryptoDiffieHellman.applySync(void 0, [\n JSON.stringify(serializeDhValue2(options))\n ]);\n return restoreDhValue2(JSON.parse(resultJson));\n };\n }\n result2.DiffieHellman = SandboxDiffieHellman2;\n result2.DiffieHellmanGroup = SandboxDiffieHellman2;\n result2.ECDH = SandboxECDH2;\n }\n if (typeof _cryptoGenerateKeyPairSync !== \"undefined\") {\n let restoreBridgeValue2 = function(value) {\n if (!value || typeof value !== \"object\") {\n return value;\n }\n if (value.__type === \"buffer\") {\n return Buffer.from(value.value, \"base64\");\n }\n if (value.__type === \"bigint\") {\n return BigInt(value.value);\n }\n if (Array.isArray(value)) {\n return value.map(restoreBridgeValue2);\n }\n var output = {};\n var keys = Object.keys(value);\n for (var i = 0; i < keys.length; i++) {\n output[keys[i]] = restoreBridgeValue2(value[keys[i]]);\n }\n return output;\n }, cloneObject2 = function(value) {\n if (!value || typeof value !== \"object\") {\n return value;\n }\n if (Array.isArray(value)) {\n return value.map(cloneObject2);\n }\n var output = {};\n var keys = Object.keys(value);\n for (var i = 0; i < keys.length; i++) {\n output[keys[i]] = cloneObject2(value[keys[i]]);\n }\n return output;\n }, createDomException2 = function(message, name3) {\n if (typeof DOMException === \"function\") {\n return new DOMException(message, name3);\n }\n var error = new Error(message);\n error.name = name3;\n return error;\n }, toRawBuffer2 = function(data, encoding) {\n if (Buffer.isBuffer(data)) {\n return Buffer.from(data);\n }\n if (data instanceof ArrayBuffer) {\n return Buffer.from(new Uint8Array(data));\n }\n if (ArrayBuffer.isView(data)) {\n return Buffer.from(data.buffer, data.byteOffset, data.byteLength);\n }\n if (typeof data === \"string\") {\n return Buffer.from(data, encoding || \"utf8\");\n }\n return Buffer.from(data);\n }, serializeBridgeValue2 = function(value) {\n if (value === null) {\n return null;\n }\n if (typeof value === \"string\" || typeof value === \"number\" || typeof value === \"boolean\") {\n return value;\n }\n if (typeof value === \"bigint\") {\n return {\n __type: \"bigint\",\n value: value.toString()\n };\n }\n if (Buffer.isBuffer(value)) {\n return {\n __type: \"buffer\",\n value: Buffer.from(value).toString(\"base64\")\n };\n }\n if (value instanceof ArrayBuffer) {\n return {\n __type: \"buffer\",\n value: Buffer.from(new Uint8Array(value)).toString(\"base64\")\n };\n }\n if (ArrayBuffer.isView(value)) {\n return {\n __type: \"buffer\",\n value: Buffer.from(value.buffer, value.byteOffset, value.byteLength).toString(\"base64\")\n };\n }\n if (Array.isArray(value)) {\n return value.map(serializeBridgeValue2);\n }\n if (value && typeof value === \"object\" && (value.type === \"public\" || value.type === \"private\" || value.type === \"secret\") && typeof value.export === \"function\") {\n if (value.type === \"secret\") {\n return {\n __type: \"keyObject\",\n value: {\n type: \"secret\",\n raw: Buffer.from(value.export()).toString(\"base64\")\n }\n };\n }\n return {\n __type: \"keyObject\",\n value: {\n type: value.type,\n pem: value._pem\n }\n };\n }\n if (value && typeof value === \"object\") {\n var output = {};\n var keys = Object.keys(value);\n for (var i = 0; i < keys.length; i++) {\n var entry = value[keys[i]];\n if (entry !== void 0) {\n output[keys[i]] = serializeBridgeValue2(entry);\n }\n }\n return output;\n }\n return String(value);\n }, normalizeCryptoBridgeError2 = function(error) {\n if (!error || typeof error !== \"object\") {\n return error;\n }\n if (error.code === void 0 && error.message === \"error:07880109:common libcrypto routines::interrupted or cancelled\") {\n error.code = \"ERR_OSSL_CRYPTO_INTERRUPTED_OR_CANCELLED\";\n }\n return error;\n }, deserializeGeneratedKeyValue2 = function(value) {\n if (!value || typeof value !== \"object\") {\n return value;\n }\n if (value.kind === \"string\") {\n return value.value;\n }\n if (value.kind === \"buffer\") {\n return Buffer.from(value.value, \"base64\");\n }\n if (value.kind === \"keyObject\") {\n return createGeneratedKeyObject2(value.value);\n }\n if (value.kind === \"object\") {\n return value.value;\n }\n return value;\n }, serializeBridgeOptions2 = function(options) {\n return JSON.stringify({\n hasOptions: options !== void 0,\n options: options === void 0 ? null : serializeBridgeValue2(options)\n });\n }, createInvalidArgTypeError2 = function(name3, expected, value) {\n var received;\n if (value == null) {\n received = \" Received \" + value;\n } else if (typeof value === \"function\") {\n received = \" Received function \" + (value.name || \"anonymous\");\n } else if (typeof value === \"object\") {\n if (value.constructor && value.constructor.name) {\n received = \" Received an instance of \" + value.constructor.name;\n } else {\n received = \" Received [object Object]\";\n }\n } else {\n var inspected = typeof value === \"string\" ? \"'\" + value + \"'\" : String(value);\n if (inspected.length > 28) {\n inspected = inspected.slice(0, 25) + \"...\";\n }\n received = \" Received type \" + typeof value + \" (\" + inspected + \")\";\n }\n var error = new TypeError('The \"' + name3 + '\" argument must be ' + expected + \".\" + received);\n error.code = \"ERR_INVALID_ARG_TYPE\";\n return error;\n }, scheduleCryptoCallback2 = function(callback, args) {\n var invoke = function() {\n callback.apply(void 0, args);\n };\n if (typeof process !== \"undefined\" && process && typeof process.nextTick === \"function\") {\n process.nextTick(invoke);\n return;\n }\n if (typeof queueMicrotask === \"function\") {\n queueMicrotask(invoke);\n return;\n }\n Promise.resolve().then(invoke);\n }, shouldThrowCryptoValidationError2 = function(error) {\n if (!error || typeof error !== \"object\") {\n return false;\n }\n if (error.name === \"TypeError\" || error.name === \"RangeError\") {\n return true;\n }\n var code = error.code;\n return code === \"ERR_MISSING_OPTION\" || code === \"ERR_CRYPTO_UNKNOWN_DH_GROUP\" || code === \"ERR_OUT_OF_RANGE\" || typeof code === \"string\" && code.indexOf(\"ERR_INVALID_ARG_\") === 0;\n }, ensureCryptoCallback2 = function(callback, syncValidator) {\n if (typeof callback === \"function\") {\n return callback;\n }\n if (typeof syncValidator === \"function\") {\n syncValidator();\n }\n throw createInvalidArgTypeError2(\"callback\", \"of type function\", callback);\n }, SandboxKeyObject2 = function(type, handle) {\n this.type = type;\n this._pem = handle && handle.pem !== void 0 ? handle.pem : void 0;\n this._raw = handle && handle.raw !== void 0 ? handle.raw : void 0;\n this._jwk = handle && handle.jwk !== void 0 ? cloneObject2(handle.jwk) : void 0;\n this.asymmetricKeyType = handle && handle.asymmetricKeyType !== void 0 ? handle.asymmetricKeyType : void 0;\n this.asymmetricKeyDetails = handle && handle.asymmetricKeyDetails !== void 0 ? restoreBridgeValue2(handle.asymmetricKeyDetails) : void 0;\n this.symmetricKeySize = type === \"secret\" && handle && handle.raw !== void 0 ? Buffer.from(handle.raw, \"base64\").byteLength : void 0;\n }, normalizeNamedCurve2 = function(namedCurve) {\n if (!namedCurve) {\n return namedCurve;\n }\n var upper = String(namedCurve).toUpperCase();\n if (upper === \"PRIME256V1\" || upper === \"SECP256R1\") return \"P-256\";\n if (upper === \"SECP384R1\") return \"P-384\";\n if (upper === \"SECP521R1\") return \"P-521\";\n return namedCurve;\n }, normalizeAlgorithmInput2 = function(algorithm) {\n if (typeof algorithm === \"string\") {\n return { name: algorithm };\n }\n return Object.assign({}, algorithm);\n }, createCompatibleCryptoKey2 = function(keyData) {\n var key;\n if (globalThis.CryptoKey && globalThis.CryptoKey.prototype && globalThis.CryptoKey.prototype !== SandboxCryptoKey.prototype) {\n key = Object.create(globalThis.CryptoKey.prototype);\n key.type = keyData.type;\n key.extractable = keyData.extractable;\n key.algorithm = keyData.algorithm;\n key.usages = keyData.usages;\n key._keyData = keyData;\n key._pem = keyData._pem;\n key._jwk = keyData._jwk;\n key._raw = keyData._raw;\n key._sourceKeyObjectData = keyData._sourceKeyObjectData;\n return key;\n }\n return new SandboxCryptoKey(keyData);\n }, buildCryptoKeyFromKeyObject2 = function(keyObject, algorithm, extractable, usages) {\n var algo = normalizeAlgorithmInput2(algorithm);\n var name3 = algo.name;\n if (keyObject.type === \"secret\") {\n var secretBytes = Buffer.from(keyObject._raw || \"\", \"base64\");\n if (name3 === \"PBKDF2\") {\n if (extractable) {\n throw new SyntaxError(\"PBKDF2 keys are not extractable\");\n }\n if (usages.some(function(usage) {\n return usage !== \"deriveBits\" && usage !== \"deriveKey\";\n })) {\n throw new SyntaxError(\"Unsupported key usage for a PBKDF2 key\");\n }\n return createCompatibleCryptoKey2({\n type: \"secret\",\n extractable,\n algorithm: { name: name3 },\n usages: Array.from(usages),\n _raw: keyObject._raw,\n _sourceKeyObjectData: {\n type: \"secret\",\n raw: keyObject._raw\n }\n });\n }\n if (name3 === \"HMAC\") {\n if (!secretBytes.byteLength || algo.length === 0) {\n throw createDomException2(\"Zero-length key is not supported\", \"DataError\");\n }\n if (!usages.length) {\n throw new SyntaxError(\"Usages cannot be empty when importing a secret key.\");\n }\n return createCompatibleCryptoKey2({\n type: \"secret\",\n extractable,\n algorithm: {\n name: name3,\n hash: typeof algo.hash === \"string\" ? { name: algo.hash } : cloneObject2(algo.hash),\n length: secretBytes.byteLength * 8\n },\n usages: Array.from(usages),\n _raw: keyObject._raw,\n _sourceKeyObjectData: {\n type: \"secret\",\n raw: keyObject._raw\n }\n });\n }\n return createCompatibleCryptoKey2({\n type: \"secret\",\n extractable,\n algorithm: {\n name: name3,\n length: secretBytes.byteLength * 8\n },\n usages: Array.from(usages),\n _raw: keyObject._raw,\n _sourceKeyObjectData: {\n type: \"secret\",\n raw: keyObject._raw\n }\n });\n }\n var keyType = String(keyObject.asymmetricKeyType || \"\").toLowerCase();\n var algorithmName = String(name3 || \"\");\n if ((keyType === \"ed25519\" || keyType === \"ed448\" || keyType === \"x25519\" || keyType === \"x448\") && keyType !== algorithmName.toLowerCase()) {\n throw createDomException2(\"Invalid key type\", \"DataError\");\n }\n if (algorithmName === \"ECDH\") {\n if (keyObject.type === \"private\" && !usages.length) {\n throw new SyntaxError(\"Usages cannot be empty when importing a private key.\");\n }\n var actualCurve = normalizeNamedCurve2(\n keyObject.asymmetricKeyDetails && keyObject.asymmetricKeyDetails.namedCurve\n );\n if (algo.namedCurve && actualCurve && normalizeNamedCurve2(algo.namedCurve) !== actualCurve) {\n throw createDomException2(\"Named curve mismatch\", \"DataError\");\n }\n }\n var normalizedAlgo = cloneObject2(algo);\n if (typeof normalizedAlgo.hash === \"string\") {\n normalizedAlgo.hash = { name: normalizedAlgo.hash };\n }\n return createCompatibleCryptoKey2({\n type: keyObject.type,\n extractable,\n algorithm: normalizedAlgo,\n usages: Array.from(usages),\n _pem: keyObject._pem,\n _jwk: cloneObject2(keyObject._jwk),\n _sourceKeyObjectData: {\n type: keyObject.type,\n pem: keyObject._pem,\n jwk: cloneObject2(keyObject._jwk),\n asymmetricKeyType: keyObject.asymmetricKeyType,\n asymmetricKeyDetails: cloneObject2(keyObject.asymmetricKeyDetails)\n }\n });\n }, createAsymmetricKeyObject2 = function(type, key) {\n if (typeof key === \"string\") {\n if (key.indexOf(\"-----BEGIN\") === -1) {\n throw new TypeError(\"error:0900006e:PEM routines:OPENSSL_internal:NO_START_LINE\");\n }\n return new SandboxKeyObject2(type, { pem: key });\n }\n if (key && typeof key === \"object\" && key._pem) {\n return new SandboxKeyObject2(type, {\n pem: key._pem,\n jwk: key._jwk,\n asymmetricKeyType: key.asymmetricKeyType,\n asymmetricKeyDetails: key.asymmetricKeyDetails\n });\n }\n if (key && typeof key === \"object\" && key.key) {\n var keyData = typeof key.key === \"string\" ? key.key : key.key.toString(\"utf8\");\n return new SandboxKeyObject2(type, { pem: keyData });\n }\n if (Buffer.isBuffer(key)) {\n var keyStr = key.toString(\"utf8\");\n if (keyStr.indexOf(\"-----BEGIN\") === -1) {\n throw new TypeError(\"error:0900006e:PEM routines:OPENSSL_internal:NO_START_LINE\");\n }\n return new SandboxKeyObject2(type, { pem: keyStr });\n }\n return new SandboxKeyObject2(type, { pem: String(key) });\n }, createGeneratedKeyObject2 = function(value) {\n return new SandboxKeyObject2(value.type, {\n pem: value.pem,\n raw: value.raw,\n jwk: value.jwk,\n asymmetricKeyType: value.asymmetricKeyType,\n asymmetricKeyDetails: value.asymmetricKeyDetails\n });\n };\n var restoreBridgeValue = restoreBridgeValue2, cloneObject = cloneObject2, createDomException = createDomException2, toRawBuffer = toRawBuffer2, serializeBridgeValue = serializeBridgeValue2, normalizeCryptoBridgeError = normalizeCryptoBridgeError2, deserializeGeneratedKeyValue = deserializeGeneratedKeyValue2, serializeBridgeOptions = serializeBridgeOptions2, createInvalidArgTypeError = createInvalidArgTypeError2, scheduleCryptoCallback = scheduleCryptoCallback2, shouldThrowCryptoValidationError = shouldThrowCryptoValidationError2, ensureCryptoCallback = ensureCryptoCallback2, SandboxKeyObject = SandboxKeyObject2, normalizeNamedCurve = normalizeNamedCurve2, normalizeAlgorithmInput = normalizeAlgorithmInput2, createCompatibleCryptoKey = createCompatibleCryptoKey2, buildCryptoKeyFromKeyObject = buildCryptoKeyFromKeyObject2, createAsymmetricKeyObject = createAsymmetricKeyObject2, createGeneratedKeyObject = createGeneratedKeyObject2;\n Object.defineProperty(SandboxKeyObject2.prototype, Symbol.toStringTag, {\n value: \"KeyObject\",\n configurable: true\n });\n SandboxKeyObject2.prototype.export = function exportKey(options) {\n if (this.type === \"secret\") {\n return Buffer.from(this._raw || \"\", \"base64\");\n }\n if (!options || typeof options !== \"object\") {\n throw new TypeError('The \"options\" argument must be of type object.');\n }\n if (options.format === \"jwk\") {\n return cloneObject2(this._jwk);\n }\n if (options.format === \"der\") {\n var lines = String(this._pem || \"\").split(\"\\n\").filter(function(l) {\n return l && l.indexOf(\"-----\") !== 0;\n });\n return Buffer.from(lines.join(\"\"), \"base64\");\n }\n return this._pem;\n };\n SandboxKeyObject2.prototype.toString = function() {\n return \"[object KeyObject]\";\n };\n SandboxKeyObject2.prototype.equals = function equals(other) {\n if (!(other instanceof SandboxKeyObject2)) {\n return false;\n }\n if (this.type !== other.type) {\n return false;\n }\n if (this.type === \"secret\") {\n return (this._raw || \"\") === (other._raw || \"\");\n }\n return (this._pem || \"\") === (other._pem || \"\") && this.asymmetricKeyType === other.asymmetricKeyType;\n };\n SandboxKeyObject2.prototype.toCryptoKey = function toCryptoKey(algorithm, extractable, usages) {\n return buildCryptoKeyFromKeyObject2(this, algorithm, extractable, Array.from(usages || []));\n };\n result2.generateKeyPairSync = function generateKeyPairSync(type, options) {\n var resultJson = _cryptoGenerateKeyPairSync.applySync(void 0, [\n type,\n serializeBridgeOptions2(options)\n ]);\n var parsed = JSON.parse(resultJson);\n if (parsed.publicKey && parsed.publicKey.kind) {\n return {\n publicKey: deserializeGeneratedKeyValue2(parsed.publicKey),\n privateKey: deserializeGeneratedKeyValue2(parsed.privateKey)\n };\n }\n return {\n publicKey: createGeneratedKeyObject2(parsed.publicKey),\n privateKey: createGeneratedKeyObject2(parsed.privateKey)\n };\n };\n result2.generateKeyPair = function generateKeyPair(type, options, callback) {\n if (typeof options === \"function\") {\n callback = options;\n options = void 0;\n }\n callback = ensureCryptoCallback2(callback, function() {\n result2.generateKeyPairSync(type, options);\n });\n try {\n var pair = result2.generateKeyPairSync(type, options);\n scheduleCryptoCallback2(callback, [null, pair.publicKey, pair.privateKey]);\n } catch (e) {\n if (shouldThrowCryptoValidationError2(e)) {\n throw e;\n }\n scheduleCryptoCallback2(callback, [e]);\n }\n };\n if (typeof _cryptoGenerateKeySync !== \"undefined\") {\n result2.generateKeySync = function generateKeySync(type, options) {\n var resultJson;\n try {\n resultJson = _cryptoGenerateKeySync.applySync(void 0, [\n type,\n serializeBridgeOptions2(options)\n ]);\n } catch (error) {\n throw normalizeCryptoBridgeError2(error);\n }\n return createGeneratedKeyObject2(JSON.parse(resultJson));\n };\n result2.generateKey = function generateKey(type, options, callback) {\n callback = ensureCryptoCallback2(callback, function() {\n result2.generateKeySync(type, options);\n });\n try {\n var key = result2.generateKeySync(type, options);\n scheduleCryptoCallback2(callback, [null, key]);\n } catch (e) {\n if (shouldThrowCryptoValidationError2(e)) {\n throw e;\n }\n scheduleCryptoCallback2(callback, [e]);\n }\n };\n }\n if (typeof _cryptoGeneratePrimeSync !== \"undefined\") {\n result2.generatePrimeSync = function generatePrimeSync(size, options) {\n var resultJson;\n try {\n resultJson = _cryptoGeneratePrimeSync.applySync(void 0, [\n size,\n serializeBridgeOptions2(options)\n ]);\n } catch (error) {\n throw normalizeCryptoBridgeError2(error);\n }\n return restoreBridgeValue2(JSON.parse(resultJson));\n };\n result2.generatePrime = function generatePrime(size, options, callback) {\n if (typeof options === \"function\") {\n callback = options;\n options = void 0;\n }\n callback = ensureCryptoCallback2(callback, function() {\n result2.generatePrimeSync(size, options);\n });\n try {\n var prime = result2.generatePrimeSync(size, options);\n scheduleCryptoCallback2(callback, [null, prime]);\n } catch (e) {\n if (shouldThrowCryptoValidationError2(e)) {\n throw e;\n }\n scheduleCryptoCallback2(callback, [e]);\n }\n };\n }\n result2.createPublicKey = function createPublicKey(key) {\n if (typeof _cryptoCreateKeyObject !== \"undefined\") {\n var resultJson;\n try {\n resultJson = _cryptoCreateKeyObject.applySync(void 0, [\n \"createPublicKey\",\n JSON.stringify(serializeBridgeValue2(key))\n ]);\n } catch (error) {\n throw normalizeCryptoBridgeError2(error);\n }\n return createGeneratedKeyObject2(JSON.parse(resultJson));\n }\n return createAsymmetricKeyObject2(\"public\", key);\n };\n result2.createPrivateKey = function createPrivateKey(key) {\n if (typeof _cryptoCreateKeyObject !== \"undefined\") {\n var resultJson;\n try {\n resultJson = _cryptoCreateKeyObject.applySync(void 0, [\n \"createPrivateKey\",\n JSON.stringify(serializeBridgeValue2(key))\n ]);\n } catch (error) {\n throw normalizeCryptoBridgeError2(error);\n }\n return createGeneratedKeyObject2(JSON.parse(resultJson));\n }\n return createAsymmetricKeyObject2(\"private\", key);\n };\n result2.createSecretKey = function createSecretKey(key, encoding) {\n return new SandboxKeyObject2(\"secret\", {\n raw: toRawBuffer2(key, encoding).toString(\"base64\")\n });\n };\n SandboxKeyObject2.from = function from(key) {\n if (!key || typeof key !== \"object\" || key[Symbol.toStringTag] !== \"CryptoKey\") {\n throw new TypeError('The \"key\" argument must be an instance of CryptoKey.');\n }\n if (key._sourceKeyObjectData && key._sourceKeyObjectData.type === \"secret\") {\n return new SandboxKeyObject2(\"secret\", {\n raw: key._sourceKeyObjectData.raw\n });\n }\n return new SandboxKeyObject2(key.type, {\n pem: key._pem,\n jwk: key._jwk,\n asymmetricKeyType: key._sourceKeyObjectData && key._sourceKeyObjectData.asymmetricKeyType,\n asymmetricKeyDetails: key._sourceKeyObjectData && key._sourceKeyObjectData.asymmetricKeyDetails\n });\n };\n result2.KeyObject = SandboxKeyObject2;\n }\n if (typeof _cryptoSubtle !== \"undefined\") {\n let SandboxCryptoKey2 = function(keyData) {\n this.type = keyData.type;\n this.extractable = keyData.extractable;\n this.algorithm = keyData.algorithm;\n this.usages = keyData.usages;\n this._keyData = keyData;\n this._pem = keyData._pem;\n this._jwk = keyData._jwk;\n this._raw = keyData._raw;\n this._sourceKeyObjectData = keyData._sourceKeyObjectData;\n }, toBase642 = function(data) {\n if (typeof data === \"string\") return Buffer.from(data).toString(\"base64\");\n if (data instanceof ArrayBuffer) return Buffer.from(new Uint8Array(data)).toString(\"base64\");\n if (ArrayBuffer.isView(data)) return Buffer.from(new Uint8Array(data.buffer, data.byteOffset, data.byteLength)).toString(\"base64\");\n return Buffer.from(data).toString(\"base64\");\n }, subtleCall2 = function(reqObj) {\n return _cryptoSubtle.applySync(void 0, [JSON.stringify(reqObj)]);\n }, normalizeAlgo2 = function(algorithm) {\n if (typeof algorithm === \"string\") return { name: algorithm };\n return algorithm;\n };\n var SandboxCryptoKey = SandboxCryptoKey2, toBase64 = toBase642, subtleCall = subtleCall2, normalizeAlgo = normalizeAlgo2;\n Object.defineProperty(SandboxCryptoKey2.prototype, Symbol.toStringTag, {\n value: \"CryptoKey\",\n configurable: true\n });\n Object.defineProperty(SandboxCryptoKey2, Symbol.hasInstance, {\n value: function(candidate) {\n return !!(candidate && typeof candidate === \"object\" && (candidate._keyData || candidate[Symbol.toStringTag] === \"CryptoKey\"));\n },\n configurable: true\n });\n if (globalThis.CryptoKey && globalThis.CryptoKey.prototype && globalThis.CryptoKey.prototype !== SandboxCryptoKey2.prototype) {\n Object.setPrototypeOf(SandboxCryptoKey2.prototype, globalThis.CryptoKey.prototype);\n }\n if (typeof globalThis.CryptoKey === \"undefined\") {\n __requireExposeCustomGlobal(\"CryptoKey\", SandboxCryptoKey2);\n } else if (globalThis.CryptoKey !== SandboxCryptoKey2) {\n globalThis.CryptoKey = SandboxCryptoKey2;\n }\n var SandboxSubtle = {};\n SandboxSubtle.digest = function digest(algorithm, data) {\n return Promise.resolve().then(function() {\n var algo = normalizeAlgo2(algorithm);\n var result22 = JSON.parse(subtleCall2({\n op: \"digest\",\n algorithm: algo.name,\n data: toBase642(data)\n }));\n var buf = Buffer.from(result22.data, \"base64\");\n return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n });\n };\n SandboxSubtle.generateKey = function generateKey(algorithm, extractable, keyUsages) {\n return Promise.resolve().then(function() {\n var algo = normalizeAlgo2(algorithm);\n var reqAlgo = Object.assign({}, algo);\n if (reqAlgo.hash) reqAlgo.hash = normalizeAlgo2(reqAlgo.hash);\n if (reqAlgo.publicExponent) {\n reqAlgo.publicExponent = Buffer.from(new Uint8Array(reqAlgo.publicExponent.buffer || reqAlgo.publicExponent)).toString(\"base64\");\n }\n var result22 = JSON.parse(subtleCall2({\n op: \"generateKey\",\n algorithm: reqAlgo,\n extractable,\n usages: Array.from(keyUsages)\n }));\n if (result22.publicKey && result22.privateKey) {\n return {\n publicKey: new SandboxCryptoKey2(result22.publicKey),\n privateKey: new SandboxCryptoKey2(result22.privateKey)\n };\n }\n return new SandboxCryptoKey2(result22.key);\n });\n };\n SandboxSubtle.importKey = function importKey(format, keyData, algorithm, extractable, keyUsages) {\n return Promise.resolve().then(function() {\n var algo = normalizeAlgo2(algorithm);\n var reqAlgo = Object.assign({}, algo);\n if (reqAlgo.hash) reqAlgo.hash = normalizeAlgo2(reqAlgo.hash);\n var serializedKeyData;\n if (format === \"jwk\") {\n serializedKeyData = keyData;\n } else if (format === \"raw\") {\n serializedKeyData = toBase642(keyData);\n } else {\n serializedKeyData = toBase642(keyData);\n }\n var result22 = JSON.parse(subtleCall2({\n op: \"importKey\",\n format,\n keyData: serializedKeyData,\n algorithm: reqAlgo,\n extractable,\n usages: Array.from(keyUsages)\n }));\n return new SandboxCryptoKey2(result22.key);\n });\n };\n SandboxSubtle.exportKey = function exportKey(format, key) {\n return Promise.resolve().then(function() {\n var result22 = JSON.parse(subtleCall2({\n op: \"exportKey\",\n format,\n key: key._keyData\n }));\n if (format === \"jwk\") return result22.jwk;\n var buf = Buffer.from(result22.data, \"base64\");\n return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n });\n };\n SandboxSubtle.encrypt = function encrypt(algorithm, key, data) {\n return Promise.resolve().then(function() {\n var algo = normalizeAlgo2(algorithm);\n var reqAlgo = Object.assign({}, algo);\n if (reqAlgo.iv) reqAlgo.iv = toBase642(reqAlgo.iv);\n if (reqAlgo.additionalData) reqAlgo.additionalData = toBase642(reqAlgo.additionalData);\n var result22 = JSON.parse(subtleCall2({\n op: \"encrypt\",\n algorithm: reqAlgo,\n key: key._keyData,\n data: toBase642(data)\n }));\n var buf = Buffer.from(result22.data, \"base64\");\n return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n });\n };\n SandboxSubtle.decrypt = function decrypt(algorithm, key, data) {\n return Promise.resolve().then(function() {\n var algo = normalizeAlgo2(algorithm);\n var reqAlgo = Object.assign({}, algo);\n if (reqAlgo.iv) reqAlgo.iv = toBase642(reqAlgo.iv);\n if (reqAlgo.additionalData) reqAlgo.additionalData = toBase642(reqAlgo.additionalData);\n var result22 = JSON.parse(subtleCall2({\n op: \"decrypt\",\n algorithm: reqAlgo,\n key: key._keyData,\n data: toBase642(data)\n }));\n var buf = Buffer.from(result22.data, \"base64\");\n return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n });\n };\n SandboxSubtle.sign = function sign(algorithm, key, data) {\n return Promise.resolve().then(function() {\n var result22 = JSON.parse(subtleCall2({\n op: \"sign\",\n algorithm: normalizeAlgo2(algorithm),\n key: key._keyData,\n data: toBase642(data)\n }));\n var buf = Buffer.from(result22.data, \"base64\");\n return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n });\n };\n SandboxSubtle.verify = function verify(algorithm, key, signature, data) {\n return Promise.resolve().then(function() {\n var result22 = JSON.parse(subtleCall2({\n op: \"verify\",\n algorithm: normalizeAlgo2(algorithm),\n key: key._keyData,\n signature: toBase642(signature),\n data: toBase642(data)\n }));\n return result22.result;\n });\n };\n SandboxSubtle.deriveBits = function deriveBits(algorithm, baseKey, length) {\n return Promise.resolve().then(function() {\n var algo = normalizeAlgo2(algorithm);\n var reqAlgo = Object.assign({}, algo);\n if (reqAlgo.salt) reqAlgo.salt = toBase642(reqAlgo.salt);\n if (reqAlgo.info) reqAlgo.info = toBase642(reqAlgo.info);\n var result22 = JSON.parse(subtleCall2({\n op: \"deriveBits\",\n algorithm: reqAlgo,\n baseKey: baseKey._keyData,\n length\n }));\n return Buffer.from(result22.data, \"base64\").buffer;\n });\n };\n SandboxSubtle.deriveKey = function deriveKey(algorithm, baseKey, derivedKeyAlgorithm, extractable, keyUsages) {\n return Promise.resolve().then(function() {\n var algo = normalizeAlgo2(algorithm);\n var reqAlgo = Object.assign({}, algo);\n if (reqAlgo.salt) reqAlgo.salt = toBase642(reqAlgo.salt);\n if (reqAlgo.info) reqAlgo.info = toBase642(reqAlgo.info);\n var result22 = JSON.parse(subtleCall2({\n op: \"deriveKey\",\n algorithm: reqAlgo,\n baseKey: baseKey._keyData,\n derivedKeyAlgorithm: normalizeAlgo2(derivedKeyAlgorithm),\n extractable,\n usages: keyUsages\n }));\n return new SandboxCryptoKey2(result22.key);\n });\n };\n if (globalThis.crypto && globalThis.crypto.subtle && typeof globalThis.crypto.subtle.importKey === \"function\") {\n result2.subtle = globalThis.crypto.subtle;\n result2.webcrypto = globalThis.crypto;\n } else {\n result2.subtle = SandboxSubtle;\n result2.webcrypto = { subtle: SandboxSubtle, getRandomValues: result2.randomFillSync };\n }\n }\n if (typeof result2.getCurves !== \"function\") {\n result2.getCurves = function getCurves() {\n return [\n \"prime256v1\",\n \"secp256r1\",\n \"secp384r1\",\n \"secp521r1\",\n \"secp256k1\",\n \"secp224r1\",\n \"secp192k1\"\n ];\n };\n }\n if (typeof result2.getCiphers !== \"function\") {\n result2.getCiphers = function getCiphers() {\n return [\n \"aes-128-cbc\",\n \"aes-128-gcm\",\n \"aes-192-cbc\",\n \"aes-192-gcm\",\n \"aes-256-cbc\",\n \"aes-256-gcm\",\n \"aes-128-ctr\",\n \"aes-192-ctr\",\n \"aes-256-ctr\"\n ];\n };\n }\n if (typeof result2.getHashes !== \"function\") {\n result2.getHashes = function getHashes() {\n return [\"md5\", \"sha1\", \"sha256\", \"sha384\", \"sha512\"];\n };\n }\n if (typeof result2.timingSafeEqual !== \"function\") {\n result2.timingSafeEqual = function timingSafeEqual(a, b) {\n if (a.length !== b.length) {\n throw new RangeError(\"Input buffers must have the same byte length\");\n }\n var out = 0;\n for (var i = 0; i < a.length; i++) {\n out |= a[i] ^ b[i];\n }\n return out === 0;\n };\n }\n if (typeof result2.getFips !== \"function\") {\n result2.getFips = function getFips() {\n return 0;\n };\n }\n if (typeof result2.setFips !== \"function\") {\n result2.setFips = function setFips() {\n throw new Error(\"FIPS mode is not supported in sandbox\");\n };\n }\n return result2;\n }\n if (name2 === \"stream\") {\n if (typeof result2 === \"function\" && result2.prototype && typeof result2.Readable === \"function\") {\n var readableProto = result2.Readable.prototype;\n var streamProto = result2.prototype;\n if (readableProto && streamProto && !(readableProto instanceof result2)) {\n var currentParent = Object.getPrototypeOf(readableProto);\n Object.setPrototypeOf(streamProto, currentParent);\n Object.setPrototypeOf(readableProto, streamProto);\n }\n }\n return result2;\n }\n if (name2 === \"path\") {\n if (result2.win32 === null || result2.win32 === void 0) {\n result2.win32 = result2.posix || result2;\n }\n if (result2.posix === null || result2.posix === void 0) {\n result2.posix = result2;\n }\n const hasAbsoluteSegment = function(args) {\n return args.some(function(arg) {\n return typeof arg === \"string\" && arg.length > 0 && arg.charAt(0) === \"/\";\n });\n };\n const prependCwd = function(args) {\n if (hasAbsoluteSegment(args)) return;\n if (typeof process !== \"undefined\" && typeof process.cwd === \"function\") {\n const cwd = process.cwd();\n if (cwd && cwd.charAt(0) === \"/\") {\n args.unshift(cwd);\n }\n }\n };\n const originalResolve = result2.resolve;\n if (typeof originalResolve === \"function\" && !originalResolve._patchedForCwd) {\n const patchedResolve = function resolve2() {\n const args = Array.from(arguments);\n prependCwd(args);\n return originalResolve.apply(this, args);\n };\n patchedResolve._patchedForCwd = true;\n result2.resolve = patchedResolve;\n }\n if (result2.posix && typeof result2.posix.resolve === \"function\" && !result2.posix.resolve._patchedForCwd) {\n const originalPosixResolve = result2.posix.resolve;\n const patchedPosixResolve = function resolve2() {\n const args = Array.from(arguments);\n prependCwd(args);\n return originalPosixResolve.apply(this, args);\n };\n patchedPosixResolve._patchedForCwd = true;\n result2.posix.resolve = patchedPosixResolve;\n }\n }\n return result2;\n }\n var _deferredCoreModules = /* @__PURE__ */ new Set([\n \"readline\",\n \"perf_hooks\",\n \"async_hooks\",\n \"worker_threads\",\n \"diagnostics_channel\"\n ]);\n var _unsupportedCoreModules = /* @__PURE__ */ new Set([\n \"dgram\",\n \"cluster\",\n \"wasi\",\n \"inspector\",\n \"repl\",\n \"trace_events\",\n \"domain\"\n ]);\n function _unsupportedApiError(moduleName2, apiName) {\n return new Error(moduleName2 + \".\" + apiName + \" is not supported in sandbox\");\n }\n function _createDeferredModuleStub(moduleName2) {\n const methodCache = {};\n let stub = null;\n stub = new Proxy({}, {\n get(_target, prop) {\n if (prop === \"__esModule\") return false;\n if (prop === \"default\") return stub;\n if (prop === Symbol.toStringTag) return \"Module\";\n if (prop === \"then\") return void 0;\n if (typeof prop !== \"string\") return void 0;\n if (!methodCache[prop]) {\n methodCache[prop] = function deferredApiStub() {\n throw _unsupportedApiError(moduleName2, prop);\n };\n }\n return methodCache[prop];\n }\n });\n return stub;\n }\n var __internalModuleCache = _moduleCache;\n var __require = function require2(moduleName2) {\n return _requireFrom(moduleName2, _currentModule.dirname);\n };\n __requireExposeCustomGlobal(\"require\", __require);\n function _resolveFrom(moduleName2, fromDir2) {\n var resolved2;\n if (typeof _resolveModuleSync !== \"undefined\") {\n resolved2 = _resolveModuleSync.applySync(void 0, [moduleName2, fromDir2]);\n }\n if (resolved2 === null || resolved2 === void 0) {\n resolved2 = _resolveModule.applySyncPromise(void 0, [moduleName2, fromDir2, \"require\"]);\n }\n if (resolved2 === null) {\n const err = new Error(\"Cannot find module '\" + moduleName2 + \"'\");\n err.code = \"MODULE_NOT_FOUND\";\n throw err;\n }\n return resolved2;\n }\n globalThis.require.resolve = function resolve(moduleName2) {\n return _resolveFrom(moduleName2, _currentModule.dirname);\n };\n function _debugRequire(phase, moduleName2, extra) {\n if (globalThis.__sandboxRequireDebug !== true) {\n return;\n }\n if (moduleName2 !== \"rivetkit\" && moduleName2 !== \"@rivetkit/traces\" && moduleName2 !== \"@rivetkit/on-change\" && moduleName2 !== \"async_hooks\" && !moduleName2.startsWith(\"rivetkit/\") && !moduleName2.startsWith(\"@rivetkit/\")) {\n return;\n }\n if (typeof console !== \"undefined\" && typeof console.log === \"function\") {\n console.log(\n \"[sandbox.require] \" + phase + \" \" + moduleName2 + (extra ? \" \" + extra : \"\")\n );\n }\n }\n function _requireFrom(moduleName, fromDir) {\n _debugRequire(\"start\", moduleName, fromDir);\n const name = moduleName.replace(/^node:/, \"\");\n let cacheKey = name;\n let resolved = null;\n const isRelative = name.startsWith(\"./\") || name.startsWith(\"../\");\n if (!isRelative && __internalModuleCache[name]) {\n _debugRequire(\"cache-hit\", name, name);\n return __internalModuleCache[name];\n }\n if (name === \"fs\") {\n if (__internalModuleCache[\"fs\"]) return __internalModuleCache[\"fs\"];\n const fsModule = globalThis.bridge?.fs || globalThis.bridge?.default || globalThis._fsModule || {};\n __internalModuleCache[\"fs\"] = fsModule;\n _debugRequire(\"loaded\", name, \"fs-special\");\n return fsModule;\n }\n if (name === \"fs/promises\") {\n if (__internalModuleCache[\"fs/promises\"]) return __internalModuleCache[\"fs/promises\"];\n const fsModule = _requireFrom(\"fs\", fromDir);\n __internalModuleCache[\"fs/promises\"] = fsModule.promises;\n _debugRequire(\"loaded\", name, \"fs-promises-special\");\n return fsModule.promises;\n }\n if (name === \"stream/promises\") {\n if (__internalModuleCache[\"stream/promises\"]) return __internalModuleCache[\"stream/promises\"];\n const streamModule = _requireFrom(\"stream\", fromDir);\n const promisesModule = {\n finished(stream, options) {\n return new Promise(function(resolve2, reject) {\n if (typeof streamModule.finished !== \"function\") {\n resolve2();\n return;\n }\n if (options && typeof options === \"object\" && !Array.isArray(options)) {\n streamModule.finished(stream, options, function(error) {\n if (error) {\n reject(error);\n return;\n }\n resolve2();\n });\n return;\n }\n streamModule.finished(stream, function(error) {\n if (error) {\n reject(error);\n return;\n }\n resolve2();\n });\n });\n },\n pipeline() {\n const args = Array.prototype.slice.call(arguments);\n return new Promise(function(resolve2, reject) {\n if (typeof streamModule.pipeline !== \"function\") {\n reject(new Error(\"stream.pipeline is not supported in sandbox\"));\n return;\n }\n args.push(function(error) {\n if (error) {\n reject(error);\n return;\n }\n resolve2();\n });\n streamModule.pipeline.apply(streamModule, args);\n });\n }\n };\n __internalModuleCache[\"stream/promises\"] = promisesModule;\n _debugRequire(\"loaded\", name, \"stream-promises-special\");\n return promisesModule;\n }\n if (name === \"child_process\") {\n if (__internalModuleCache[\"child_process\"]) return __internalModuleCache[\"child_process\"];\n __internalModuleCache[\"child_process\"] = _childProcessModule;\n _debugRequire(\"loaded\", name, \"child-process-special\");\n return _childProcessModule;\n }\n if (name === \"net\") {\n if (__internalModuleCache[\"net\"]) return __internalModuleCache[\"net\"];\n __internalModuleCache[\"net\"] = _netModule;\n _debugRequire(\"loaded\", name, \"net-special\");\n return _netModule;\n }\n if (name === \"tls\") {\n if (__internalModuleCache[\"tls\"]) return __internalModuleCache[\"tls\"];\n __internalModuleCache[\"tls\"] = _tlsModule;\n _debugRequire(\"loaded\", name, \"tls-special\");\n return _tlsModule;\n }\n if (name === \"http\") {\n if (__internalModuleCache[\"http\"]) return __internalModuleCache[\"http\"];\n __internalModuleCache[\"http\"] = _httpModule;\n _debugRequire(\"loaded\", name, \"http-special\");\n return _httpModule;\n }\n if (name === \"_http_agent\") {\n if (__internalModuleCache[\"_http_agent\"]) return __internalModuleCache[\"_http_agent\"];\n const httpAgentModule = {\n Agent: _httpModule.Agent,\n globalAgent: _httpModule.globalAgent\n };\n __internalModuleCache[\"_http_agent\"] = httpAgentModule;\n _debugRequire(\"loaded\", name, \"http-agent-special\");\n return httpAgentModule;\n }\n if (name === \"https\") {\n if (__internalModuleCache[\"https\"]) return __internalModuleCache[\"https\"];\n __internalModuleCache[\"https\"] = _httpsModule;\n _debugRequire(\"loaded\", name, \"https-special\");\n return _httpsModule;\n }\n if (name === \"http2\") {\n if (__internalModuleCache[\"http2\"]) return __internalModuleCache[\"http2\"];\n __internalModuleCache[\"http2\"] = _http2Module;\n _debugRequire(\"loaded\", name, \"http2-special\");\n return _http2Module;\n }\n if (name === \"dns\") {\n if (__internalModuleCache[\"dns\"]) return __internalModuleCache[\"dns\"];\n __internalModuleCache[\"dns\"] = _dnsModule;\n _debugRequire(\"loaded\", name, \"dns-special\");\n return _dnsModule;\n }\n if (name === \"os\") {\n if (__internalModuleCache[\"os\"]) return __internalModuleCache[\"os\"];\n __internalModuleCache[\"os\"] = _osModule;\n _debugRequire(\"loaded\", name, \"os-special\");\n return _osModule;\n }\n if (name === \"module\") {\n if (__internalModuleCache[\"module\"]) return __internalModuleCache[\"module\"];\n __internalModuleCache[\"module\"] = _moduleModule;\n _debugRequire(\"loaded\", name, \"module-special\");\n return _moduleModule;\n }\n if (name === \"process\") {\n _debugRequire(\"loaded\", name, \"process-special\");\n return globalThis.process;\n }\n if (name === \"async_hooks\") {\n if (__internalModuleCache[\"async_hooks\"]) return __internalModuleCache[\"async_hooks\"];\n class AsyncLocalStorage {\n constructor() {\n this._store = void 0;\n }\n run(store, callback) {\n const previousStore = this._store;\n this._store = store;\n try {\n const args = Array.prototype.slice.call(arguments, 2);\n return callback.apply(void 0, args);\n } finally {\n this._store = previousStore;\n }\n }\n enterWith(store) {\n this._store = store;\n }\n getStore() {\n return this._store;\n }\n disable() {\n this._store = void 0;\n }\n exit(callback) {\n const previousStore = this._store;\n this._store = void 0;\n try {\n const args = Array.prototype.slice.call(arguments, 1);\n return callback.apply(void 0, args);\n } finally {\n this._store = previousStore;\n }\n }\n }\n class AsyncResource {\n constructor(type) {\n this.type = type;\n }\n runInAsyncScope(callback, thisArg) {\n const args = Array.prototype.slice.call(arguments, 2);\n return callback.apply(thisArg, args);\n }\n emitDestroy() {\n }\n }\n const asyncHooksModule = {\n AsyncLocalStorage,\n AsyncResource,\n createHook() {\n return {\n enable() {\n return this;\n },\n disable() {\n return this;\n }\n };\n },\n executionAsyncId() {\n return 1;\n },\n triggerAsyncId() {\n return 0;\n },\n executionAsyncResource() {\n return null;\n }\n };\n __internalModuleCache[\"async_hooks\"] = asyncHooksModule;\n _debugRequire(\"loaded\", name, \"async-hooks-special\");\n return asyncHooksModule;\n }\n if (name === \"diagnostics_channel\") {\n let _createChannel2 = function() {\n return {\n hasSubscribers: false,\n publish: function() {\n },\n subscribe: function() {\n },\n unsubscribe: function() {\n }\n };\n };\n var _createChannel = _createChannel2;\n if (__internalModuleCache[name]) return __internalModuleCache[name];\n const dcModule = {\n channel: function() {\n return _createChannel2();\n },\n hasSubscribers: function() {\n return false;\n },\n tracingChannel: function() {\n return {\n start: _createChannel2(),\n end: _createChannel2(),\n asyncStart: _createChannel2(),\n asyncEnd: _createChannel2(),\n error: _createChannel2(),\n traceSync: function(fn, context, thisArg) {\n var args = Array.prototype.slice.call(arguments, 3);\n return fn.apply(thisArg, args);\n },\n tracePromise: function(fn, context, thisArg) {\n var args = Array.prototype.slice.call(arguments, 3);\n return fn.apply(thisArg, args);\n },\n traceCallback: function(fn, context, thisArg) {\n var args = Array.prototype.slice.call(arguments, 3);\n return fn.apply(thisArg, args);\n }\n };\n },\n Channel: function Channel(name2) {\n this.hasSubscribers = false;\n this.publish = function() {\n };\n this.subscribe = function() {\n };\n this.unsubscribe = function() {\n };\n }\n };\n __internalModuleCache[name] = dcModule;\n _debugRequire(\"loaded\", name, \"diagnostics-channel-special\");\n return dcModule;\n }\n if (_deferredCoreModules.has(name)) {\n if (__internalModuleCache[name]) return __internalModuleCache[name];\n const deferredStub = _createDeferredModuleStub(name);\n __internalModuleCache[name] = deferredStub;\n _debugRequire(\"loaded\", name, \"deferred-stub\");\n return deferredStub;\n }\n if (_unsupportedCoreModules.has(name)) {\n throw new Error(name + \" is not supported in sandbox\");\n }\n const polyfillCode = _loadPolyfill.applySyncPromise(void 0, [name]);\n if (polyfillCode !== null) {\n if (__internalModuleCache[name]) return __internalModuleCache[name];\n const moduleObj = { exports: {} };\n _pendingModules[name] = moduleObj;\n let result = eval(polyfillCode);\n result = _patchPolyfill(name, result);\n if (typeof result === \"object\" && result !== null) {\n Object.assign(moduleObj.exports, result);\n } else {\n moduleObj.exports = result;\n }\n __internalModuleCache[name] = moduleObj.exports;\n delete _pendingModules[name];\n _debugRequire(\"loaded\", name, \"polyfill\");\n return __internalModuleCache[name];\n }\n resolved = _resolveFrom(name, fromDir);\n cacheKey = resolved;\n if (__internalModuleCache[cacheKey]) {\n _debugRequire(\"cache-hit\", name, cacheKey);\n return __internalModuleCache[cacheKey];\n }\n if (_pendingModules[cacheKey]) {\n _debugRequire(\"pending-hit\", name, cacheKey);\n return _pendingModules[cacheKey].exports;\n }\n var source;\n if (typeof _loadFileSync !== \"undefined\") {\n source = _loadFileSync.applySync(void 0, [resolved]);\n }\n if (source === null || source === void 0) {\n source = _loadFile.applySyncPromise(void 0, [resolved, \"require\"]);\n }\n if (source === null) {\n const err = new Error(\"Cannot find module '\" + resolved + \"'\");\n err.code = \"MODULE_NOT_FOUND\";\n throw err;\n }\n if (resolved.endsWith(\".json\")) {\n const parsed = JSON.parse(source);\n __internalModuleCache[cacheKey] = parsed;\n return parsed;\n }\n const normalizedSource = typeof source === \"string\" ? source.replace(/import\\.meta\\.url/g, \"__filename\").replace(/fileURLToPath\\(__filename\\)/g, \"__filename\").replace(/url\\.fileURLToPath\\(__filename\\)/g, \"__filename\").replace(/fileURLToPath\\.call\\(void 0, __filename\\)/g, \"__filename\") : source;\n const module = {\n exports: {},\n filename: resolved,\n dirname: _dirname(resolved),\n id: resolved,\n loaded: false\n };\n _pendingModules[cacheKey] = module;\n const prevModule = _currentModule;\n _currentModule = module;\n try {\n let wrapper;\n try {\n wrapper = new Function(\n \"exports\",\n \"require\",\n \"module\",\n \"__filename\",\n \"__dirname\",\n \"__dynamicImport\",\n normalizedSource + \"\\n//# sourceURL=\" + resolved\n );\n } catch (error) {\n const details = error && error.stack ? error.stack : String(error);\n throw new Error(\"failed to compile module \" + resolved + \": \" + details);\n }\n const moduleRequire = function(request) {\n return _requireFrom(request, module.dirname);\n };\n moduleRequire.resolve = function(request) {\n return _resolveFrom(request, module.dirname);\n };\n const moduleDynamicImport = function(specifier) {\n if (typeof globalThis.__dynamicImport === \"function\") {\n return globalThis.__dynamicImport(specifier, module.dirname);\n }\n return Promise.reject(new Error(\"Dynamic import is not initialized\"));\n };\n wrapper(\n module.exports,\n moduleRequire,\n module,\n resolved,\n module.dirname,\n moduleDynamicImport\n );\n module.loaded = true;\n } catch (error) {\n const details = error && error.stack ? error.stack : String(error);\n throw new Error(\"failed to execute module \" + resolved + \": \" + details);\n } finally {\n _currentModule = prevModule;\n }\n __internalModuleCache[cacheKey] = module.exports;\n delete _pendingModules[cacheKey];\n _debugRequire(\"loaded\", name, cacheKey);\n return module.exports;\n }\n __requireExposeCustomGlobal(\"_requireFrom\", _requireFrom);\n var __moduleCacheProxy = new Proxy(__internalModuleCache, {\n get(target, prop, receiver) {\n return Reflect.get(target, prop, receiver);\n },\n set(_target, prop) {\n throw new TypeError(\"Cannot set require.cache['\" + String(prop) + \"']\");\n },\n deleteProperty(_target, prop) {\n throw new TypeError(\"Cannot delete require.cache['\" + String(prop) + \"']\");\n },\n defineProperty(_target, prop) {\n throw new TypeError(\"Cannot define property '\" + String(prop) + \"' on require.cache\");\n },\n has(target, prop) {\n return Reflect.has(target, prop);\n },\n ownKeys(target) {\n return Reflect.ownKeys(target);\n },\n getOwnPropertyDescriptor(target, prop) {\n return Reflect.getOwnPropertyDescriptor(target, prop);\n }\n });\n globalThis.require.cache = __moduleCacheProxy;\n Object.defineProperty(globalThis, \"_moduleCache\", {\n value: __moduleCacheProxy,\n writable: false,\n configurable: true,\n enumerable: false\n });\n if (typeof _moduleModule !== \"undefined\") {\n if (_moduleModule.Module) {\n _moduleModule.Module._cache = __moduleCacheProxy;\n }\n _moduleModule._cache = __moduleCacheProxy;\n }\n})();\n", + "requireSetup": "\"use strict\";\n(() => {\n // ../core/isolate-runtime/src/inject/require-setup.ts\n var __requireExposeCustomGlobal = typeof globalThis.__runtimeExposeCustomGlobal === \"function\" ? globalThis.__runtimeExposeCustomGlobal : function exposeCustomGlobal(name2, value) {\n Object.defineProperty(globalThis, name2, {\n value,\n writable: false,\n configurable: false,\n enumerable: true\n });\n };\n if (typeof globalThis.AbortController === \"undefined\" || typeof globalThis.AbortSignal === \"undefined\" || typeof globalThis.AbortSignal?.prototype?.addEventListener !== \"function\" || typeof globalThis.AbortSignal?.prototype?.removeEventListener !== \"function\") {\n let getAbortSignalState = function(signal) {\n const state = abortSignalState.get(signal);\n if (!state) {\n throw new Error(\"Invalid AbortSignal\");\n }\n return state;\n };\n getAbortSignalState2 = getAbortSignalState;\n const abortSignalState = /* @__PURE__ */ new WeakMap();\n class AbortSignal {\n constructor() {\n this.onabort = null;\n abortSignalState.set(this, {\n aborted: false,\n reason: void 0,\n listeners: []\n });\n }\n get aborted() {\n return getAbortSignalState(this).aborted;\n }\n get reason() {\n return getAbortSignalState(this).reason;\n }\n get _listeners() {\n return getAbortSignalState(this).listeners.slice();\n }\n getEventListeners(type) {\n if (type !== \"abort\") return [];\n return getAbortSignalState(this).listeners.slice();\n }\n addEventListener(type, listener) {\n if (type !== \"abort\" || typeof listener !== \"function\") return;\n getAbortSignalState(this).listeners.push(listener);\n }\n removeEventListener(type, listener) {\n if (type !== \"abort\" || typeof listener !== \"function\") return;\n const listeners = getAbortSignalState(this).listeners;\n const index = listeners.indexOf(listener);\n if (index !== -1) {\n listeners.splice(index, 1);\n }\n }\n dispatchEvent(event) {\n if (!event || event.type !== \"abort\") return false;\n if (typeof this.onabort === \"function\") {\n try {\n this.onabort.call(this, event);\n } catch {\n }\n }\n const listeners = getAbortSignalState(this).listeners.slice();\n for (const listener of listeners) {\n try {\n listener.call(this, event);\n } catch {\n }\n }\n return true;\n }\n }\n class AbortController {\n constructor() {\n this.signal = new AbortSignal();\n }\n abort(reason) {\n const state = getAbortSignalState(this.signal);\n if (state.aborted) return;\n state.aborted = true;\n state.reason = reason;\n this.signal.dispatchEvent({ type: \"abort\" });\n }\n }\n __requireExposeCustomGlobal(\"AbortSignal\", AbortSignal);\n __requireExposeCustomGlobal(\"AbortController\", AbortController);\n }\n var getAbortSignalState2;\n if (typeof globalThis.AbortSignal === \"function\" && typeof globalThis.AbortController === \"function\" && typeof globalThis.AbortSignal.abort !== \"function\") {\n globalThis.AbortSignal.abort = function abort(reason) {\n const controller = new globalThis.AbortController();\n controller.abort(reason);\n return controller.signal;\n };\n }\n if (typeof globalThis.structuredClone !== \"function\") {\n let structuredClonePolyfill = function(value) {\n if (value === null || typeof value !== \"object\") {\n return value;\n }\n if (value instanceof ArrayBuffer) {\n return value.slice(0);\n }\n if (ArrayBuffer.isView(value)) {\n if (value instanceof Uint8Array) {\n return new Uint8Array(value);\n }\n return new value.constructor(value);\n }\n return JSON.parse(JSON.stringify(value));\n };\n structuredClonePolyfill2 = structuredClonePolyfill;\n __requireExposeCustomGlobal(\"structuredClone\", structuredClonePolyfill);\n }\n var structuredClonePolyfill2;\n if (typeof globalThis.SharedArrayBuffer === \"undefined\") {\n globalThis.SharedArrayBuffer = ArrayBuffer;\n __requireExposeCustomGlobal(\"SharedArrayBuffer\", ArrayBuffer);\n }\n if (typeof globalThis.btoa !== \"function\") {\n __requireExposeCustomGlobal(\"btoa\", function btoa(input) {\n return Buffer.from(String(input), \"binary\").toString(\"base64\");\n });\n }\n if (typeof globalThis.atob !== \"function\") {\n __requireExposeCustomGlobal(\"atob\", function atob(input) {\n return Buffer.from(String(input), \"base64\").toString(\"binary\");\n });\n }\n function _dirname(p) {\n const lastSlash = p.lastIndexOf(\"/\");\n if (lastSlash === -1) return \".\";\n if (lastSlash === 0) return \"/\";\n return p.slice(0, lastSlash);\n }\n if (typeof globalThis.TextDecoder === \"function\") {\n _OrigTextDecoder = globalThis.TextDecoder;\n _utf8Aliases = {\n \"utf-8\": true,\n \"utf8\": true,\n \"unicode-1-1-utf-8\": true,\n \"ascii\": true,\n \"us-ascii\": true,\n \"iso-8859-1\": true,\n \"latin1\": true,\n \"binary\": true,\n \"windows-1252\": true,\n \"utf-16le\": true,\n \"utf-16\": true,\n \"ucs-2\": true,\n \"ucs2\": true\n };\n globalThis.TextDecoder = function TextDecoder(encoding, options) {\n var label = encoding !== void 0 ? String(encoding).toLowerCase().replace(/\\s/g, \"\") : \"utf-8\";\n if (_utf8Aliases[label]) {\n return new _OrigTextDecoder(\"utf-8\", options);\n }\n return new _OrigTextDecoder(encoding, options);\n };\n globalThis.TextDecoder.prototype = _OrigTextDecoder.prototype;\n }\n var _OrigTextDecoder;\n var _utf8Aliases;\n function _patchPolyfill(name2, result2) {\n if (typeof result2 !== \"object\" && typeof result2 !== \"function\" || result2 === null) {\n return result2;\n }\n if (name2 === \"buffer\") {\n const maxLength = typeof result2.kMaxLength === \"number\" ? result2.kMaxLength : 2147483647;\n const maxStringLength = typeof result2.kStringMaxLength === \"number\" ? result2.kStringMaxLength : 536870888;\n if (typeof result2.constants !== \"object\" || result2.constants === null) {\n result2.constants = {};\n }\n if (typeof result2.constants.MAX_LENGTH !== \"number\") {\n result2.constants.MAX_LENGTH = maxLength;\n }\n if (typeof result2.constants.MAX_STRING_LENGTH !== \"number\") {\n result2.constants.MAX_STRING_LENGTH = maxStringLength;\n }\n if (typeof result2.kMaxLength !== \"number\") {\n result2.kMaxLength = maxLength;\n }\n if (typeof result2.kStringMaxLength !== \"number\") {\n result2.kStringMaxLength = maxStringLength;\n }\n const BufferCtor = result2.Buffer;\n if ((typeof BufferCtor === \"function\" || typeof BufferCtor === \"object\") && BufferCtor !== null) {\n if (typeof BufferCtor.kMaxLength !== \"number\") {\n BufferCtor.kMaxLength = maxLength;\n }\n if (typeof BufferCtor.kStringMaxLength !== \"number\") {\n BufferCtor.kStringMaxLength = maxStringLength;\n }\n if (typeof BufferCtor.constants !== \"object\" || BufferCtor.constants === null) {\n BufferCtor.constants = result2.constants;\n }\n var proto = BufferCtor.prototype;\n if (proto && typeof proto.utf8Slice !== \"function\") {\n var encodings = [\"utf8\", \"latin1\", \"ascii\", \"hex\", \"base64\", \"ucs2\", \"utf16le\"];\n for (var ei = 0; ei < encodings.length; ei++) {\n var enc = encodings[ei];\n (function(e) {\n if (typeof proto[e + \"Slice\"] !== \"function\") {\n proto[e + \"Slice\"] = function(start, end) {\n return this.toString(e, start, end);\n };\n }\n if (typeof proto[e + \"Write\"] !== \"function\") {\n proto[e + \"Write\"] = function(string, offset, length) {\n return this.write(string, offset, length, e);\n };\n }\n })(enc);\n }\n }\n if (typeof BufferCtor.allocUnsafe === \"function\" && !BufferCtor.allocUnsafe._secureExecPatched) {\n var _origAllocUnsafe = BufferCtor.allocUnsafe;\n BufferCtor.allocUnsafe = function(size) {\n try {\n return _origAllocUnsafe.apply(this, arguments);\n } catch (error) {\n if (error && error.name === \"RangeError\" && typeof size === \"number\" && size > maxLength) {\n throw new Error(\"Array buffer allocation failed\");\n }\n throw error;\n }\n };\n BufferCtor.allocUnsafe._secureExecPatched = true;\n }\n }\n return result2;\n }\n if (name2 === \"util\" && typeof result2.formatWithOptions === \"undefined\" && typeof result2.format === \"function\") {\n result2.formatWithOptions = function formatWithOptions(inspectOptions, ...args) {\n return result2.format.apply(null, args);\n };\n }\n if (name2 === \"util\") {\n if (typeof result2.inspect === \"function\" && typeof result2.inspect.custom === \"undefined\") {\n result2.inspect.custom = /* @__PURE__ */ Symbol.for(\"nodejs.util.inspect.custom\");\n }\n if (typeof result2.inspect === \"function\" && !result2.inspect._secureExecPatchedCustomInspect) {\n const customInspectSymbol = result2.inspect.custom || /* @__PURE__ */ Symbol.for(\"nodejs.util.inspect.custom\");\n const originalInspect = result2.inspect;\n const formatObjectKey = function(key) {\n return /^[A-Za-z_$][A-Za-z0-9_$]*$/.test(key) ? key : originalInspect(key);\n };\n const containsCustomInspectable = function(value, depth, seen) {\n if (value === null) {\n return false;\n }\n if (typeof value !== \"object\" && typeof value !== \"function\") {\n return false;\n }\n if (typeof value[customInspectSymbol] === \"function\") {\n return true;\n }\n if (depth < 0 || seen.has(value)) {\n return false;\n }\n seen.add(value);\n if (Array.isArray(value)) {\n for (const entry of value) {\n if (containsCustomInspectable(entry, depth - 1, seen)) {\n seen.delete(value);\n return true;\n }\n }\n seen.delete(value);\n return false;\n }\n for (const key of Object.keys(value)) {\n if (containsCustomInspectable(value[key], depth - 1, seen)) {\n seen.delete(value);\n return true;\n }\n }\n seen.delete(value);\n return false;\n };\n const inspectWithCustom = function(value, depth, options, seen) {\n if (value === null || typeof value !== \"object\" && typeof value !== \"function\") {\n return originalInspect(value, options);\n }\n if (seen.has(value)) {\n return \"[Circular]\";\n }\n if (typeof value[customInspectSymbol] === \"function\") {\n return value[customInspectSymbol](depth, options, result2.inspect);\n }\n if (depth < 0) {\n return originalInspect(value, options);\n }\n seen.add(value);\n if (Array.isArray(value)) {\n const items = value.map((entry) => inspectWithCustom(entry, depth - 1, options, seen));\n seen.delete(value);\n return `[ ${items.join(\", \")} ]`;\n }\n const proto2 = Object.getPrototypeOf(value);\n if (proto2 === Object.prototype || proto2 === null) {\n const entries = Object.keys(value).map(\n (key) => `${formatObjectKey(key)}: ${inspectWithCustom(value[key], depth - 1, options, seen)}`\n );\n seen.delete(value);\n return `{ ${entries.join(\", \")} }`;\n }\n seen.delete(value);\n return originalInspect(value, options);\n };\n result2.inspect = function inspect(value, options) {\n const inspectOptions = typeof options === \"object\" && options !== null ? options : {};\n const depth = typeof inspectOptions.depth === \"number\" ? inspectOptions.depth : 2;\n if (!containsCustomInspectable(value, depth, /* @__PURE__ */ new Set())) {\n return originalInspect.call(this, value, options);\n }\n return inspectWithCustom(value, depth, inspectOptions, /* @__PURE__ */ new Set());\n };\n result2.inspect.custom = customInspectSymbol;\n result2.inspect._secureExecPatchedCustomInspect = true;\n }\n return result2;\n }\n if (name2 === \"events\") {\n if (typeof result2.getEventListeners !== \"function\") {\n result2.getEventListeners = function getEventListeners(target, eventName) {\n if (target && typeof target.listeners === \"function\") {\n return target.listeners(eventName);\n }\n if (target && typeof target.getEventListeners === \"function\") {\n return target.getEventListeners(eventName);\n }\n if (target && eventName === \"abort\" && Array.isArray(target._listeners)) {\n return target._listeners.slice();\n }\n return [];\n };\n }\n return result2;\n }\n if (name2 === \"stream\") {\n const ReadableCtor = result2.Readable;\n const readableFrom = typeof ReadableCtor === \"function\" ? ReadableCtor.from : void 0;\n const readableFromSource = typeof readableFrom === \"function\" ? Function.prototype.toString.call(readableFrom) : \"\";\n const hasBrowserReadableFromStub = readableFromSource.indexOf(\n \"Readable.from is not available in the browser\"\n ) !== -1 || readableFromSource.indexOf(\"require_from_browser\") !== -1;\n if (typeof ReadableCtor === \"function\" && (typeof readableFrom !== \"function\" || hasBrowserReadableFromStub)) {\n ReadableCtor.from = function from(iterable, options) {\n const readable = new ReadableCtor(Object.assign({ read() {\n } }, options || {}));\n Promise.resolve().then(async function() {\n try {\n if (iterable && typeof iterable[Symbol.asyncIterator] === \"function\") {\n for await (const chunk of iterable) {\n readable.push(chunk);\n }\n } else if (iterable && typeof iterable[Symbol.iterator] === \"function\") {\n for (const chunk of iterable) {\n readable.push(chunk);\n }\n } else {\n readable.push(iterable);\n }\n readable.push(null);\n } catch (error) {\n if (typeof readable.destroy === \"function\") {\n readable.destroy(error);\n } else {\n readable.emit(\"error\", error);\n }\n }\n });\n return readable;\n };\n }\n return result2;\n }\n if (name2 === \"url\") {\n const OriginalURL = result2.URL;\n if (typeof OriginalURL !== \"function\" || OriginalURL._patched) {\n return result2;\n }\n const PatchedURL = function PatchedURL2(url, base) {\n if (typeof url === \"string\" && url.startsWith(\"file:\") && !url.startsWith(\"file://\") && base === void 0) {\n if (typeof process !== \"undefined\" && typeof process.cwd === \"function\") {\n const cwd = process.cwd();\n if (cwd) {\n try {\n return new OriginalURL(url, \"file://\" + cwd + \"/\");\n } catch (e) {\n }\n }\n }\n }\n return base !== void 0 ? new OriginalURL(url, base) : new OriginalURL(url);\n };\n Object.keys(OriginalURL).forEach(function(key) {\n try {\n PatchedURL[key] = OriginalURL[key];\n } catch {\n }\n });\n Object.setPrototypeOf(PatchedURL, OriginalURL);\n PatchedURL.prototype = OriginalURL.prototype;\n PatchedURL._patched = true;\n const descriptor = Object.getOwnPropertyDescriptor(result2, \"URL\");\n if (descriptor && descriptor.configurable !== true && descriptor.writable !== true && typeof descriptor.set !== \"function\") {\n return result2;\n }\n try {\n result2.URL = PatchedURL;\n } catch {\n try {\n Object.defineProperty(result2, \"URL\", {\n value: PatchedURL,\n writable: true,\n configurable: true,\n enumerable: descriptor?.enumerable ?? true\n });\n } catch {\n }\n }\n return result2;\n }\n if (name2 === \"zlib\") {\n if (typeof result2.constants !== \"object\" || result2.constants === null) {\n var zlibConstants = {};\n var constKeys = Object.keys(result2);\n for (var ci = 0; ci < constKeys.length; ci++) {\n var ck = constKeys[ci];\n if (ck.indexOf(\"Z_\") === 0 && typeof result2[ck] === \"number\") {\n zlibConstants[ck] = result2[ck];\n }\n }\n if (typeof zlibConstants.DEFLATE !== \"number\") zlibConstants.DEFLATE = 1;\n if (typeof zlibConstants.INFLATE !== \"number\") zlibConstants.INFLATE = 2;\n if (typeof zlibConstants.GZIP !== \"number\") zlibConstants.GZIP = 3;\n if (typeof zlibConstants.DEFLATERAW !== \"number\") zlibConstants.DEFLATERAW = 4;\n if (typeof zlibConstants.INFLATERAW !== \"number\") zlibConstants.INFLATERAW = 5;\n if (typeof zlibConstants.UNZIP !== \"number\") zlibConstants.UNZIP = 6;\n if (typeof zlibConstants.GUNZIP !== \"number\") zlibConstants.GUNZIP = 7;\n result2.constants = zlibConstants;\n }\n return result2;\n }\n if (name2 === \"crypto\") {\n let createCryptoRangeError2 = function(name3, message) {\n var error = new RangeError(message);\n error.code = \"ERR_OUT_OF_RANGE\";\n error.name = \"RangeError\";\n return error;\n }, createCryptoError2 = function(code, message) {\n var error = new Error(message);\n error.code = code;\n return error;\n }, encodeCryptoResult2 = function(buffer, encoding) {\n if (!encoding || encoding === \"buffer\") return buffer;\n return buffer.toString(encoding);\n }, isSharedArrayBufferInstance2 = function(value) {\n return typeof SharedArrayBuffer !== \"undefined\" && value instanceof SharedArrayBuffer;\n }, isBinaryLike2 = function(value) {\n return Buffer.isBuffer(value) || ArrayBuffer.isView(value) || value instanceof ArrayBuffer || isSharedArrayBufferInstance2(value);\n }, normalizeByteSource2 = function(value, name3, options) {\n var allowNull = options && options.allowNull;\n if (allowNull && value === null) {\n return null;\n }\n if (typeof value === \"string\") {\n return Buffer.from(value, \"utf8\");\n }\n if (Buffer.isBuffer(value)) {\n return Buffer.from(value);\n }\n if (ArrayBuffer.isView(value)) {\n return Buffer.from(value.buffer, value.byteOffset, value.byteLength);\n }\n if (value instanceof ArrayBuffer || isSharedArrayBufferInstance2(value)) {\n return Buffer.from(value);\n }\n throw createInvalidArgTypeError(\n name3,\n \"of type string or an instance of ArrayBuffer, Buffer, TypedArray, or DataView\",\n value\n );\n }, serializeCipherBridgeOptions2 = function(options) {\n if (!options) {\n return \"\";\n }\n var serialized = {};\n if (options.authTagLength !== void 0) {\n serialized.authTagLength = options.authTagLength;\n }\n if (options.authTag) {\n serialized.authTag = options.authTag.toString(\"base64\");\n }\n if (options.aad) {\n serialized.aad = options.aad.toString(\"base64\");\n }\n if (options.aadOptions !== void 0) {\n serialized.aadOptions = options.aadOptions;\n }\n if (options.autoPadding !== void 0) {\n serialized.autoPadding = options.autoPadding;\n }\n if (options.validateOnly !== void 0) {\n serialized.validateOnly = options.validateOnly;\n }\n return JSON.stringify(serialized);\n };\n var createCryptoRangeError = createCryptoRangeError2, createCryptoError = createCryptoError2, encodeCryptoResult = encodeCryptoResult2, isSharedArrayBufferInstance = isSharedArrayBufferInstance2, isBinaryLike = isBinaryLike2, normalizeByteSource = normalizeByteSource2, serializeCipherBridgeOptions = serializeCipherBridgeOptions2;\n var _runtimeRequire = globalThis.require;\n var _streamModule = _runtimeRequire && _runtimeRequire(\"stream\");\n var _utilModule = _runtimeRequire && _runtimeRequire(\"util\");\n var _Transform = _streamModule && _streamModule.Transform;\n var _inherits = _utilModule && _utilModule.inherits;\n if (typeof _cryptoHashDigest !== \"undefined\") {\n let SandboxHash2 = function(algorithm, options) {\n if (!(this instanceof SandboxHash2)) {\n return new SandboxHash2(algorithm, options);\n }\n if (!_Transform || !_inherits) {\n throw new Error(\"stream.Transform is required for crypto.Hash\");\n }\n if (typeof algorithm !== \"string\") {\n throw createInvalidArgTypeError(\"algorithm\", \"of type string\", algorithm);\n }\n _Transform.call(this, options);\n this._algorithm = algorithm;\n this._chunks = [];\n this._finalized = false;\n this._cachedDigest = null;\n this._allowCachedDigest = false;\n };\n var SandboxHash = SandboxHash2;\n _inherits(SandboxHash2, _Transform);\n SandboxHash2.prototype.update = function update(data, inputEncoding) {\n if (this._finalized) {\n throw createCryptoError2(\"ERR_CRYPTO_HASH_FINALIZED\", \"Digest already called\");\n }\n if (typeof data === \"string\") {\n this._chunks.push(Buffer.from(data, inputEncoding || \"utf8\"));\n } else if (isBinaryLike2(data)) {\n this._chunks.push(Buffer.from(data));\n } else {\n throw createInvalidArgTypeError(\n \"data\",\n \"one of type string, Buffer, TypedArray, or DataView\",\n data\n );\n }\n return this;\n };\n SandboxHash2.prototype._finishDigest = function _finishDigest() {\n if (this._cachedDigest) {\n return this._cachedDigest;\n }\n var combined = Buffer.concat(this._chunks);\n var resultBase64 = _cryptoHashDigest.applySync(void 0, [\n this._algorithm,\n combined.toString(\"base64\")\n ]);\n this._cachedDigest = Buffer.from(resultBase64, \"base64\");\n this._finalized = true;\n return this._cachedDigest;\n };\n SandboxHash2.prototype.digest = function digest(encoding) {\n if (this._finalized && !this._allowCachedDigest) {\n throw createCryptoError2(\"ERR_CRYPTO_HASH_FINALIZED\", \"Digest already called\");\n }\n var resultBuffer = this._finishDigest();\n this._allowCachedDigest = false;\n return encodeCryptoResult2(resultBuffer, encoding);\n };\n SandboxHash2.prototype.copy = function copy() {\n if (this._finalized) {\n throw createCryptoError2(\"ERR_CRYPTO_HASH_FINALIZED\", \"Digest already called\");\n }\n var c = new SandboxHash2(this._algorithm);\n c._chunks = this._chunks.slice();\n return c;\n };\n SandboxHash2.prototype._transform = function _transform(chunk, encoding, callback) {\n try {\n this.update(chunk, encoding === \"buffer\" ? void 0 : encoding);\n callback();\n } catch (error) {\n callback(normalizeCryptoBridgeError(error));\n }\n };\n SandboxHash2.prototype._flush = function _flush(callback) {\n try {\n var output = this._finishDigest();\n this._allowCachedDigest = true;\n this.push(output);\n callback();\n } catch (error) {\n callback(normalizeCryptoBridgeError(error));\n }\n };\n result2.createHash = function createHash(algorithm, options) {\n return new SandboxHash2(algorithm, options);\n };\n result2.Hash = SandboxHash2;\n }\n if (typeof _cryptoHmacDigest !== \"undefined\") {\n let SandboxHmac2 = function(algorithm, key) {\n this._algorithm = algorithm;\n if (typeof key === \"string\") {\n this._key = Buffer.from(key, \"utf8\");\n } else if (key && typeof key === \"object\" && key._pem !== void 0) {\n this._key = Buffer.from(key._pem, \"utf8\");\n } else {\n this._key = Buffer.from(key);\n }\n this._chunks = [];\n };\n var SandboxHmac = SandboxHmac2;\n SandboxHmac2.prototype.update = function update(data, inputEncoding) {\n if (typeof data === \"string\") {\n this._chunks.push(Buffer.from(data, inputEncoding || \"utf8\"));\n } else {\n this._chunks.push(Buffer.from(data));\n }\n return this;\n };\n SandboxHmac2.prototype.digest = function digest(encoding) {\n var combined = Buffer.concat(this._chunks);\n var resultBase64 = _cryptoHmacDigest.applySync(void 0, [\n this._algorithm,\n this._key.toString(\"base64\"),\n combined.toString(\"base64\")\n ]);\n var resultBuffer = Buffer.from(resultBase64, \"base64\");\n if (!encoding || encoding === \"buffer\") return resultBuffer;\n return resultBuffer.toString(encoding);\n };\n SandboxHmac2.prototype.copy = function copy() {\n var c = new SandboxHmac2(this._algorithm, this._key);\n c._chunks = this._chunks.slice();\n return c;\n };\n SandboxHmac2.prototype.write = function write(data, encoding) {\n this.update(data, encoding);\n return true;\n };\n SandboxHmac2.prototype.end = function end(data, encoding) {\n if (data) this.update(data, encoding);\n };\n result2.createHmac = function createHmac(algorithm, key) {\n return new SandboxHmac2(algorithm, key);\n };\n result2.Hmac = SandboxHmac2;\n }\n if (typeof _cryptoRandomFill !== \"undefined\") {\n result2.randomBytes = function randomBytes(size, callback) {\n if (typeof size !== \"number\" || size < 0 || size !== (size | 0)) {\n var err = new TypeError('The \"size\" argument must be of type number. Received type ' + typeof size);\n if (typeof callback === \"function\") {\n callback(err);\n return;\n }\n throw err;\n }\n if (size > 2147483647) {\n var rangeErr = new RangeError('The value of \"size\" is out of range. It must be >= 0 && <= 2147483647. Received ' + size);\n if (typeof callback === \"function\") {\n callback(rangeErr);\n return;\n }\n throw rangeErr;\n }\n var buf = Buffer.alloc(size);\n var offset = 0;\n while (offset < size) {\n var chunk = Math.min(size - offset, 65536);\n var base64 = _cryptoRandomFill.applySync(void 0, [chunk]);\n var hostBytes = Buffer.from(base64, \"base64\");\n hostBytes.copy(buf, offset);\n offset += chunk;\n }\n if (typeof callback === \"function\") {\n callback(null, buf);\n return;\n }\n return buf;\n };\n result2.randomFillSync = function randomFillSync(buffer, offset, size) {\n if (offset === void 0) offset = 0;\n var byteLength = buffer.byteLength !== void 0 ? buffer.byteLength : buffer.length;\n if (size === void 0) size = byteLength - offset;\n if (offset < 0 || size < 0 || offset + size > byteLength) {\n throw new RangeError('The value of \"offset + size\" is out of range.');\n }\n var bytes = new Uint8Array(buffer.buffer || buffer, buffer.byteOffset ? buffer.byteOffset + offset : offset, size);\n var filled = 0;\n while (filled < size) {\n var chunk = Math.min(size - filled, 65536);\n var base64 = _cryptoRandomFill.applySync(void 0, [chunk]);\n var hostBytes = Buffer.from(base64, \"base64\");\n bytes.set(hostBytes, filled);\n filled += chunk;\n }\n return buffer;\n };\n result2.randomFill = function randomFill(buffer, offsetOrCb, sizeOrCb, callback) {\n var offset = 0;\n var size;\n var cb;\n if (typeof offsetOrCb === \"function\") {\n cb = offsetOrCb;\n } else if (typeof sizeOrCb === \"function\") {\n offset = offsetOrCb || 0;\n cb = sizeOrCb;\n } else {\n offset = offsetOrCb || 0;\n size = sizeOrCb;\n cb = callback;\n }\n if (typeof cb !== \"function\") {\n throw new TypeError(\"Callback must be a function\");\n }\n try {\n result2.randomFillSync(buffer, offset, size);\n cb(null, buffer);\n } catch (e) {\n cb(e);\n }\n };\n result2.randomInt = function randomInt(minOrMax, maxOrCb, callback) {\n var min, max, cb;\n if (typeof maxOrCb === \"function\" || maxOrCb === void 0) {\n min = 0;\n max = minOrMax;\n cb = maxOrCb;\n } else {\n min = minOrMax;\n max = maxOrCb;\n cb = callback;\n }\n if (!Number.isSafeInteger(min)) {\n var minErr = new TypeError('The \"min\" argument must be a safe integer');\n if (typeof cb === \"function\") {\n cb(minErr);\n return;\n }\n throw minErr;\n }\n if (!Number.isSafeInteger(max)) {\n var maxErr = new TypeError('The \"max\" argument must be a safe integer');\n if (typeof cb === \"function\") {\n cb(maxErr);\n return;\n }\n throw maxErr;\n }\n if (max <= min) {\n var rangeErr2 = new RangeError('The value of \"max\" is out of range. It must be greater than the value of \"min\" (' + min + \")\");\n if (typeof cb === \"function\") {\n cb(rangeErr2);\n return;\n }\n throw rangeErr2;\n }\n var range = max - min;\n var bytes = 6;\n var maxValid = Math.pow(2, 48) - Math.pow(2, 48) % range;\n var val;\n do {\n var base64 = _cryptoRandomFill.applySync(void 0, [bytes]);\n var buf = Buffer.from(base64, \"base64\");\n val = buf.readUIntBE(0, bytes);\n } while (val >= maxValid);\n var result22 = min + val % range;\n if (typeof cb === \"function\") {\n cb(null, result22);\n return;\n }\n return result22;\n };\n }\n if (typeof _cryptoPbkdf2 !== \"undefined\") {\n let createPbkdf2ArgTypeError2 = function(name3, value) {\n var received;\n if (value == null) {\n received = \" Received \" + value;\n } else if (typeof value === \"object\") {\n received = value.constructor && value.constructor.name ? \" Received an instance of \" + value.constructor.name : \" Received [object Object]\";\n } else {\n var inspected = typeof value === \"string\" ? \"'\" + value + \"'\" : String(value);\n received = \" Received type \" + typeof value + \" (\" + inspected + \")\";\n }\n var error = new TypeError('The \"' + name3 + '\" argument must be of type number.' + received);\n error.code = \"ERR_INVALID_ARG_TYPE\";\n return error;\n }, validatePbkdf2Args2 = function(password, salt, iterations, keylen, digest) {\n var pwBuf = normalizeByteSource2(password, \"password\");\n var saltBuf = normalizeByteSource2(salt, \"salt\");\n if (typeof iterations !== \"number\") {\n throw createPbkdf2ArgTypeError2(\"iterations\", iterations);\n }\n if (!Number.isInteger(iterations)) {\n throw createCryptoRangeError2(\n \"iterations\",\n 'The value of \"iterations\" is out of range. It must be an integer. Received ' + iterations\n );\n }\n if (iterations < 1 || iterations > 2147483647) {\n throw createCryptoRangeError2(\n \"iterations\",\n 'The value of \"iterations\" is out of range. It must be >= 1 && <= 2147483647. Received ' + iterations\n );\n }\n if (typeof keylen !== \"number\") {\n throw createPbkdf2ArgTypeError2(\"keylen\", keylen);\n }\n if (!Number.isInteger(keylen)) {\n throw createCryptoRangeError2(\n \"keylen\",\n 'The value of \"keylen\" is out of range. It must be an integer. Received ' + keylen\n );\n }\n if (keylen < 0 || keylen > 2147483647) {\n throw createCryptoRangeError2(\n \"keylen\",\n 'The value of \"keylen\" is out of range. It must be >= 0 && <= 2147483647. Received ' + keylen\n );\n }\n if (typeof digest !== \"string\") {\n throw createInvalidArgTypeError(\"digest\", \"of type string\", digest);\n }\n return {\n password: pwBuf,\n salt: saltBuf\n };\n };\n var createPbkdf2ArgTypeError = createPbkdf2ArgTypeError2, validatePbkdf2Args = validatePbkdf2Args2;\n result2.pbkdf2Sync = function pbkdf2Sync(password, salt, iterations, keylen, digest) {\n var normalized = validatePbkdf2Args2(password, salt, iterations, keylen, digest);\n try {\n var resultBase64 = _cryptoPbkdf2.applySync(void 0, [\n normalized.password.toString(\"base64\"),\n normalized.salt.toString(\"base64\"),\n iterations,\n keylen,\n digest\n ]);\n return Buffer.from(resultBase64, \"base64\");\n } catch (error) {\n throw normalizeCryptoBridgeError(error);\n }\n };\n result2.pbkdf2 = function pbkdf2(password, salt, iterations, keylen, digest, callback) {\n if (typeof digest === \"function\" && callback === void 0) {\n callback = digest;\n digest = void 0;\n }\n if (typeof callback !== \"function\") {\n throw createInvalidArgTypeError(\"callback\", \"of type function\", callback);\n }\n try {\n var derived = result2.pbkdf2Sync(password, salt, iterations, keylen, digest);\n scheduleCryptoCallback(callback, [null, derived]);\n } catch (e) {\n throw normalizeCryptoBridgeError(e);\n }\n };\n }\n if (typeof _cryptoScrypt !== \"undefined\") {\n result2.scryptSync = function scryptSync(password, salt, keylen, options) {\n var pwBuf = typeof password === \"string\" ? Buffer.from(password, \"utf8\") : Buffer.from(password);\n var saltBuf = typeof salt === \"string\" ? Buffer.from(salt, \"utf8\") : Buffer.from(salt);\n var opts = {};\n if (options) {\n if (options.N !== void 0) opts.N = options.N;\n if (options.r !== void 0) opts.r = options.r;\n if (options.p !== void 0) opts.p = options.p;\n if (options.maxmem !== void 0) opts.maxmem = options.maxmem;\n if (options.cost !== void 0) opts.N = options.cost;\n if (options.blockSize !== void 0) opts.r = options.blockSize;\n if (options.parallelization !== void 0) opts.p = options.parallelization;\n }\n var resultBase64 = _cryptoScrypt.applySync(void 0, [\n pwBuf.toString(\"base64\"),\n saltBuf.toString(\"base64\"),\n keylen,\n JSON.stringify(opts)\n ]);\n return Buffer.from(resultBase64, \"base64\");\n };\n result2.scrypt = function scrypt(password, salt, keylen, optionsOrCb, callback) {\n var opts = optionsOrCb;\n var cb = callback;\n if (typeof optionsOrCb === \"function\") {\n opts = void 0;\n cb = optionsOrCb;\n }\n try {\n var derived = result2.scryptSync(password, salt, keylen, opts);\n cb(null, derived);\n } catch (e) {\n cb(e);\n }\n };\n }\n if (typeof _cryptoCipheriv !== \"undefined\") {\n let SandboxCipher2 = function(algorithm, key, iv, options) {\n if (!(this instanceof SandboxCipher2)) {\n return new SandboxCipher2(algorithm, key, iv, options);\n }\n if (typeof algorithm !== \"string\") {\n throw createInvalidArgTypeError(\"cipher\", \"of type string\", algorithm);\n }\n _Transform.call(this);\n this._algorithm = algorithm;\n this._key = normalizeByteSource2(key, \"key\");\n this._iv = normalizeByteSource2(iv, \"iv\", { allowNull: true });\n this._options = options || void 0;\n this._authTag = null;\n this._finalized = false;\n this._sessionCreated = false;\n this._sessionId = void 0;\n this._aad = null;\n this._aadOptions = void 0;\n this._autoPadding = void 0;\n this._chunks = [];\n this._bufferedMode = !_useSessionCipher || !!options;\n if (!this._bufferedMode) {\n this._ensureSession();\n } else if (!options) {\n _cryptoCipheriv.applySync(void 0, [\n this._algorithm,\n this._key.toString(\"base64\"),\n this._iv === null ? null : this._iv.toString(\"base64\"),\n \"\",\n serializeCipherBridgeOptions2({ validateOnly: true })\n ]);\n }\n };\n var SandboxCipher = SandboxCipher2;\n var _useSessionCipher = typeof _cryptoCipherivCreate !== \"undefined\";\n _inherits(SandboxCipher2, _Transform);\n SandboxCipher2.prototype._ensureSession = function _ensureSession() {\n if (this._bufferedMode || this._sessionCreated) {\n return;\n }\n this._sessionCreated = true;\n this._sessionId = _cryptoCipherivCreate.applySync(void 0, [\n \"cipher\",\n this._algorithm,\n this._key.toString(\"base64\"),\n this._iv === null ? null : this._iv.toString(\"base64\"),\n serializeCipherBridgeOptions2(this._getBridgeOptions())\n ]);\n };\n SandboxCipher2.prototype._getBridgeOptions = function _getBridgeOptions() {\n var options = {};\n if (this._options && this._options.authTagLength !== void 0) {\n options.authTagLength = this._options.authTagLength;\n }\n if (this._aad) {\n options.aad = this._aad;\n }\n if (this._aadOptions !== void 0) {\n options.aadOptions = this._aadOptions;\n }\n if (this._autoPadding !== void 0) {\n options.autoPadding = this._autoPadding;\n }\n return Object.keys(options).length === 0 ? null : options;\n };\n SandboxCipher2.prototype.update = function update(data, inputEncoding, outputEncoding) {\n if (this._finalized) {\n throw new Error(\"Attempting to call update() after final()\");\n }\n var buf;\n if (typeof data === \"string\") {\n buf = Buffer.from(data, inputEncoding || \"utf8\");\n } else {\n buf = normalizeByteSource2(data, \"data\");\n }\n if (!this._bufferedMode) {\n this._ensureSession();\n var resultBase64 = _cryptoCipherivUpdate.applySync(void 0, [this._sessionId, buf.toString(\"base64\")]);\n var resultBuffer = Buffer.from(resultBase64, \"base64\");\n return encodeCryptoResult2(resultBuffer, outputEncoding);\n }\n this._chunks.push(buf);\n return encodeCryptoResult2(Buffer.alloc(0), outputEncoding);\n };\n SandboxCipher2.prototype.final = function final(outputEncoding) {\n if (this._finalized) throw new Error(\"Attempting to call final() after already finalized\");\n this._finalized = true;\n var parsed;\n if (!this._bufferedMode) {\n this._ensureSession();\n var resultJson = _cryptoCipherivFinal.applySync(void 0, [this._sessionId]);\n parsed = JSON.parse(resultJson);\n } else {\n var combined = Buffer.concat(this._chunks);\n var resultJson2 = _cryptoCipheriv.applySync(void 0, [\n this._algorithm,\n this._key.toString(\"base64\"),\n this._iv === null ? null : this._iv.toString(\"base64\"),\n combined.toString(\"base64\"),\n serializeCipherBridgeOptions2(this._getBridgeOptions())\n ]);\n parsed = JSON.parse(resultJson2);\n }\n if (parsed.authTag) {\n this._authTag = Buffer.from(parsed.authTag, \"base64\");\n }\n var resultBuffer = Buffer.from(parsed.data, \"base64\");\n return encodeCryptoResult2(resultBuffer, outputEncoding);\n };\n SandboxCipher2.prototype.getAuthTag = function getAuthTag() {\n if (!this._finalized) throw new Error(\"Cannot call getAuthTag before final()\");\n if (!this._authTag) throw new Error(\"Auth tag is not available\");\n return this._authTag;\n };\n SandboxCipher2.prototype.setAAD = function setAAD(aad, options) {\n this._bufferedMode = true;\n this._aad = normalizeByteSource2(aad, \"buffer\");\n this._aadOptions = options;\n return this;\n };\n SandboxCipher2.prototype.setAutoPadding = function setAutoPadding(autoPadding) {\n this._bufferedMode = true;\n this._autoPadding = autoPadding !== false;\n return this;\n };\n SandboxCipher2.prototype._transform = function _transform(chunk, encoding, callback) {\n try {\n var output = this.update(chunk, encoding === \"buffer\" ? void 0 : encoding);\n if (output.length) {\n this.push(output);\n }\n callback();\n } catch (error) {\n callback(normalizeCryptoBridgeError(error));\n }\n };\n SandboxCipher2.prototype._flush = function _flush(callback) {\n try {\n var output = this.final();\n if (output.length) {\n this.push(output);\n }\n callback();\n } catch (error) {\n callback(normalizeCryptoBridgeError(error));\n }\n };\n result2.createCipheriv = function createCipheriv(algorithm, key, iv, options) {\n return new SandboxCipher2(algorithm, key, iv, options);\n };\n result2.Cipheriv = SandboxCipher2;\n }\n if (typeof _cryptoDecipheriv !== \"undefined\") {\n let SandboxDecipher2 = function(algorithm, key, iv, options) {\n if (!(this instanceof SandboxDecipher2)) {\n return new SandboxDecipher2(algorithm, key, iv, options);\n }\n if (typeof algorithm !== \"string\") {\n throw createInvalidArgTypeError(\"cipher\", \"of type string\", algorithm);\n }\n _Transform.call(this);\n this._algorithm = algorithm;\n this._key = normalizeByteSource2(key, \"key\");\n this._iv = normalizeByteSource2(iv, \"iv\", { allowNull: true });\n this._options = options || void 0;\n this._authTag = null;\n this._finalized = false;\n this._sessionCreated = false;\n this._aad = null;\n this._aadOptions = void 0;\n this._autoPadding = void 0;\n this._chunks = [];\n this._bufferedMode = !_useSessionCipher || !!options;\n if (!this._bufferedMode) {\n this._ensureSession();\n } else if (!options) {\n _cryptoDecipheriv.applySync(void 0, [\n this._algorithm,\n this._key.toString(\"base64\"),\n this._iv === null ? null : this._iv.toString(\"base64\"),\n \"\",\n serializeCipherBridgeOptions2({ validateOnly: true })\n ]);\n }\n };\n var SandboxDecipher = SandboxDecipher2;\n _inherits(SandboxDecipher2, _Transform);\n SandboxDecipher2.prototype._ensureSession = function _ensureSession() {\n if (!this._bufferedMode && !this._sessionCreated) {\n this._sessionCreated = true;\n this._sessionId = _cryptoCipherivCreate.applySync(void 0, [\n \"decipher\",\n this._algorithm,\n this._key.toString(\"base64\"),\n this._iv === null ? null : this._iv.toString(\"base64\"),\n serializeCipherBridgeOptions2(this._getBridgeOptions())\n ]);\n }\n };\n SandboxDecipher2.prototype._getBridgeOptions = function _getBridgeOptions() {\n var options = {};\n if (this._options && this._options.authTagLength !== void 0) {\n options.authTagLength = this._options.authTagLength;\n }\n if (this._authTag) {\n options.authTag = this._authTag;\n }\n if (this._aad) {\n options.aad = this._aad;\n }\n if (this._aadOptions !== void 0) {\n options.aadOptions = this._aadOptions;\n }\n if (this._autoPadding !== void 0) {\n options.autoPadding = this._autoPadding;\n }\n return Object.keys(options).length === 0 ? null : options;\n };\n SandboxDecipher2.prototype.update = function update(data, inputEncoding, outputEncoding) {\n if (this._finalized) {\n throw new Error(\"Attempting to call update() after final()\");\n }\n var buf;\n if (typeof data === \"string\") {\n buf = Buffer.from(data, inputEncoding || \"utf8\");\n } else {\n buf = normalizeByteSource2(data, \"data\");\n }\n if (!this._bufferedMode) {\n this._ensureSession();\n var resultBase64 = _cryptoCipherivUpdate.applySync(void 0, [this._sessionId, buf.toString(\"base64\")]);\n var resultBuffer = Buffer.from(resultBase64, \"base64\");\n return encodeCryptoResult2(resultBuffer, outputEncoding);\n }\n this._chunks.push(buf);\n return encodeCryptoResult2(Buffer.alloc(0), outputEncoding);\n };\n SandboxDecipher2.prototype.final = function final(outputEncoding) {\n if (this._finalized) throw new Error(\"Attempting to call final() after already finalized\");\n this._finalized = true;\n var resultBuffer;\n if (!this._bufferedMode) {\n this._ensureSession();\n var resultJson = _cryptoCipherivFinal.applySync(void 0, [this._sessionId]);\n var parsed = JSON.parse(resultJson);\n resultBuffer = Buffer.from(parsed.data, \"base64\");\n } else {\n var combined = Buffer.concat(this._chunks);\n var options = {};\n var resultBase64 = _cryptoDecipheriv.applySync(void 0, [\n this._algorithm,\n this._key.toString(\"base64\"),\n this._iv === null ? null : this._iv.toString(\"base64\"),\n combined.toString(\"base64\"),\n serializeCipherBridgeOptions2(this._getBridgeOptions())\n ]);\n resultBuffer = Buffer.from(resultBase64, \"base64\");\n }\n return encodeCryptoResult2(resultBuffer, outputEncoding);\n };\n SandboxDecipher2.prototype.setAuthTag = function setAuthTag(tag) {\n this._bufferedMode = true;\n this._authTag = typeof tag === \"string\" ? Buffer.from(tag, \"base64\") : normalizeByteSource2(tag, \"buffer\");\n return this;\n };\n SandboxDecipher2.prototype.setAAD = function setAAD(aad, options) {\n this._bufferedMode = true;\n this._aad = normalizeByteSource2(aad, \"buffer\");\n this._aadOptions = options;\n return this;\n };\n SandboxDecipher2.prototype.setAutoPadding = function setAutoPadding(autoPadding) {\n this._bufferedMode = true;\n this._autoPadding = autoPadding !== false;\n return this;\n };\n SandboxDecipher2.prototype._transform = function _transform(chunk, encoding, callback) {\n try {\n var output = this.update(chunk, encoding === \"buffer\" ? void 0 : encoding);\n if (output.length) {\n this.push(output);\n }\n callback();\n } catch (error) {\n callback(normalizeCryptoBridgeError(error));\n }\n };\n SandboxDecipher2.prototype._flush = function _flush(callback) {\n try {\n var output = this.final();\n if (output.length) {\n this.push(output);\n }\n callback();\n } catch (error) {\n callback(normalizeCryptoBridgeError(error));\n }\n };\n result2.createDecipheriv = function createDecipheriv(algorithm, key, iv, options) {\n return new SandboxDecipher2(algorithm, key, iv, options);\n };\n result2.Decipheriv = SandboxDecipher2;\n }\n if (typeof _cryptoSign !== \"undefined\") {\n result2.sign = function sign(algorithm, data, key) {\n var dataBuf = typeof data === \"string\" ? Buffer.from(data, \"utf8\") : Buffer.from(data);\n var sigBase64;\n try {\n sigBase64 = _cryptoSign.applySync(void 0, [\n algorithm === void 0 ? null : algorithm,\n dataBuf.toString(\"base64\"),\n JSON.stringify(serializeBridgeValue(key))\n ]);\n } catch (error) {\n throw normalizeCryptoBridgeError(error);\n }\n return Buffer.from(sigBase64, \"base64\");\n };\n }\n if (typeof _cryptoVerify !== \"undefined\") {\n result2.verify = function verify(algorithm, data, key, signature) {\n var dataBuf = typeof data === \"string\" ? Buffer.from(data, \"utf8\") : Buffer.from(data);\n var sigBuf = typeof signature === \"string\" ? Buffer.from(signature, \"base64\") : Buffer.from(signature);\n try {\n return _cryptoVerify.applySync(void 0, [\n algorithm === void 0 ? null : algorithm,\n dataBuf.toString(\"base64\"),\n JSON.stringify(serializeBridgeValue(key)),\n sigBuf.toString(\"base64\")\n ]);\n } catch (error) {\n throw normalizeCryptoBridgeError(error);\n }\n };\n }\n if (typeof _cryptoAsymmetricOp !== \"undefined\") {\n let asymmetricBridgeCall2 = function(operation, key, data) {\n var dataBuf = toRawBuffer(data);\n var resultBase64;\n try {\n resultBase64 = _cryptoAsymmetricOp.applySync(void 0, [\n operation,\n JSON.stringify(serializeBridgeValue(key)),\n dataBuf.toString(\"base64\")\n ]);\n } catch (error) {\n throw normalizeCryptoBridgeError(error);\n }\n return Buffer.from(resultBase64, \"base64\");\n };\n var asymmetricBridgeCall = asymmetricBridgeCall2;\n result2.publicEncrypt = function publicEncrypt(key, data) {\n return asymmetricBridgeCall2(\"publicEncrypt\", key, data);\n };\n result2.privateDecrypt = function privateDecrypt(key, data) {\n return asymmetricBridgeCall2(\"privateDecrypt\", key, data);\n };\n result2.privateEncrypt = function privateEncrypt(key, data) {\n return asymmetricBridgeCall2(\"privateEncrypt\", key, data);\n };\n result2.publicDecrypt = function publicDecrypt(key, data) {\n return asymmetricBridgeCall2(\"publicDecrypt\", key, data);\n };\n }\n if (typeof _cryptoDiffieHellmanSessionCreate !== \"undefined\" && typeof _cryptoDiffieHellmanSessionCall !== \"undefined\") {\n let serializeDhKeyObject2 = function(value) {\n if (value.type === \"secret\") {\n return {\n type: \"secret\",\n raw: Buffer.from(value.export()).toString(\"base64\")\n };\n }\n return {\n type: value.type,\n pem: value._pem || value.export({\n type: value.type === \"private\" ? \"pkcs8\" : \"spki\",\n format: \"pem\"\n })\n };\n }, serializeDhValue2 = function(value) {\n if (value === null || typeof value === \"string\" || typeof value === \"number\" || typeof value === \"boolean\") {\n return value;\n }\n if (Buffer.isBuffer(value)) {\n return {\n __type: \"buffer\",\n value: Buffer.from(value).toString(\"base64\")\n };\n }\n if (value instanceof ArrayBuffer) {\n return {\n __type: \"buffer\",\n value: Buffer.from(new Uint8Array(value)).toString(\"base64\")\n };\n }\n if (ArrayBuffer.isView(value)) {\n return {\n __type: \"buffer\",\n value: Buffer.from(value.buffer, value.byteOffset, value.byteLength).toString(\"base64\")\n };\n }\n if (typeof value === \"bigint\") {\n return {\n __type: \"bigint\",\n value: value.toString()\n };\n }\n if (value && typeof value === \"object\" && (value.type === \"public\" || value.type === \"private\" || value.type === \"secret\") && typeof value.export === \"function\") {\n return {\n __type: \"keyObject\",\n value: serializeDhKeyObject2(value)\n };\n }\n if (Array.isArray(value)) {\n return value.map(serializeDhValue2);\n }\n if (value && typeof value === \"object\") {\n var output = {};\n var keys = Object.keys(value);\n for (var i = 0; i < keys.length; i++) {\n if (value[keys[i]] !== void 0) {\n output[keys[i]] = serializeDhValue2(value[keys[i]]);\n }\n }\n return output;\n }\n return String(value);\n }, restoreDhValue2 = function(value) {\n if (!value || typeof value !== \"object\") {\n return value;\n }\n if (value.__type === \"buffer\") {\n return Buffer.from(value.value, \"base64\");\n }\n if (value.__type === \"bigint\") {\n return BigInt(value.value);\n }\n if (Array.isArray(value)) {\n return value.map(restoreDhValue2);\n }\n var output = {};\n var keys = Object.keys(value);\n for (var i = 0; i < keys.length; i++) {\n output[keys[i]] = restoreDhValue2(value[keys[i]]);\n }\n return output;\n }, createDhSession2 = function(type, name3, argsLike) {\n var args = [];\n for (var i = 0; i < argsLike.length; i++) {\n args.push(serializeDhValue2(argsLike[i]));\n }\n return _cryptoDiffieHellmanSessionCreate.applySync(void 0, [\n JSON.stringify({\n type,\n name: name3,\n args\n })\n ]);\n }, callDhSession2 = function(sessionId, method, argsLike) {\n var args = [];\n for (var i = 0; i < argsLike.length; i++) {\n args.push(serializeDhValue2(argsLike[i]));\n }\n var response = JSON.parse(_cryptoDiffieHellmanSessionCall.applySync(void 0, [\n sessionId,\n JSON.stringify({\n method,\n args\n })\n ]));\n if (response && response.hasResult === false) {\n return void 0;\n }\n return restoreDhValue2(response && response.result);\n }, SandboxDiffieHellman2 = function(sessionId) {\n this._sessionId = sessionId;\n }, SandboxECDH2 = function(sessionId) {\n SandboxDiffieHellman2.call(this, sessionId);\n };\n var serializeDhKeyObject = serializeDhKeyObject2, serializeDhValue = serializeDhValue2, restoreDhValue = restoreDhValue2, createDhSession = createDhSession2, callDhSession = callDhSession2, SandboxDiffieHellman = SandboxDiffieHellman2, SandboxECDH = SandboxECDH2;\n Object.defineProperty(SandboxDiffieHellman2.prototype, \"verifyError\", {\n get: function getVerifyError() {\n return callDhSession2(this._sessionId, \"verifyError\", []);\n }\n });\n SandboxDiffieHellman2.prototype.generateKeys = function generateKeys(encoding) {\n if (arguments.length === 0) return callDhSession2(this._sessionId, \"generateKeys\", []);\n return callDhSession2(this._sessionId, \"generateKeys\", [encoding]);\n };\n SandboxDiffieHellman2.prototype.computeSecret = function computeSecret(key, inputEncoding, outputEncoding) {\n return callDhSession2(this._sessionId, \"computeSecret\", Array.prototype.slice.call(arguments));\n };\n SandboxDiffieHellman2.prototype.getPrime = function getPrime(encoding) {\n if (arguments.length === 0) return callDhSession2(this._sessionId, \"getPrime\", []);\n return callDhSession2(this._sessionId, \"getPrime\", [encoding]);\n };\n SandboxDiffieHellman2.prototype.getGenerator = function getGenerator(encoding) {\n if (arguments.length === 0) return callDhSession2(this._sessionId, \"getGenerator\", []);\n return callDhSession2(this._sessionId, \"getGenerator\", [encoding]);\n };\n SandboxDiffieHellman2.prototype.getPublicKey = function getPublicKey(encoding) {\n if (arguments.length === 0) return callDhSession2(this._sessionId, \"getPublicKey\", []);\n return callDhSession2(this._sessionId, \"getPublicKey\", [encoding]);\n };\n SandboxDiffieHellman2.prototype.getPrivateKey = function getPrivateKey(encoding) {\n if (arguments.length === 0) return callDhSession2(this._sessionId, \"getPrivateKey\", []);\n return callDhSession2(this._sessionId, \"getPrivateKey\", [encoding]);\n };\n SandboxDiffieHellman2.prototype.setPublicKey = function setPublicKey(key, encoding) {\n return callDhSession2(this._sessionId, \"setPublicKey\", Array.prototype.slice.call(arguments));\n };\n SandboxDiffieHellman2.prototype.setPrivateKey = function setPrivateKey(key, encoding) {\n return callDhSession2(this._sessionId, \"setPrivateKey\", Array.prototype.slice.call(arguments));\n };\n SandboxECDH2.prototype = Object.create(SandboxDiffieHellman2.prototype);\n SandboxECDH2.prototype.constructor = SandboxECDH2;\n SandboxECDH2.prototype.getPublicKey = function getPublicKey(encoding, format) {\n return callDhSession2(this._sessionId, \"getPublicKey\", Array.prototype.slice.call(arguments));\n };\n result2.createDiffieHellman = function createDiffieHellman() {\n return new SandboxDiffieHellman2(createDhSession2(\"dh\", void 0, arguments));\n };\n result2.getDiffieHellman = function getDiffieHellman(name3) {\n return new SandboxDiffieHellman2(createDhSession2(\"group\", name3, []));\n };\n result2.createDiffieHellmanGroup = result2.getDiffieHellman;\n result2.createECDH = function createECDH(curve) {\n return new SandboxECDH2(createDhSession2(\"ecdh\", curve, []));\n };\n if (typeof _cryptoDiffieHellman !== \"undefined\") {\n result2.diffieHellman = function diffieHellman(options) {\n var resultJson = _cryptoDiffieHellman.applySync(void 0, [\n JSON.stringify(serializeDhValue2(options))\n ]);\n return restoreDhValue2(JSON.parse(resultJson));\n };\n }\n result2.DiffieHellman = SandboxDiffieHellman2;\n result2.DiffieHellmanGroup = SandboxDiffieHellman2;\n result2.ECDH = SandboxECDH2;\n }\n if (typeof _cryptoGenerateKeyPairSync !== \"undefined\") {\n let restoreBridgeValue2 = function(value) {\n if (!value || typeof value !== \"object\") {\n return value;\n }\n if (value.__type === \"buffer\") {\n return Buffer.from(value.value, \"base64\");\n }\n if (value.__type === \"bigint\") {\n return BigInt(value.value);\n }\n if (Array.isArray(value)) {\n return value.map(restoreBridgeValue2);\n }\n var output = {};\n var keys = Object.keys(value);\n for (var i = 0; i < keys.length; i++) {\n output[keys[i]] = restoreBridgeValue2(value[keys[i]]);\n }\n return output;\n }, cloneObject2 = function(value) {\n if (!value || typeof value !== \"object\") {\n return value;\n }\n if (Array.isArray(value)) {\n return value.map(cloneObject2);\n }\n var output = {};\n var keys = Object.keys(value);\n for (var i = 0; i < keys.length; i++) {\n output[keys[i]] = cloneObject2(value[keys[i]]);\n }\n return output;\n }, createDomException2 = function(message, name3) {\n if (typeof DOMException === \"function\") {\n return new DOMException(message, name3);\n }\n var error = new Error(message);\n error.name = name3;\n return error;\n }, toRawBuffer2 = function(data, encoding) {\n if (Buffer.isBuffer(data)) {\n return Buffer.from(data);\n }\n if (data instanceof ArrayBuffer) {\n return Buffer.from(new Uint8Array(data));\n }\n if (ArrayBuffer.isView(data)) {\n return Buffer.from(data.buffer, data.byteOffset, data.byteLength);\n }\n if (typeof data === \"string\") {\n return Buffer.from(data, encoding || \"utf8\");\n }\n return Buffer.from(data);\n }, serializeBridgeValue2 = function(value) {\n if (value === null) {\n return null;\n }\n if (typeof value === \"string\" || typeof value === \"number\" || typeof value === \"boolean\") {\n return value;\n }\n if (typeof value === \"bigint\") {\n return {\n __type: \"bigint\",\n value: value.toString()\n };\n }\n if (Buffer.isBuffer(value)) {\n return {\n __type: \"buffer\",\n value: Buffer.from(value).toString(\"base64\")\n };\n }\n if (value instanceof ArrayBuffer) {\n return {\n __type: \"buffer\",\n value: Buffer.from(new Uint8Array(value)).toString(\"base64\")\n };\n }\n if (ArrayBuffer.isView(value)) {\n return {\n __type: \"buffer\",\n value: Buffer.from(value.buffer, value.byteOffset, value.byteLength).toString(\"base64\")\n };\n }\n if (Array.isArray(value)) {\n return value.map(serializeBridgeValue2);\n }\n if (value && typeof value === \"object\" && (value.type === \"public\" || value.type === \"private\" || value.type === \"secret\") && typeof value.export === \"function\") {\n if (value.type === \"secret\") {\n return {\n __type: \"keyObject\",\n value: {\n type: \"secret\",\n raw: Buffer.from(value.export()).toString(\"base64\")\n }\n };\n }\n return {\n __type: \"keyObject\",\n value: {\n type: value.type,\n pem: value._pem\n }\n };\n }\n if (value && typeof value === \"object\") {\n var output = {};\n var keys = Object.keys(value);\n for (var i = 0; i < keys.length; i++) {\n var entry = value[keys[i]];\n if (entry !== void 0) {\n output[keys[i]] = serializeBridgeValue2(entry);\n }\n }\n return output;\n }\n return String(value);\n }, normalizeCryptoBridgeError2 = function(error) {\n if (!error || typeof error !== \"object\") {\n return error;\n }\n if (error.code === void 0 && error.message === \"error:07880109:common libcrypto routines::interrupted or cancelled\") {\n error.code = \"ERR_OSSL_CRYPTO_INTERRUPTED_OR_CANCELLED\";\n }\n return error;\n }, deserializeGeneratedKeyValue2 = function(value) {\n if (!value || typeof value !== \"object\") {\n return value;\n }\n if (value.kind === \"string\") {\n return value.value;\n }\n if (value.kind === \"buffer\") {\n return Buffer.from(value.value, \"base64\");\n }\n if (value.kind === \"keyObject\") {\n return createGeneratedKeyObject2(value.value);\n }\n if (value.kind === \"object\") {\n return value.value;\n }\n return value;\n }, serializeBridgeOptions2 = function(options) {\n return JSON.stringify({\n hasOptions: options !== void 0,\n options: options === void 0 ? null : serializeBridgeValue2(options)\n });\n }, createInvalidArgTypeError2 = function(name3, expected, value) {\n var received;\n if (value == null) {\n received = \" Received \" + value;\n } else if (typeof value === \"function\") {\n received = \" Received function \" + (value.name || \"anonymous\");\n } else if (typeof value === \"object\") {\n if (value.constructor && value.constructor.name) {\n received = \" Received an instance of \" + value.constructor.name;\n } else {\n received = \" Received [object Object]\";\n }\n } else {\n var inspected = typeof value === \"string\" ? \"'\" + value + \"'\" : String(value);\n if (inspected.length > 28) {\n inspected = inspected.slice(0, 25) + \"...\";\n }\n received = \" Received type \" + typeof value + \" (\" + inspected + \")\";\n }\n var error = new TypeError('The \"' + name3 + '\" argument must be ' + expected + \".\" + received);\n error.code = \"ERR_INVALID_ARG_TYPE\";\n return error;\n }, scheduleCryptoCallback2 = function(callback, args) {\n setTimeout(function() {\n callback.apply(void 0, args);\n }, 0);\n }, shouldThrowCryptoValidationError2 = function(error) {\n if (!error || typeof error !== \"object\") {\n return false;\n }\n if (error.name === \"TypeError\" || error.name === \"RangeError\") {\n return true;\n }\n var code = error.code;\n return code === \"ERR_MISSING_OPTION\" || code === \"ERR_CRYPTO_UNKNOWN_DH_GROUP\" || code === \"ERR_OUT_OF_RANGE\" || typeof code === \"string\" && code.indexOf(\"ERR_INVALID_ARG_\") === 0;\n }, ensureCryptoCallback2 = function(callback, syncValidator) {\n if (typeof callback === \"function\") {\n return callback;\n }\n if (typeof syncValidator === \"function\") {\n syncValidator();\n }\n throw createInvalidArgTypeError2(\"callback\", \"of type function\", callback);\n }, SandboxKeyObject2 = function(type, handle) {\n this.type = type;\n this._pem = handle && handle.pem !== void 0 ? handle.pem : void 0;\n this._raw = handle && handle.raw !== void 0 ? handle.raw : void 0;\n this._jwk = handle && handle.jwk !== void 0 ? cloneObject2(handle.jwk) : void 0;\n this.asymmetricKeyType = handle && handle.asymmetricKeyType !== void 0 ? handle.asymmetricKeyType : void 0;\n this.asymmetricKeyDetails = handle && handle.asymmetricKeyDetails !== void 0 ? restoreBridgeValue2(handle.asymmetricKeyDetails) : void 0;\n this.symmetricKeySize = type === \"secret\" && handle && handle.raw !== void 0 ? Buffer.from(handle.raw, \"base64\").byteLength : void 0;\n }, normalizeNamedCurve2 = function(namedCurve) {\n if (!namedCurve) {\n return namedCurve;\n }\n var upper = String(namedCurve).toUpperCase();\n if (upper === \"PRIME256V1\" || upper === \"SECP256R1\") return \"P-256\";\n if (upper === \"SECP384R1\") return \"P-384\";\n if (upper === \"SECP521R1\") return \"P-521\";\n return namedCurve;\n }, normalizeAlgorithmInput2 = function(algorithm) {\n if (typeof algorithm === \"string\") {\n return { name: algorithm };\n }\n return Object.assign({}, algorithm);\n }, createCompatibleCryptoKey2 = function(keyData) {\n var key;\n if (globalThis.CryptoKey && globalThis.CryptoKey.prototype && globalThis.CryptoKey.prototype !== SandboxCryptoKey.prototype) {\n key = Object.create(globalThis.CryptoKey.prototype);\n key.type = keyData.type;\n key.extractable = keyData.extractable;\n key.algorithm = keyData.algorithm;\n key.usages = keyData.usages;\n key._keyData = keyData;\n key._pem = keyData._pem;\n key._jwk = keyData._jwk;\n key._raw = keyData._raw;\n key._sourceKeyObjectData = keyData._sourceKeyObjectData;\n return key;\n }\n return new SandboxCryptoKey(keyData);\n }, buildCryptoKeyFromKeyObject2 = function(keyObject, algorithm, extractable, usages) {\n var algo = normalizeAlgorithmInput2(algorithm);\n var name3 = algo.name;\n if (keyObject.type === \"secret\") {\n var secretBytes = Buffer.from(keyObject._raw || \"\", \"base64\");\n if (name3 === \"PBKDF2\") {\n if (extractable) {\n throw new SyntaxError(\"PBKDF2 keys are not extractable\");\n }\n if (usages.some(function(usage) {\n return usage !== \"deriveBits\" && usage !== \"deriveKey\";\n })) {\n throw new SyntaxError(\"Unsupported key usage for a PBKDF2 key\");\n }\n return createCompatibleCryptoKey2({\n type: \"secret\",\n extractable,\n algorithm: { name: name3 },\n usages: Array.from(usages),\n _raw: keyObject._raw,\n _sourceKeyObjectData: {\n type: \"secret\",\n raw: keyObject._raw\n }\n });\n }\n if (name3 === \"HMAC\") {\n if (!secretBytes.byteLength || algo.length === 0) {\n throw createDomException2(\"Zero-length key is not supported\", \"DataError\");\n }\n if (!usages.length) {\n throw new SyntaxError(\"Usages cannot be empty when importing a secret key.\");\n }\n return createCompatibleCryptoKey2({\n type: \"secret\",\n extractable,\n algorithm: {\n name: name3,\n hash: typeof algo.hash === \"string\" ? { name: algo.hash } : cloneObject2(algo.hash),\n length: secretBytes.byteLength * 8\n },\n usages: Array.from(usages),\n _raw: keyObject._raw,\n _sourceKeyObjectData: {\n type: \"secret\",\n raw: keyObject._raw\n }\n });\n }\n return createCompatibleCryptoKey2({\n type: \"secret\",\n extractable,\n algorithm: {\n name: name3,\n length: secretBytes.byteLength * 8\n },\n usages: Array.from(usages),\n _raw: keyObject._raw,\n _sourceKeyObjectData: {\n type: \"secret\",\n raw: keyObject._raw\n }\n });\n }\n var keyType = String(keyObject.asymmetricKeyType || \"\").toLowerCase();\n var algorithmName = String(name3 || \"\");\n if ((keyType === \"ed25519\" || keyType === \"ed448\" || keyType === \"x25519\" || keyType === \"x448\") && keyType !== algorithmName.toLowerCase()) {\n throw createDomException2(\"Invalid key type\", \"DataError\");\n }\n if (algorithmName === \"ECDH\") {\n if (keyObject.type === \"private\" && !usages.length) {\n throw new SyntaxError(\"Usages cannot be empty when importing a private key.\");\n }\n var actualCurve = normalizeNamedCurve2(\n keyObject.asymmetricKeyDetails && keyObject.asymmetricKeyDetails.namedCurve\n );\n if (algo.namedCurve && actualCurve && normalizeNamedCurve2(algo.namedCurve) !== actualCurve) {\n throw createDomException2(\"Named curve mismatch\", \"DataError\");\n }\n }\n var normalizedAlgo = cloneObject2(algo);\n if (typeof normalizedAlgo.hash === \"string\") {\n normalizedAlgo.hash = { name: normalizedAlgo.hash };\n }\n return createCompatibleCryptoKey2({\n type: keyObject.type,\n extractable,\n algorithm: normalizedAlgo,\n usages: Array.from(usages),\n _pem: keyObject._pem,\n _jwk: cloneObject2(keyObject._jwk),\n _sourceKeyObjectData: {\n type: keyObject.type,\n pem: keyObject._pem,\n jwk: cloneObject2(keyObject._jwk),\n asymmetricKeyType: keyObject.asymmetricKeyType,\n asymmetricKeyDetails: cloneObject2(keyObject.asymmetricKeyDetails)\n }\n });\n }, createAsymmetricKeyObject2 = function(type, key) {\n if (typeof key === \"string\") {\n if (key.indexOf(\"-----BEGIN\") === -1) {\n throw new TypeError(\"error:0900006e:PEM routines:OPENSSL_internal:NO_START_LINE\");\n }\n return new SandboxKeyObject2(type, { pem: key });\n }\n if (key && typeof key === \"object\" && key._pem) {\n return new SandboxKeyObject2(type, {\n pem: key._pem,\n jwk: key._jwk,\n asymmetricKeyType: key.asymmetricKeyType,\n asymmetricKeyDetails: key.asymmetricKeyDetails\n });\n }\n if (key && typeof key === \"object\" && key.key) {\n var keyData = typeof key.key === \"string\" ? key.key : key.key.toString(\"utf8\");\n return new SandboxKeyObject2(type, { pem: keyData });\n }\n if (Buffer.isBuffer(key)) {\n var keyStr = key.toString(\"utf8\");\n if (keyStr.indexOf(\"-----BEGIN\") === -1) {\n throw new TypeError(\"error:0900006e:PEM routines:OPENSSL_internal:NO_START_LINE\");\n }\n return new SandboxKeyObject2(type, { pem: keyStr });\n }\n return new SandboxKeyObject2(type, { pem: String(key) });\n }, createGeneratedKeyObject2 = function(value) {\n return new SandboxKeyObject2(value.type, {\n pem: value.pem,\n raw: value.raw,\n jwk: value.jwk,\n asymmetricKeyType: value.asymmetricKeyType,\n asymmetricKeyDetails: value.asymmetricKeyDetails\n });\n };\n var restoreBridgeValue = restoreBridgeValue2, cloneObject = cloneObject2, createDomException = createDomException2, toRawBuffer = toRawBuffer2, serializeBridgeValue = serializeBridgeValue2, normalizeCryptoBridgeError = normalizeCryptoBridgeError2, deserializeGeneratedKeyValue = deserializeGeneratedKeyValue2, serializeBridgeOptions = serializeBridgeOptions2, createInvalidArgTypeError = createInvalidArgTypeError2, scheduleCryptoCallback = scheduleCryptoCallback2, shouldThrowCryptoValidationError = shouldThrowCryptoValidationError2, ensureCryptoCallback = ensureCryptoCallback2, SandboxKeyObject = SandboxKeyObject2, normalizeNamedCurve = normalizeNamedCurve2, normalizeAlgorithmInput = normalizeAlgorithmInput2, createCompatibleCryptoKey = createCompatibleCryptoKey2, buildCryptoKeyFromKeyObject = buildCryptoKeyFromKeyObject2, createAsymmetricKeyObject = createAsymmetricKeyObject2, createGeneratedKeyObject = createGeneratedKeyObject2;\n Object.defineProperty(SandboxKeyObject2.prototype, Symbol.toStringTag, {\n value: \"KeyObject\",\n configurable: true\n });\n SandboxKeyObject2.prototype.export = function exportKey(options) {\n if (this.type === \"secret\") {\n return Buffer.from(this._raw || \"\", \"base64\");\n }\n if (!options || typeof options !== \"object\") {\n throw new TypeError('The \"options\" argument must be of type object.');\n }\n if (options.format === \"jwk\") {\n return cloneObject2(this._jwk);\n }\n if (options.format === \"der\") {\n var lines = String(this._pem || \"\").split(\"\\n\").filter(function(l) {\n return l && l.indexOf(\"-----\") !== 0;\n });\n return Buffer.from(lines.join(\"\"), \"base64\");\n }\n return this._pem;\n };\n SandboxKeyObject2.prototype.toString = function() {\n return \"[object KeyObject]\";\n };\n SandboxKeyObject2.prototype.equals = function equals(other) {\n if (!(other instanceof SandboxKeyObject2)) {\n return false;\n }\n if (this.type !== other.type) {\n return false;\n }\n if (this.type === \"secret\") {\n return (this._raw || \"\") === (other._raw || \"\");\n }\n return (this._pem || \"\") === (other._pem || \"\") && this.asymmetricKeyType === other.asymmetricKeyType;\n };\n SandboxKeyObject2.prototype.toCryptoKey = function toCryptoKey(algorithm, extractable, usages) {\n return buildCryptoKeyFromKeyObject2(this, algorithm, extractable, Array.from(usages || []));\n };\n result2.generateKeyPairSync = function generateKeyPairSync(type, options) {\n var resultJson = _cryptoGenerateKeyPairSync.applySync(void 0, [\n type,\n serializeBridgeOptions2(options)\n ]);\n var parsed = JSON.parse(resultJson);\n if (parsed.publicKey && parsed.publicKey.kind) {\n return {\n publicKey: deserializeGeneratedKeyValue2(parsed.publicKey),\n privateKey: deserializeGeneratedKeyValue2(parsed.privateKey)\n };\n }\n return {\n publicKey: createGeneratedKeyObject2(parsed.publicKey),\n privateKey: createGeneratedKeyObject2(parsed.privateKey)\n };\n };\n result2.generateKeyPair = function generateKeyPair(type, options, callback) {\n if (typeof options === \"function\") {\n callback = options;\n options = void 0;\n }\n callback = ensureCryptoCallback2(callback, function() {\n result2.generateKeyPairSync(type, options);\n });\n try {\n var pair = result2.generateKeyPairSync(type, options);\n scheduleCryptoCallback2(callback, [null, pair.publicKey, pair.privateKey]);\n } catch (e) {\n if (shouldThrowCryptoValidationError2(e)) {\n throw e;\n }\n scheduleCryptoCallback2(callback, [e]);\n }\n };\n if (typeof _cryptoGenerateKeySync !== \"undefined\") {\n result2.generateKeySync = function generateKeySync(type, options) {\n var resultJson;\n try {\n resultJson = _cryptoGenerateKeySync.applySync(void 0, [\n type,\n serializeBridgeOptions2(options)\n ]);\n } catch (error) {\n throw normalizeCryptoBridgeError2(error);\n }\n return createGeneratedKeyObject2(JSON.parse(resultJson));\n };\n result2.generateKey = function generateKey(type, options, callback) {\n callback = ensureCryptoCallback2(callback, function() {\n result2.generateKeySync(type, options);\n });\n try {\n var key = result2.generateKeySync(type, options);\n scheduleCryptoCallback2(callback, [null, key]);\n } catch (e) {\n if (shouldThrowCryptoValidationError2(e)) {\n throw e;\n }\n scheduleCryptoCallback2(callback, [e]);\n }\n };\n }\n if (typeof _cryptoGeneratePrimeSync !== \"undefined\") {\n result2.generatePrimeSync = function generatePrimeSync(size, options) {\n var resultJson;\n try {\n resultJson = _cryptoGeneratePrimeSync.applySync(void 0, [\n size,\n serializeBridgeOptions2(options)\n ]);\n } catch (error) {\n throw normalizeCryptoBridgeError2(error);\n }\n return restoreBridgeValue2(JSON.parse(resultJson));\n };\n result2.generatePrime = function generatePrime(size, options, callback) {\n if (typeof options === \"function\") {\n callback = options;\n options = void 0;\n }\n callback = ensureCryptoCallback2(callback, function() {\n result2.generatePrimeSync(size, options);\n });\n try {\n var prime = result2.generatePrimeSync(size, options);\n scheduleCryptoCallback2(callback, [null, prime]);\n } catch (e) {\n if (shouldThrowCryptoValidationError2(e)) {\n throw e;\n }\n scheduleCryptoCallback2(callback, [e]);\n }\n };\n }\n result2.createPublicKey = function createPublicKey(key) {\n if (typeof _cryptoCreateKeyObject !== \"undefined\") {\n var resultJson;\n try {\n resultJson = _cryptoCreateKeyObject.applySync(void 0, [\n \"createPublicKey\",\n JSON.stringify(serializeBridgeValue2(key))\n ]);\n } catch (error) {\n throw normalizeCryptoBridgeError2(error);\n }\n return createGeneratedKeyObject2(JSON.parse(resultJson));\n }\n return createAsymmetricKeyObject2(\"public\", key);\n };\n result2.createPrivateKey = function createPrivateKey(key) {\n if (typeof _cryptoCreateKeyObject !== \"undefined\") {\n var resultJson;\n try {\n resultJson = _cryptoCreateKeyObject.applySync(void 0, [\n \"createPrivateKey\",\n JSON.stringify(serializeBridgeValue2(key))\n ]);\n } catch (error) {\n throw normalizeCryptoBridgeError2(error);\n }\n return createGeneratedKeyObject2(JSON.parse(resultJson));\n }\n return createAsymmetricKeyObject2(\"private\", key);\n };\n result2.createSecretKey = function createSecretKey(key, encoding) {\n return new SandboxKeyObject2(\"secret\", {\n raw: toRawBuffer2(key, encoding).toString(\"base64\")\n });\n };\n SandboxKeyObject2.from = function from(key) {\n if (!key || typeof key !== \"object\" || key[Symbol.toStringTag] !== \"CryptoKey\") {\n throw new TypeError('The \"key\" argument must be an instance of CryptoKey.');\n }\n if (key._sourceKeyObjectData && key._sourceKeyObjectData.type === \"secret\") {\n return new SandboxKeyObject2(\"secret\", {\n raw: key._sourceKeyObjectData.raw\n });\n }\n return new SandboxKeyObject2(key.type, {\n pem: key._pem,\n jwk: key._jwk,\n asymmetricKeyType: key._sourceKeyObjectData && key._sourceKeyObjectData.asymmetricKeyType,\n asymmetricKeyDetails: key._sourceKeyObjectData && key._sourceKeyObjectData.asymmetricKeyDetails\n });\n };\n result2.KeyObject = SandboxKeyObject2;\n }\n if (typeof _cryptoSubtle !== \"undefined\") {\n let SandboxCryptoKey2 = function(keyData) {\n this.type = keyData.type;\n this.extractable = keyData.extractable;\n this.algorithm = keyData.algorithm;\n this.usages = keyData.usages;\n this._keyData = keyData;\n this._pem = keyData._pem;\n this._jwk = keyData._jwk;\n this._raw = keyData._raw;\n this._sourceKeyObjectData = keyData._sourceKeyObjectData;\n }, toBase642 = function(data) {\n if (typeof data === \"string\") return Buffer.from(data).toString(\"base64\");\n if (data instanceof ArrayBuffer) return Buffer.from(new Uint8Array(data)).toString(\"base64\");\n if (ArrayBuffer.isView(data)) return Buffer.from(new Uint8Array(data.buffer, data.byteOffset, data.byteLength)).toString(\"base64\");\n return Buffer.from(data).toString(\"base64\");\n }, subtleCall2 = function(reqObj) {\n return _cryptoSubtle.applySync(void 0, [JSON.stringify(reqObj)]);\n }, normalizeAlgo2 = function(algorithm) {\n if (typeof algorithm === \"string\") return { name: algorithm };\n return algorithm;\n };\n var SandboxCryptoKey = SandboxCryptoKey2, toBase64 = toBase642, subtleCall = subtleCall2, normalizeAlgo = normalizeAlgo2;\n Object.defineProperty(SandboxCryptoKey2.prototype, Symbol.toStringTag, {\n value: \"CryptoKey\",\n configurable: true\n });\n Object.defineProperty(SandboxCryptoKey2, Symbol.hasInstance, {\n value: function(candidate) {\n return !!(candidate && typeof candidate === \"object\" && (candidate._keyData || candidate[Symbol.toStringTag] === \"CryptoKey\"));\n },\n configurable: true\n });\n if (globalThis.CryptoKey && globalThis.CryptoKey.prototype && globalThis.CryptoKey.prototype !== SandboxCryptoKey2.prototype) {\n Object.setPrototypeOf(SandboxCryptoKey2.prototype, globalThis.CryptoKey.prototype);\n }\n if (typeof globalThis.CryptoKey === \"undefined\") {\n __requireExposeCustomGlobal(\"CryptoKey\", SandboxCryptoKey2);\n } else if (globalThis.CryptoKey !== SandboxCryptoKey2) {\n globalThis.CryptoKey = SandboxCryptoKey2;\n }\n var SandboxSubtle = {};\n SandboxSubtle.digest = function digest(algorithm, data) {\n return Promise.resolve().then(function() {\n var algo = normalizeAlgo2(algorithm);\n var result22 = JSON.parse(subtleCall2({\n op: \"digest\",\n algorithm: algo.name,\n data: toBase642(data)\n }));\n var buf = Buffer.from(result22.data, \"base64\");\n return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n });\n };\n SandboxSubtle.generateKey = function generateKey(algorithm, extractable, keyUsages) {\n return Promise.resolve().then(function() {\n var algo = normalizeAlgo2(algorithm);\n var reqAlgo = Object.assign({}, algo);\n if (reqAlgo.hash) reqAlgo.hash = normalizeAlgo2(reqAlgo.hash);\n if (reqAlgo.publicExponent) {\n reqAlgo.publicExponent = Buffer.from(new Uint8Array(reqAlgo.publicExponent.buffer || reqAlgo.publicExponent)).toString(\"base64\");\n }\n var result22 = JSON.parse(subtleCall2({\n op: \"generateKey\",\n algorithm: reqAlgo,\n extractable,\n usages: Array.from(keyUsages)\n }));\n if (result22.publicKey && result22.privateKey) {\n return {\n publicKey: new SandboxCryptoKey2(result22.publicKey),\n privateKey: new SandboxCryptoKey2(result22.privateKey)\n };\n }\n return new SandboxCryptoKey2(result22.key);\n });\n };\n SandboxSubtle.importKey = function importKey(format, keyData, algorithm, extractable, keyUsages) {\n return Promise.resolve().then(function() {\n var algo = normalizeAlgo2(algorithm);\n var reqAlgo = Object.assign({}, algo);\n if (reqAlgo.hash) reqAlgo.hash = normalizeAlgo2(reqAlgo.hash);\n var serializedKeyData;\n if (format === \"jwk\") {\n serializedKeyData = keyData;\n } else if (format === \"raw\") {\n serializedKeyData = toBase642(keyData);\n } else {\n serializedKeyData = toBase642(keyData);\n }\n var result22 = JSON.parse(subtleCall2({\n op: \"importKey\",\n format,\n keyData: serializedKeyData,\n algorithm: reqAlgo,\n extractable,\n usages: Array.from(keyUsages)\n }));\n return new SandboxCryptoKey2(result22.key);\n });\n };\n SandboxSubtle.exportKey = function exportKey(format, key) {\n return Promise.resolve().then(function() {\n var result22 = JSON.parse(subtleCall2({\n op: \"exportKey\",\n format,\n key: key._keyData\n }));\n if (format === \"jwk\") return result22.jwk;\n var buf = Buffer.from(result22.data, \"base64\");\n return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n });\n };\n SandboxSubtle.encrypt = function encrypt(algorithm, key, data) {\n return Promise.resolve().then(function() {\n var algo = normalizeAlgo2(algorithm);\n var reqAlgo = Object.assign({}, algo);\n if (reqAlgo.iv) reqAlgo.iv = toBase642(reqAlgo.iv);\n if (reqAlgo.additionalData) reqAlgo.additionalData = toBase642(reqAlgo.additionalData);\n var result22 = JSON.parse(subtleCall2({\n op: \"encrypt\",\n algorithm: reqAlgo,\n key: key._keyData,\n data: toBase642(data)\n }));\n var buf = Buffer.from(result22.data, \"base64\");\n return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n });\n };\n SandboxSubtle.decrypt = function decrypt(algorithm, key, data) {\n return Promise.resolve().then(function() {\n var algo = normalizeAlgo2(algorithm);\n var reqAlgo = Object.assign({}, algo);\n if (reqAlgo.iv) reqAlgo.iv = toBase642(reqAlgo.iv);\n if (reqAlgo.additionalData) reqAlgo.additionalData = toBase642(reqAlgo.additionalData);\n var result22 = JSON.parse(subtleCall2({\n op: \"decrypt\",\n algorithm: reqAlgo,\n key: key._keyData,\n data: toBase642(data)\n }));\n var buf = Buffer.from(result22.data, \"base64\");\n return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n });\n };\n SandboxSubtle.sign = function sign(algorithm, key, data) {\n return Promise.resolve().then(function() {\n var result22 = JSON.parse(subtleCall2({\n op: \"sign\",\n algorithm: normalizeAlgo2(algorithm),\n key: key._keyData,\n data: toBase642(data)\n }));\n var buf = Buffer.from(result22.data, \"base64\");\n return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n });\n };\n SandboxSubtle.verify = function verify(algorithm, key, signature, data) {\n return Promise.resolve().then(function() {\n var result22 = JSON.parse(subtleCall2({\n op: \"verify\",\n algorithm: normalizeAlgo2(algorithm),\n key: key._keyData,\n signature: toBase642(signature),\n data: toBase642(data)\n }));\n return result22.result;\n });\n };\n SandboxSubtle.deriveBits = function deriveBits(algorithm, baseKey, length) {\n return Promise.resolve().then(function() {\n var algo = normalizeAlgo2(algorithm);\n var reqAlgo = Object.assign({}, algo);\n if (reqAlgo.salt) reqAlgo.salt = toBase642(reqAlgo.salt);\n if (reqAlgo.info) reqAlgo.info = toBase642(reqAlgo.info);\n var result22 = JSON.parse(subtleCall2({\n op: \"deriveBits\",\n algorithm: reqAlgo,\n baseKey: baseKey._keyData,\n length\n }));\n return Buffer.from(result22.data, \"base64\").buffer;\n });\n };\n SandboxSubtle.deriveKey = function deriveKey(algorithm, baseKey, derivedKeyAlgorithm, extractable, keyUsages) {\n return Promise.resolve().then(function() {\n var algo = normalizeAlgo2(algorithm);\n var reqAlgo = Object.assign({}, algo);\n if (reqAlgo.salt) reqAlgo.salt = toBase642(reqAlgo.salt);\n if (reqAlgo.info) reqAlgo.info = toBase642(reqAlgo.info);\n var result22 = JSON.parse(subtleCall2({\n op: \"deriveKey\",\n algorithm: reqAlgo,\n baseKey: baseKey._keyData,\n derivedKeyAlgorithm: normalizeAlgo2(derivedKeyAlgorithm),\n extractable,\n usages: keyUsages\n }));\n return new SandboxCryptoKey2(result22.key);\n });\n };\n if (globalThis.crypto && globalThis.crypto.subtle && typeof globalThis.crypto.subtle.importKey === \"function\") {\n result2.subtle = globalThis.crypto.subtle;\n result2.webcrypto = globalThis.crypto;\n } else {\n result2.subtle = SandboxSubtle;\n result2.webcrypto = { subtle: SandboxSubtle, getRandomValues: result2.randomFillSync };\n }\n }\n if (typeof result2.getCurves !== \"function\") {\n result2.getCurves = function getCurves() {\n return [\n \"prime256v1\",\n \"secp256r1\",\n \"secp384r1\",\n \"secp521r1\",\n \"secp256k1\",\n \"secp224r1\",\n \"secp192k1\"\n ];\n };\n }\n if (typeof result2.getCiphers !== \"function\") {\n result2.getCiphers = function getCiphers() {\n return [\n \"aes-128-cbc\",\n \"aes-128-gcm\",\n \"aes-192-cbc\",\n \"aes-192-gcm\",\n \"aes-256-cbc\",\n \"aes-256-gcm\",\n \"aes-128-ctr\",\n \"aes-192-ctr\",\n \"aes-256-ctr\"\n ];\n };\n }\n if (typeof result2.getHashes !== \"function\") {\n result2.getHashes = function getHashes() {\n return [\"md5\", \"sha1\", \"sha256\", \"sha384\", \"sha512\"];\n };\n }\n if (typeof result2.timingSafeEqual !== \"function\") {\n result2.timingSafeEqual = function timingSafeEqual(a, b) {\n if (a.length !== b.length) {\n throw new RangeError(\"Input buffers must have the same byte length\");\n }\n var out = 0;\n for (var i = 0; i < a.length; i++) {\n out |= a[i] ^ b[i];\n }\n return out === 0;\n };\n }\n if (typeof result2.getFips !== \"function\") {\n result2.getFips = function getFips() {\n return 0;\n };\n }\n if (typeof result2.setFips !== \"function\") {\n result2.setFips = function setFips() {\n throw new Error(\"FIPS mode is not supported in sandbox\");\n };\n }\n return result2;\n }\n if (name2 === \"stream\") {\n if (typeof result2 === \"function\" && result2.prototype && typeof result2.Readable === \"function\") {\n var readableProto = result2.Readable.prototype;\n var streamProto = result2.prototype;\n if (readableProto && streamProto && !(readableProto instanceof result2)) {\n var currentParent = Object.getPrototypeOf(readableProto);\n Object.setPrototypeOf(streamProto, currentParent);\n Object.setPrototypeOf(readableProto, streamProto);\n }\n }\n return result2;\n }\n if (name2 === \"path\") {\n if (result2.win32 === null || result2.win32 === void 0) {\n result2.win32 = result2.posix || result2;\n }\n if (result2.posix === null || result2.posix === void 0) {\n result2.posix = result2;\n }\n const hasAbsoluteSegment = function(args) {\n return args.some(function(arg) {\n return typeof arg === \"string\" && arg.length > 0 && arg.charAt(0) === \"/\";\n });\n };\n const prependCwd = function(args) {\n if (hasAbsoluteSegment(args)) return;\n if (typeof process !== \"undefined\" && typeof process.cwd === \"function\") {\n const cwd = process.cwd();\n if (cwd && cwd.charAt(0) === \"/\") {\n args.unshift(cwd);\n }\n }\n };\n const originalResolve = result2.resolve;\n if (typeof originalResolve === \"function\" && !originalResolve._patchedForCwd) {\n const patchedResolve = function resolve2() {\n const args = Array.from(arguments);\n prependCwd(args);\n return originalResolve.apply(this, args);\n };\n patchedResolve._patchedForCwd = true;\n result2.resolve = patchedResolve;\n }\n if (result2.posix && typeof result2.posix.resolve === \"function\" && !result2.posix.resolve._patchedForCwd) {\n const originalPosixResolve = result2.posix.resolve;\n const patchedPosixResolve = function resolve2() {\n const args = Array.from(arguments);\n prependCwd(args);\n return originalPosixResolve.apply(this, args);\n };\n patchedPosixResolve._patchedForCwd = true;\n result2.posix.resolve = patchedPosixResolve;\n }\n }\n return result2;\n }\n var _deferredCoreModules = /* @__PURE__ */ new Set([\n \"readline\",\n \"perf_hooks\",\n \"async_hooks\",\n \"worker_threads\",\n \"diagnostics_channel\"\n ]);\n var _unsupportedCoreModules = /* @__PURE__ */ new Set([\n \"cluster\",\n \"wasi\",\n \"inspector\",\n \"repl\",\n \"trace_events\",\n \"domain\"\n ]);\n function _unsupportedApiError(moduleName2, apiName) {\n return new Error(moduleName2 + \".\" + apiName + \" is not supported in sandbox\");\n }\n function _createDeferredModuleStub(moduleName2) {\n const methodCache = {};\n let stub = null;\n stub = new Proxy({}, {\n get(_target, prop) {\n if (prop === \"__esModule\") return false;\n if (prop === \"default\") return stub;\n if (prop === Symbol.toStringTag) return \"Module\";\n if (prop === \"then\") return void 0;\n if (typeof prop !== \"string\") return void 0;\n if (!methodCache[prop]) {\n methodCache[prop] = function deferredApiStub() {\n throw _unsupportedApiError(moduleName2, prop);\n };\n }\n return methodCache[prop];\n }\n });\n return stub;\n }\n var __internalModuleCache = _moduleCache;\n var __require = function require2(moduleName2) {\n return _requireFrom(moduleName2, _currentModule.dirname);\n };\n __requireExposeCustomGlobal(\"require\", __require);\n function _resolveFrom(moduleName2, fromDir2) {\n var resolved2;\n if (typeof _resolveModuleSync !== \"undefined\") {\n resolved2 = _resolveModuleSync.applySync(void 0, [moduleName2, fromDir2]);\n }\n if (resolved2 === null || resolved2 === void 0) {\n resolved2 = _resolveModule.applySyncPromise(void 0, [moduleName2, fromDir2, \"require\"]);\n }\n if (resolved2 === null) {\n const err = new Error(\"Cannot find module '\" + moduleName2 + \"'\");\n err.code = \"MODULE_NOT_FOUND\";\n throw err;\n }\n return resolved2;\n }\n globalThis.require.resolve = function resolve(moduleName2) {\n return _resolveFrom(moduleName2, _currentModule.dirname);\n };\n function _debugRequire(phase, moduleName2, extra) {\n if (globalThis.__sandboxRequireDebug !== true) {\n return;\n }\n if (moduleName2 !== \"rivetkit\" && moduleName2 !== \"@rivetkit/traces\" && moduleName2 !== \"@rivetkit/on-change\" && moduleName2 !== \"async_hooks\" && !moduleName2.startsWith(\"rivetkit/\") && !moduleName2.startsWith(\"@rivetkit/\")) {\n return;\n }\n if (typeof console !== \"undefined\" && typeof console.log === \"function\") {\n console.log(\n \"[sandbox.require] \" + phase + \" \" + moduleName2 + (extra ? \" \" + extra : \"\")\n );\n }\n }\n function _requireFrom(moduleName, fromDir) {\n _debugRequire(\"start\", moduleName, fromDir);\n const name = moduleName.replace(/^node:/, \"\");\n let cacheKey = name;\n let resolved = null;\n const isRelative = name.startsWith(\"./\") || name.startsWith(\"../\");\n if (!isRelative && __internalModuleCache[name]) {\n _debugRequire(\"cache-hit\", name, name);\n return __internalModuleCache[name];\n }\n if (name === \"fs\") {\n if (__internalModuleCache[\"fs\"]) return __internalModuleCache[\"fs\"];\n const fsModule = globalThis.bridge?.fs || globalThis.bridge?.default || globalThis._fsModule || {};\n __internalModuleCache[\"fs\"] = fsModule;\n _debugRequire(\"loaded\", name, \"fs-special\");\n return fsModule;\n }\n if (name === \"fs/promises\") {\n if (__internalModuleCache[\"fs/promises\"]) return __internalModuleCache[\"fs/promises\"];\n const fsModule = _requireFrom(\"fs\", fromDir);\n __internalModuleCache[\"fs/promises\"] = fsModule.promises;\n _debugRequire(\"loaded\", name, \"fs-promises-special\");\n return fsModule.promises;\n }\n if (name === \"stream/promises\") {\n if (__internalModuleCache[\"stream/promises\"]) return __internalModuleCache[\"stream/promises\"];\n const streamModule = _requireFrom(\"stream\", fromDir);\n const promisesModule = {\n finished(stream, options) {\n return new Promise(function(resolve2, reject) {\n if (typeof streamModule.finished !== \"function\") {\n resolve2();\n return;\n }\n if (options && typeof options === \"object\" && !Array.isArray(options)) {\n streamModule.finished(stream, options, function(error) {\n if (error) {\n reject(error);\n return;\n }\n resolve2();\n });\n return;\n }\n streamModule.finished(stream, function(error) {\n if (error) {\n reject(error);\n return;\n }\n resolve2();\n });\n });\n },\n pipeline() {\n const args = Array.prototype.slice.call(arguments);\n return new Promise(function(resolve2, reject) {\n if (typeof streamModule.pipeline !== \"function\") {\n reject(new Error(\"stream.pipeline is not supported in sandbox\"));\n return;\n }\n args.push(function(error) {\n if (error) {\n reject(error);\n return;\n }\n resolve2();\n });\n streamModule.pipeline.apply(streamModule, args);\n });\n }\n };\n __internalModuleCache[\"stream/promises\"] = promisesModule;\n _debugRequire(\"loaded\", name, \"stream-promises-special\");\n return promisesModule;\n }\n if (name === \"stream/consumers\") {\n if (__internalModuleCache[\"stream/consumers\"]) return __internalModuleCache[\"stream/consumers\"];\n const consumersModule = {};\n consumersModule.buffer = async function buffer(stream) {\n const chunks = [];\n const pushChunk = function(chunk) {\n if (typeof chunk === \"string\") {\n chunks.push(Buffer.from(chunk));\n } else if (Buffer.isBuffer(chunk)) {\n chunks.push(chunk);\n } else if (ArrayBuffer.isView(chunk)) {\n chunks.push(Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength));\n } else if (chunk instanceof ArrayBuffer) {\n chunks.push(Buffer.from(new Uint8Array(chunk)));\n } else {\n chunks.push(Buffer.from(String(chunk)));\n }\n };\n if (stream && typeof stream[Symbol.asyncIterator] === \"function\") {\n for await (const chunk of stream) {\n pushChunk(chunk);\n }\n return Buffer.concat(chunks);\n }\n return new Promise(function(resolve2, reject) {\n stream.on(\"data\", pushChunk);\n stream.on(\"end\", function() {\n resolve2(Buffer.concat(chunks));\n });\n stream.on(\"error\", reject);\n });\n };\n consumersModule.text = async function text(stream) {\n return (await consumersModule.buffer(stream)).toString(\"utf8\");\n };\n consumersModule.json = async function json(stream) {\n return JSON.parse(await consumersModule.text(stream));\n };\n consumersModule.arrayBuffer = async function arrayBuffer(stream) {\n const buffer = await consumersModule.buffer(stream);\n return buffer.buffer.slice(\n buffer.byteOffset,\n buffer.byteOffset + buffer.byteLength\n );\n };\n __internalModuleCache[\"stream/consumers\"] = consumersModule;\n _debugRequire(\"loaded\", name, \"stream-consumers-special\");\n return consumersModule;\n }\n if (name === \"child_process\") {\n if (__internalModuleCache[\"child_process\"]) return __internalModuleCache[\"child_process\"];\n __internalModuleCache[\"child_process\"] = _childProcessModule;\n _debugRequire(\"loaded\", name, \"child-process-special\");\n return _childProcessModule;\n }\n if (name === \"net\") {\n if (__internalModuleCache[\"net\"]) return __internalModuleCache[\"net\"];\n __internalModuleCache[\"net\"] = _netModule;\n _debugRequire(\"loaded\", name, \"net-special\");\n return _netModule;\n }\n if (name === \"tls\") {\n if (__internalModuleCache[\"tls\"]) return __internalModuleCache[\"tls\"];\n __internalModuleCache[\"tls\"] = _tlsModule;\n _debugRequire(\"loaded\", name, \"tls-special\");\n return _tlsModule;\n }\n if (name === \"http\") {\n if (__internalModuleCache[\"http\"]) return __internalModuleCache[\"http\"];\n __internalModuleCache[\"http\"] = _httpModule;\n _debugRequire(\"loaded\", name, \"http-special\");\n return _httpModule;\n }\n if (name === \"_http_agent\") {\n if (__internalModuleCache[\"_http_agent\"]) return __internalModuleCache[\"_http_agent\"];\n const httpAgentModule = {\n Agent: _httpModule.Agent,\n globalAgent: _httpModule.globalAgent\n };\n __internalModuleCache[\"_http_agent\"] = httpAgentModule;\n _debugRequire(\"loaded\", name, \"http-agent-special\");\n return httpAgentModule;\n }\n if (name === \"_http_common\") {\n if (__internalModuleCache[\"_http_common\"]) return __internalModuleCache[\"_http_common\"];\n const httpCommonModule = {\n _checkIsHttpToken: _httpModule._checkIsHttpToken,\n _checkInvalidHeaderChar: _httpModule._checkInvalidHeaderChar\n };\n __internalModuleCache[\"_http_common\"] = httpCommonModule;\n _debugRequire(\"loaded\", name, \"http-common-special\");\n return httpCommonModule;\n }\n if (name === \"https\") {\n if (__internalModuleCache[\"https\"]) return __internalModuleCache[\"https\"];\n __internalModuleCache[\"https\"] = _httpsModule;\n _debugRequire(\"loaded\", name, \"https-special\");\n return _httpsModule;\n }\n if (name === \"http2\") {\n if (__internalModuleCache[\"http2\"]) return __internalModuleCache[\"http2\"];\n __internalModuleCache[\"http2\"] = _http2Module;\n _debugRequire(\"loaded\", name, \"http2-special\");\n return _http2Module;\n }\n if (name === \"internal/http2/util\") {\n if (__internalModuleCache[name]) return __internalModuleCache[name];\n class NghttpError extends Error {\n constructor(message) {\n super(message);\n this.name = \"Error\";\n this.code = \"ERR_HTTP2_ERROR\";\n }\n }\n const utilModule = {\n kSocket: /* @__PURE__ */ Symbol.for(\"secure-exec.http2.kSocket\"),\n NghttpError\n };\n __internalModuleCache[name] = utilModule;\n _debugRequire(\"loaded\", name, \"http2-util-special\");\n return utilModule;\n }\n if (name === \"dns\") {\n if (__internalModuleCache[\"dns\"]) return __internalModuleCache[\"dns\"];\n __internalModuleCache[\"dns\"] = _dnsModule;\n _debugRequire(\"loaded\", name, \"dns-special\");\n return _dnsModule;\n }\n if (name === \"dgram\") {\n if (__internalModuleCache[\"dgram\"]) return __internalModuleCache[\"dgram\"];\n __internalModuleCache[\"dgram\"] = _dgramModule;\n _debugRequire(\"loaded\", name, \"dgram-special\");\n return _dgramModule;\n }\n if (name === \"os\") {\n if (__internalModuleCache[\"os\"]) return __internalModuleCache[\"os\"];\n __internalModuleCache[\"os\"] = _osModule;\n _debugRequire(\"loaded\", name, \"os-special\");\n return _osModule;\n }\n if (name === \"module\") {\n if (__internalModuleCache[\"module\"]) return __internalModuleCache[\"module\"];\n __internalModuleCache[\"module\"] = _moduleModule;\n _debugRequire(\"loaded\", name, \"module-special\");\n return _moduleModule;\n }\n if (name === \"process\") {\n _debugRequire(\"loaded\", name, \"process-special\");\n return globalThis.process;\n }\n if (name === \"async_hooks\") {\n if (__internalModuleCache[\"async_hooks\"]) return __internalModuleCache[\"async_hooks\"];\n class AsyncLocalStorage {\n constructor() {\n this._store = void 0;\n }\n run(store, callback) {\n const previousStore = this._store;\n this._store = store;\n try {\n const args = Array.prototype.slice.call(arguments, 2);\n return callback.apply(void 0, args);\n } finally {\n this._store = previousStore;\n }\n }\n enterWith(store) {\n this._store = store;\n }\n getStore() {\n return this._store;\n }\n disable() {\n this._store = void 0;\n }\n exit(callback) {\n const previousStore = this._store;\n this._store = void 0;\n try {\n const args = Array.prototype.slice.call(arguments, 1);\n return callback.apply(void 0, args);\n } finally {\n this._store = previousStore;\n }\n }\n }\n class AsyncResource {\n constructor(type) {\n this.type = type;\n }\n runInAsyncScope(callback, thisArg) {\n const args = Array.prototype.slice.call(arguments, 2);\n return callback.apply(thisArg, args);\n }\n emitDestroy() {\n }\n }\n const asyncHooksModule = {\n AsyncLocalStorage,\n AsyncResource,\n createHook() {\n return {\n enable() {\n return this;\n },\n disable() {\n return this;\n }\n };\n },\n executionAsyncId() {\n return 1;\n },\n triggerAsyncId() {\n return 0;\n },\n executionAsyncResource() {\n return null;\n }\n };\n __internalModuleCache[\"async_hooks\"] = asyncHooksModule;\n _debugRequire(\"loaded\", name, \"async-hooks-special\");\n return asyncHooksModule;\n }\n if (name === \"diagnostics_channel\") {\n let _createChannel2 = function() {\n return {\n hasSubscribers: false,\n publish: function() {\n },\n subscribe: function() {\n },\n unsubscribe: function() {\n }\n };\n };\n var _createChannel = _createChannel2;\n if (__internalModuleCache[name]) return __internalModuleCache[name];\n const dcModule = {\n channel: function() {\n return _createChannel2();\n },\n hasSubscribers: function() {\n return false;\n },\n tracingChannel: function() {\n return {\n start: _createChannel2(),\n end: _createChannel2(),\n asyncStart: _createChannel2(),\n asyncEnd: _createChannel2(),\n error: _createChannel2(),\n traceSync: function(fn, context, thisArg) {\n var args = Array.prototype.slice.call(arguments, 3);\n return fn.apply(thisArg, args);\n },\n tracePromise: function(fn, context, thisArg) {\n var args = Array.prototype.slice.call(arguments, 3);\n return fn.apply(thisArg, args);\n },\n traceCallback: function(fn, context, thisArg) {\n var args = Array.prototype.slice.call(arguments, 3);\n return fn.apply(thisArg, args);\n }\n };\n },\n Channel: function Channel(name2) {\n this.hasSubscribers = false;\n this.publish = function() {\n };\n this.subscribe = function() {\n };\n this.unsubscribe = function() {\n };\n }\n };\n __internalModuleCache[name] = dcModule;\n _debugRequire(\"loaded\", name, \"diagnostics-channel-special\");\n return dcModule;\n }\n if (_deferredCoreModules.has(name)) {\n if (__internalModuleCache[name]) return __internalModuleCache[name];\n const deferredStub = _createDeferredModuleStub(name);\n __internalModuleCache[name] = deferredStub;\n _debugRequire(\"loaded\", name, \"deferred-stub\");\n return deferredStub;\n }\n if (_unsupportedCoreModules.has(name)) {\n throw new Error(name + \" is not supported in sandbox\");\n }\n const polyfillCode = _loadPolyfill.applySyncPromise(void 0, [name]);\n if (polyfillCode !== null) {\n if (__internalModuleCache[name]) return __internalModuleCache[name];\n const moduleObj = { exports: {} };\n _pendingModules[name] = moduleObj;\n let result = eval(polyfillCode);\n result = _patchPolyfill(name, result);\n if (typeof result === \"object\" && result !== null) {\n Object.assign(moduleObj.exports, result);\n } else {\n moduleObj.exports = result;\n }\n __internalModuleCache[name] = moduleObj.exports;\n delete _pendingModules[name];\n _debugRequire(\"loaded\", name, \"polyfill\");\n return __internalModuleCache[name];\n }\n resolved = _resolveFrom(name, fromDir);\n cacheKey = resolved;\n if (__internalModuleCache[cacheKey]) {\n _debugRequire(\"cache-hit\", name, cacheKey);\n return __internalModuleCache[cacheKey];\n }\n if (_pendingModules[cacheKey]) {\n _debugRequire(\"pending-hit\", name, cacheKey);\n return _pendingModules[cacheKey].exports;\n }\n var source;\n if (typeof _loadFileSync !== \"undefined\") {\n source = _loadFileSync.applySync(void 0, [resolved]);\n }\n if (source === null || source === void 0) {\n source = _loadFile.applySyncPromise(void 0, [resolved, \"require\"]);\n }\n if (source === null) {\n const err = new Error(\"Cannot find module '\" + resolved + \"'\");\n err.code = \"MODULE_NOT_FOUND\";\n throw err;\n }\n if (resolved.endsWith(\".json\")) {\n const parsed = JSON.parse(source);\n __internalModuleCache[cacheKey] = parsed;\n return parsed;\n }\n const normalizedSource = typeof source === \"string\" ? source.replace(/import\\.meta\\.url/g, \"__filename\").replace(/fileURLToPath\\(__filename\\)/g, \"__filename\").replace(/url\\.fileURLToPath\\(__filename\\)/g, \"__filename\").replace(/fileURLToPath\\.call\\(void 0, __filename\\)/g, \"__filename\") : source;\n const module = {\n exports: {},\n filename: resolved,\n dirname: _dirname(resolved),\n id: resolved,\n loaded: false\n };\n _pendingModules[cacheKey] = module;\n const prevModule = _currentModule;\n _currentModule = module;\n try {\n let wrapper;\n try {\n wrapper = new Function(\n \"exports\",\n \"require\",\n \"module\",\n \"__filename\",\n \"__dirname\",\n \"__dynamicImport\",\n normalizedSource + \"\\n//# sourceURL=\" + resolved\n );\n } catch (error) {\n const details = error && error.stack ? error.stack : String(error);\n throw new Error(\"failed to compile module \" + resolved + \": \" + details);\n }\n const moduleRequire = function(request) {\n return _requireFrom(request, module.dirname);\n };\n moduleRequire.resolve = function(request) {\n return _resolveFrom(request, module.dirname);\n };\n const moduleDynamicImport = function(specifier) {\n if (typeof globalThis.__dynamicImport === \"function\") {\n return globalThis.__dynamicImport(specifier, module.dirname);\n }\n return Promise.reject(new Error(\"Dynamic import is not initialized\"));\n };\n wrapper(\n module.exports,\n moduleRequire,\n module,\n resolved,\n module.dirname,\n moduleDynamicImport\n );\n module.loaded = true;\n } catch (error) {\n const details = error && error.stack ? error.stack : String(error);\n throw new Error(\"failed to execute module \" + resolved + \": \" + details);\n } finally {\n _currentModule = prevModule;\n }\n __internalModuleCache[cacheKey] = module.exports;\n delete _pendingModules[cacheKey];\n _debugRequire(\"loaded\", name, cacheKey);\n return module.exports;\n }\n __requireExposeCustomGlobal(\"_requireFrom\", _requireFrom);\n var __moduleCacheProxy = new Proxy(__internalModuleCache, {\n get(target, prop, receiver) {\n return Reflect.get(target, prop, receiver);\n },\n set(_target, prop) {\n throw new TypeError(\"Cannot set require.cache['\" + String(prop) + \"']\");\n },\n deleteProperty(_target, prop) {\n throw new TypeError(\"Cannot delete require.cache['\" + String(prop) + \"']\");\n },\n defineProperty(_target, prop) {\n throw new TypeError(\"Cannot define property '\" + String(prop) + \"' on require.cache\");\n },\n has(target, prop) {\n return Reflect.has(target, prop);\n },\n ownKeys(target) {\n return Reflect.ownKeys(target);\n },\n getOwnPropertyDescriptor(target, prop) {\n return Reflect.getOwnPropertyDescriptor(target, prop);\n }\n });\n globalThis.require.cache = __moduleCacheProxy;\n Object.defineProperty(globalThis, \"_moduleCache\", {\n value: __moduleCacheProxy,\n writable: false,\n configurable: true,\n enumerable: false\n });\n if (typeof _moduleModule !== \"undefined\") {\n if (_moduleModule.Module) {\n _moduleModule.Module._cache = __moduleCacheProxy;\n }\n _moduleModule._cache = __moduleCacheProxy;\n }\n})();\n", "setCommonjsFileGlobals": "\"use strict\";\n(() => {\n // ../core/isolate-runtime/src/common/global-exposure.ts\n function defineRuntimeGlobalBinding(name, value, mutable) {\n Object.defineProperty(globalThis, name, {\n value,\n writable: mutable,\n configurable: mutable,\n enumerable: true\n });\n }\n function createRuntimeGlobalExposer(mutable) {\n return (name, value) => {\n defineRuntimeGlobalBinding(name, value, mutable);\n };\n }\n function getRuntimeExposeMutableGlobal() {\n if (typeof globalThis.__runtimeExposeMutableGlobal === \"function\") {\n return globalThis.__runtimeExposeMutableGlobal;\n }\n return createRuntimeGlobalExposer(true);\n }\n\n // ../core/isolate-runtime/src/inject/set-commonjs-file-globals.ts\n var __runtimeExposeMutableGlobal = getRuntimeExposeMutableGlobal();\n var __commonJsFileConfig = globalThis.__runtimeCommonJsFileConfig ?? {};\n var __filePath = typeof __commonJsFileConfig.filePath === \"string\" ? __commonJsFileConfig.filePath : \"/.js\";\n var __dirname = typeof __commonJsFileConfig.dirname === \"string\" ? __commonJsFileConfig.dirname : \"/\";\n __runtimeExposeMutableGlobal(\"__filename\", __filePath);\n __runtimeExposeMutableGlobal(\"__dirname\", __dirname);\n var __currentModule = globalThis._currentModule;\n if (__currentModule) {\n __currentModule.dirname = __dirname;\n __currentModule.filename = __filePath;\n }\n})();\n", "setStdinData": "\"use strict\";\n(() => {\n // ../core/isolate-runtime/src/inject/set-stdin-data.ts\n if (typeof globalThis._stdinData !== \"undefined\") {\n globalThis._stdinData = globalThis.__runtimeStdinData;\n globalThis._stdinPosition = 0;\n globalThis._stdinEnded = false;\n globalThis._stdinFlowMode = false;\n }\n})();\n", "setupDynamicImport": "\"use strict\";\n(() => {\n // ../core/isolate-runtime/src/common/global-access.ts\n function isObjectLike(value) {\n return value !== null && (typeof value === \"object\" || typeof value === \"function\");\n }\n\n // ../core/isolate-runtime/src/common/global-exposure.ts\n function defineRuntimeGlobalBinding(name, value, mutable) {\n Object.defineProperty(globalThis, name, {\n value,\n writable: mutable,\n configurable: mutable,\n enumerable: true\n });\n }\n function createRuntimeGlobalExposer(mutable) {\n return (name, value) => {\n defineRuntimeGlobalBinding(name, value, mutable);\n };\n }\n function getRuntimeExposeCustomGlobal() {\n if (typeof globalThis.__runtimeExposeCustomGlobal === \"function\") {\n return globalThis.__runtimeExposeCustomGlobal;\n }\n return createRuntimeGlobalExposer(false);\n }\n\n // ../core/isolate-runtime/src/inject/setup-dynamic-import.ts\n var __runtimeExposeCustomGlobal = getRuntimeExposeCustomGlobal();\n var __dynamicImportConfig = globalThis.__runtimeDynamicImportConfig ?? {};\n var __fallbackReferrer = typeof __dynamicImportConfig.referrerPath === \"string\" && __dynamicImportConfig.referrerPath.length > 0 ? __dynamicImportConfig.referrerPath : \"/\";\n var __dynamicImportCache = /* @__PURE__ */ new Map();\n var __resolveDynamicImportPath = function(request, referrer) {\n if (!request.startsWith(\"./\") && !request.startsWith(\"../\") && !request.startsWith(\"/\")) {\n return request;\n }\n const baseDir = referrer.endsWith(\"/\") ? referrer : referrer.slice(0, referrer.lastIndexOf(\"/\")) || \"/\";\n const segments = baseDir.split(\"/\").filter(Boolean);\n for (const part of request.split(\"/\")) {\n if (part === \".\" || part.length === 0) continue;\n if (part === \"..\") {\n segments.pop();\n continue;\n }\n segments.push(part);\n }\n return `/${segments.join(\"/\")}`;\n };\n var __dynamicImportHandler = function(specifier, fromPath) {\n const request = String(specifier);\n const referrer = typeof fromPath === \"string\" && fromPath.length > 0 ? fromPath : __fallbackReferrer;\n let resolved = null;\n if (typeof globalThis._resolveModuleSync !== \"undefined\") {\n resolved = globalThis._resolveModuleSync.applySync(\n void 0,\n [request, referrer, \"import\"]\n );\n }\n const resolvedPath = typeof resolved === \"string\" && resolved.length > 0 ? resolved : __resolveDynamicImportPath(request, referrer);\n const cacheKey = typeof resolved === \"string\" && resolved.length > 0 ? resolved : `${referrer}\\0${request}`;\n const cached = __dynamicImportCache.get(cacheKey);\n if (cached) return Promise.resolve(cached);\n if (typeof globalThis._requireFrom !== \"function\") {\n throw new Error(\"Cannot load module: \" + resolvedPath);\n }\n let mod;\n try {\n mod = globalThis._requireFrom(resolved ?? request, referrer);\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error);\n if (error && typeof error === \"object\" && \"code\" in error && error.code === \"MODULE_NOT_FOUND\") {\n throw new Error(\"Cannot load module: \" + resolvedPath);\n }\n if (message.startsWith(\"Cannot find module \")) {\n throw new Error(\"Cannot load module: \" + resolvedPath);\n }\n throw error;\n }\n const namespaceFallback = { default: mod };\n if (isObjectLike(mod)) {\n for (const key of Object.keys(mod)) {\n if (!(key in namespaceFallback)) {\n namespaceFallback[key] = mod[key];\n }\n }\n }\n __dynamicImportCache.set(cacheKey, namespaceFallback);\n return Promise.resolve(namespaceFallback);\n };\n __runtimeExposeCustomGlobal(\"__dynamicImport\", __dynamicImportHandler);\n})();\n", diff --git a/packages/core/src/kernel/kernel.ts b/packages/core/src/kernel/kernel.ts index e3fdc678..df2dd167 100644 --- a/packages/core/src/kernel/kernel.ts +++ b/packages/core/src/kernel/kernel.ts @@ -110,6 +110,7 @@ class KernelImpl implements Kernel { fs = createProcLayer(fs, { processTable: this.processTable, fdTableManager: this.fdTableManager, + hostname: options.env?.HOSTNAME, }); // Apply permission wrapping diff --git a/packages/core/src/kernel/proc-layer.ts b/packages/core/src/kernel/proc-layer.ts index 57b39b2a..69b8b43d 100644 --- a/packages/core/src/kernel/proc-layer.ts +++ b/packages/core/src/kernel/proc-layer.ts @@ -8,16 +8,30 @@ const S_IFDIR = 0o040000; const S_IFLNK = 0o120000; const PROC_INO_BASE = 0xfffe_0000; const PROC_SELF_PREFIX = "/proc/self"; +const PROC_SYS_PREFIX = "/proc/sys"; +const PROC_SYS_KERNEL_PREFIX = "/proc/sys/kernel"; +const PROC_SYS_KERNEL_HOSTNAME_PATH = "/proc/sys/kernel/hostname"; const PROC_PID_ENTRIES: VirtualDirEntry[] = [ { name: "fd", isDirectory: true }, { name: "cwd", isDirectory: false, isSymbolicLink: true }, { name: "exe", isDirectory: false, isSymbolicLink: true }, { name: "environ", isDirectory: false }, ]; +const PROC_ROOT_ENTRIES: VirtualDirEntry[] = [ + { name: "self", isDirectory: false, isSymbolicLink: true }, + { name: "sys", isDirectory: true }, +]; +const PROC_SYS_ENTRIES: VirtualDirEntry[] = [ + { name: "kernel", isDirectory: true }, +]; +const PROC_SYS_KERNEL_ENTRIES: VirtualDirEntry[] = [ + { name: "hostname", isDirectory: false }, +]; export interface ProcLayerOptions { processTable: ProcessTable; fdTableManager: FDTableManager; + hostname?: string; } function normalizePath(path: string): string { @@ -190,6 +204,7 @@ export function createProcLayer(vfs: VirtualFileSystem, options: ProcLayerOption const syncVfs = vfs as VirtualFileSystem & { prepareOpenSync?: (path: string, flags: number) => boolean; }; + const kernelHostname = encodeText(`${options.hostname ?? "sandbox"}\n`); const getProcess = (pid: number) => { const entry = options.processTable.get(pid); @@ -235,6 +250,11 @@ export function createProcLayer(vfs: VirtualFileSystem, options: ProcLayerOption const getProcStat = async (path: string, followSymlinks: boolean): Promise => { const normalized = normalizePath(path); if (normalized === "/proc") return dirStat("proc"); + if (normalized === PROC_SYS_PREFIX) return dirStat("proc:sys"); + if (normalized === PROC_SYS_KERNEL_PREFIX) return dirStat("proc:sys:kernel"); + if (normalized === PROC_SYS_KERNEL_HOSTNAME_PATH) { + return fileStat("proc:sys:kernel:hostname", kernelHostname.length); + } if (normalized === PROC_SELF_PREFIX) { return followSymlinks ? dirStat("proc-self") : linkStat("proc-self-link", PROC_SELF_PREFIX); } @@ -286,6 +306,12 @@ export function createProcLayer(vfs: VirtualFileSystem, options: ProcLayerOption if (normalized === "/proc" || normalized === PROC_SELF_PREFIX) { throw new KernelError("EISDIR", `illegal operation on a directory, read '${normalized}'`); } + if (normalized === PROC_SYS_PREFIX || normalized === PROC_SYS_KERNEL_PREFIX) { + throw new KernelError("EISDIR", `illegal operation on a directory, read '${normalized}'`); + } + if (normalized === PROC_SYS_KERNEL_HOSTNAME_PATH) { + return kernelHostname; + } const parsed = parseProcPath(normalized); if (!parsed) throw new KernelError("ENOENT", `no such file or directory: ${normalized}`); const { pid, tail } = parsed; @@ -315,10 +341,16 @@ export function createProcLayer(vfs: VirtualFileSystem, options: ProcLayerOption if (!isProcPath(normalized)) return vfs.readDirWithTypes(clonePathArg(path, normalized)); if (normalized === "/proc") { return [ - { name: "self", isDirectory: false, isSymbolicLink: true }, + ...PROC_ROOT_ENTRIES, ...listPids().map((pid) => ({ name: String(pid), isDirectory: true })), ]; } + if (normalized === PROC_SYS_PREFIX) { + return PROC_SYS_ENTRIES; + } + if (normalized === PROC_SYS_KERNEL_PREFIX) { + return PROC_SYS_KERNEL_ENTRIES; + } if (normalized === PROC_SELF_PREFIX) { throw new KernelError("ENOENT", `no such file or directory: ${normalized}`); } @@ -354,7 +386,15 @@ export function createProcLayer(vfs: VirtualFileSystem, options: ProcLayerOption async exists(path) { const normalized = normalizePath(path); if (!isProcPath(normalized)) return vfs.exists(clonePathArg(path, normalized)); - if (normalized === "/proc" || normalized === PROC_SELF_PREFIX) return true; + if ( + normalized === "/proc" || + normalized === PROC_SELF_PREFIX || + normalized === PROC_SYS_PREFIX || + normalized === PROC_SYS_KERNEL_PREFIX || + normalized === PROC_SYS_KERNEL_HOSTNAME_PATH + ) { + return true; + } const parsed = parseProcPath(normalized); if (!parsed) return false; const { pid, tail } = parsed; @@ -397,7 +437,15 @@ export function createProcLayer(vfs: VirtualFileSystem, options: ProcLayerOption async realpath(path) { const normalized = normalizePath(path); if (!isProcPath(normalized)) return vfs.realpath(clonePathArg(path, normalized)); - if (normalized === "/proc" || normalized === PROC_SELF_PREFIX) return normalized; + if ( + normalized === "/proc" || + normalized === PROC_SELF_PREFIX || + normalized === PROC_SYS_PREFIX || + normalized === PROC_SYS_KERNEL_PREFIX || + normalized === PROC_SYS_KERNEL_HOSTNAME_PATH + ) { + return normalized; + } const parsed = parseProcPath(normalized); if (!parsed) throw new KernelError("ENOENT", `no such file or directory: ${normalized}`); const { pid, tail } = parsed; diff --git a/packages/core/src/kernel/socket-table.ts b/packages/core/src/kernel/socket-table.ts index 828c197a..c5968347 100644 --- a/packages/core/src/kernel/socket-table.ts +++ b/packages/core/src/kernel/socket-table.ts @@ -259,7 +259,7 @@ export class SocketTable { * For Unix domain sockets (UnixAddr), creates a socket file in the * VFS if one is configured. */ - async bind(socketId: number, addr: SockAddr): Promise { + async bind(socketId: number, addr: SockAddr, options?: { mode?: number }): Promise { const socket = this.requireSocket(socketId); if (socket.state !== "created") { throw new KernelError("EINVAL", "socket must be in created state to bind"); @@ -283,7 +283,7 @@ export class SocketTable { this.udpBindings.set(addrKey(boundAddr), socketId); // Create socket file in VFS for Unix dgram sockets if (isUnixAddr(boundAddr) && this.vfs) { - await this.createSocketFile(boundAddr.path); + await this.createSocketFile(boundAddr.path, options?.mode); } return; } @@ -298,7 +298,7 @@ export class SocketTable { // Create socket file in VFS for Unix stream sockets if (isUnixAddr(boundAddr) && this.vfs) { - await this.createSocketFile(boundAddr.path); + await this.createSocketFile(boundAddr.path, options?.mode); } } @@ -544,6 +544,18 @@ export class SocketTable { throw new KernelError("EINVAL", "socket must be in created or bound state to connect"); } + // Mirror POSIX auto-bind behavior so connected client sockets always + // expose a concrete local address/port to both peers. + if (!socket.localAddr && isInetAddr(addr)) { + socket.localAddr = this.assignEphemeralPort( + { + host: addr.host.includes(":") ? "::1" : "127.0.0.1", + port: 0, + }, + socket, + ); + } + // Unix domain sockets: check VFS for socket file existence if (isUnixAddr(addr) && this.vfs) { if (!await this.vfs.exists(addr.path)) { @@ -763,7 +775,7 @@ export class SocketTable { if (target.datagramQueue.length >= MAX_UDP_QUEUE_DEPTH) { return data.length; // Silently drop } - const srcAddr: SockAddr = socket.localAddr ?? { host: "127.0.0.1", port: 0 }; + const srcAddr: SockAddr = this.getUdpSourceAddr(socket, destAddr); target.datagramQueue.push({ data: new Uint8Array(data), srcAddr }); target.readWaiters.wakeOne(); return data.length; @@ -784,6 +796,28 @@ export class SocketTable { return data.length; } + private getUdpSourceAddr(socket: KernelSocket, destAddr: SockAddr): SockAddr { + if (!socket.localAddr) { + return isInetAddr(destAddr) + ? { + host: destAddr.host.includes(":") ? "::1" : "127.0.0.1", + port: 0, + } + : { path: destAddr.path }; + } + if ( + isInetAddr(socket.localAddr) && + isInetAddr(destAddr) && + (socket.localAddr.host === "0.0.0.0" || socket.localAddr.host === "::") + ) { + return { + host: destAddr.host, + port: socket.localAddr.port, + }; + } + return socket.localAddr; + } + /** * Receive a datagram from a UDP socket. Returns the datagram and the * source address, or null if no datagram is queued. @@ -942,10 +976,10 @@ export class SocketTable { // ----------------------------------------------------------------------- /** Create a socket file in the VFS with S_IFSOCK mode. */ - private async createSocketFile(path: string): Promise { + private async createSocketFile(path: string, mode: number = 0o755): Promise { if (!this.vfs) return; await this.vfs.writeFile(path, new Uint8Array(0)); - await this.vfs.chmod(path, S_IFSOCK | 0o755); + await this.vfs.chmod(path, S_IFSOCK | (mode & 0o777)); } private requireSocket(socketId: number): KernelSocket { diff --git a/packages/core/src/shared/bridge-contract.ts b/packages/core/src/shared/bridge-contract.ts index 31684146..157d0460 100644 --- a/packages/core/src/shared/bridge-contract.ts +++ b/packages/core/src/shared/bridge-contract.ts @@ -80,14 +80,53 @@ export const HOST_BRIDGE_GLOBAL_KEYS = { networkHttpServerCloseRaw: "_networkHttpServerCloseRaw", networkHttpServerRespondRaw: "_networkHttpServerRespondRaw", networkHttpServerWaitRaw: "_networkHttpServerWaitRaw", + networkHttp2ServerListenRaw: "_networkHttp2ServerListenRaw", + networkHttp2ServerCloseRaw: "_networkHttp2ServerCloseRaw", + networkHttp2ServerWaitRaw: "_networkHttp2ServerWaitRaw", + networkHttp2SessionConnectRaw: "_networkHttp2SessionConnectRaw", + networkHttp2SessionRequestRaw: "_networkHttp2SessionRequestRaw", + networkHttp2SessionSettingsRaw: "_networkHttp2SessionSettingsRaw", + networkHttp2SessionSetLocalWindowSizeRaw: "_networkHttp2SessionSetLocalWindowSizeRaw", + networkHttp2SessionGoawayRaw: "_networkHttp2SessionGoawayRaw", + networkHttp2SessionCloseRaw: "_networkHttp2SessionCloseRaw", + networkHttp2SessionDestroyRaw: "_networkHttp2SessionDestroyRaw", + networkHttp2SessionWaitRaw: "_networkHttp2SessionWaitRaw", + networkHttp2ServerPollRaw: "_networkHttp2ServerPollRaw", + networkHttp2SessionPollRaw: "_networkHttp2SessionPollRaw", + networkHttp2StreamRespondRaw: "_networkHttp2StreamRespondRaw", + networkHttp2StreamPushStreamRaw: "_networkHttp2StreamPushStreamRaw", + networkHttp2StreamWriteRaw: "_networkHttp2StreamWriteRaw", + networkHttp2StreamEndRaw: "_networkHttp2StreamEndRaw", + networkHttp2StreamPauseRaw: "_networkHttp2StreamPauseRaw", + networkHttp2StreamResumeRaw: "_networkHttp2StreamResumeRaw", + networkHttp2StreamRespondWithFileRaw: "_networkHttp2StreamRespondWithFileRaw", + networkHttp2ServerRespondRaw: "_networkHttp2ServerRespondRaw", upgradeSocketWriteRaw: "_upgradeSocketWriteRaw", upgradeSocketEndRaw: "_upgradeSocketEndRaw", upgradeSocketDestroyRaw: "_upgradeSocketDestroyRaw", netSocketConnectRaw: "_netSocketConnectRaw", + netSocketWaitConnectRaw: "_netSocketWaitConnectRaw", + netSocketReadRaw: "_netSocketReadRaw", + netSocketSetNoDelayRaw: "_netSocketSetNoDelayRaw", + netSocketSetKeepAliveRaw: "_netSocketSetKeepAliveRaw", netSocketWriteRaw: "_netSocketWriteRaw", netSocketEndRaw: "_netSocketEndRaw", netSocketDestroyRaw: "_netSocketDestroyRaw", netSocketUpgradeTlsRaw: "_netSocketUpgradeTlsRaw", + netSocketGetTlsClientHelloRaw: "_netSocketGetTlsClientHelloRaw", + netSocketTlsQueryRaw: "_netSocketTlsQueryRaw", + tlsGetCiphersRaw: "_tlsGetCiphersRaw", + netServerListenRaw: "_netServerListenRaw", + netServerAcceptRaw: "_netServerAcceptRaw", + netServerCloseRaw: "_netServerCloseRaw", + dgramSocketCreateRaw: "_dgramSocketCreateRaw", + dgramSocketBindRaw: "_dgramSocketBindRaw", + dgramSocketRecvRaw: "_dgramSocketRecvRaw", + dgramSocketSendRaw: "_dgramSocketSendRaw", + dgramSocketCloseRaw: "_dgramSocketCloseRaw", + dgramSocketAddressRaw: "_dgramSocketAddressRaw", + dgramSocketSetBufferSizeRaw: "_dgramSocketSetBufferSizeRaw", + dgramSocketGetBufferSizeRaw: "_dgramSocketGetBufferSizeRaw", resolveModuleSync: "_resolveModuleSync", loadFileSync: "_loadFileSync", ptySetRawMode: "_ptySetRawMode", @@ -111,8 +150,11 @@ export const RUNTIME_BRIDGE_GLOBAL_KEYS = { httpsModule: "_httpsModule", http2Module: "_http2Module", dnsModule: "_dnsModule", + dgramModule: "_dgramModule", httpServerDispatch: "_httpServerDispatch", httpServerUpgradeDispatch: "_httpServerUpgradeDispatch", + httpServerConnectDispatch: "_httpServerConnectDispatch", + http2Dispatch: "_http2Dispatch", timerDispatch: "_timerDispatch", upgradeSocketData: "_upgradeSocketData", upgradeSocketEnd: "_upgradeSocketEnd", @@ -318,14 +360,110 @@ export type NetworkHttpServerRespondRawBridgeRef = BridgeApplySyncRef< void >; export type NetworkHttpServerWaitRawBridgeRef = BridgeApplyRef<[number], void>; +export type NetworkHttp2ServerListenRawBridgeRef = BridgeApplySyncPromiseRef< + [string], + string +>; +export type NetworkHttp2ServerCloseRawBridgeRef = BridgeApplyRef<[number], void>; +export type NetworkHttp2ServerWaitRawBridgeRef = BridgeApplyRef<[number], void>; +export type NetworkHttp2SessionConnectRawBridgeRef = BridgeApplySyncPromiseRef< + [string], + string +>; +export type NetworkHttp2SessionRequestRawBridgeRef = BridgeApplySyncRef< + [number, string, string], + number +>; +export type NetworkHttp2SessionSettingsRawBridgeRef = BridgeApplySyncRef< + [number, string], + void +>; +export type NetworkHttp2SessionSetLocalWindowSizeRawBridgeRef = BridgeApplySyncRef< + [number, number], + string +>; +export type NetworkHttp2SessionGoawayRawBridgeRef = BridgeApplySyncRef< + [number, number, number, string | null], + void +>; +export type NetworkHttp2SessionCloseRawBridgeRef = BridgeApplySyncRef< + [number], + void +>; +export type NetworkHttp2SessionDestroyRawBridgeRef = BridgeApplySyncRef< + [number], + void +>; +export type NetworkHttp2SessionWaitRawBridgeRef = BridgeApplyRef<[number], void>; +export type NetworkHttp2ServerPollRawBridgeRef = BridgeApplySyncRef< + [number], + string | null +>; +export type NetworkHttp2SessionPollRawBridgeRef = BridgeApplySyncRef< + [number], + string | null +>; +export type NetworkHttp2StreamRespondRawBridgeRef = BridgeApplySyncRef< + [number, string], + void +>; +export type NetworkHttp2StreamPushStreamRawBridgeRef = BridgeApplySyncPromiseRef< + [number, string, string], + string +>; +export type NetworkHttp2StreamWriteRawBridgeRef = BridgeApplySyncRef< + [number, string], + boolean +>; +export type NetworkHttp2StreamEndRawBridgeRef = BridgeApplySyncRef< + [number, string | null], + void +>; +export type NetworkHttp2StreamPauseRawBridgeRef = BridgeApplySyncRef<[number], void>; +export type NetworkHttp2StreamResumeRawBridgeRef = BridgeApplySyncRef<[number], void>; +export type NetworkHttp2StreamRespondWithFileRawBridgeRef = BridgeApplySyncRef< + [number, string, string, string], + void +>; +export type NetworkHttp2ServerRespondRawBridgeRef = BridgeApplySyncRef< + [number, number, string], + void +>; export type UpgradeSocketWriteRawBridgeRef = BridgeApplySyncRef<[number, string], void>; export type UpgradeSocketEndRawBridgeRef = BridgeApplySyncRef<[number], void>; export type UpgradeSocketDestroyRawBridgeRef = BridgeApplySyncRef<[number], void>; -export type NetSocketConnectRawBridgeRef = BridgeApplySyncRef<[string, number], number>; +export type NetSocketConnectRawBridgeRef = BridgeApplySyncRef<[string], number>; +export type NetSocketWaitConnectRawBridgeRef = BridgeApplyRef<[number], string>; +export type NetSocketReadRawBridgeRef = BridgeApplySyncRef<[number], string | null>; +export type NetSocketSetNoDelayRawBridgeRef = BridgeApplySyncRef<[number, boolean], void>; +export type NetSocketSetKeepAliveRawBridgeRef = BridgeApplySyncRef<[number, boolean, number], void>; export type NetSocketWriteRawBridgeRef = BridgeApplySyncRef<[number, string], void>; export type NetSocketEndRawBridgeRef = BridgeApplySyncRef<[number], void>; export type NetSocketDestroyRawBridgeRef = BridgeApplySyncRef<[number], void>; export type NetSocketUpgradeTlsRawBridgeRef = BridgeApplySyncRef<[number, string], void>; +export type NetSocketGetTlsClientHelloRawBridgeRef = BridgeApplySyncRef<[number], string>; +export type NetSocketTlsQueryRawBridgeRef = BridgeApplySyncRef< + [number, string, boolean?], + string +>; +export type TlsGetCiphersRawBridgeRef = BridgeApplySyncRef<[], string>; +export type NetServerListenRawBridgeRef = BridgeApplyRef<[string], string>; +export type NetServerAcceptRawBridgeRef = BridgeApplySyncRef<[number], string | null>; +export type NetServerCloseRawBridgeRef = BridgeApplyRef<[number], void>; +export type DgramSocketCreateRawBridgeRef = BridgeApplySyncRef<[string], number>; +export type DgramSocketBindRawBridgeRef = BridgeApplySyncPromiseRef<[number, string], string>; +export type DgramSocketRecvRawBridgeRef = BridgeApplySyncRef<[number], string | null>; +export type DgramSocketSendRawBridgeRef = BridgeApplySyncPromiseRef<[number, string], number>; +export type DgramSocketCloseRawBridgeRef = BridgeApplySyncPromiseRef<[number], void>; +export type DgramSocketAddressRawBridgeRef = BridgeApplySyncRef<[number], string>; +export type DgramSocketSetBufferSizeRawBridgeRef = BridgeApplySyncRef< + [number, "recv" | "send", number], + void +>; +export type DgramSocketGetBufferSizeRawBridgeRef = BridgeApplySyncRef< + [number, "recv" | "send"], + number +>; export type ResolveModuleSyncBridgeRef = BridgeApplySyncRef< [string, string], string | null diff --git a/packages/core/src/shared/global-exposure.ts b/packages/core/src/shared/global-exposure.ts index a85689c1..0d2a651d 100644 --- a/packages/core/src/shared/global-exposure.ts +++ b/packages/core/src/shared/global-exposure.ts @@ -93,6 +93,11 @@ export const NODE_CUSTOM_GLOBAL_INVENTORY: readonly CustomGlobalInventoryEntry[] classification: "hardened", rationale: "Bridge-owned dns module handle for require resolution.", }, + { + name: "_dgramModule", + classification: "hardened", + rationale: "Bridge-owned dgram module handle for require resolution.", + }, { name: "_netModule", classification: "hardened", @@ -118,6 +123,16 @@ export const NODE_CUSTOM_GLOBAL_INVENTORY: readonly CustomGlobalInventoryEntry[] classification: "hardened", rationale: "Host-to-sandbox HTTP upgrade dispatch entrypoint.", }, + { + name: "_httpServerConnectDispatch", + classification: "hardened", + rationale: "Host-to-sandbox HTTP CONNECT dispatch entrypoint.", + }, + { + name: "_http2Dispatch", + classification: "hardened", + rationale: "Host-to-sandbox HTTP/2 event dispatch entrypoint.", + }, { name: "_timerDispatch", classification: "hardened", @@ -453,6 +468,76 @@ export const NODE_CUSTOM_GLOBAL_INVENTORY: readonly CustomGlobalInventoryEntry[] classification: "hardened", rationale: "Host network bridge reference for sandbox HTTP server lifetime tracking.", }, + { + name: "_networkHttp2ServerListenRaw", + classification: "hardened", + rationale: "Host HTTP/2 server listen bridge reference.", + }, + { + name: "_networkHttp2ServerCloseRaw", + classification: "hardened", + rationale: "Host HTTP/2 server close bridge reference.", + }, + { + name: "_networkHttp2ServerWaitRaw", + classification: "hardened", + rationale: "Host HTTP/2 server lifetime bridge reference.", + }, + { + name: "_networkHttp2SessionConnectRaw", + classification: "hardened", + rationale: "Host HTTP/2 session connect bridge reference.", + }, + { + name: "_networkHttp2SessionRequestRaw", + classification: "hardened", + rationale: "Host HTTP/2 session request bridge reference.", + }, + { + name: "_networkHttp2SessionSettingsRaw", + classification: "hardened", + rationale: "Host HTTP/2 session settings bridge reference.", + }, + { + name: "_networkHttp2SessionGoawayRaw", + classification: "hardened", + rationale: "Host HTTP/2 session GOAWAY bridge reference.", + }, + { + name: "_networkHttp2SessionCloseRaw", + classification: "hardened", + rationale: "Host HTTP/2 session close bridge reference.", + }, + { + name: "_networkHttp2SessionDestroyRaw", + classification: "hardened", + rationale: "Host HTTP/2 session destroy bridge reference.", + }, + { + name: "_networkHttp2SessionWaitRaw", + classification: "hardened", + rationale: "Host HTTP/2 session lifetime bridge reference.", + }, + { + name: "_networkHttp2StreamRespondRaw", + classification: "hardened", + rationale: "Host HTTP/2 stream respond bridge reference.", + }, + { + name: "_networkHttp2StreamPushStreamRaw", + classification: "hardened", + rationale: "Host HTTP/2 push stream bridge reference.", + }, + { + name: "_networkHttp2StreamWriteRaw", + classification: "hardened", + rationale: "Host HTTP/2 stream write bridge reference.", + }, + { + name: "_networkHttp2StreamEndRaw", + classification: "hardened", + rationale: "Host HTTP/2 stream end bridge reference.", + }, { name: "_upgradeSocketWriteRaw", classification: "hardened", @@ -473,6 +558,26 @@ export const NODE_CUSTOM_GLOBAL_INVENTORY: readonly CustomGlobalInventoryEntry[] classification: "hardened", rationale: "Host net socket connect bridge reference.", }, + { + name: "_netSocketWaitConnectRaw", + classification: "hardened", + rationale: "Host net socket connect-wait bridge reference.", + }, + { + name: "_netSocketReadRaw", + classification: "hardened", + rationale: "Host net socket read bridge reference.", + }, + { + name: "_netSocketSetNoDelayRaw", + classification: "hardened", + rationale: "Host net socket no-delay bridge reference.", + }, + { + name: "_netSocketSetKeepAliveRaw", + classification: "hardened", + rationale: "Host net socket keepalive bridge reference.", + }, { name: "_netSocketWriteRaw", classification: "hardened", @@ -493,6 +598,36 @@ export const NODE_CUSTOM_GLOBAL_INVENTORY: readonly CustomGlobalInventoryEntry[] classification: "hardened", rationale: "Host net socket TLS-upgrade bridge reference.", }, + { + name: "_netSocketGetTlsClientHelloRaw", + classification: "hardened", + rationale: "Host loopback TLS client-hello bridge reference.", + }, + { + name: "_netSocketTlsQueryRaw", + classification: "hardened", + rationale: "Host TLS socket query bridge reference.", + }, + { + name: "_tlsGetCiphersRaw", + classification: "hardened", + rationale: "Host TLS cipher-list bridge reference.", + }, + { + name: "_netServerListenRaw", + classification: "hardened", + rationale: "Host net server listen bridge reference.", + }, + { + name: "_netServerAcceptRaw", + classification: "hardened", + rationale: "Host net server accept bridge reference.", + }, + { + name: "_netServerCloseRaw", + classification: "hardened", + rationale: "Host net server close bridge reference.", + }, { name: "_batchResolveModules", classification: "hardened", diff --git a/packages/nodejs/src/bridge-contract.ts b/packages/nodejs/src/bridge-contract.ts index 6499fdcd..e324e780 100644 --- a/packages/nodejs/src/bridge-contract.ts +++ b/packages/nodejs/src/bridge-contract.ts @@ -76,14 +76,53 @@ export const HOST_BRIDGE_GLOBAL_KEYS = { networkHttpServerCloseRaw: "_networkHttpServerCloseRaw", networkHttpServerRespondRaw: "_networkHttpServerRespondRaw", networkHttpServerWaitRaw: "_networkHttpServerWaitRaw", + networkHttp2ServerListenRaw: "_networkHttp2ServerListenRaw", + networkHttp2ServerCloseRaw: "_networkHttp2ServerCloseRaw", + networkHttp2ServerWaitRaw: "_networkHttp2ServerWaitRaw", + networkHttp2SessionConnectRaw: "_networkHttp2SessionConnectRaw", + networkHttp2SessionRequestRaw: "_networkHttp2SessionRequestRaw", + networkHttp2SessionSettingsRaw: "_networkHttp2SessionSettingsRaw", + networkHttp2SessionSetLocalWindowSizeRaw: "_networkHttp2SessionSetLocalWindowSizeRaw", + networkHttp2SessionGoawayRaw: "_networkHttp2SessionGoawayRaw", + networkHttp2SessionCloseRaw: "_networkHttp2SessionCloseRaw", + networkHttp2SessionDestroyRaw: "_networkHttp2SessionDestroyRaw", + networkHttp2SessionWaitRaw: "_networkHttp2SessionWaitRaw", + networkHttp2ServerPollRaw: "_networkHttp2ServerPollRaw", + networkHttp2SessionPollRaw: "_networkHttp2SessionPollRaw", + networkHttp2StreamRespondRaw: "_networkHttp2StreamRespondRaw", + networkHttp2StreamPushStreamRaw: "_networkHttp2StreamPushStreamRaw", + networkHttp2StreamWriteRaw: "_networkHttp2StreamWriteRaw", + networkHttp2StreamEndRaw: "_networkHttp2StreamEndRaw", + networkHttp2StreamPauseRaw: "_networkHttp2StreamPauseRaw", + networkHttp2StreamResumeRaw: "_networkHttp2StreamResumeRaw", + networkHttp2StreamRespondWithFileRaw: "_networkHttp2StreamRespondWithFileRaw", + networkHttp2ServerRespondRaw: "_networkHttp2ServerRespondRaw", upgradeSocketWriteRaw: "_upgradeSocketWriteRaw", upgradeSocketEndRaw: "_upgradeSocketEndRaw", upgradeSocketDestroyRaw: "_upgradeSocketDestroyRaw", netSocketConnectRaw: "_netSocketConnectRaw", + netSocketWaitConnectRaw: "_netSocketWaitConnectRaw", + netSocketReadRaw: "_netSocketReadRaw", + netSocketSetNoDelayRaw: "_netSocketSetNoDelayRaw", + netSocketSetKeepAliveRaw: "_netSocketSetKeepAliveRaw", netSocketWriteRaw: "_netSocketWriteRaw", netSocketEndRaw: "_netSocketEndRaw", netSocketDestroyRaw: "_netSocketDestroyRaw", netSocketUpgradeTlsRaw: "_netSocketUpgradeTlsRaw", + netSocketGetTlsClientHelloRaw: "_netSocketGetTlsClientHelloRaw", + netSocketTlsQueryRaw: "_netSocketTlsQueryRaw", + tlsGetCiphersRaw: "_tlsGetCiphersRaw", + netServerListenRaw: "_netServerListenRaw", + netServerAcceptRaw: "_netServerAcceptRaw", + netServerCloseRaw: "_netServerCloseRaw", + dgramSocketCreateRaw: "_dgramSocketCreateRaw", + dgramSocketBindRaw: "_dgramSocketBindRaw", + dgramSocketRecvRaw: "_dgramSocketRecvRaw", + dgramSocketSendRaw: "_dgramSocketSendRaw", + dgramSocketCloseRaw: "_dgramSocketCloseRaw", + dgramSocketAddressRaw: "_dgramSocketAddressRaw", + dgramSocketSetBufferSizeRaw: "_dgramSocketSetBufferSizeRaw", + dgramSocketGetBufferSizeRaw: "_dgramSocketGetBufferSizeRaw", resolveModuleSync: "_resolveModuleSync", loadFileSync: "_loadFileSync", ptySetRawMode: "_ptySetRawMode", @@ -116,8 +155,11 @@ export const RUNTIME_BRIDGE_GLOBAL_KEYS = { httpsModule: "_httpsModule", http2Module: "_http2Module", dnsModule: "_dnsModule", + dgramModule: "_dgramModule", httpServerDispatch: "_httpServerDispatch", httpServerUpgradeDispatch: "_httpServerUpgradeDispatch", + httpServerConnectDispatch: "_httpServerConnectDispatch", + http2Dispatch: "_http2Dispatch", timerDispatch: "_timerDispatch", upgradeSocketData: "_upgradeSocketData", upgradeSocketEnd: "_upgradeSocketEnd", @@ -323,14 +365,110 @@ export type NetworkHttpServerRespondRawBridgeRef = BridgeApplySyncRef< void >; export type NetworkHttpServerWaitRawBridgeRef = BridgeApplyRef<[number], void>; +export type NetworkHttp2ServerListenRawBridgeRef = BridgeApplySyncPromiseRef< + [string], + string +>; +export type NetworkHttp2ServerCloseRawBridgeRef = BridgeApplyRef<[number], void>; +export type NetworkHttp2ServerWaitRawBridgeRef = BridgeApplyRef<[number], void>; +export type NetworkHttp2SessionConnectRawBridgeRef = BridgeApplySyncPromiseRef< + [string], + string +>; +export type NetworkHttp2SessionRequestRawBridgeRef = BridgeApplySyncRef< + [number, string, string], + number +>; +export type NetworkHttp2SessionSettingsRawBridgeRef = BridgeApplySyncRef< + [number, string], + void +>; +export type NetworkHttp2SessionSetLocalWindowSizeRawBridgeRef = BridgeApplySyncRef< + [number, number], + string +>; +export type NetworkHttp2SessionGoawayRawBridgeRef = BridgeApplySyncRef< + [number, number, number, string | null], + void +>; +export type NetworkHttp2SessionCloseRawBridgeRef = BridgeApplySyncRef< + [number], + void +>; +export type NetworkHttp2SessionDestroyRawBridgeRef = BridgeApplySyncRef< + [number], + void +>; +export type NetworkHttp2SessionWaitRawBridgeRef = BridgeApplyRef<[number], void>; +export type NetworkHttp2ServerPollRawBridgeRef = BridgeApplySyncRef< + [number], + string | null +>; +export type NetworkHttp2SessionPollRawBridgeRef = BridgeApplySyncRef< + [number], + string | null +>; +export type NetworkHttp2StreamRespondRawBridgeRef = BridgeApplySyncRef< + [number, string], + void +>; +export type NetworkHttp2StreamPushStreamRawBridgeRef = BridgeApplySyncPromiseRef< + [number, string, string], + string +>; +export type NetworkHttp2StreamWriteRawBridgeRef = BridgeApplySyncRef< + [number, string], + boolean +>; +export type NetworkHttp2StreamEndRawBridgeRef = BridgeApplySyncRef< + [number, string | null], + void +>; +export type NetworkHttp2StreamPauseRawBridgeRef = BridgeApplySyncRef<[number], void>; +export type NetworkHttp2StreamResumeRawBridgeRef = BridgeApplySyncRef<[number], void>; +export type NetworkHttp2StreamRespondWithFileRawBridgeRef = BridgeApplySyncRef< + [number, string, string, string], + void +>; +export type NetworkHttp2ServerRespondRawBridgeRef = BridgeApplySyncRef< + [number, number, string], + void +>; export type UpgradeSocketWriteRawBridgeRef = BridgeApplySyncRef<[number, string], void>; export type UpgradeSocketEndRawBridgeRef = BridgeApplySyncRef<[number], void>; export type UpgradeSocketDestroyRawBridgeRef = BridgeApplySyncRef<[number], void>; -export type NetSocketConnectRawBridgeRef = BridgeApplySyncRef<[string, number], number>; +export type NetSocketConnectRawBridgeRef = BridgeApplySyncRef<[string], number>; +export type NetSocketWaitConnectRawBridgeRef = BridgeApplyRef<[number], string>; +export type NetSocketReadRawBridgeRef = BridgeApplySyncRef<[number], string | null>; +export type NetSocketSetNoDelayRawBridgeRef = BridgeApplySyncRef<[number, boolean], void>; +export type NetSocketSetKeepAliveRawBridgeRef = BridgeApplySyncRef<[number, boolean, number], void>; export type NetSocketWriteRawBridgeRef = BridgeApplySyncRef<[number, string], void>; export type NetSocketEndRawBridgeRef = BridgeApplySyncRef<[number], void>; export type NetSocketDestroyRawBridgeRef = BridgeApplySyncRef<[number], void>; export type NetSocketUpgradeTlsRawBridgeRef = BridgeApplySyncRef<[number, string], void>; +export type NetSocketGetTlsClientHelloRawBridgeRef = BridgeApplySyncRef<[number], string>; +export type NetSocketTlsQueryRawBridgeRef = BridgeApplySyncRef< + [number, string, boolean?], + string +>; +export type TlsGetCiphersRawBridgeRef = BridgeApplySyncRef<[], string>; +export type NetServerListenRawBridgeRef = BridgeApplySyncPromiseRef<[string], string>; +export type NetServerAcceptRawBridgeRef = BridgeApplySyncRef<[number], string | null>; +export type NetServerCloseRawBridgeRef = BridgeApplyRef<[number], void>; +export type DgramSocketCreateRawBridgeRef = BridgeApplySyncRef<[string], number>; +export type DgramSocketBindRawBridgeRef = BridgeApplySyncPromiseRef<[number, string], string>; +export type DgramSocketRecvRawBridgeRef = BridgeApplySyncRef<[number], string | null>; +export type DgramSocketSendRawBridgeRef = BridgeApplySyncPromiseRef<[number, string], number>; +export type DgramSocketCloseRawBridgeRef = BridgeApplySyncPromiseRef<[number], void>; +export type DgramSocketAddressRawBridgeRef = BridgeApplySyncRef<[number], string>; +export type DgramSocketSetBufferSizeRawBridgeRef = BridgeApplySyncRef< + [number, "recv" | "send", number], + void +>; +export type DgramSocketGetBufferSizeRawBridgeRef = BridgeApplySyncRef< + [number, "recv" | "send"], + number +>; export type ResolveModuleSyncBridgeRef = BridgeApplySyncRef< [string, string], string | null diff --git a/packages/nodejs/src/bridge-handlers.ts b/packages/nodejs/src/bridge-handlers.ts index b434b7a1..e55b2661 100644 --- a/packages/nodejs/src/bridge-handlers.ts +++ b/packages/nodejs/src/bridge-handlers.ts @@ -5,8 +5,9 @@ import * as net from "node:net"; import * as http from "node:http"; +import * as http2 from "node:http2"; import * as tls from "node:tls"; -import { Duplex } from "node:stream"; +import { Duplex, PassThrough } from "node:stream"; import { readFileSync, realpathSync, existsSync } from "node:fs"; import { dirname as pathDirname, join as pathJoin, resolve as pathResolve } from "node:path"; import { createRequire } from "node:module"; @@ -48,6 +49,9 @@ import { } from "./bridge-contract.js"; import { AF_INET, + AF_INET6, + AF_UNIX, + SOCK_DGRAM, SOCK_STREAM, mkdir, FDTableManager, @@ -92,6 +96,13 @@ import type { } from "@secure-exec/core/internal/shared/api-types"; import type { BudgetState } from "./isolate-bootstrap.js"; +const SOL_SOCKET = 1; +const IPPROTO_TCP = 6; +const SO_KEEPALIVE = 9; +const SO_RCVBUF = 8; +const SO_SNDBUF = 7; +const TCP_NODELAY = 1; + /** A bridge handler function invoked when sandbox code calls a bridge global. */ export type BridgeHandler = (...args: unknown[]) => unknown | Promise; @@ -1885,11 +1896,391 @@ function buildKernelSocketBridgeHandlers( ): NetSocketBridgeResult { const handlers: BridgeHandlers = {}; const K = HOST_BRIDGE_GLOBAL_KEYS; + const NET_BRIDGE_TIMEOUT_SENTINEL = "__secure_exec_net_timeout__"; // Track active kernel socket IDs for cleanup const activeSocketIds = new Set(); + const activeServerIds = new Set(); + const activeDgramIds = new Set(); // Track TLS-upgraded sockets that bypass kernel recv (host-side TLS) - const tlsSockets = new Map(); + const tlsSockets = new Map(); + const loopbackTlsTransports = new Map(); + const loopbackTlsClientHello = new Map(); + const pendingConnects = new Map>(); + + type SerializedNetSocketInfo = { + localAddress: string; + localPort: number; + localFamily: string; + localPath?: string; + remoteAddress?: string; + remotePort?: number; + remoteFamily?: string; + remotePath?: string; + }; + + type SerializedNetConnectOptions = { + host?: string; + port?: number; + path?: string; + }; + + type SerializedNetListenOptions = { + host?: string; + port?: number; + path?: string; + backlog?: number; + readableAll?: boolean; + writableAll?: boolean; + }; + + type SerializedDgramBindOptions = { + port?: number; + address?: string; + }; + + type SerializedDgramSendOptions = { + data: string; + port: number; + address: string; + }; + + type SerializedTlsDataValue = + | { + kind: "buffer"; + data: string; + } + | { + kind: "string"; + data: string; + }; + + type SerializedTlsMaterial = SerializedTlsDataValue | SerializedTlsDataValue[]; + + type SerializedTlsUpgradeOptions = { + isServer?: boolean; + servername?: string; + rejectUnauthorized?: boolean; + requestCert?: boolean; + session?: string; + key?: SerializedTlsMaterial; + cert?: SerializedTlsMaterial; + ca?: SerializedTlsMaterial; + passphrase?: string; + ciphers?: string; + ALPNProtocols?: string[]; + minVersion?: tls.SecureVersion; + maxVersion?: tls.SecureVersion; + }; + + type SerializedTlsClientHello = { + servername?: string; + ALPNProtocols?: string[]; + }; + + type SerializedTlsBridgeValue = + | null + | boolean + | number + | string + | { + type: "undefined"; + } + | { + type: "buffer"; + data: string; + } + | { + type: "array"; + value: SerializedTlsBridgeValue[]; + } + | { + type: "object"; + id: number; + value: Record; + } + | { + type: "ref"; + id: number; + }; + + type KernelSocketLike = NonNullable>; + + function addressFamily(host?: string): string { + return host?.includes(":") ? "IPv6" : "IPv4"; + } + + function decodeTlsMaterial( + value: SerializedTlsMaterial | undefined, + ): string | Buffer | Array | undefined { + if (value === undefined) { + return undefined; + } + const decodeOne = (entry: SerializedTlsDataValue): string | Buffer => + entry.kind === "buffer" ? Buffer.from(entry.data, "base64") : entry.data; + return Array.isArray(value) ? value.map(decodeOne) : decodeOne(value); + } + + function buildHostTlsOptions( + options: SerializedTlsUpgradeOptions, + ): Record { + const hostOptions: Record = {}; + const key = decodeTlsMaterial(options.key); + const cert = decodeTlsMaterial(options.cert); + const ca = decodeTlsMaterial(options.ca); + if (key !== undefined) hostOptions.key = key; + if (cert !== undefined) hostOptions.cert = cert; + if (ca !== undefined) hostOptions.ca = ca; + if (typeof options.passphrase === "string") hostOptions.passphrase = options.passphrase; + if (typeof options.ciphers === "string") hostOptions.ciphers = options.ciphers; + if (typeof options.session === "string") hostOptions.session = Buffer.from(options.session, "base64"); + if (Array.isArray(options.ALPNProtocols) && options.ALPNProtocols.length > 0) { + hostOptions.ALPNProtocols = [...options.ALPNProtocols]; + } + if (typeof options.minVersion === "string") hostOptions.minVersion = options.minVersion; + if (typeof options.maxVersion === "string") hostOptions.maxVersion = options.maxVersion; + if (typeof options.servername === "string") hostOptions.servername = options.servername; + if (typeof options.requestCert === "boolean") hostOptions.requestCert = options.requestCert; + return hostOptions; + } + + function getLoopbackTlsKey(socketId: number, peerId: number): string { + return socketId < peerId ? `${socketId}:${peerId}` : `${peerId}:${socketId}`; + } + + function createTlsTransportEndpoint( + readable: PassThrough, + writable: PassThrough, + ): Duplex { + const duplex = new Duplex({ + read() { + let chunk: Buffer | null; + while ((chunk = readable.read() as Buffer | null) !== null) { + if (!this.push(chunk)) { + return; + } + } + }, + write(chunk, _encoding, callback) { + if (!writable.write(chunk)) { + writable.once("drain", callback); + return; + } + callback(); + }, + final(callback) { + writable.end(); + callback(); + }, + destroy(error, callback) { + readable.destroy(error ?? undefined); + writable.destroy(error ?? undefined); + callback(error ?? null); + }, + }); + + readable.on("readable", () => { + let chunk: Buffer | null; + while ((chunk = readable.read() as Buffer | null) !== null) { + if (!duplex.push(chunk)) { + return; + } + } + }); + readable.on("end", () => duplex.push(null)); + readable.on("error", (error) => duplex.destroy(error)); + + return duplex; + } + + function getLoopbackTlsTransport(socket: KernelSocketLike): Duplex { + if (socket.peerId === undefined) { + throw new Error(`Socket ${socket.id} has no loopback peer for TLS upgrade`); + } + const key = getLoopbackTlsKey(socket.id, socket.peerId); + let pair = loopbackTlsTransports.get(key); + if (!pair) { + const aIn = new PassThrough(); + const bIn = new PassThrough(); + pair = { + a: createTlsTransportEndpoint(aIn, bIn), + b: createTlsTransportEndpoint(bIn, aIn), + }; + loopbackTlsTransports.set(key, pair); + } + return socket.id < socket.peerId ? pair.a : pair.b; + } + + function cleanupLoopbackTlsTransport(socketId: number, peerId?: number): void { + if (peerId === undefined) { + return; + } + if (tlsSockets.has(socketId) || tlsSockets.has(peerId)) { + return; + } + const key = getLoopbackTlsKey(socketId, peerId); + const pair = loopbackTlsTransports.get(key); + if (!pair) { + return; + } + pair.a.destroy(); + pair.b.destroy(); + loopbackTlsTransports.delete(key); + loopbackTlsClientHello.delete(key); + } + + function serializeTlsState(tlsSocket: tls.TLSSocket): string { + let cipher: Record | null = null; + try { + const details = tlsSocket.getCipher(); + if (details) { + const standardName = (details as { standardName?: string }).standardName ?? details.name; + cipher = { + name: details.name, + standardName, + version: details.version, + }; + } + } catch { + cipher = null; + } + return JSON.stringify({ + authorized: tlsSocket.authorized === true, + authorizationError: + typeof tlsSocket.authorizationError === "string" + ? tlsSocket.authorizationError + : undefined, + alpnProtocol: tlsSocket.alpnProtocol || false, + servername: (tlsSocket as tls.TLSSocket & { servername?: string }).servername, + protocol: tlsSocket.getProtocol?.() ?? null, + sessionReused: tlsSocket.isSessionReused?.() === true, + cipher, + }); + } + + function serializeTlsBridgeValue( + value: unknown, + seen = new Map(), + ): SerializedTlsBridgeValue { + if (value === undefined) { + return { type: "undefined" }; + } + if ( + value === null || + typeof value === "boolean" || + typeof value === "number" || + typeof value === "string" + ) { + return value; + } + if (Buffer.isBuffer(value) || value instanceof Uint8Array) { + return { + type: "buffer", + data: Buffer.from(value).toString("base64"), + }; + } + if (Array.isArray(value)) { + return { + type: "array", + value: value.map((entry) => serializeTlsBridgeValue(entry, seen)), + }; + } + if (typeof value === "object") { + const existingId = seen.get(value); + if (existingId !== undefined) { + return { type: "ref", id: existingId }; + } + const id = seen.size + 1; + seen.set(value, id); + const serialized: Record = {}; + for (const [key, entry] of Object.entries(value as Record)) { + serialized[key] = serializeTlsBridgeValue(entry, seen); + } + return { + type: "object", + id, + value: serialized, + }; + } + return String(value); + } + + function serializeTlsError(error: unknown, tlsSocket?: tls.TLSSocket): string { + const err = + error instanceof Error ? error : new Error(typeof error === "string" ? error : String(error)); + const payload: Record = { + message: err.message, + name: err.name, + stack: err.stack, + }; + const code = (err as { code?: unknown }).code; + if (typeof code === "string") { + payload.code = code; + } + if (tlsSocket) { + payload.authorized = tlsSocket.authorized === true; + if (typeof tlsSocket.authorizationError === "string") { + payload.authorizationError = tlsSocket.authorizationError; + } + } + return JSON.stringify(payload); + } + + function serializeSocketInfo(socketId: number): SerializedNetSocketInfo { + const socket = socketTable.get(socketId); + const localAddr = socket?.localAddr; + const remoteAddr = socket?.remoteAddr; + return { + localAddress: + localAddr && typeof localAddr === "object" && "host" in localAddr + ? localAddr.host + : localAddr && typeof localAddr === "object" && "path" in localAddr + ? localAddr.path + : "0.0.0.0", + localPort: + localAddr && typeof localAddr === "object" && "port" in localAddr + ? localAddr.port + : 0, + localFamily: + localAddr && typeof localAddr === "object" && "host" in localAddr + ? addressFamily(localAddr.host) + : localAddr && typeof localAddr === "object" && "path" in localAddr + ? "Unix" + : "IPv4", + ...(localAddr && typeof localAddr === "object" && "path" in localAddr + ? { localPath: localAddr.path } + : {}), + ...(remoteAddr && typeof remoteAddr === "object" && "host" in remoteAddr + ? { + remoteAddress: remoteAddr.host, + remotePort: remoteAddr.port, + remoteFamily: addressFamily(remoteAddr.host), + } + : remoteAddr && typeof remoteAddr === "object" && "path" in remoteAddr + ? { + remoteAddress: remoteAddr.path, + remoteFamily: "Unix", + remotePath: remoteAddr.path, + } + : {}), + }; + } + + function getBackingSocket(socketId: number): net.Socket | undefined { + const tlsSocket = tlsSockets.get(socketId); + if (tlsSocket) { + return tlsSocket; + } + const socket = socketTable.get(socketId); + const hostSocket = socket?.hostSocket as { socket?: net.Socket } | undefined; + return hostSocket?.socket; + } + + function dispatchAsync(socketId: number, event: string, data?: string): void { + setTimeout(() => { + dispatch(socketId, event, data); + }, 0); + } /** Background read pump: polls kernel recv() and dispatches data/end/close. */ function startReadPump(socketId: number): void { @@ -1906,7 +2297,7 @@ function buildKernelSocketBridgeHandlers( } if (data !== null) { - dispatch(socketId, "data", Buffer.from(data).toString("base64")); + dispatchAsync(socketId, "data", Buffer.from(data).toString("base64")); continue; } @@ -1914,16 +2305,16 @@ function buildKernelSocketBridgeHandlers( const socket = socketTable.get(socketId); if (!socket) break; if (socket.state === "closed" || socket.state === "read-closed") { - dispatch(socketId, "end"); + dispatchAsync(socketId, "end"); break; } if (socket.peerWriteClosed || (socket.peerId === undefined && !socket.external)) { - dispatch(socketId, "end"); + dispatchAsync(socketId, "end"); break; } // For external sockets, check hostSocket EOF via readBuffer state if (socket.external && socket.readBuffer.length === 0 && socket.peerWriteClosed) { - dispatch(socketId, "end"); + dispatchAsync(socketId, "end"); break; } @@ -1936,34 +2327,109 @@ function buildKernelSocketBridgeHandlers( } // Dispatch close if socket was active if (activeSocketIds.delete(socketId)) { - dispatch(socketId, "close"); + dispatchAsync(socketId, "close"); } }; pump(); } // Connect — create kernel socket and start async connect + read pump - handlers[K.netSocketConnectRaw] = (host: unknown, port: unknown) => { - const socketId = socketTable.create(AF_INET, SOCK_STREAM, 0, pid); + handlers[K.netSocketConnectRaw] = (optionsJson: unknown) => { + const options = parseJsonWithLimit( + "net.socket.connect options", + String(optionsJson), + 128 * 1024, + ); + const isUnixPath = typeof options.path === "string" && options.path.length > 0; + const host = String(options.host ?? "127.0.0.1"); + const port = Number(options.port ?? 0); + const socketId = socketTable.create( + isUnixPath ? AF_UNIX : host.includes(":") ? AF_INET6 : AF_INET, + SOCK_STREAM, + 0, + pid, + ); activeSocketIds.add(socketId); - // Async connect — dispatch 'connect' on success, 'error' on failure - socketTable.connect(socketId, { host: String(host), port: Number(port) }) - .then(() => { - if (!activeSocketIds.has(socketId)) return; - dispatch(socketId, "connect"); - startReadPump(socketId); - }) - .catch((err: Error) => { - if (!activeSocketIds.has(socketId)) return; - dispatch(socketId, "error", err.message); - activeSocketIds.delete(socketId); - dispatch(socketId, "close"); - }); + // Async connect completion is polled from the isolate via waitConnectRaw. + pendingConnects.set( + socketId, + socketTable.connect( + socketId, + isUnixPath ? { path: options.path! } : { host, port }, + ).then( + () => ({ ok: true } as const), + (error) => ({ + ok: false as const, + error: error instanceof Error ? error.message : String(error), + }), + ), + ); return socketId; }; + handlers[K.netSocketWaitConnectRaw] = async (socketId: unknown): Promise => { + const id = Number(socketId); + const pending = pendingConnects.get(id); + try { + if (pending) { + const result = await pending; + if (!result.ok) { + throw new Error(result.error); + } + } + return JSON.stringify(serializeSocketInfo(id)); + } finally { + pendingConnects.delete(id); + } + }; + + handlers[K.netSocketReadRaw] = (socketId: unknown): string | null => { + const id = Number(socketId); + if (!activeSocketIds.has(id)) { + return null; + } + try { + const chunk = socketTable.recv(id, 65536, 0); + if (chunk !== null) { + return Buffer.from(chunk).toString("base64"); + } + const socket = socketTable.get(id); + if ( + !socket || + socket.state === "closed" || + socket.state === "read-closed" || + socket.peerWriteClosed + ) { + return null; + } + return NET_BRIDGE_TIMEOUT_SENTINEL; + } catch (error) { + if (error instanceof Error && error.message.includes("EAGAIN")) { + return NET_BRIDGE_TIMEOUT_SENTINEL; + } + return null; + } + }; + + handlers[K.netSocketSetNoDelayRaw] = (socketId: unknown, enable: unknown) => { + const id = Number(socketId); + socketTable.setsockopt(id, IPPROTO_TCP, TCP_NODELAY, enable ? 1 : 0); + getBackingSocket(id)?.setNoDelay(Boolean(enable)); + }; + + handlers[K.netSocketSetKeepAliveRaw] = ( + socketId: unknown, + enable: unknown, + initialDelaySeconds: unknown, + ) => { + const id = Number(socketId); + const delaySeconds = Math.max(0, Number(initialDelaySeconds) || 0); + socketTable.setsockopt(id, SOL_SOCKET, SO_KEEPALIVE, enable ? 1 : 0); + getBackingSocket(id)?.setKeepAlive(Boolean(enable), delaySeconds * 1000); + }; + // Write — send data through kernel socket handlers[K.netSocketWriteRaw] = ( socketId: unknown, @@ -1998,11 +2464,14 @@ function buildKernelSocketBridgeHandlers( // Destroy — close kernel socket handlers[K.netSocketDestroyRaw] = (socketId: unknown) => { const id = Number(socketId); + const socket = socketTable.get(id); const tlsSocket = tlsSockets.get(id); if (tlsSocket) { tlsSocket.destroy(); tlsSockets.delete(id); } + cleanupLoopbackTlsTransport(id, socket?.peerId); + socketTable.get(id)?.readWaiters.wakeAll(); if (activeSocketIds.has(id)) { activeSocketIds.delete(id); try { @@ -2023,50 +2492,335 @@ function buildKernelSocketBridgeHandlers( const socket = socketTable.get(id); if (!socket) throw new Error(`Socket ${id} not found for TLS upgrade`); - // TLS only works for external sockets with a real host socket - if (!socket.external || !socket.hostSocket) { - throw new Error(`Socket ${id} cannot be TLS-upgraded (loopback socket)`); + const options = optionsJson + ? parseJsonWithLimit( + "net.socket.upgradeTls options", + String(optionsJson), + 256 * 1024, + ) + : {}; + const hostTlsOptions = buildHostTlsOptions(options); + const peerId = socket.peerId; + const loopbackTlsKey = peerId === undefined ? undefined : getLoopbackTlsKey(id, peerId); + + if (!options.isServer && loopbackTlsKey) { + loopbackTlsClientHello.set(loopbackTlsKey, { + servername: options.servername, + ALPNProtocols: options.ALPNProtocols, + }); } - const options = optionsJson ? JSON.parse(String(optionsJson)) : {}; - - // Access the underlying net.Socket from the host adapter - const hostSocket = socket.hostSocket as unknown as { socket?: net.Socket }; - const realSocket = (hostSocket as any).socket as net.Socket | undefined; - if (!realSocket) { - throw new Error(`Socket ${id} has no underlying TCP socket for TLS upgrade`); + let transport: net.Socket | Duplex; + if (socket.external && socket.hostSocket) { + const hostSocket = socket.hostSocket as unknown as { socket?: net.Socket }; + const realSocket = hostSocket.socket; + if (!realSocket) { + throw new Error(`Socket ${id} has no underlying TCP socket for TLS upgrade`); + } + socket.hostSocket = undefined; + transport = realSocket; + } else { + transport = getLoopbackTlsTransport(socket); } - // Detach the kernel read pump by clearing the host socket ref - socket.hostSocket = undefined; - - const tlsSocket = tls.connect({ - socket: realSocket, - rejectUnauthorized: options.rejectUnauthorized ?? false, - servername: options.servername, - ...( options.minVersion ? { minVersion: options.minVersion } : {}), - ...( options.maxVersion ? { maxVersion: options.maxVersion } : {}), - }); + const tlsSocket = options.isServer + ? new tls.TLSSocket(transport, { + isServer: true, + secureContext: tls.createSecureContext(hostTlsOptions), + requestCert: options.requestCert === true, + rejectUnauthorized: options.rejectUnauthorized === true, + }) + : tls.connect({ + socket: transport, + ...hostTlsOptions, + rejectUnauthorized: options.rejectUnauthorized !== false, + }); // Track TLS socket for write/end/destroy bypass - tlsSockets.set(id, tlsSocket as unknown as net.Socket); + tlsSockets.set(id, tlsSocket); - tlsSocket.on("secureConnect", () => dispatch(id, "secureConnect")); + tlsSocket.on("secureConnect", () => + dispatchAsync(id, "secureConnect", serializeTlsState(tlsSocket)), + ); + tlsSocket.on("secure", () => + dispatchAsync(id, "secure", serializeTlsState(tlsSocket)), + ); + tlsSocket.on("session", (session: Buffer) => + dispatchAsync(id, "session", session.toString("base64")), + ); tlsSocket.on("data", (chunk: Buffer) => - dispatch(id, "data", chunk.toString("base64")), + dispatchAsync(id, "data", chunk.toString("base64")), ); - tlsSocket.on("end", () => dispatch(id, "end")); + tlsSocket.on("end", () => dispatchAsync(id, "end")); tlsSocket.on("error", (err: Error) => - dispatch(id, "error", err.message), + dispatchAsync(id, "error", serializeTlsError(err, tlsSocket)), ); tlsSocket.on("close", () => { tlsSockets.delete(id); activeSocketIds.delete(id); - dispatch(id, "close"); + cleanupLoopbackTlsTransport(id, peerId); + dispatchAsync(id, "close"); + }); + }; + + handlers[K.netSocketGetTlsClientHelloRaw] = (socketId: unknown): string => { + const id = Number(socketId); + const socket = socketTable.get(id); + if (!socket || socket.peerId === undefined) { + return "{}"; + } + const entry = loopbackTlsClientHello.get(getLoopbackTlsKey(id, socket.peerId)); + return JSON.stringify(entry ?? {}); + }; + + handlers[K.netSocketTlsQueryRaw] = ( + socketId: unknown, + query: unknown, + detailed?: unknown, + ): string => { + const tlsSocket = tlsSockets.get(Number(socketId)) as tls.TLSSocket | undefined; + if (!tlsSocket) { + return JSON.stringify({ type: "undefined" }); + } + let result: unknown; + switch (String(query)) { + case "getSession": + result = tlsSocket.getSession(); + break; + case "isSessionReused": + result = tlsSocket.isSessionReused(); + break; + case "getPeerCertificate": + result = tlsSocket.getPeerCertificate(Boolean(detailed)); + break; + case "getCertificate": + result = tlsSocket.getCertificate(); + break; + case "getProtocol": + result = tlsSocket.getProtocol(); + break; + case "getCipher": + result = tlsSocket.getCipher(); + break; + default: + result = undefined; + break; + } + return JSON.stringify(serializeTlsBridgeValue(result)); + }; + + handlers[K.tlsGetCiphersRaw] = (): string => JSON.stringify(tls.getCiphers()); + + handlers[K.netServerListenRaw] = async (optionsJson: unknown): Promise => { + const options = parseJsonWithLimit( + "net.server.listen options", + String(optionsJson), + 128 * 1024, + ); + const isUnixPath = typeof options.path === "string" && options.path.length > 0; + const host = String(options.host ?? "127.0.0.1"); + const serverId = socketTable.create( + isUnixPath ? AF_UNIX : host.includes(":") ? AF_INET6 : AF_INET, + SOCK_STREAM, + 0, + pid, + ); + activeServerIds.add(serverId); + const socketMode = + options.readableAll || options.writableAll + ? 0o600 | + (options.readableAll ? 0o044 : 0) | + (options.writableAll ? 0o022 : 0) + : undefined; + await socketTable.bind( + serverId, + isUnixPath + ? { path: options.path! } + : { + host, + port: Number(options.port ?? 0), + }, + socketMode === undefined ? undefined : { mode: socketMode }, + ); + await socketTable.listen(serverId, Number(options.backlog ?? 511)); + return JSON.stringify({ + serverId, + address: serializeSocketInfo(serverId), + }); + }; + + handlers[K.netServerAcceptRaw] = (serverId: unknown): string | null => { + const id = Number(serverId); + if (!activeServerIds.has(id)) { + return null; + } + const listener = socketTable.get(id); + if (!listener || listener.state !== "listening") { + return null; + } + const acceptedId = socketTable.accept(id); + if (acceptedId === null) { + return NET_BRIDGE_TIMEOUT_SENTINEL; + } + activeSocketIds.add(acceptedId); + return JSON.stringify({ + socketId: acceptedId, + info: serializeSocketInfo(acceptedId), + }); + }; + + handlers[K.netServerCloseRaw] = async (serverId: unknown): Promise => { + const id = Number(serverId); + activeServerIds.delete(id); + socketTable.get(id)?.acceptWaiters.wakeAll(); + try { + socketTable.close(id, pid); + } catch { + // Already closed + } + }; + + handlers[K.dgramSocketCreateRaw] = (type: unknown): number => { + const socketType = String(type); + const domain = socketType === "udp6" ? AF_INET6 : AF_INET; + const socketId = socketTable.create(domain, SOCK_DGRAM, 0, pid); + activeDgramIds.add(socketId); + return socketId; + }; + + handlers[K.dgramSocketBindRaw] = async ( + socketId: unknown, + optionsJson: unknown, + ): Promise => { + const id = Number(socketId); + const socket = socketTable.get(id); + if (!socket) { + throw new Error(`UDP socket ${id} not found`); + } + const options = parseJsonWithLimit( + "dgram.socket.bind options", + String(optionsJson), + 128 * 1024, + ); + const host = String( + options.address ?? + (socket.domain === AF_INET6 ? "::" : "0.0.0.0"), + ); + await socketTable.bind(id, { + host, + port: Number(options.port ?? 0), + }); + return JSON.stringify(serializeSocketInfo(id)); + }; + + handlers[K.dgramSocketRecvRaw] = (socketId: unknown): string | null => { + const id = Number(socketId); + if (!activeDgramIds.has(id)) { + return null; + } + try { + const socket = socketTable.get(id); + if (!socket || socket.state === "closed") { + return null; + } + const message = socketTable.recvFrom(id, 65535, 0); + if (message === null) { + return NET_BRIDGE_TIMEOUT_SENTINEL; + } + return JSON.stringify({ + data: Buffer.from(message.data).toString("base64"), + rinfo: + "path" in message.srcAddr + ? { + address: message.srcAddr.path, + family: "unix", + port: 0, + size: message.data.length, + } + : { + address: message.srcAddr.host, + family: addressFamily(message.srcAddr.host), + port: message.srcAddr.port, + size: message.data.length, + }, + }); + } catch (error) { + if (error instanceof Error && error.message.includes("EAGAIN")) { + return NET_BRIDGE_TIMEOUT_SENTINEL; + } + return null; + } + }; + + handlers[K.dgramSocketSendRaw] = async ( + socketId: unknown, + optionsJson: unknown, + ): Promise => { + const id = Number(socketId); + const options = parseJsonWithLimit( + "dgram.socket.send options", + String(optionsJson), + 256 * 1024, + ); + const data = Buffer.from(options.data, "base64"); + return socketTable.sendTo( + id, + new Uint8Array(data), + 0, + { host: String(options.address), port: Number(options.port) }, + ); + }; + + handlers[K.dgramSocketCloseRaw] = async (socketId: unknown): Promise => { + const id = Number(socketId); + activeDgramIds.delete(id); + socketTable.get(id)?.readWaiters.wakeAll(); + try { + socketTable.close(id, pid); + } catch { + // Already closed + } + }; + + handlers[K.dgramSocketAddressRaw] = (socketId: unknown): string => { + const id = Number(socketId); + const socket = socketTable.get(id); + if (!socket?.localAddr || "path" in socket.localAddr) { + throw new Error("getsockname EBADF"); + } + return JSON.stringify({ + address: socket.localAddr.host, + family: addressFamily(socket.localAddr.host), + port: socket.localAddr.port, }); }; + handlers[K.dgramSocketSetBufferSizeRaw] = ( + socketId: unknown, + which: unknown, + size: unknown, + ): void => { + const optname = which === "send" ? SO_SNDBUF : SO_RCVBUF; + socketTable.setsockopt(Number(socketId), SOL_SOCKET, optname, Number(size)); + }; + + handlers[K.dgramSocketGetBufferSizeRaw] = ( + socketId: unknown, + which: unknown, + ): number => { + const optname = which === "send" ? SO_SNDBUF : SO_RCVBUF; + return socketTable.getsockopt(Number(socketId), SOL_SOCKET, optname) ?? 0; + }; + const dispose = () => { + for (const id of activeServerIds) { + try { socketTable.close(id, pid); } catch { /* best effort */ } + } + activeServerIds.clear(); + for (const id of activeDgramIds) { + try { socketTable.close(id, pid); } catch { /* best effort */ } + } + activeDgramIds.clear(); for (const id of activeSocketIds) { try { socketTable.close(id, pid); } catch { /* best effort */ } } @@ -2075,6 +2829,12 @@ function buildKernelSocketBridgeHandlers( socket.destroy(); } tlsSockets.clear(); + for (const pair of loopbackTlsTransports.values()) { + pair.a.destroy(); + pair.b.destroy(); + } + loopbackTlsTransports.clear(); + loopbackTlsClientHello.clear(); }; return { handlers, dispose }; @@ -2759,7 +3519,7 @@ export function buildFsBridgeHandlers(deps: FsBridgeDeps): BridgeHandlers { handlers[K.fsReadFile] = async (path: unknown) => { checkBridgeBudget(deps); - const text = await fs.readTextFile(String(path)); + const text = await readStandaloneProcAwareTextFile(fs, String(path)); assertTextPayloadSize(`fs.readFile ${path}`, text, jsonLimit); return text; }; @@ -2771,7 +3531,7 @@ export function buildFsBridgeHandlers(deps: FsBridgeDeps): BridgeHandlers { handlers[K.fsReadFileBinary] = async (path: unknown) => { checkBridgeBudget(deps); - const data = await fs.readFile(String(path)); + const data = await readStandaloneProcAwareFile(fs, String(path)); assertPayloadByteLength(`fs.readFileBinary ${path}`, getBase64EncodedByteLength(data.byteLength), base64Limit); return Buffer.from(data).toString("base64"); }; @@ -2805,12 +3565,12 @@ export function buildFsBridgeHandlers(deps: FsBridgeDeps): BridgeHandlers { handlers[K.fsExists] = async (path: unknown) => { checkBridgeBudget(deps); - return fs.exists(String(path)); + return standaloneProcAwareExists(fs, String(path)); }; handlers[K.fsStat] = async (path: unknown) => { checkBridgeBudget(deps); - const s = await fs.stat(String(path)); + const s = await standaloneProcAwareStat(fs, String(path)); return JSON.stringify({ mode: s.mode, size: s.size, isDirectory: s.isDirectory, atimeMs: s.atimeMs, mtimeMs: s.mtimeMs, ctimeMs: s.ctimeMs, birthtimeMs: s.birthtimeMs }); }; @@ -3161,10 +3921,25 @@ function createKernelSocketDuplex( }); // Socket-like properties for Node http module - (duplex as any).remoteAddress = "127.0.0.1"; - (duplex as any).remotePort = 0; - (duplex as any).localAddress = "127.0.0.1"; - (duplex as any).localPort = 0; + const socket = socketTable.get(socketId); + const localAddr = socket?.localAddr; + const remoteAddr = socket?.remoteAddr; + (duplex as any).remoteAddress = + remoteAddr && typeof remoteAddr === "object" && "host" in remoteAddr + ? remoteAddr.host + : "127.0.0.1"; + (duplex as any).remotePort = + remoteAddr && typeof remoteAddr === "object" && "port" in remoteAddr + ? remoteAddr.port + : 0; + (duplex as any).localAddress = + localAddr && typeof localAddr === "object" && "host" in localAddr + ? localAddr.host + : "127.0.0.1"; + (duplex as any).localPort = + localAddr && typeof localAddr === "object" && "port" in localAddr + ? localAddr.port + : 0; (duplex as any).encrypted = false; (duplex as any).setNoDelay = () => duplex; (duplex as any).setKeepAlive = () => duplex; @@ -3226,24 +4001,222 @@ export function buildNetworkBridgeHandlers(deps: NetworkBridgeDeps): NetworkBrid const adapter = deps.networkAdapter; const jsonLimit = deps.isolateJsonPayloadLimitBytes; const ownedHttpServers = new Set(); + const ownedHttp2Servers = new Set(); const { socketTable, pid } = deps; // Track kernel HTTP servers for cleanup const kernelHttpServers = new Map(); + type KernelHttp2ServerState = { + listenSocketId: number; + server: http2.Http2Server | http2.Http2SecureServer; + sessions: Set; + acceptLoopActive: boolean; + closedPromise: Promise; + resolveClosed: () => void; + }; + type SerializedHttp2SocketState = { + encrypted?: boolean; + allowHalfOpen?: boolean; + localAddress?: string; + localPort?: number; + localFamily?: string; + remoteAddress?: string; + remotePort?: number; + remoteFamily?: string; + servername?: string; + alpnProtocol?: string | false; + }; + type SerializedHttp2SessionState = { + encrypted?: boolean; + alpnProtocol?: string | false; + originSet?: string[]; + localSettings?: Record>; + remoteSettings?: Record>; + state?: { + effectiveLocalWindowSize?: number; + localWindowSize?: number; + remoteWindowSize?: number; + nextStreamID?: number; + outboundQueueSize?: number; + deflateDynamicTableSize?: number; + inflateDynamicTableSize?: number; + }; + socket?: SerializedHttp2SocketState; + }; + type SerializedTlsDataValue = + | { kind: "buffer"; data: string } + | { kind: "string"; data: string }; + type SerializedTlsMaterial = SerializedTlsDataValue | SerializedTlsDataValue[]; + type SerializedTlsBridgeOptions = { + isServer?: boolean; + servername?: string; + rejectUnauthorized?: boolean; + requestCert?: boolean; + session?: string; + key?: SerializedTlsMaterial; + cert?: SerializedTlsMaterial; + ca?: SerializedTlsMaterial; + passphrase?: string; + ciphers?: string; + ALPNProtocols?: string[]; + minVersion?: tls.SecureVersion; + maxVersion?: tls.SecureVersion; + }; + const kernelHttp2Servers = new Map(); + type KernelHttp2ClientSessionState = { + session: http2.ClientHttp2Session; + closedPromise: Promise; + resolveClosed: () => void; + }; + const kernelHttp2ClientSessions = new Map(); + const http2Sessions = new Map(); + const http2Streams = new Map(); + const http2ServerSessionIds = new WeakMap(); + let nextHttp2SessionId = 1; + let nextHttp2StreamId = 1; const kernelUpgradeSockets = new Map(); let nextKernelUpgradeSocketId = 1; const loopbackAwareAdapter = adapter as NetworkAdapter & { __setLoopbackPortChecker?: (checker: (hostname: string, port: number) => boolean) => void; }; - // Let host-side runtime.network.fetch/httpRequest reach only the HTTP - // listeners owned by this execution. - loopbackAwareAdapter.__setLoopbackPortChecker?.((_hostname, port) => { - for (const state of kernelHttpServers.values()) { - const socket = socketTable.get(state.listenSocketId); - const localAddr = socket?.localAddr; - if (localAddr && typeof localAddr === "object" && "port" in localAddr) { - if (localAddr.port === port) { + const decodeTlsMaterial = ( + value: SerializedTlsMaterial | undefined, + ): string | Buffer | Array | undefined => { + if (value === undefined) { + return undefined; + } + const decodeOne = (entry: SerializedTlsDataValue): string | Buffer => + entry.kind === "buffer" ? Buffer.from(entry.data, "base64") : entry.data; + return Array.isArray(value) ? value.map(decodeOne) : decodeOne(value); + }; + + const buildHostTlsOptions = ( + options: SerializedTlsBridgeOptions | undefined, + ): Record => { + if (!options) { + return {}; + } + const hostOptions: Record = {}; + const key = decodeTlsMaterial(options.key); + const cert = decodeTlsMaterial(options.cert); + const ca = decodeTlsMaterial(options.ca); + if (key !== undefined) hostOptions.key = key; + if (cert !== undefined) hostOptions.cert = cert; + if (ca !== undefined) hostOptions.ca = ca; + if (typeof options.passphrase === "string") hostOptions.passphrase = options.passphrase; + if (typeof options.ciphers === "string") hostOptions.ciphers = options.ciphers; + if (typeof options.session === "string") hostOptions.session = Buffer.from(options.session, "base64"); + if (Array.isArray(options.ALPNProtocols) && options.ALPNProtocols.length > 0) { + hostOptions.ALPNProtocols = [...options.ALPNProtocols]; + } + if (typeof options.minVersion === "string") hostOptions.minVersion = options.minVersion; + if (typeof options.maxVersion === "string") hostOptions.maxVersion = options.maxVersion; + if (typeof options.servername === "string") hostOptions.servername = options.servername; + if (typeof options.requestCert === "boolean") hostOptions.requestCert = options.requestCert; + if (typeof options.rejectUnauthorized === "boolean") { + hostOptions.rejectUnauthorized = options.rejectUnauthorized; + } + return hostOptions; + }; + + const debugHttp2Bridge = (...args: unknown[]): void => { + if (process.env.SECURE_EXEC_DEBUG_HTTP2_BRIDGE === "1") { + console.error("[secure-exec http2 bridge]", ...args); + } + }; + + const emitHttp2Event = (...fields: Array): void => { + const [kind, id, data, extra, extraNumber, extraHeaders, flags] = fields; + debugHttp2Bridge("emit", kind, id); + deps.sendStreamEvent("http2", Buffer.from(JSON.stringify({ + kind, + id, + data, + extra, + extraNumber, + extraHeaders, + flags, + }))); + }; + + const serializeHttp2SocketState = ( + socket: Pick & + Partial, + ): string => JSON.stringify({ + encrypted: socket.encrypted === true, + allowHalfOpen: socket.allowHalfOpen === true, + localAddress: socket.localAddress, + localPort: socket.localPort, + localFamily: socket.localAddress?.includes(":") ? "IPv6" : "IPv4", + remoteAddress: socket.remoteAddress, + remotePort: socket.remotePort, + remoteFamily: socket.remoteAddress?.includes(":") ? "IPv6" : "IPv4", + servername: + typeof (socket as tls.TLSSocket & { servername?: string }).servername === "string" + ? (socket as tls.TLSSocket & { servername?: string }).servername + : undefined, + alpnProtocol: socket.alpnProtocol || false, + } satisfies SerializedHttp2SocketState); + + const serializeHttp2SessionState = ( + session: http2.ClientHttp2Session | http2.ServerHttp2Session, + ): string => JSON.stringify({ + encrypted: session.encrypted === true, + alpnProtocol: session.alpnProtocol || (session.encrypted ? "h2" : "h2c"), + originSet: Array.isArray(session.originSet) ? [...session.originSet] : undefined, + localSettings: + session.localSettings && typeof session.localSettings === "object" + ? session.localSettings as Record> + : undefined, + remoteSettings: + session.remoteSettings && typeof session.remoteSettings === "object" + ? session.remoteSettings as Record> + : undefined, + state: + session.state && typeof session.state === "object" + ? { + effectiveLocalWindowSize: + typeof session.state.effectiveLocalWindowSize === "number" + ? session.state.effectiveLocalWindowSize + : undefined, + localWindowSize: + typeof session.state.localWindowSize === "number" + ? session.state.localWindowSize + : undefined, + remoteWindowSize: + typeof session.state.remoteWindowSize === "number" + ? session.state.remoteWindowSize + : undefined, + nextStreamID: + typeof session.state.nextStreamID === "number" + ? session.state.nextStreamID + : undefined, + outboundQueueSize: + typeof session.state.outboundQueueSize === "number" + ? session.state.outboundQueueSize + : undefined, + deflateDynamicTableSize: + typeof session.state.deflateDynamicTableSize === "number" + ? session.state.deflateDynamicTableSize + : undefined, + inflateDynamicTableSize: + typeof session.state.inflateDynamicTableSize === "number" + ? session.state.inflateDynamicTableSize + : undefined, + } + : undefined, + socket: session.socket ? JSON.parse(serializeHttp2SocketState(session.socket as net.Socket & tls.TLSSocket)) : undefined, + } satisfies SerializedHttp2SessionState); + + // Let host-side runtime.network.fetch/httpRequest reach only the HTTP + // listeners owned by this execution. + loopbackAwareAdapter.__setLoopbackPortChecker?.((_hostname, port) => { + for (const state of kernelHttpServers.values()) { + const socket = socketTable.get(state.listenSocketId); + const localAddr = socket?.localAddr; + if (localAddr && typeof localAddr === "object" && "port" in localAddr) { + if (localAddr.port === port) { return true; } } @@ -3423,10 +4396,36 @@ export function buildNetworkBridgeHandlers(deps: NetworkBridgeDeps): NetworkBrid const response = parseJsonWithLimit<{ status: number; headers?: Array<[string, string]>; + rawHeaders?: string[]; + informational?: Array<{ + status: number; + statusText?: string; + headers?: Array<[string, string]>; + rawHeaders?: string[]; + }>; body?: string; bodyEncoding?: "utf8" | "base64"; }>("network.httpServer response", responseJson, jsonLimit); + for (const informational of response.informational || []) { + const rawHeaderLines = informational.rawHeaders && informational.rawHeaders.length > 0 + ? informational.rawHeaders + : (informational.headers || []).flatMap(([key, value]) => [key, value]); + const statusText = + informational.statusText || + http.STATUS_CODES[informational.status] || + ""; + const rawFrame = + `HTTP/1.1 ${informational.status} ${statusText}\r\n` + + rawHeaderLines.reduce((acc, entry, index) => + index % 2 === 0 + ? `${acc}${entry}: ${rawHeaderLines[index + 1] ?? ""}\r\n` + : acc, + "") + + "\r\n"; + (res as http.ServerResponse & { _writeRaw?: (chunk: string) => void })._writeRaw?.(rawFrame); + } + res.statusCode = response.status || 200; for (const [key, value] of response.headers || []) { res.setHeader(key, value); @@ -3472,6 +4471,26 @@ export function buildNetworkBridgeHandlers(deps: NetworkBridgeDeps): NetworkBrid }))); }); + httpServer.on("connect", (req, socket, head) => { + const connectHeaders: Record = {}; + Object.entries(req.headers).forEach(([key, value]) => { + if (typeof value === "string") connectHeaders[key] = value; + else if (Array.isArray(value)) connectHeaders[key] = value[0] ?? ""; + }); + const connectSocketId = registerKernelUpgradeSocket(socket as Duplex); + deps.sendStreamEvent("httpServerConnect", Buffer.from(JSON.stringify({ + serverId: options.serverId, + request: JSON.stringify({ + method: req.method || "CONNECT", + url: req.url || "/", + headers: connectHeaders, + rawHeaders: req.rawHeaders || [], + }), + head: head.toString("base64"), + socketId: connectSocketId, + }))); + }); + let resolveClosed!: () => void; const closedPromise = new Promise((resolve) => { resolveClosed = resolve; @@ -3520,6 +4539,716 @@ export function buildNetworkBridgeHandlers(deps: NetworkBridgeDeps): NetworkBrid return closeKernelServer(id); }; + const closeKernelHttp2Server = async (serverId: number): Promise => { + const state = kernelHttp2Servers.get(serverId); + if (!state) { + return; + } + state.acceptLoopActive = false; + try { + socketTable.close(state.listenSocketId, pid); + } catch { + // Listener already closed. + } + for (const session of [...state.sessions]) { + try { + session.close(); + } catch { + // Ignore already-closing sessions. + } + } + await new Promise((resolve) => { + try { + state.server.close(() => resolve()); + } catch { + resolve(); + } + }); + kernelHttp2Servers.delete(serverId); + ownedHttp2Servers.delete(serverId); + deps.activeHttpServerIds.delete(serverId); + deps.activeHttpServerClosers.delete(serverId); + state.resolveClosed(); + }; + + const startKernelHttp2AcceptLoop = async ( + state: KernelHttp2ServerState, + ): Promise => { + try { + while (state.acceptLoopActive) { + const listenSocket = socketTable.get(state.listenSocketId); + if (!listenSocket || listenSocket.state !== "listening") { + break; + } + + const acceptedId = socketTable.accept(state.listenSocketId); + if (acceptedId !== null) { + const duplex = createKernelSocketDuplex(acceptedId, socketTable, pid); + state.server.emit("connection", duplex); + continue; + } + + const handle = listenSocket.acceptWaiters.enqueue(); + const acceptedAfterEnqueue = socketTable.accept(state.listenSocketId); + if (acceptedAfterEnqueue !== null) { + handle.wake(); + const duplex = createKernelSocketDuplex(acceptedAfterEnqueue, socketTable, pid); + state.server.emit("connection", duplex); + continue; + } + + await handle.wait(); + } + } catch { + // Listener closed. + } + }; + + const normalizeHttp2EventHeaders = ( + headers: http2.IncomingHttpHeaders | http2.OutgoingHttpHeaders, + ): Record => { + const normalizedHeaders: Record = {}; + for (const [key, value] of Object.entries(headers)) { + if (value !== undefined) { + normalizedHeaders[key] = value as string | string[] | number; + } + } + return normalizedHeaders; + }; + + const emitHttp2SerializedError = (kind: string, id: number, error: unknown): void => { + const err = error instanceof Error ? error : new Error(String(error)); + emitHttp2Event(kind, id, JSON.stringify({ + message: err.message, + name: err.name, + code: (err as { code?: unknown }).code, + })); + }; + + const attachHttp2ClientStreamListeners = ( + streamId: number, + stream: http2.ClientHttp2Stream, + ): void => { + stream.on("response", (headers) => { + emitHttp2Event( + "clientResponseHeaders", + streamId, + JSON.stringify(normalizeHttp2EventHeaders(headers)), + ); + }); + stream.on("push", (headers, flags) => { + setImmediate(() => { + emitHttp2Event( + "clientPushHeaders", + streamId, + JSON.stringify(normalizeHttp2EventHeaders(headers)), + undefined, + String(flags ?? 0), + ); + }); + }); + stream.on("data", (chunk) => { + emitHttp2Event( + "clientData", + streamId, + (Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk)).toString("base64"), + ); + }); + stream.on("end", () => { + debugHttp2Bridge("client response end", streamId); + setImmediate(() => { + emitHttp2Event("clientEnd", streamId); + }); + }); + stream.on("close", () => { + setImmediate(() => { + emitHttp2Event("clientClose", streamId, undefined, undefined, String(stream.rstCode ?? 0)); + http2Streams.delete(streamId); + }); + }); + stream.on("error", (error) => { + emitHttp2SerializedError("clientError", streamId, error); + }); + stream.resume(); + }; + + const attachHttp2SessionListeners = ( + sessionId: number, + session: http2.ClientHttp2Session | http2.ServerHttp2Session, + onClose?: () => void, + ): void => { + session.on("close", () => { + debugHttp2Bridge("session close", sessionId); + emitHttp2Event("sessionClose", sessionId); + http2Sessions.delete(sessionId); + onClose?.(); + }); + session.on("error", (error) => { + debugHttp2Bridge("session error", sessionId, error instanceof Error ? error.message : String(error)); + emitHttp2SerializedError("sessionError", sessionId, error); + }); + session.on("localSettings", (settings) => { + emitHttp2Event("sessionLocalSettings", sessionId, JSON.stringify(settings)); + }); + session.on("remoteSettings", (settings) => { + emitHttp2Event("sessionRemoteSettings", sessionId, JSON.stringify(settings)); + }); + session.on("goaway", (errorCode, lastStreamID, opaqueData) => { + emitHttp2Event( + "sessionGoaway", + sessionId, + Buffer.isBuffer(opaqueData) ? opaqueData.toString("base64") : undefined, + undefined, + String(errorCode), + undefined, + String(lastStreamID), + ); + }); + }; + + handlers[K.networkHttp2ServerListenRaw] = (optionsJson: unknown): Promise => { + const options = parseJsonWithLimit<{ + serverId: number; + secure?: boolean; + port?: number; + host?: string; + backlog?: number; + allowHalfOpen?: boolean; + allowHTTP1?: boolean; + timeout?: number; + settings?: Record; + remoteCustomSettings?: number[]; + tls?: SerializedTlsBridgeOptions; + }>("network.http2Server.listen options", String(optionsJson), jsonLimit); + + return (async () => { + debugHttp2Bridge("server listen start", options.serverId, options.secure, options.host, options.port); + const host = normalizeLoopbackHostname(options.host); + const listenSocketId = socketTable.create(AF_INET, SOCK_STREAM, 0, pid); + await socketTable.bind(listenSocketId, { host, port: options.port ?? 0 }); + await socketTable.listen(listenSocketId, options.backlog ?? 128, { external: true }); + + const listenSocket = socketTable.get(listenSocketId); + const addr = listenSocket?.localAddr as { host: string; port: number } | undefined; + const address = addr ? { + address: addr.host, + family: addr.host.includes(":") ? "IPv6" : "IPv4", + port: addr.port, + } : null; + + const server = options.secure + ? http2.createSecureServer({ + allowHTTP1: options.allowHTTP1 === true, + settings: options.settings as http2.Settings, + remoteCustomSettings: options.remoteCustomSettings, + ...buildHostTlsOptions(options.tls), + } as http2.SecureServerOptions) + : http2.createServer({ + allowHTTP1: options.allowHTTP1 === true, + settings: options.settings as http2.Settings, + remoteCustomSettings: options.remoteCustomSettings, + } as http2.ServerOptions); + + if (typeof options.timeout === "number" && options.timeout > 0) { + server.setTimeout(options.timeout); + } + + server.on("timeout", () => { + emitHttp2Event("serverTimeout", options.serverId); + }); + server.on("connection", (socket) => { + emitHttp2Event("serverConnection", options.serverId, serializeHttp2SocketState(socket)); + }); + if (options.secure) { + server.on("secureConnection", (socket) => { + emitHttp2Event("serverSecureConnection", options.serverId, serializeHttp2SocketState(socket)); + }); + } + server.on("request", (req, res) => { + if (req.httpVersionMajor === 2) { + return; + } + void (async () => { + const chunks: Buffer[] = []; + for await (const chunk of req) { + chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk)); + } + + const headers: Record = {}; + Object.entries(req.headers).forEach(([key, value]) => { + if (typeof value === "string") headers[key] = value; + else if (Array.isArray(value)) headers[key] = value[0] ?? ""; + }); + + const requestJson = JSON.stringify({ + method: req.method || "GET", + url: req.url || "/", + headers, + rawHeaders: req.rawHeaders || [], + bodyBase64: chunks.length > 0 ? Buffer.concat(chunks).toString("base64") : undefined, + }); + const requestId = nextHttp2CompatRequestId++; + const responsePromise = new Promise((resolve) => { + registerPendingHttp2CompatResponse(options.serverId, requestId, resolve); + }); + emitHttp2Event("serverCompatRequest", options.serverId, requestJson, undefined, String(requestId)); + const responseJson = await responsePromise; + const response = parseJsonWithLimit<{ + status: number; + headers?: Array<[string, string]>; + body?: string; + bodyEncoding?: "utf8" | "base64"; + }>("network.http2Server.compat response", responseJson, jsonLimit); + res.statusCode = response.status || 200; + for (const [key, value] of response.headers || []) { + res.setHeader(key, value); + } + if (response.bodyEncoding === "base64" && typeof response.body === "string") { + res.end(Buffer.from(response.body, "base64")); + } else if (typeof response.body === "string") { + res.end(response.body); + } else { + res.end(); + } + })().catch((error) => { + try { + res.statusCode = 500; + res.end(error instanceof Error ? error.message : String(error)); + } catch { + // Response already closed. + } + }); + }); + server.on("stream", (stream, headers, flags) => { + debugHttp2Bridge("server stream", options.serverId, flags); + const streamSession = stream.session as http2.ServerHttp2Session | undefined; + if (!streamSession) { + return; + } + let sessionId = http2ServerSessionIds.get(streamSession); + if (sessionId === undefined) { + sessionId = nextHttp2SessionId++; + http2ServerSessionIds.set(streamSession, sessionId); + http2Sessions.set(sessionId, streamSession); + attachHttp2SessionListeners(sessionId, streamSession); + emitHttp2Event("serverSession", options.serverId, serializeHttp2SessionState(streamSession), undefined, String(sessionId)); + } + + const streamId = nextHttp2StreamId++; + http2Streams.set(streamId, stream); + stream.pause(); + stream.on("data", (chunk) => { + emitHttp2Event( + "serverStreamData", + streamId, + (Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk)).toString("base64"), + ); + }); + stream.on("end", () => { + emitHttp2Event("serverStreamEnd", streamId); + }); + stream.on("drain", () => { + emitHttp2Event("serverStreamDrain", streamId); + }); + stream.on("error", (error) => { + emitHttp2SerializedError("serverStreamError", streamId, error); + }); + stream.on("close", () => { + emitHttp2Event("serverStreamClose", streamId, undefined, undefined, String(stream.rstCode ?? 0)); + http2Streams.delete(streamId); + }); + emitHttp2Event( + "serverStream", + options.serverId, + String(streamId), + serializeHttp2SessionState(streamSession), + String(sessionId), + JSON.stringify(normalizeHttp2EventHeaders(headers)), + String(flags ?? 0), + ); + }); + server.on("close", () => { + debugHttp2Bridge("server close", options.serverId); + emitHttp2Event("serverClose", options.serverId); + }); + + let resolveClosed!: () => void; + const closedPromise = new Promise((resolve) => { + resolveClosed = resolve; + }); + const state: KernelHttp2ServerState = { + listenSocketId, + server, + sessions: new Set(), + acceptLoopActive: true, + closedPromise, + resolveClosed, + }; + server.on("session", (session) => { + state.sessions.add(session); + session.once("close", () => { + state.sessions.delete(session); + }); + }); + kernelHttp2Servers.set(options.serverId, state); + ownedHttp2Servers.add(options.serverId); + deps.activeHttpServerIds.add(options.serverId); + deps.activeHttpServerClosers.set( + options.serverId, + () => closeKernelHttp2Server(options.serverId), + ); + void startKernelHttp2AcceptLoop(state); + return JSON.stringify({ address }); + })(); + }; + + handlers[K.networkHttp2ServerCloseRaw] = (serverId: unknown): Promise => { + const id = Number(serverId); + if (!ownedHttp2Servers.has(id)) { + throw new Error(`Cannot close HTTP/2 server ${id}: not owned by this execution context`); + } + return closeKernelHttp2Server(id); + }; + + handlers[K.networkHttp2ServerWaitRaw] = (serverId: unknown): Promise => { + const state = kernelHttp2Servers.get(Number(serverId)); + return state?.closedPromise ?? Promise.resolve(); + }; + + handlers[K.networkHttp2SessionConnectRaw] = (optionsJson: unknown): Promise => { + const options = parseJsonWithLimit<{ + authority: string; + protocol: string; + host?: string; + port?: number | string; + localAddress?: string; + family?: number; + socketId?: number; + settings?: Record; + remoteCustomSettings?: number[]; + tls?: SerializedTlsBridgeOptions; + }>("network.http2Session.connect options", String(optionsJson), jsonLimit); + + return (async () => { + const authority = String(options.authority); + debugHttp2Bridge("session connect start", authority, options.socketId ?? null); + const sessionId = nextHttp2SessionId++; + let transport: Duplex; + if (typeof options.socketId === "number") { + transport = createKernelSocketDuplex(options.socketId, socketTable, pid); + } else { + const host = String(options.host ?? "127.0.0.1"); + const port = Number(options.port ?? 0); + const socketId = socketTable.create( + host.includes(":") ? AF_INET6 : AF_INET, + SOCK_STREAM, + 0, + pid, + ); + if (typeof options.localAddress === "string" && options.localAddress.length > 0) { + await socketTable.bind(socketId, { + host: options.localAddress, + port: 0, + }); + } + await socketTable.connect(socketId, { host, port }); + transport = createKernelSocketDuplex(socketId, socketTable, pid); + } + + const session = http2.connect(authority, { + settings: options.settings as http2.Settings, + remoteCustomSettings: options.remoteCustomSettings, + createConnection: () => { + debugHttp2Bridge("createConnection", authority, options.protocol); + if (options.protocol === "https:") { + return tls.connect({ + socket: transport, + ALPNProtocols: ["h2"], + servername: + typeof options.tls?.servername === "string" && options.tls.servername.length > 0 + ? options.tls.servername + : undefined, + ...buildHostTlsOptions(options.tls), + }); + } + return transport; + }, + }); + + let resolveClosed!: () => void; + const closedPromise = new Promise((resolve) => { + resolveClosed = resolve; + }); + http2Sessions.set(sessionId, session); + kernelHttp2ClientSessions.set(sessionId, { + session, + closedPromise, + resolveClosed, + }); + session.on("connect", () => { + debugHttp2Bridge("session connect", sessionId, authority); + emitHttp2Event("sessionConnect", sessionId, serializeHttp2SessionState(session)); + }); + attachHttp2SessionListeners(sessionId, session, () => { + kernelHttp2ClientSessions.get(sessionId)?.resolveClosed(); + kernelHttp2ClientSessions.delete(sessionId); + }); + session.on("stream", (stream, headers, flags) => { + const streamId = nextHttp2StreamId++; + http2Streams.set(streamId, stream); + attachHttp2ClientStreamListeners(streamId, stream); + emitHttp2Event( + "clientPushStream", + sessionId, + String(streamId), + undefined, + undefined, + JSON.stringify(normalizeHttp2EventHeaders(headers)), + String(flags ?? 0), + ); + }); + + return JSON.stringify({ + sessionId, + state: serializeHttp2SessionState(session), + }); + })(); + }; + + handlers[K.networkHttp2SessionRequestRaw] = ( + sessionId: unknown, + headersJson: unknown, + optionsJson: unknown, + ): number => { + const session = http2Sessions.get(Number(sessionId)) as http2.ClientHttp2Session | undefined; + if (!session) { + throw new Error(`HTTP/2 session ${String(sessionId)} not found`); + } + const headers = parseJsonWithLimit>( + "network.http2Session.request headers", + String(headersJson), + jsonLimit, + ); + const requestOptions = parseJsonWithLimit>( + "network.http2Session.request options", + String(optionsJson), + jsonLimit, + ); + const stream = session.request(headers, requestOptions as http2.ClientSessionRequestOptions); + debugHttp2Bridge("session request", sessionId, stream.id); + const streamId = nextHttp2StreamId++; + http2Streams.set(streamId, stream); + attachHttp2ClientStreamListeners(streamId, stream); + return streamId; + }; + + handlers[K.networkHttp2SessionCloseRaw] = (sessionId: unknown): void => { + http2Sessions.get(Number(sessionId))?.close(); + }; + + handlers[K.networkHttp2SessionSettingsRaw] = ( + sessionId: unknown, + settingsJson: unknown, + ): void => { + const session = http2Sessions.get(Number(sessionId)); + if (!session) { + throw new Error(`HTTP/2 session ${String(sessionId)} not found`); + } + const settings = parseJsonWithLimit>( + "network.http2Session.settings settings", + String(settingsJson), + jsonLimit, + ); + session.settings(settings as http2.Settings, () => { + emitHttp2Event("sessionSettingsAck", Number(sessionId)); + }); + }; + + handlers[K.networkHttp2SessionSetLocalWindowSizeRaw] = ( + sessionId: unknown, + windowSize: unknown, + ): string => { + const session = http2Sessions.get(Number(sessionId)); + if (!session) { + throw new Error(`HTTP/2 session ${String(sessionId)} not found`); + } + session.setLocalWindowSize(Number(windowSize)); + return serializeHttp2SessionState(session); + }; + + handlers[K.networkHttp2SessionGoawayRaw] = ( + sessionId: unknown, + errorCode: unknown, + lastStreamID: unknown, + opaqueDataBase64: unknown, + ): void => { + const session = http2Sessions.get(Number(sessionId)); + if (!session) { + throw new Error(`HTTP/2 session ${String(sessionId)} not found`); + } + session.goaway( + Number(errorCode), + Number(lastStreamID), + typeof opaqueDataBase64 === "string" && opaqueDataBase64.length > 0 + ? Buffer.from(opaqueDataBase64, "base64") + : undefined, + ); + }; + + handlers[K.networkHttp2SessionDestroyRaw] = (sessionId: unknown): void => { + http2Sessions.get(Number(sessionId))?.destroy(); + }; + + handlers[K.networkHttp2SessionWaitRaw] = (sessionId: unknown): Promise => { + const state = kernelHttp2ClientSessions.get(Number(sessionId)); + return state?.closedPromise ?? Promise.resolve(); + }; + + handlers[K.networkHttp2StreamRespondRaw] = ( + streamId: unknown, + headersJson: unknown, + ): void => { + const stream = http2Streams.get(Number(streamId)) as http2.ServerHttp2Stream | undefined; + if (!stream) { + throw new Error(`HTTP/2 stream ${String(streamId)} not found`); + } + const headers = parseJsonWithLimit>( + "network.http2Stream.respond headers", + String(headersJson), + jsonLimit, + ); + stream.respond(headers); + }; + + handlers[K.networkHttp2StreamPushStreamRaw] = async ( + streamId: unknown, + headersJson: unknown, + optionsJson: unknown, + ): Promise => { + const stream = http2Streams.get(Number(streamId)) as http2.ServerHttp2Stream | undefined; + if (!stream) { + throw new Error(`HTTP/2 stream ${String(streamId)} not found`); + } + const headers = parseJsonWithLimit>( + "network.http2Stream.pushStream headers", + String(headersJson), + jsonLimit, + ); + const options = parseJsonWithLimit>( + "network.http2Stream.pushStream options", + String(optionsJson), + jsonLimit, + ); + return await new Promise((resolve, reject) => { + try { + stream.pushStream( + headers, + options as http2.StreamPriorityOptions, + (error, pushStream, pushHeaders) => { + if (error) { + resolve(JSON.stringify({ + error: JSON.stringify({ + message: error.message, + name: error.name, + code: (error as { code?: unknown }).code, + }), + })); + return; + } + if (!pushStream) { + reject(new Error("HTTP/2 push stream callback returned no stream")); + return; + } + const pushStreamId = nextHttp2StreamId++; + http2Streams.set(pushStreamId, pushStream); + pushStream.on("close", () => { + http2Streams.delete(pushStreamId); + }); + resolve(JSON.stringify({ + streamId: pushStreamId, + headers: JSON.stringify(normalizeHttp2EventHeaders(pushHeaders ?? {})), + })); + }, + ); + } catch (error) { + reject(error); + } + }); + }; + + handlers[K.networkHttp2StreamWriteRaw] = ( + streamId: unknown, + dataBase64: unknown, + ): boolean => { + const stream = http2Streams.get(Number(streamId)); + if (!stream) { + throw new Error(`HTTP/2 stream ${String(streamId)} not found`); + } + return stream.write(Buffer.from(String(dataBase64), "base64")); + }; + + handlers[K.networkHttp2StreamEndRaw] = ( + streamId: unknown, + dataBase64: unknown, + ): void => { + const stream = http2Streams.get(Number(streamId)); + if (!stream) { + throw new Error(`HTTP/2 stream ${String(streamId)} not found`); + } + if (typeof dataBase64 === "string" && dataBase64.length > 0) { + stream.end(Buffer.from(dataBase64, "base64")); + return; + } + stream.end(); + }; + + handlers[K.networkHttp2StreamPauseRaw] = (streamId: unknown): void => { + http2Streams.get(Number(streamId))?.pause(); + }; + + handlers[K.networkHttp2StreamResumeRaw] = (streamId: unknown): void => { + http2Streams.get(Number(streamId))?.resume(); + }; + + handlers[K.networkHttp2StreamRespondWithFileRaw] = ( + streamId: unknown, + filePath: unknown, + headersJson: unknown, + optionsJson: unknown, + ): void => { + const stream = http2Streams.get(Number(streamId)) as http2.ServerHttp2Stream | undefined; + if (!stream) { + throw new Error(`HTTP/2 stream ${String(streamId)} not found`); + } + const headers = parseJsonWithLimit>( + "network.http2Stream.respondWithFile headers", + String(headersJson), + jsonLimit, + ); + const options = parseJsonWithLimit>( + "network.http2Stream.respondWithFile options", + String(optionsJson), + jsonLimit, + ); + stream.respondWithFile( + String(filePath), + headers as http2.OutgoingHttpHeaders, + options as http2.ServerStreamFileResponseOptionsWithError, + ); + }; + + handlers[K.networkHttp2ServerRespondRaw] = ( + serverId: unknown, + requestId: unknown, + responseJson: unknown, + ): void => { + resolveHttp2CompatResponse({ + serverId: Number(serverId), + requestId: Number(requestId), + responseJson: String(responseJson), + }); + }; + handlers[K.upgradeSocketWriteRaw] = ( socketId: unknown, dataBase64: unknown, @@ -3569,6 +5298,19 @@ export function buildNetworkBridgeHandlers(deps: NetworkBridgeDeps): NetworkBrid for (const serverId of Array.from(kernelHttpServers.keys())) { await closeKernelServer(serverId); } + for (const serverId of Array.from(kernelHttp2Servers.keys())) { + await closeKernelHttp2Server(serverId); + } + for (const session of http2Sessions.values()) { + try { + session.destroy(); + } catch { + // Session already closed. + } + } + kernelHttp2ClientSessions.clear(); + http2Sessions.clear(); + http2Streams.clear(); for (const socket of kernelUpgradeSockets.values()) { socket.destroy(); } @@ -3626,11 +5368,19 @@ type PendingHttpResponse = { resolve: (response: string) => void; }; +type PendingHttp2CompatResponse = { + serverId: number; + resolve: (response: string) => void; +}; + // Track request IDs directly, but also keep per-server FIFO queues so older // callbacks that only report serverId still resolve the correct pending waiters. const pendingHttpResponses = new Map(); const pendingHttpResponsesByServer = new Map(); let nextHttpRequestId = 1; +const pendingHttp2CompatResponses = new Map(); +const pendingHttp2CompatResponsesByServer = new Map(); +let nextHttp2CompatRequestId = 1; function registerPendingHttpResponse( serverId: number, @@ -3677,6 +5427,56 @@ function takePendingHttpResponseByServer(serverId: number): PendingHttpResponse return pending; } +function registerPendingHttp2CompatResponse( + serverId: number, + requestId: number, + resolve: (response: string) => void, +): void { + pendingHttp2CompatResponses.set(requestId, { serverId, resolve }); + const queue = pendingHttp2CompatResponsesByServer.get(serverId); + if (queue) { + queue.push(requestId); + } else { + pendingHttp2CompatResponsesByServer.set(serverId, [requestId]); + } +} + +function removePendingHttp2CompatResponse( + serverId: number, + requestId: number, +): PendingHttp2CompatResponse | undefined { + const pending = pendingHttp2CompatResponses.get(requestId); + if (!pending) return undefined; + + pendingHttp2CompatResponses.delete(requestId); + + const queue = pendingHttp2CompatResponsesByServer.get(serverId); + if (queue) { + const index = queue.indexOf(requestId); + if (index !== -1) queue.splice(index, 1); + if (queue.length === 0) pendingHttp2CompatResponsesByServer.delete(serverId); + } + + return pending; +} + +function takePendingHttp2CompatResponseByServer( + serverId: number, +): PendingHttp2CompatResponse | undefined { + const queue = pendingHttp2CompatResponsesByServer.get(serverId); + if (!queue || queue.length === 0) return undefined; + + const requestId = queue.shift()!; + if (queue.length === 0) pendingHttp2CompatResponsesByServer.delete(serverId); + + const pending = pendingHttp2CompatResponses.get(requestId); + if (pending) { + pendingHttp2CompatResponses.delete(requestId); + } + + return pending; +} + /** Resolve a pending HTTP server response (called from stream callback handler). */ export function resolveHttpServerResponse(options: { requestId?: number; @@ -3695,6 +5495,23 @@ export function resolveHttpServerResponse(options: { pending?.resolve(options.responseJson); } +export function resolveHttp2CompatResponse(options: { + requestId?: number; + serverId?: number; + responseJson: string; +}): void { + const pending = + options.requestId !== undefined + ? removePendingHttp2CompatResponse( + options.serverId ?? pendingHttp2CompatResponses.get(options.requestId)?.serverId ?? -1, + options.requestId, + ) + : options.serverId !== undefined + ? takePendingHttp2CompatResponseByServer(options.serverId) + : undefined; + pending?.resolve(options.responseJson); +} + /** Dependencies for PTY bridge handlers. */ export interface PtyBridgeDeps { onPtySetRawMode?: (mode: boolean) => void; @@ -3740,6 +5557,82 @@ function canWrite(flags: number): boolean { return access === O_WRONLY || access === O_RDWR; } +const PROC_SYS_KERNEL_HOSTNAME_PATH = "/proc/sys/kernel/hostname"; + +function getStandaloneProcFileContent(path: string): Uint8Array | null { + if (path === PROC_SYS_KERNEL_HOSTNAME_PATH) { + return Buffer.from("sandbox\n", "utf8"); + } + return null; +} + +function getStandaloneProcFileStat( + path: string, +): import("@secure-exec/core").VirtualStat | null { + const content = getStandaloneProcFileContent(path); + if (!content) return null; + const now = Date.now(); + return { + mode: 0o100444, + size: content.length, + isDirectory: false, + isSymbolicLink: false, + atimeMs: now, + mtimeMs: now, + ctimeMs: now, + birthtimeMs: now, + ino: 0xfffe0001, + nlink: 1, + uid: 0, + gid: 0, + }; +} + +async function readStandaloneProcAwareFile( + vfs: VirtualFileSystem, + path: string, +): Promise { + return getStandaloneProcFileContent(path) ?? vfs.readFile(path); +} + +async function readStandaloneProcAwareTextFile( + vfs: VirtualFileSystem, + path: string, +): Promise { + const content = getStandaloneProcFileContent(path); + if (content) return new TextDecoder().decode(content); + return vfs.readTextFile(path); +} + +async function standaloneProcAwareExists( + vfs: VirtualFileSystem, + path: string, +): Promise { + if (getStandaloneProcFileContent(path)) return true; + return vfs.exists(path); +} + +async function standaloneProcAwareStat( + vfs: VirtualFileSystem, + path: string, +): Promise { + return getStandaloneProcFileStat(path) ?? vfs.stat(path); +} + +async function standaloneProcAwarePread( + vfs: VirtualFileSystem, + path: string, + offset: number, + length: number, +): Promise { + const content = getStandaloneProcFileContent(path); + if (content) { + if (offset >= content.length) return new Uint8Array(0); + return content.slice(offset, offset + length); + } + return vfs.pread(path, offset, length); +} + /** * Build kernel FD table bridge handlers. * @@ -3765,7 +5658,7 @@ export function buildKernelFdBridgeHandlers(deps: KernelFdBridgeDeps): KernelFdB const numFlags = Number(flags); const numMode = mode !== undefined && mode !== null ? Number(mode) : undefined; - const exists = await vfs.exists(pathStr); + const exists = await standaloneProcAwareExists(vfs, pathStr); // O_CREAT: create if doesn't exist if ((numFlags & O_CREAT) && !exists) { @@ -3810,7 +5703,7 @@ export function buildKernelFdBridgeHandlers(deps: KernelFdBridgeDeps): KernelFdB ? Number(position) : Number(entry.description.cursor); - const data = await vfs.pread(entry.description.path, pos, len); + const data = await standaloneProcAwarePread(vfs, entry.description.path, pos, len); // Update cursor only when no explicit position if (position === null || position === undefined) { @@ -3833,7 +5726,7 @@ export function buildKernelFdBridgeHandlers(deps: KernelFdBridgeDeps): KernelFdB // Read existing content let content: Uint8Array; try { - content = await vfs.readFile(entry.description.path); + content = await readStandaloneProcAwareFile(vfs, entry.description.path); } catch { content = new Uint8Array(0); } @@ -3870,7 +5763,7 @@ export function buildKernelFdBridgeHandlers(deps: KernelFdBridgeDeps): KernelFdB const entry = fdTable.get(fdNum); if (!entry) throw new Error("EBADF: bad file descriptor, fstat"); - const stat = await vfs.stat(entry.description.path); + const stat = await standaloneProcAwareStat(vfs, entry.description.path); return JSON.stringify({ dev: 0, ino: stat.ino ?? 0, @@ -3899,7 +5792,7 @@ export function buildKernelFdBridgeHandlers(deps: KernelFdBridgeDeps): KernelFdB const newLen = (len !== undefined && len !== null) ? Number(len) : 0; let content: Uint8Array; try { - content = await vfs.readFile(entry.description.path); + content = await readStandaloneProcAwareFile(vfs, entry.description.path); } catch { content = new Uint8Array(0); } diff --git a/packages/nodejs/src/bridge/active-handles.ts b/packages/nodejs/src/bridge/active-handles.ts index d67817f7..42254f9e 100644 --- a/packages/nodejs/src/bridge/active-handles.ts +++ b/packages/nodejs/src/bridge/active-handles.ts @@ -20,6 +20,22 @@ const HANDLE_DISPATCH = { // Resolvers waiting for all handles to complete let _waitResolvers: Array<() => void> = []; +let _handlePollTimer: ReturnType | null = null; + +function ensureHandlePollTimer(): void { + if (_handlePollTimer !== null) { + return; + } + _handlePollTimer = setInterval(() => { + if (_getActiveHandles().length > 0) { + return; + } + if (_handlePollTimer !== null) { + clearInterval(_handlePollTimer); + _handlePollTimer = null; + } + }, 25); +} /** * Register an active handle that keeps the sandbox alive. @@ -30,6 +46,7 @@ let _waitResolvers: Array<() => void> = []; export function _registerHandle(id: string, description: string): void { try { bridgeDispatchSync(HANDLE_DISPATCH.register, id, description); + ensureHandlePollTimer(); } catch (error) { if (error instanceof Error && error.message.includes("EAGAIN")) { throw new Error( @@ -46,6 +63,10 @@ export function _registerHandle(id: string, description: string): void { */ export function _unregisterHandle(id: string): void { const remaining = bridgeDispatchSync(HANDLE_DISPATCH.unregister, id); + if (remaining === 0 && _handlePollTimer !== null) { + clearInterval(_handlePollTimer); + _handlePollTimer = null; + } if (remaining === 0 && _waitResolvers.length > 0) { const resolvers = _waitResolvers; _waitResolvers = []; @@ -62,7 +83,26 @@ export function _waitForActiveHandles(): Promise { return Promise.resolve(); } return new Promise((resolve) => { - _waitResolvers.push(resolve); + let settled = false; + const complete = () => { + if (settled) { + return; + } + settled = true; + resolve(); + }; + _waitResolvers.push(complete); + const poll = () => { + if (settled) { + return; + } + if (_getActiveHandles().length === 0) { + complete(); + return; + } + setTimeout(poll, 10); + }; + setTimeout(poll, 10); }); } diff --git a/packages/nodejs/src/bridge/fs.ts b/packages/nodejs/src/bridge/fs.ts index ec7ef431..b645c356 100644 --- a/packages/nodejs/src/bridge/fs.ts +++ b/packages/nodejs/src/bridge/fs.ts @@ -192,109 +192,231 @@ class Dir { } } -// ReadStream class for createReadStream -// Provides a proper readable stream implementation that works with stream.pipeline -class ReadStream { - // ReadStream-specific properties - bytesRead: number = 0; - path: string | Buffer; - pending: boolean = true; - - // Readable stream properties - readable: boolean = true; - readableAborted: boolean = false; - readableDidRead: boolean = false; - readableEncoding: BufferEncoding | null = null; - readableEnded: boolean = false; - readableFlowing: boolean | null = null; - readableHighWaterMark: number = 65536; - readableLength: number = 0; - readableObjectMode: boolean = false; - destroyed: boolean = false; - closed: boolean = false; - errored: Error | null = null; +const FILE_HANDLE_READ_CHUNK_BYTES = 64 * 1024; +const FILE_HANDLE_READ_BUFFER_BYTES = 16 * 1024; +const FILE_HANDLE_MAX_READ_BYTES = 2 ** 31 - 1; + +function createAbortError(reason?: unknown): Error & { name: string; code?: string; cause?: unknown } { + const error = new Error("The operation was aborted") as Error & { + name: string; + code?: string; + cause?: unknown; + }; + error.name = "AbortError"; + error.code = "ABORT_ERR"; + if (reason !== undefined) { + error.cause = reason; + } + return error; +} - // Internal state - private _content: Buffer | null = null; - private _listeners: Map void>> = new Map(); - private _started: boolean = false; +function validateAbortSignal(signal: unknown): AbortSignal | undefined { + if (signal === undefined) { + return undefined; + } + if ( + signal === null || + typeof signal !== "object" || + typeof (signal as AbortSignal).aborted !== "boolean" || + typeof (signal as AbortSignal).addEventListener !== "function" || + typeof (signal as AbortSignal).removeEventListener !== "function" + ) { + const error = new TypeError( + 'The "signal" argument must be an instance of AbortSignal' + ) as TypeError & { code?: string }; + error.code = "ERR_INVALID_ARG_TYPE"; + throw error; + } + return signal as AbortSignal; +} - constructor(filePath: string | Buffer, private _options?: { encoding?: BufferEncoding; start?: number; end?: number; highWaterMark?: number }) { - this.path = filePath; - if (_options?.encoding) { - this.readableEncoding = _options.encoding; - } - if (_options?.highWaterMark) { - this.readableHighWaterMark = _options.highWaterMark; - } +function throwIfAborted(signal?: AbortSignal): void { + if (signal?.aborted) { + throw createAbortError(signal.reason); } +} - private _loadContent(): Buffer { - if (this._content === null) { - const pathStr = typeof this.path === 'string' ? this.path : this.path.toString(); - // readFileSync already normalizes the path - this._content = fs.readFileSync(pathStr) as Buffer; - this.pending = false; +function waitForNextTick(): Promise { + return new Promise((resolve) => process.nextTick(resolve)); +} + +function createInternalAssertionError(message: string): Error & { code: string } { + const error = new Error(message) as Error & { code: string }; + error.code = "ERR_INTERNAL_ASSERTION"; + return error; +} + +function createOutOfRangeError(name: string, range: string, received: unknown): RangeError & { code: string } { + const error = new RangeError( + `The value of "${name}" is out of range. It must be ${range}. Received ${String(received)}` + ) as RangeError & { code: string }; + error.code = "ERR_OUT_OF_RANGE"; + return error; +} + +function formatInvalidArgReceived(actual: unknown): string { + if (actual === null) { + return "Received null"; + } + if (actual === undefined) { + return "Received undefined"; + } + if (typeof actual === "string") { + return `Received type string ('${actual}')`; + } + if (typeof actual === "number") { + return `Received type number (${String(actual)})`; + } + if (typeof actual === "boolean") { + return `Received type boolean (${String(actual)})`; + } + if (typeof actual === "bigint") { + return `Received type bigint (${actual.toString()}n)`; + } + if (typeof actual === "symbol") { + return `Received type symbol (${String(actual)})`; + } + if (typeof actual === "function") { + return actual.name ? `Received function ${actual.name}` : "Received function"; + } + if (Array.isArray(actual)) { + return "Received an instance of Array"; + } + if (actual && typeof actual === "object") { + const constructorName = (actual as { constructor?: { name?: string } }).constructor?.name; + if (constructorName) { + return `Received an instance of ${constructorName}`; } - return this._content; } + return `Received type ${typeof actual} (${String(actual)})`; +} - // Start reading - called when 'data' listener is added or resume() is called - private _startReading(): void { - if (this._started || this.destroyed) return; - this._started = true; - this.readableFlowing = true; +function createInvalidArgTypeError(name: string, expected: string, actual: unknown): TypeError & { code: string } { + const error = new TypeError( + `The "${name}" argument must be ${expected}. ${formatInvalidArgReceived(actual)}` + ) as TypeError & { code: string }; + error.code = "ERR_INVALID_ARG_TYPE"; + return error; +} - Promise.resolve().then(() => { - try { - const content = this._loadContent(); - this.readableDidRead = true; +function createInvalidArgValueError(name: string, message: string): TypeError & { code: string } { + const error = new TypeError( + `The argument '${name}' ${message}` + ) as TypeError & { code: string }; + error.code = "ERR_INVALID_ARG_VALUE"; + return error; +} - // Determine start/end positions - const start = this._options?.start ?? 0; - const end = this._options?.end ?? content.length; - const chunk = content.slice(start, end); +function createInvalidEncodingError(encoding: unknown): TypeError & { code: string } { + const printable = + typeof encoding === "string" + ? `'${encoding}'` + : encoding === undefined + ? "undefined" + : encoding === null + ? "null" + : String(encoding); + const error = new TypeError( + `The argument 'encoding' is invalid encoding. Received ${printable}` + ) as TypeError & { code: string }; + error.code = "ERR_INVALID_ARG_VALUE"; + return error; +} - this.bytesRead = chunk.length; +function toUint8ArrayChunk(chunk: unknown, encoding?: BufferEncoding): Uint8Array { + if (typeof chunk === "string") { + return Buffer.from(chunk, encoding ?? "utf8"); + } + if (Buffer.isBuffer(chunk)) { + return new Uint8Array(chunk.buffer, chunk.byteOffset, chunk.byteLength); + } + if (chunk instanceof Uint8Array) { + return chunk; + } + if (ArrayBuffer.isView(chunk)) { + return new Uint8Array(chunk.buffer, chunk.byteOffset, chunk.byteLength); + } + throw createInvalidArgTypeError("data", "a string, Buffer, TypedArray, or DataView", chunk); +} - // Emit data event - this.emit('data', chunk); +async function *iterateWriteChunks( + data: unknown, + encoding?: BufferEncoding +): AsyncGenerator { + if (typeof data === "string" || ArrayBuffer.isView(data)) { + yield toUint8ArrayChunk(data, encoding); + return; + } + if (data && typeof (data as AsyncIterable)[Symbol.asyncIterator] === "function") { + for await (const chunk of data as AsyncIterable) { + yield toUint8ArrayChunk(chunk, encoding); + } + return; + } + if (data && typeof (data as Iterable)[Symbol.iterator] === "function") { + for (const chunk of data as Iterable) { + yield toUint8ArrayChunk(chunk, encoding); + } + return; + } + throw createInvalidArgTypeError("data", "a string, Buffer, TypedArray, DataView, or Iterable", data); +} - // Emit end and close - Promise.resolve().then(() => { - this.readable = false; - this.readableEnded = true; - this.emit('end'); - Promise.resolve().then(() => { - this.closed = true; - this.emit('close'); - }); - }); - } catch (err) { - this.errored = err as Error; - this.emit('error', err); - this.destroy(err as Error); - } - }); +type FileHandleReadFileOptions = nodeFs.ObjectEncodingOptions & { signal?: AbortSignal | undefined }; +type FileHandleWriteFileOptions = nodeFs.ObjectEncodingOptions & { signal?: AbortSignal | undefined }; + +class FileHandle { + private _fd: number; + private _closing = false; + private _closed = false; + private _listeners: Map void>> = new Map(); + + constructor(fd: number) { + this._fd = fd; } - // Event handling - on(event: string | symbol, listener: (...args: unknown[]) => void): this { - if (!this._listeners.has(event)) { - this._listeners.set(event, []); + private static _assertHandle(handle: unknown): FileHandle { + if (!(handle instanceof FileHandle)) { + throw createInternalAssertionError("handle must be an instance of FileHandle"); } - this._listeners.get(event)!.push(listener); + return handle; + } + + private _emitCloseOnce(): void { + if (this._closed) { + this._fd = -1; + this.emit("close"); + return; + } + this._closed = true; + this._fd = -1; + this.emit("close"); + } - // Start reading when 'data' listener is added (flowing mode) - if (event === 'data' && !this._started) { - this._startReading(); + private _resolvePath(): string | null { + if (this._fd < 0) { + return null; } + return _fdGetPath.applySync(undefined, [this._fd]); + } + + get fd(): number { + return this._fd; + } + + get closed(): boolean { + return this._closed; + } + on(event: string | symbol, listener: (...args: unknown[]) => void): this { + const listeners = this._listeners.get(event) ?? []; + listeners.push(listener); + this._listeners.set(event, listeners); return this; } once(event: string | symbol, listener: (...args: unknown[]) => void): this { - const wrapper = (...args: unknown[]): void => { + const wrapper = (...args: unknown[]) => { this.off(event, wrapper); listener(...args); }; @@ -304,11 +426,16 @@ class ReadStream { off(event: string | symbol, listener: (...args: unknown[]) => void): this { const listeners = this._listeners.get(event); - if (listeners) { - const idx = listeners.findIndex( - fn => fn === listener || (fn as { _originalListener?: typeof listener })._originalListener === listener - ); - if (idx !== -1) listeners.splice(idx, 1); + if (!listeners) { + return this; + } + const index = listeners.findIndex( + (candidate) => + candidate === listener || + (candidate as { _originalListener?: typeof listener })._originalListener === listener + ); + if (index !== -1) { + listeners.splice(index, 1); } return this; } @@ -317,81 +444,1143 @@ class ReadStream { return this.off(event, listener); } - removeAllListeners(event?: string | symbol): this { - if (event) { - this._listeners.delete(event); - } else { - this._listeners.clear(); + emit(event: string | symbol, ...args: unknown[]): boolean { + const listeners = this._listeners.get(event); + if (!listeners || listeners.length === 0) { + return false; } - return this; + for (const listener of listeners.slice()) { + listener(...args); + } + return true; } - emit(event: string | symbol, ...args: unknown[]): boolean { - const listeners = this._listeners.get(event); - if (listeners && listeners.length > 0) { - listeners.slice().forEach(fn => fn(...args)); - return true; + async close(): Promise { + const handle = FileHandle._assertHandle(this); + if (handle._closing || handle._closed) { + if (handle._fd < 0) { + throw createFsError("EBADF", "EBADF: bad file descriptor, close", "close"); + } + } + handle._closing = true; + try { + fs.closeSync(handle._fd); + handle._emitCloseOnce(); + } finally { + handle._closing = false; } - return false; } - // Readable methods - read(_size?: number): Buffer | string | null { - if (this.readableEnded || this.destroyed) return null; + async stat(): Promise { + const handle = FileHandle._assertHandle(this); + return fs.fstatSync(handle.fd); + } - try { - const content = this._loadContent(); - const start = this._options?.start ?? 0; - const end = this._options?.end ?? content.length; - const chunk = content.slice(start, end); + async sync(): Promise { + const handle = FileHandle._assertHandle(this); + fs.fsyncSync(handle.fd); + } - this.bytesRead = chunk.length; - this.readableDidRead = true; - this.readable = false; - this.readableEnded = true; + async datasync(): Promise { + return this.sync(); + } + + async truncate(len?: number): Promise { + const handle = FileHandle._assertHandle(this); + fs.ftruncateSync(handle.fd, len); + } + + async chmod(mode: Mode): Promise { + const handle = FileHandle._assertHandle(this); + const path = handle._resolvePath(); + if (!path) { + throw createFsError("EBADF", "EBADF: bad file descriptor", "chmod"); + } + fs.chmodSync(path, mode); + } + + async chown(uid: number, gid: number): Promise { + const handle = FileHandle._assertHandle(this); + const path = handle._resolvePath(); + if (!path) { + throw createFsError("EBADF", "EBADF: bad file descriptor", "chown"); + } + fs.chownSync(path, uid, gid); + } + + async utimes(atime: string | number | Date, mtime: string | number | Date): Promise { + const handle = FileHandle._assertHandle(this); + const path = handle._resolvePath(); + if (!path) { + throw createFsError("EBADF", "EBADF: bad file descriptor", "utimes"); + } + fs.utimesSync(path, atime, mtime); + } + + async read( + buffer: NodeJS.ArrayBufferView | null, + offset?: number, + length?: number, + position?: number | null + ): Promise<{ bytesRead: number; buffer: NodeJS.ArrayBufferView }> { + const handle = FileHandle._assertHandle(this); + let target = buffer; + if (target === null) { + target = Buffer.alloc(FILE_HANDLE_READ_BUFFER_BYTES); + } + if (!ArrayBuffer.isView(target)) { + throw createInvalidArgTypeError("buffer", "an instance of ArrayBufferView", target); + } + const readOffset = offset ?? 0; + const readLength = length ?? (target.byteLength - readOffset); + const bytesRead = fs.readSync(handle.fd, target, readOffset, readLength, position ?? null); + return { bytesRead, buffer: target }; + } + + async write( + buffer: string | NodeJS.ArrayBufferView, + offsetOrPosition?: number, + lengthOrEncoding?: number | BufferEncoding, + position?: number + ): Promise<{ bytesWritten: number; buffer: string | NodeJS.ArrayBufferView }> { + const handle = FileHandle._assertHandle(this); + if (typeof buffer === "string") { + const encoding = typeof lengthOrEncoding === "string" ? lengthOrEncoding : "utf8"; + if (encoding === "hex" && buffer.length % 2 !== 0) { + throw createInvalidArgValueError("encoding", `is invalid for data of length ${buffer.length}`); + } + const bytesWritten = fs.writeSync(handle.fd, Buffer.from(buffer, encoding), 0, undefined, offsetOrPosition ?? null); + return { bytesWritten, buffer }; + } + if (!ArrayBuffer.isView(buffer)) { + throw createInvalidArgTypeError("buffer", "a string, Buffer, TypedArray, or DataView", buffer); + } + const offset = offsetOrPosition ?? 0; + const length = typeof lengthOrEncoding === "number" ? lengthOrEncoding : undefined; + const bytesWritten = fs.writeSync(handle.fd, buffer, offset, length, position ?? null); + return { bytesWritten, buffer }; + } + + async readFile(options?: BufferEncoding | FileHandleReadFileOptions | null): Promise { + const handle = FileHandle._assertHandle(this); + const normalized = + typeof options === "string" ? { encoding: options } : (options ?? undefined); + const signal = validateAbortSignal(normalized?.signal); + const encoding = normalized?.encoding ?? undefined; + const stats = await handle.stat(); + if (stats.size > FILE_HANDLE_MAX_READ_BYTES) { + const error = new RangeError("File size is greater than 2 GiB") as RangeError & { code: string }; + error.code = "ERR_FS_FILE_TOO_LARGE"; + throw error; + } + await waitForNextTick(); + throwIfAborted(signal); + + const chunks: Buffer[] = []; + let totalLength = 0; + while (true) { + throwIfAborted(signal); + const chunk = Buffer.alloc(FILE_HANDLE_READ_CHUNK_BYTES); + const { bytesRead } = await handle.read(chunk, 0, chunk.byteLength, null); + if (bytesRead === 0) { + break; + } + chunks.push(chunk.subarray(0, bytesRead)); + totalLength += bytesRead; + if (totalLength > FILE_HANDLE_MAX_READ_BYTES) { + const error = new RangeError("File size is greater than 2 GiB") as RangeError & { code: string }; + error.code = "ERR_FS_FILE_TOO_LARGE"; + throw error; + } + await waitForNextTick(); + } + const result = Buffer.concat(chunks, totalLength); + return encoding ? result.toString(encoding) : result; + } + + async writeFile( + data: unknown, + options?: BufferEncoding | FileHandleWriteFileOptions | null + ): Promise { + const handle = FileHandle._assertHandle(this); + const normalized = + typeof options === "string" ? { encoding: options } : (options ?? undefined); + const signal = validateAbortSignal(normalized?.signal); + const encoding = normalized?.encoding ?? undefined; + await waitForNextTick(); + throwIfAborted(signal); + for await (const chunk of iterateWriteChunks(data, encoding)) { + throwIfAborted(signal); + await handle.write(chunk, 0, chunk.byteLength, undefined); + await waitForNextTick(); + } + } + + async appendFile( + data: unknown, + options?: BufferEncoding | FileHandleWriteFileOptions | null + ): Promise { + return this.writeFile(data, options); + } + + createReadStream( + options?: { + encoding?: BufferEncoding; + start?: number; + end?: number; + highWaterMark?: number; + signal?: AbortSignal; + } + ): ReadStream { + FileHandle._assertHandle(this); + return new ReadStream(null, { ...(options ?? {}), fd: this }); + } + + createWriteStream( + options?: { encoding?: BufferEncoding; flags?: string; mode?: number } + ): WriteStream { + FileHandle._assertHandle(this); + return new WriteStream(null, { ...(options ?? {}), fd: this }); + } +} + +type StreamFsMethods = { + open?: (...args: unknown[]) => unknown; + close?: (...args: unknown[]) => unknown; + read?: (...args: unknown[]) => unknown; + write?: (...args: unknown[]) => unknown; + writev?: (...args: unknown[]) => unknown; +}; + +function isArrayBufferView(value: unknown): value is NodeJS.ArrayBufferView { + return ArrayBuffer.isView(value); +} + +function createInvalidPropertyTypeError(propertyPath: string, actual: unknown): TypeError & { code: string } { + let received: string; + if (actual === null) { + received = "Received null"; + } else if (typeof actual === "string") { + received = `Received type string ('${actual}')`; + } else { + received = `Received type ${typeof actual} (${String(actual)})`; + } + const error = new TypeError( + `The "${propertyPath}" property must be of type function. ${received}` + ) as TypeError & { code: string }; + error.code = "ERR_INVALID_ARG_TYPE"; + return error; +} + +function validateCallback(callback: unknown, name: string = "cb"): asserts callback is (...args: unknown[]) => void { + if (typeof callback !== "function") { + throw createInvalidArgTypeError(name, "of type function", callback); + } +} + +function validateEncodingValue(encoding: unknown): asserts encoding is BufferEncoding { + if (encoding === undefined || encoding === null) { + return; + } + if (typeof encoding !== "string" || !Buffer.isEncoding(encoding)) { + throw createInvalidEncodingError(encoding); + } +} + +function validateEncodingOption(options: unknown): void { + if (typeof options === "string") { + validateEncodingValue(options); + return; + } + if (options && typeof options === "object" && "encoding" in options) { + validateEncodingValue((options as { encoding?: unknown }).encoding); + } +} + +function normalizePathLike(path: unknown, name: string = "path"): string { + if (typeof path === "string") { + return path; + } + if (Buffer.isBuffer(path)) { + return path.toString("utf8"); + } + if (path instanceof URL) { + if (path.protocol === "file:") { + return path.pathname; + } + throw createInvalidArgTypeError(name, "of type string or an instance of Buffer or URL", path); + } + throw createInvalidArgTypeError(name, "of type string or an instance of Buffer or URL", path); +} + +function tryNormalizeExistsPath(path: unknown): string | null { + try { + return normalizePathLike(path); + } catch { + return null; + } +} + +function normalizeNumberArgument( + name: string, + value: unknown, + options: { min?: number; max?: number; allowNegativeOne?: boolean } = {}, +): number { + const { min = 0, max = 0x7fffffff, allowNegativeOne = false } = options; + if (typeof value !== "number") { + throw createInvalidArgTypeError(name, "of type number", value); + } + if (!Number.isFinite(value) || !Number.isInteger(value)) { + throw createOutOfRangeError(name, "an integer", value); + } + if ((allowNegativeOne && value === -1) || (value >= min && value <= max)) { + return value; + } + throw createOutOfRangeError(name, `>= ${min} && <= ${max}`, value); +} + +function normalizeModeArgument(mode: unknown, name: string = "mode"): number { + if (typeof mode === "string") { + if (!/^[0-7]+$/.test(mode)) { + throw createInvalidArgValueError(name, "must be a 32-bit unsigned integer or an octal string. Received '" + mode + "'"); + } + return parseInt(mode, 8); + } + return normalizeNumberArgument(name, mode, { min: 0, max: 0xffffffff }); +} + +function normalizeOpenModeArgument(mode: unknown): number | undefined { + if (mode === undefined || mode === null) { + return undefined; + } + return normalizeModeArgument(mode); +} + +function validateWriteStreamStartOption(options: Record | undefined): void { + if (options?.start === undefined) { + return; + } + if (typeof options.start !== "number") { + throw createInvalidArgTypeError("start", "of type number", options.start); + } + if (!Number.isFinite(options.start) || !Number.isInteger(options.start) || options.start < 0) { + throw createOutOfRangeError("start", ">= 0", options.start); + } +} + +function validateBooleanOption(name: string, value: unknown): boolean | undefined { + if (value === undefined) { + return undefined; + } + if (typeof value !== "boolean") { + throw createInvalidArgTypeError(name, "of type boolean", value); + } + return value; +} + +function validateAbortSignalOption(name: string, signal: unknown): AbortSignal | undefined { + if (signal === undefined) { + return undefined; + } + if ( + signal === null || + typeof signal !== "object" || + typeof (signal as AbortSignal).aborted !== "boolean" || + typeof (signal as AbortSignal).addEventListener !== "function" || + typeof (signal as AbortSignal).removeEventListener !== "function" + ) { + const error = new TypeError( + `The "${name}" property must be an instance of AbortSignal. ${formatInvalidArgReceived(signal)}` + ) as TypeError & { code?: string }; + error.code = "ERR_INVALID_ARG_TYPE"; + throw error; + } + return signal as AbortSignal; +} + +function createUnsupportedWatcherError(api: "watch" | "watchFile" | "unwatchFile" | "promises.watch"): Error { + return new Error(`fs.${api} is not supported in sandbox — use polling`); +} + +function normalizeWatchOptions( + options: unknown, + allowString: boolean, +): { + persistent?: boolean; + recursive?: boolean; + encoding?: BufferEncoding; + signal?: AbortSignal; +} { + let normalized: Record; + if (options === undefined || options === null) { + normalized = {}; + } else if (typeof options === "string") { + if (!allowString) { + throw createInvalidArgTypeError("options", "of type object", options); + } + validateEncodingValue(options); + normalized = { encoding: options }; + } else if (typeof options === "object") { + normalized = options as Record; + } else { + throw createInvalidArgTypeError( + "options", + allowString ? "one of type string or object" : "of type object", + options + ); + } + + validateBooleanOption("options.persistent", normalized.persistent); + validateBooleanOption("options.recursive", normalized.recursive); + validateEncodingOption(normalized); + const signal = validateAbortSignalOption("options.signal", normalized.signal); + + return { + persistent: normalized.persistent as boolean | undefined, + recursive: normalized.recursive as boolean | undefined, + encoding: normalized.encoding as BufferEncoding | undefined, + signal, + }; +} + +function normalizeWatchArguments( + path: unknown, + optionsOrListener?: unknown, + listener?: unknown, +): { + persistent?: boolean; + recursive?: boolean; + encoding?: BufferEncoding; + signal?: AbortSignal; +} { + normalizePathLike(path); + + let options = optionsOrListener; + let resolvedListener = listener; + if (typeof optionsOrListener === "function") { + options = undefined; + resolvedListener = optionsOrListener; + } + + if (resolvedListener !== undefined && typeof resolvedListener !== "function") { + throw createInvalidArgTypeError("listener", "of type function", resolvedListener); + } + + return normalizeWatchOptions(options, true); +} + +function normalizeWatchFileArguments( + path: unknown, + optionsOrListener?: unknown, + listener?: unknown, +): void { + normalizePathLike(path); + + let options: Record = {}; + let resolvedListener = listener; + + if (typeof optionsOrListener === "function") { + resolvedListener = optionsOrListener; + } else if (optionsOrListener === undefined || optionsOrListener === null) { + options = {}; + } else if (typeof optionsOrListener === "object") { + options = optionsOrListener as Record; + } else { + throw createInvalidArgTypeError("listener", "of type function", optionsOrListener); + } + + if (typeof resolvedListener !== "function") { + throw createInvalidArgTypeError("listener", "of type function", resolvedListener); + } + + if (options.interval !== undefined && typeof options.interval !== "number") { + throw createInvalidArgTypeError("interval", "of type number", options.interval); + } +} + +async function *createUnsupportedPromisesWatchIterator( + path: unknown, + options?: unknown, +): AsyncIterableIterator<{ eventType: string; filename: string | Buffer | null }> { + const normalized = normalizeWatchOptions(options, false); + normalizePathLike(path); + throwIfAborted(normalized.signal); + throw createUnsupportedWatcherError("promises.watch"); +} + +function isReadWriteOptionsObject(value: unknown): value is Record { + return value === null || value === undefined || (typeof value === "object" && !Array.isArray(value)); +} + +function normalizeOptionalPosition(value: unknown): number | null { + if (value === undefined || value === null || value === -1) { + return null; + } + if (typeof value === "bigint") { + return Number(value); + } + if (typeof value !== "number" || !Number.isInteger(value)) { + throw createInvalidArgTypeError("position", "an integer", value); + } + return value; +} + +function normalizeOffsetLength( + bufferByteLength: number, + offsetValue: unknown, + lengthValue: unknown, +): { offset: number; length: number } { + const offset = offsetValue ?? 0; + if (typeof offset !== "number" || !Number.isInteger(offset)) { + throw createInvalidArgTypeError("offset", "an integer", offset); + } + if (offset < 0 || offset > bufferByteLength) { + throw createOutOfRangeError("offset", `>= 0 && <= ${bufferByteLength}`, offset); + } + + const defaultLength = bufferByteLength - offset; + const length = lengthValue ?? defaultLength; + if (typeof length !== "number" || !Number.isInteger(length)) { + throw createInvalidArgTypeError("length", "an integer", length); + } + if (length < 0 || length > 0x7fffffff) { + throw createOutOfRangeError("length", ">= 0 && <= 2147483647", length); + } + if (offset + length > bufferByteLength) { + throw createOutOfRangeError("length", `>= 0 && <= ${bufferByteLength - offset}`, length); + } + + return { offset, length }; +} - // Schedule end event - Promise.resolve().then(() => { - this.emit('end'); - Promise.resolve().then(() => { +function normalizeReadSyncArgs( + buffer: unknown, + offsetOrOptions?: number | Record | null, + length?: number | null, + position?: nodeFs.ReadPosition | null, +): { + buffer: NodeJS.ArrayBufferView; + offset: number; + length: number; + position: number | null; +} { + if (!isArrayBufferView(buffer)) { + throw createInvalidArgTypeError("buffer", "an instance of Buffer, TypedArray, or DataView", buffer); + } + + if ( + length === undefined && + position === undefined && + isReadWriteOptionsObject(offsetOrOptions) + ) { + const options = (offsetOrOptions ?? {}) as Record; + const { offset, length } = normalizeOffsetLength( + buffer.byteLength, + options.offset, + options.length, + ); + return { + buffer, + offset, + length, + position: normalizeOptionalPosition(options.position), + }; + } + + const { offset, length: normalizedLength } = normalizeOffsetLength( + buffer.byteLength, + offsetOrOptions, + length, + ); + return { + buffer, + offset, + length: normalizedLength, + position: normalizeOptionalPosition(position), + }; +} + +function normalizeWriteSyncArgs( + buffer: unknown, + offsetOrPosition?: number | Record | null, + lengthOrEncoding?: number | BufferEncoding | null, + position?: number | null, +): { + buffer: string | NodeJS.ArrayBufferView; + offset: number; + length: number; + position: number | null; + encoding?: BufferEncoding; +} { + if (typeof buffer === "string") { + if ( + lengthOrEncoding === undefined && + position === undefined && + isReadWriteOptionsObject(offsetOrPosition) + ) { + const options = (offsetOrPosition ?? {}) as Record; + const encoding = typeof options.encoding === "string" ? (options.encoding as BufferEncoding) : undefined; + return { + buffer, + offset: 0, + length: Buffer.byteLength(buffer, encoding), + position: normalizeOptionalPosition(options.position), + encoding, + }; + } + + if ( + offsetOrPosition !== undefined && + offsetOrPosition !== null && + typeof offsetOrPosition !== "number" + ) { + throw createInvalidArgTypeError("position", "an integer", offsetOrPosition); + } + + return { + buffer, + offset: 0, + length: Buffer.byteLength(buffer, typeof lengthOrEncoding === "string" ? lengthOrEncoding : undefined), + position: normalizeOptionalPosition(offsetOrPosition), + encoding: typeof lengthOrEncoding === "string" ? lengthOrEncoding : undefined, + }; + } + + if (!isArrayBufferView(buffer)) { + throw createInvalidArgTypeError("buffer", "a string, Buffer, TypedArray, or DataView", buffer); + } + + if ( + lengthOrEncoding === undefined && + position === undefined && + isReadWriteOptionsObject(offsetOrPosition) + ) { + const options = (offsetOrPosition ?? {}) as Record; + const { offset, length } = normalizeOffsetLength( + buffer.byteLength, + options.offset, + options.length, + ); + return { + buffer, + offset, + length, + position: normalizeOptionalPosition(options.position), + }; + } + + const { offset, length } = normalizeOffsetLength( + buffer.byteLength, + offsetOrPosition, + typeof lengthOrEncoding === "number" ? lengthOrEncoding : undefined, + ); + return { + buffer, + offset, + length, + position: normalizeOptionalPosition(position), + }; +} + +function normalizeFdInteger(fd: unknown): number { + return normalizeNumberArgument("fd", fd); +} + +function normalizeIoVectorBuffers(buffers: unknown): ArrayBufferView[] { + if (!Array.isArray(buffers)) { + throw createInvalidArgTypeError("buffers", "an ArrayBufferView[]", buffers); + } + for (const buffer of buffers) { + if (!isArrayBufferView(buffer)) { + throw createInvalidArgTypeError("buffers", "an ArrayBufferView[]", buffers); + } + } + return buffers as ArrayBufferView[]; +} + +function validateStreamFsOverride(streamFs: unknown, required: Array): StreamFsMethods | undefined { + if (streamFs === undefined) { + return undefined; + } + if (streamFs === null || typeof streamFs !== "object") { + throw createInvalidArgTypeError("options.fs", "an object", streamFs); + } + const typed = streamFs as StreamFsMethods; + for (const key of required) { + if (typeof typed[key] !== "function") { + throw createInvalidPropertyTypeError(`options.fs.${String(key)}`, typed[key]); + } + } + return typed; +} + +function normalizeStreamFd(fd: unknown): number | FileHandle | undefined { + if (fd === undefined) { + return undefined; + } + if (fd instanceof FileHandle) { + return fd; + } + return normalizeNumberArgument("fd", fd); +} + +function normalizeStreamPath(pathValue: nodeFs.PathLike | null, fd: number | FileHandle | undefined): string | Buffer | null { + if (pathValue === null) { + if (fd === undefined) { + throw createInvalidArgTypeError("path", "of type string or an instance of Buffer or URL", pathValue); + } + return null; + } + if (typeof pathValue === "string" || Buffer.isBuffer(pathValue)) { + return pathValue; + } + if (pathValue instanceof URL) { + if (pathValue.protocol === "file:") { + return pathValue.pathname; + } + throw createInvalidArgTypeError("path", "of type string or an instance of Buffer or URL", pathValue); + } + throw createInvalidArgTypeError("path", "of type string or an instance of Buffer or URL", pathValue); +} + +function normalizeStreamStartEnd(options: Record | undefined): { + start: number | undefined; + end: number | undefined; + highWaterMark: number; + autoClose: boolean; +} { + const start = options?.start; + const end = options?.end; + + if (start !== undefined && typeof start !== "number") { + throw createInvalidArgTypeError("start", "of type number", start); + } + if (end !== undefined && typeof end !== "number") { + throw createInvalidArgTypeError("end", "of type number", end); + } + + const normalizedStart = start; + const normalizedEnd = end; + + if (normalizedStart !== undefined && (!Number.isFinite(normalizedStart) || normalizedStart < 0)) { + throw createOutOfRangeError("start", ">= 0", start); + } + if (normalizedEnd !== undefined && (!Number.isFinite(normalizedEnd) || normalizedEnd < 0)) { + throw createOutOfRangeError("end", ">= 0", end); + } + if ( + normalizedStart !== undefined && + normalizedEnd !== undefined && + normalizedStart > normalizedEnd + ) { + throw createOutOfRangeError("start", `<= "end" (here: ${normalizedEnd})`, normalizedStart); + } + + const highWaterMarkCandidate = options?.highWaterMark ?? options?.bufferSize; + const highWaterMark = + typeof highWaterMarkCandidate === "number" && Number.isFinite(highWaterMarkCandidate) && highWaterMarkCandidate > 0 + ? Math.floor(highWaterMarkCandidate) + : 65536; + + return { + start: normalizedStart, + end: normalizedEnd, + highWaterMark, + autoClose: options?.autoClose !== false, + }; +} + +class ReadStream { + bytesRead = 0; + path: string | Buffer | null; + pending = true; + readable = true; + readableAborted = false; + readableDidRead = false; + readableEncoding: BufferEncoding | null = null; + readableEnded = false; + readableFlowing: boolean | null = null; + readableHighWaterMark = 65536; + readableLength = 0; + readableObjectMode = false; + destroyed = false; + closed = false; + errored: Error | null = null; + fd: number | null = null; + autoClose = true; + start: number | undefined; + end: number | undefined; + + private _listeners: Map void>> = new Map(); + private _started = false; + private _reading = false; + private _readScheduled = false; + private _opening = false; + private _remaining: number | null = null; + private _position: number | null = null; + private _fileHandle: FileHandle | null = null; + private _streamFs?: StreamFsMethods; + private _signal?: AbortSignal; + private _handleCloseListener?: () => void; + + constructor( + filePath: string | Buffer | null, + private _options?: { + encoding?: BufferEncoding; + start?: number; + end?: number; + highWaterMark?: number; + bufferSize?: number; + autoClose?: boolean; + fd?: number | FileHandle; + fs?: unknown; + signal?: AbortSignal; + } + ) { + const fdOption = normalizeStreamFd(_options?.fd); + const optionsRecord = (_options ?? {}) as Record; + const streamState = normalizeStreamStartEnd(optionsRecord); + this.path = filePath; + this.start = streamState.start; + this.end = streamState.end; + this.autoClose = streamState.autoClose; + this.readableHighWaterMark = streamState.highWaterMark; + this.readableEncoding = _options?.encoding ?? null; + this._position = this.start ?? null; + this._remaining = + this.end !== undefined ? this.end - (this.start ?? 0) + 1 : null; + this._signal = validateAbortSignal(_options?.signal); + + if (fdOption instanceof FileHandle) { + if (_options?.fs !== undefined) { + const error = new Error("The FileHandle with fs method is not implemented") as Error & { code?: string }; + error.code = "ERR_METHOD_NOT_IMPLEMENTED"; + throw error; + } + this._fileHandle = fdOption; + this.fd = fdOption.fd; + this.pending = false; + this._handleCloseListener = () => { + if (!this.closed) { this.closed = true; - this.emit('close'); + this.destroyed = true; + this.readable = false; + this.emit("close"); + } + }; + this._fileHandle.on("close", this._handleCloseListener); + } else { + this._streamFs = validateStreamFsOverride(_options?.fs, ["open", "read", "close"]); + if (typeof fdOption === "number") { + this.fd = fdOption; + this.pending = false; + } + } + + if (this._signal) { + if (this._signal.aborted) { + queueMicrotask(() => { + void this._abort(this._signal?.reason); }); + } else { + this._signal.addEventListener("abort", () => { + void this._abort(this._signal?.reason); + }); + } + } + + if (this.fd === null) { + queueMicrotask(() => { + void this._openIfNeeded(); }); + } + } - return this.readableEncoding ? chunk.toString(this.readableEncoding) : chunk; - } catch (err) { - this.errored = err as Error; - this.emit('error', err); - return null; + private _emitOpen(fd: number): void { + this.fd = fd; + this.pending = false; + this.emit("open", fd); + if (this._started || this.readableFlowing) { + this._scheduleRead(); } } - pipe(destination: T, _options?: { end?: boolean }): T { - const content = this._loadContent(); - const start = this._options?.start ?? 0; - const end = this._options?.end ?? content.length; - const chunk = content.slice(start, end); + private async _openIfNeeded(): Promise { + if (this.fd !== null || this._opening || this.destroyed || this.closed) { + return; + } + const pathStr = + typeof this.path === "string" + ? this.path + : this.path instanceof Buffer + ? this.path.toString() + : null; + if (!pathStr) { + this._handleStreamError(createFsError("EBADF", "EBADF: bad file descriptor", "read")); + return; + } - this.bytesRead = chunk.length; - this.readableDidRead = true; + this._opening = true; + const opener = (this._streamFs?.open ?? fs.open).bind(this._streamFs ?? fs); + opener(pathStr, "r", 0o666, (error: Error | null, fd?: number) => { + this._opening = false; + if (error || typeof fd !== "number") { + this._handleStreamError((error as Error) ?? createFsError("EBADF", "EBADF: bad file descriptor", "open")); + return; + } + this._emitOpen(fd); + }); + } - if (typeof destination.write === 'function') { - destination.write(chunk as unknown as string); + private async _closeUnderlying(): Promise { + if (this._fileHandle) { + if (!this._fileHandle.closed) { + await this._fileHandle.close(); + } + return; } - if (typeof destination.end === 'function') { - Promise.resolve().then(() => destination.end()); + if (this.fd !== null && this.fd >= 0) { + const fd = this.fd; + const closer = (this._streamFs?.close ?? fs.close).bind(this._streamFs ?? fs); + await new Promise((resolve) => { + closer(fd, () => resolve()); + }); + this.fd = -1; + } + } + + private _scheduleRead(): void { + if (this._readScheduled || this._reading || this.readableFlowing === false || this.destroyed || this.closed) { + return; + } + this._readScheduled = true; + queueMicrotask(() => { + this._readScheduled = false; + void this._readNextChunk(); + }); + } + + private async _readNextChunk(): Promise { + if (this._reading || this.destroyed || this.closed || this.readableFlowing === false) { + return; + } + throwIfAborted(this._signal); + if (this.fd === null) { + await this._openIfNeeded(); + return; + } + if (this._remaining === 0) { + await this._finishReadable(); + return; + } + + const nextLength = this._remaining === null + ? this.readableHighWaterMark + : Math.min(this.readableHighWaterMark, this._remaining); + const target = Buffer.alloc(nextLength); + + this._reading = true; + const onRead = async (error: Error | null, bytesRead: number = 0): Promise => { + this._reading = false; + if (error) { + this._handleStreamError(error); + return; + } + if (bytesRead === 0) { + await this._finishReadable(); + return; + } + + this.bytesRead += bytesRead; + this.readableDidRead = true; + if (typeof this._position === "number") { + this._position += bytesRead; + } + if (this._remaining !== null) { + this._remaining -= bytesRead; + } + + const chunk = target.subarray(0, bytesRead); + this.emit("data", this.readableEncoding ? chunk.toString(this.readableEncoding) : Buffer.from(chunk)); + + if (this._remaining === 0) { + await this._finishReadable(); + return; + } + this._scheduleRead(); + }; + + if (this._fileHandle) { + try { + const result = await this._fileHandle.read(target, 0, nextLength, this._position); + await onRead(null, result.bytesRead); + } catch (error) { + await onRead(error as Error); + } + return; } + const reader = (this._streamFs?.read ?? fs.read).bind(this._streamFs ?? fs); + reader(this.fd, target, 0, nextLength, this._position, (error: Error | null, bytesRead?: number) => { + void onRead(error, bytesRead ?? 0); + }); + } + + private async _finishReadable(): Promise { + if (this.readableEnded) { + return; + } this.readable = false; this.readableEnded = true; + this.emit("end"); + if (this.autoClose) { + this.destroy(); + } + } + + private _handleStreamError(error: Error): void { + if (this.closed) { + return; + } + this.errored = error; + this.emit("error", error); + if (this.autoClose) { + this.destroy(); + } else { + this.readable = false; + } + } + + private async _abort(reason?: unknown): Promise { + if (this.closed || this.destroyed) { + return; + } + this.readableAborted = true; + this.errored = createAbortError(reason); + this.emit("error", this.errored); + if (this._fileHandle) { + this.destroyed = true; + this.readable = false; + this.closed = true; + this.emit("close"); + return; + } + if (this.autoClose) { + this.destroy(); + return; + } this.closed = true; + this.emit("close"); + } - Promise.resolve().then(() => { - this.emit('end'); - this.emit('close'); - }); + private async _readAllContent(): Promise { + const chunks: Buffer[] = []; + let totalLength = 0; + const savedFlowing = this.readableFlowing; + this.readableFlowing = false; + while (this._remaining !== 0) { + if (this.fd === null) { + await this._openIfNeeded(); + } + if (this.fd === null) { + break; + } + const nextLength = this._remaining === null + ? FILE_HANDLE_READ_CHUNK_BYTES + : Math.min(FILE_HANDLE_READ_CHUNK_BYTES, this._remaining); + const target = Buffer.alloc(nextLength); + let bytesRead = 0; + if (this._fileHandle) { + bytesRead = (await this._fileHandle.read(target, 0, nextLength, this._position)).bytesRead; + } else { + bytesRead = fs.readSync(this.fd, target, 0, nextLength, this._position); + } + if (bytesRead === 0) { + break; + } + const chunk = target.subarray(0, bytesRead); + chunks.push(chunk); + totalLength += bytesRead; + if (typeof this._position === "number") { + this._position += bytesRead; + } + if (this._remaining !== null) { + this._remaining -= bytesRead; + } + } + this.readableFlowing = savedFlowing; + return Buffer.concat(chunks, totalLength); + } + on(event: string | symbol, listener: (...args: unknown[]) => void): this { + const listeners = this._listeners.get(event) ?? []; + listeners.push(listener); + this._listeners.set(event, listeners); + if (event === "data") { + this._started = true; + this.readableFlowing = true; + this._scheduleRead(); + } + return this; + } + + once(event: string | symbol, listener: (...args: unknown[]) => void): this { + const wrapper = (...args: unknown[]): void => { + this.off(event, wrapper); + listener(...args); + }; + (wrapper as { _originalListener?: typeof listener })._originalListener = listener; + return this.on(event, wrapper); + } + + off(event: string | symbol, listener: (...args: unknown[]) => void): this { + const listeners = this._listeners.get(event); + if (!listeners) { + return this; + } + const index = listeners.findIndex( + (fn) => fn === listener || (fn as { _originalListener?: typeof listener })._originalListener === listener, + ); + if (index >= 0) { + listeners.splice(index, 1); + } + return this; + } + + removeListener(event: string | symbol, listener: (...args: unknown[]) => void): this { + return this.off(event, listener); + } + + removeAllListeners(event?: string | symbol): this { + if (event === undefined) { + this._listeners.clear(); + } else { + this._listeners.delete(event); + } + return this; + } + + emit(event: string | symbol, ...args: unknown[]): boolean { + const listeners = this._listeners.get(event); + if (!listeners?.length) { + return false; + } + listeners.slice().forEach((listener) => listener(...args)); + return true; + } + + read(): Buffer | string | null { + return null; + } + + pipe(destination: T, _options?: { end?: boolean }): T { + this.on("data", (chunk) => { + destination.write(chunk as string); + }); + this.on("end", () => { + destination.end?.(); + }); + this.resume(); return destination; } @@ -405,10 +1594,9 @@ class ReadStream { } resume(): this { + this._started = true; this.readableFlowing = true; - if (!this._started) { - this._startReading(); - } + this._scheduleRead(); return this; } @@ -418,135 +1606,179 @@ class ReadStream { } destroy(error?: Error): this { - if (this.destroyed) return this; + if (this.destroyed) { + return this; + } this.destroyed = true; this.readable = false; if (error) { this.errored = error; - this.emit('error', error); + this.emit("error", error); } - this.emit('close'); - this.closed = true; + queueMicrotask(() => { + void this._closeUnderlying().then(() => { + if (!this.closed) { + this.closed = true; + this.emit("close"); + } + }); + }); return this; } close(callback?: (err?: Error | null) => void): void { - if (this.closed) { - if (callback) Promise.resolve().then(() => callback(null)); - return; + this.destroy(); + if (callback) { + queueMicrotask(() => callback(null)); } - this.closed = true; - this.readable = false; - this.destroyed = true; - Promise.resolve().then(() => { - this.emit('close'); - if (callback) callback(null); - }); } - // Symbol.asyncIterator for async iteration async *[Symbol.asyncIterator](): AsyncIterator { - const content = this._loadContent(); - const start = this._options?.start ?? 0; - const end = this._options?.end ?? content.length; - const chunk = content.slice(start, end); - yield this.readableEncoding ? chunk.toString(this.readableEncoding) : chunk; + const content = await this._readAllContent(); + yield this.readableEncoding ? content.toString(this.readableEncoding) : content; } } -// WriteStream class for createWriteStream -// This provides a type-safe implementation that satisfies nodeFs.WriteStream -const MAX_WRITE_STREAM_BYTES = 16 * 1024 * 1024; // 16MB cap to prevent memory exhaustion -// We use 'as' assertion at the return site since the full interface is complex +const MAX_WRITE_STREAM_BYTES = 16 * 1024 * 1024; + class WriteStream { - // WriteStream-specific properties - bytesWritten: number = 0; - path: string | Buffer; - pending: boolean = false; - - // Writable stream properties - writable: boolean = true; - writableAborted: boolean = false; - writableEnded: boolean = false; - writableFinished: boolean = false; - writableHighWaterMark: number = 16384; - writableLength: number = 0; - writableObjectMode: boolean = false; - writableCorked: number = 0; - destroyed: boolean = false; - closed: boolean = false; + bytesWritten = 0; + path: string | Buffer | null; + pending = false; + writable = true; + writableAborted = false; + writableEnded = false; + writableFinished = false; + writableHighWaterMark = 16384; + writableLength = 0; + writableObjectMode = false; + writableCorked = 0; + destroyed = false; + closed = false; errored: Error | null = null; - writableNeedDrain: boolean = false; + writableNeedDrain = false; + fd: number | null = null; + autoClose = true; - // Internal state private _chunks: Uint8Array[] = []; private _listeners: Map void>> = new Map(); - - constructor(filePath: string | Buffer, _options?: { encoding?: BufferEncoding; flags?: string; mode?: number }) { + private _fileHandle: FileHandle | null = null; + private _streamFs?: StreamFsMethods; + + constructor( + filePath: string | Buffer | null, + private _options?: { encoding?: BufferEncoding; flags?: string; mode?: number; fd?: number | FileHandle; fs?: unknown; autoClose?: boolean } + ) { + const fdOption = normalizeStreamFd(_options?.fd); this.path = filePath; + this.autoClose = _options?.autoClose !== false; + this._streamFs = validateStreamFsOverride(_options?.fs, ["open", "close", "write"]); + if (_options?.fs !== undefined) { + validateStreamFsOverride(_options?.fs, ["writev"]); + } + if (fdOption instanceof FileHandle) { + this._fileHandle = fdOption; + this.fd = fdOption.fd; + return; + } + if (typeof fdOption === "number") { + this.fd = fdOption; + return; + } + + const pathStr = + typeof this.path === "string" + ? this.path + : this.path instanceof Buffer + ? this.path.toString() + : null; + if (!pathStr) { + throw createFsError("EBADF", "EBADF: bad file descriptor", "write"); + } + this.fd = fs.openSync(pathStr, _options?.flags ?? "w", _options?.mode); + queueMicrotask(() => { + if (this.fd !== null && this.fd >= 0) { + this.emit("open", this.fd); + } + }); } - // WriteStream-specific methods - close(callback?: (err?: NodeJS.ErrnoException | null) => void): void { - if (this.closed) { - if (callback) Promise.resolve().then(() => callback(null)); + private async _closeUnderlying(): Promise { + if (this._fileHandle) { + if (!this._fileHandle.closed) { + await this._fileHandle.close(); + } return; } - this.closed = true; - this.writable = false; - Promise.resolve().then(() => { - this.emit("close"); - if (callback) callback(null); + if (this.fd !== null && this.fd >= 0) { + const fd = this.fd; + const closer = (this._streamFs?.close ?? fs.close).bind(this._streamFs ?? fs); + await new Promise((resolve) => { + closer(fd, () => resolve()); + }); + this.fd = -1; + } + } + + close(callback?: (err?: NodeJS.ErrnoException | null) => void): void { + queueMicrotask(() => { + void this._closeUnderlying().then(() => { + if (!this.closed) { + this.closed = true; + this.writable = false; + this.emit("close"); + } + callback?.(null); + }); }); } - // Writable methods - write(chunk: unknown, encodingOrCallback?: BufferEncoding | ((error: Error | null | undefined) => void), callback?: (error: Error | null | undefined) => void): boolean { + write( + chunk: unknown, + encodingOrCallback?: BufferEncoding | ((error: Error | null | undefined) => void), + callback?: (error: Error | null | undefined) => void + ): boolean { if (this.writableEnded || this.destroyed) { - const err = new Error("write after end"); - if (typeof encodingOrCallback === "function") { - Promise.resolve().then(() => encodingOrCallback(err)); - } else if (callback) { - Promise.resolve().then(() => callback(err)); - } + const error = new Error("write after end"); + const cb = typeof encodingOrCallback === "function" ? encodingOrCallback : callback; + queueMicrotask(() => cb?.(error)); return false; } let data: Uint8Array; if (typeof chunk === "string") { data = Buffer.from(chunk, typeof encodingOrCallback === "string" ? encodingOrCallback : "utf8"); - } else if (Buffer.isBuffer(chunk)) { + } else if (isArrayBufferView(chunk)) { data = new Uint8Array(chunk.buffer, chunk.byteOffset, chunk.byteLength); - } else if (chunk instanceof Uint8Array) { - data = chunk; } else { - data = Buffer.from(String(chunk)); + throw createInvalidArgTypeError("chunk", "a string, Buffer, TypedArray, or DataView", chunk); } - // Cap buffered data to prevent memory exhaustion if (this.writableLength + data.length > MAX_WRITE_STREAM_BYTES) { - const err = new Error(`WriteStream buffer exceeded ${MAX_WRITE_STREAM_BYTES} bytes`); - this.errored = err; + const error = new Error(`WriteStream buffer exceeded ${MAX_WRITE_STREAM_BYTES} bytes`); + this.errored = error; this.destroyed = true; this.writable = false; const cb = typeof encodingOrCallback === "function" ? encodingOrCallback : callback; - if (cb) Promise.resolve().then(() => cb(err)); - Promise.resolve().then(() => this.emit("error", err)); + queueMicrotask(() => { + cb?.(error); + this.emit("error", error); + }); return false; } this._chunks.push(data); this.bytesWritten += data.length; this.writableLength += data.length; - const cb = typeof encodingOrCallback === "function" ? encodingOrCallback : callback; - if (cb) Promise.resolve().then(() => cb(null)); - + queueMicrotask(() => cb?.(null)); return true; } end(chunkOrCb?: unknown, encodingOrCallback?: BufferEncoding | (() => void), callback?: () => void): this { - if (this.writableEnded) return this; + if (this.writableEnded) { + return this; + } let cb: (() => void) | undefined; if (typeof chunkOrCb === "function") { @@ -564,29 +1796,50 @@ class WriteStream { } this.writableEnded = true; - - // Concatenate and write all chunks - const totalLength = this._chunks.reduce((sum, c) => sum + c.length, 0); - const result = new Uint8Array(totalLength); - let offset = 0; - for (const c of this._chunks) { - result.set(c, offset); - offset += c.length; - } - - // Write to filesystem - const pathStr = typeof this.path === "string" ? this.path : this.path.toString(); - fs.writeFileSync(pathStr, result); - this.writable = false; this.writableFinished = true; this.writableLength = 0; - Promise.resolve().then(() => { - this.emit("finish"); - this.emit("close"); - this.closed = true; - if (cb) cb(); + queueMicrotask(() => { + void (async () => { + try { + if (this._fileHandle) { + for (const chunk of this._chunks) { + await this._fileHandle.write(chunk, 0, chunk.byteLength, undefined); + } + if (this.autoClose && !this._fileHandle.closed) { + await this._fileHandle.close(); + } + } else if (this.fd !== null && this.fd >= 0) { + for (const chunk of this._chunks) { + fs.writeSync(this.fd, chunk, 0, chunk.byteLength, null); + } + if (this.autoClose) { + await this._closeUnderlying(); + } + } else { + const pathStr = + typeof this.path === "string" + ? this.path + : this.path instanceof Buffer + ? this.path.toString() + : null; + if (!pathStr) { + throw createFsError("EBADF", "EBADF: bad file descriptor", "write"); + } + fs.writeFileSync(pathStr, Buffer.concat(this._chunks.map((chunk) => Buffer.from(chunk)))); + } + this.emit("finish"); + if (this.autoClose && !this.closed) { + this.closed = true; + this.emit("close"); + } + cb?.(); + } catch (error) { + this.errored = error as Error; + this.emit("error", error); + } + })(); }); return this; @@ -601,82 +1854,59 @@ class WriteStream { } uncork(): void { - if (this.writableCorked > 0) this.writableCorked--; + if (this.writableCorked > 0) { + this.writableCorked--; + } } destroy(error?: Error): this { - if (this.destroyed) return this; + if (this.destroyed) { + return this; + } this.destroyed = true; this.writable = false; if (error) { this.errored = error; - Promise.resolve().then(() => { - this.emit("error", error); - this.emit("close"); - this.closed = true; - }); - } else { - Promise.resolve().then(() => { - this.emit("close"); - this.closed = true; - }); + this.emit("error", error); } + queueMicrotask(() => { + void this._closeUnderlying().then(() => { + if (!this.closed) { + this.closed = true; + this.emit("close"); + } + }); + }); return this; } - // Internal methods (required by Writable interface but not typically called directly) - _write(_chunk: unknown, _encoding: BufferEncoding, callback: (error?: Error | null) => void): void { - callback(); - } - - _destroy(_error: Error | null, callback: (error?: Error | null) => void): void { - callback(); - } - - _final(callback: (error?: Error | null) => void): void { - callback(); - } - - // EventEmitter methods addListener(event: string | symbol, listener: (...args: unknown[]) => void): this { return this.on(event, listener); } on(event: string | symbol, listener: (...args: unknown[]) => void): this { - const listeners = this._listeners.get(event) || []; + const listeners = this._listeners.get(event) ?? []; listeners.push(listener); this._listeners.set(event, listeners); return this; } once(event: string | symbol, listener: (...args: unknown[]) => void): this { - const wrapper = (...args: unknown[]) => { + const wrapper = (...args: unknown[]): void => { this.removeListener(event, wrapper); listener(...args); }; return this.on(event, wrapper); } - prependListener(event: string | symbol, listener: (...args: unknown[]) => void): this { - const listeners = this._listeners.get(event) || []; - listeners.unshift(listener); - this._listeners.set(event, listeners); - return this; - } - - prependOnceListener(event: string | symbol, listener: (...args: unknown[]) => void): this { - const wrapper = (...args: unknown[]) => { - this.removeListener(event, wrapper); - listener(...args); - }; - return this.prependListener(event, wrapper); - } - removeListener(event: string | symbol, listener: (...args: unknown[]) => void): this { const listeners = this._listeners.get(event); - if (listeners) { - const idx = listeners.indexOf(listener); - if (idx !== -1) listeners.splice(idx, 1); + if (!listeners) { + return this; + } + const index = listeners.indexOf(listener); + if (index >= 0) { + listeners.splice(index, 1); } return this; } @@ -686,48 +1916,23 @@ class WriteStream { } removeAllListeners(event?: string | symbol): this { - if (event !== undefined) { - this._listeners.delete(event); - } else { + if (event === undefined) { this._listeners.clear(); + } else { + this._listeners.delete(event); } return this; } emit(event: string | symbol, ...args: unknown[]): boolean { const listeners = this._listeners.get(event); - if (listeners && listeners.length > 0) { - listeners.slice().forEach(l => l(...args)); - return true; + if (!listeners?.length) { + return false; } - return false; - } - - listeners(event: string | symbol): Function[] { - return [...(this._listeners.get(event) || [])]; - } - - rawListeners(event: string | symbol): Function[] { - return this.listeners(event); - } - - listenerCount(event: string | symbol): number { - return (this._listeners.get(event) || []).length; - } - - eventNames(): (string | symbol)[] { - return [...this._listeners.keys()]; - } - - getMaxListeners(): number { - return 10; - } - - setMaxListeners(_n: number): this { - return this; + listeners.slice().forEach((listener) => listener(...args)); + return true; } - // Pipe methods (minimal implementation) pipe(destination: T, _options?: { end?: boolean }): T { return destination; } @@ -736,22 +1941,58 @@ class WriteStream { return this; } - // Additional required methods - compose(_stream: T | Iterable | AsyncIterable, _options?: { signal: AbortSignal }): T { - throw new Error("compose not implemented in sandbox"); - } - [Symbol.asyncDispose](): Promise { return Promise.resolve(); } } +const ReadStreamClass = ReadStream; +const WriteStreamClass = WriteStream; + +const ReadStreamFactory = function ReadStream( + path: string | Buffer | null, + options?: { + encoding?: BufferEncoding; + start?: number; + end?: number; + highWaterMark?: number; + bufferSize?: number; + autoClose?: boolean; + fd?: number | FileHandle; + fs?: unknown; + signal?: AbortSignal; + }, +): ReadStream { + validateEncodingOption(options); + return new ReadStreamClass(path, options); +}; +ReadStreamFactory.prototype = ReadStream.prototype; + +const WriteStreamFactory = function WriteStream( + path: string | Buffer | null, + options?: { + encoding?: BufferEncoding; + flags?: string; + mode?: number; + fd?: number | FileHandle; + fs?: unknown; + autoClose?: boolean; + }, +): WriteStream { + validateEncodingOption(options); + validateWriteStreamStartOption((options ?? {}) as Record); + return new WriteStreamClass(path, options); +}; +WriteStreamFactory.prototype = WriteStream.prototype; + // Parse flags string to number function parseFlags(flags: OpenMode): number { if (typeof flags === "number") return flags; const flagMap: Record = { r: O_RDONLY, "r+": O_RDWR, + rs: O_RDONLY, + "rs+": O_RDWR, w: O_WRONLY | O_CREAT | O_TRUNC, "w+": O_RDWR | O_CREAT | O_TRUNC, a: O_WRONLY | O_APPEND | O_CREAT, @@ -944,10 +2185,7 @@ type NodeCallback = (err: NodeJS.ErrnoException | null, result?: T) => void; // Helper to convert PathLike to string function toPathString(path: PathLike): string { - if (typeof path === "string") return path; - if (Buffer.isBuffer(path)) return path.toString("utf8"); - if (path instanceof URL) return path.pathname; - return String(path); + return normalizePathLike(path); } // Note: Path normalization is handled by VirtualFileSystem, not here. @@ -1015,7 +2253,10 @@ const fs = { // Sync methods readFileSync(path: PathOrFileDescriptor, options?: ReadFileOptions): string | Buffer { - const rawPath = typeof path === "number" ? _fdGetPath.applySync(undefined, [path]) : toPathString(path); + validateEncodingOption(options); + const rawPath = typeof path === "number" + ? _fdGetPath.applySync(undefined, [normalizeFdInteger(path)]) + : normalizePathLike(path); if (!rawPath) throw createFsError("EBADF", "EBADF: bad file descriptor", "read"); const pathStr = rawPath; const encoding = @@ -1065,7 +2306,10 @@ const fs = { data: string | NodeJS.ArrayBufferView, _options?: WriteFileOptions ): void { - const rawPath = typeof file === "number" ? _fdGetPath.applySync(undefined, [file]) : toPathString(file); + validateEncodingOption(_options); + const rawPath = typeof file === "number" + ? _fdGetPath.applySync(undefined, [normalizeFdInteger(file)]) + : normalizePathLike(file); if (!rawPath) throw createFsError("EBADF", "EBADF: bad file descriptor", "write"); const pathStr = rawPath; @@ -1089,6 +2333,7 @@ const fs = { data: string | Uint8Array, options?: WriteFileOptions ): void { + validateEncodingOption(options); const existing = fs.existsSync(path as PathLike) ? (fs.readFileSync(path, "utf8") as string) : ""; @@ -1097,7 +2342,8 @@ const fs = { }, readdirSync(path: PathLike, options?: nodeFs.ObjectEncodingOptions & { withFileTypes?: boolean; recursive?: boolean }): string[] | Dirent[] { - const rawPath = toPathString(path); + validateEncodingOption(options); + const rawPath = normalizePathLike(path); const pathStr = rawPath; let entriesJson: string; try { @@ -1126,7 +2372,7 @@ const fs = { }, mkdirSync(path: PathLike, options?: MakeDirectoryOptions | Mode): string | undefined { - const rawPath = toPathString(path); + const rawPath = normalizePathLike(path); const pathStr = rawPath; const recursive = typeof options === "object" ? options?.recursive ?? false : false; _fs.mkdir.applySyncPromise(undefined, [pathStr, recursive]); @@ -1134,7 +2380,7 @@ const fs = { }, rmdirSync(path: PathLike, _options?: RmDirOptions): void { - const pathStr = toPathString(path); + const pathStr = normalizePathLike(path); _fs.rmdir.applySyncPromise(undefined, [pathStr]); }, @@ -1172,12 +2418,15 @@ const fs = { }, existsSync(path: PathLike): boolean { - const pathStr = toPathString(path); + const pathStr = tryNormalizeExistsPath(path); + if (!pathStr) { + return false; + } return _fs.exists.applySyncPromise(undefined, [pathStr]); }, statSync(path: PathLike, _options?: nodeFs.StatSyncOptions): Stats { - const rawPath = toPathString(path); + const rawPath = normalizePathLike(path); const pathStr = rawPath; let statJson: string; try { @@ -1212,7 +2461,7 @@ const fs = { }, lstatSync(path: PathLike, _options?: nodeFs.StatSyncOptions): Stats { - const pathStr = toPathString(path); + const pathStr = normalizePathLike(path); const statJson = bridgeCall(() => _fs.lstat.applySyncPromise(undefined, [pathStr]), "lstat", pathStr); const stat = JSON.parse(statJson) as { mode: number; @@ -1228,13 +2477,13 @@ const fs = { }, unlinkSync(path: PathLike): void { - const pathStr = toPathString(path); + const pathStr = normalizePathLike(path); _fs.unlink.applySyncPromise(undefined, [pathStr]); }, renameSync(oldPath: PathLike, newPath: PathLike): void { - const oldPathStr = toPathString(oldPath); - const newPathStr = toPathString(newPath); + const oldPathStr = normalizePathLike(oldPath, "oldPath"); + const newPathStr = normalizePathLike(newPath, "newPath"); _fs.rename.applySyncPromise(undefined, [oldPathStr, newPathStr]); }, @@ -1293,6 +2542,7 @@ const fs = { // Temp directory creation mkdtempSync(prefix: string, _options?: nodeFs.EncodingOption): string { + validateEncodingOption(_options); const suffix = Math.random().toString(36).slice(2, 8); const dirPath = prefix + suffix; fs.mkdirSync(dirPath, { recursive: true }); @@ -1301,7 +2551,7 @@ const fs = { // Directory handle (sync) opendirSync(path: PathLike, _options?: nodeFs.OpenDirOptions): Dir { - const pathStr = toPathString(path); + const pathStr = normalizePathLike(path); // Verify directory exists const stat = fs.statSync(pathStr); if (!stat.isDirectory()) { @@ -1317,12 +2567,10 @@ const fs = { // File descriptor methods - openSync(path: PathLike, flags: OpenMode, _mode?: Mode | null): number { - const pathStr = toPathString(path); - const numFlags = parseFlags(flags); - const modeNum = _mode !== null && _mode !== undefined - ? (typeof _mode === "string" ? parseInt(_mode as string, 8) : _mode as number) - : undefined; + openSync(path: PathLike, flags?: OpenMode, _mode?: Mode | null): number { + const pathStr = normalizePathLike(path); + const numFlags = parseFlags(flags ?? "r"); + const modeNum = normalizeOpenModeArgument(_mode); try { return _fdOpen.applySyncPromise(undefined, [pathStr, numFlags, modeNum]); } catch (e: any) { @@ -1334,6 +2582,7 @@ const fs = { }, closeSync(fd: number): void { + normalizeFdInteger(fd); try { _fdClose.applySyncPromise(undefined, [fd]); } catch (e: any) { @@ -1346,17 +2595,15 @@ const fs = { readSync( fd: number, buffer: NodeJS.ArrayBufferView, - offset?: number | null, + offset?: number | Record | null, length?: number | null, position?: nodeFs.ReadPosition | null ): number { - const readOffset = offset ?? 0; - const readLength = length ?? (buffer.byteLength - readOffset); - const pos = (position !== null && position !== undefined) ? Number(position) : undefined; + const normalized = normalizeReadSyncArgs(buffer, offset, length, position); let base64: string; try { - base64 = _fdRead.applySyncPromise(undefined, [fd, readLength, pos ?? null]); + base64 = _fdRead.applySyncPromise(undefined, [fd, normalized.length, normalized.position ?? null]); } catch (e: any) { const msg = e?.message ?? String(e); if (msg.includes("EBADF")) throw createFsError("EBADF", msg, "read"); @@ -1364,9 +2611,13 @@ const fs = { } const bytes = Buffer.from(base64, "base64"); - const targetBuffer = new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.byteLength); - for (let i = 0; i < bytes.length && i < readLength; i++) { - targetBuffer[readOffset + i] = bytes[i]; + const targetBuffer = new Uint8Array( + normalized.buffer.buffer, + normalized.buffer.byteOffset, + normalized.buffer.byteLength, + ); + for (let i = 0; i < bytes.length && i < normalized.length; i++) { + targetBuffer[normalized.offset + i] = bytes[i]; } return bytes.length; }, @@ -1374,27 +2625,24 @@ const fs = { writeSync( fd: number, buffer: string | NodeJS.ArrayBufferView, - offsetOrPosition?: number | null, + offsetOrPosition?: number | Record | null, lengthOrEncoding?: number | BufferEncoding | null, position?: number | null ): number { - - // Encode data as base64 for bridge transfer + const normalized = normalizeWriteSyncArgs(buffer, offsetOrPosition, lengthOrEncoding, position); let dataBytes: Uint8Array; - let writePosition: number | null | undefined; - - if (typeof buffer === "string") { - dataBytes = Buffer.from(buffer); - writePosition = offsetOrPosition; + if (typeof normalized.buffer === "string") { + dataBytes = Buffer.from(normalized.buffer, normalized.encoding); } else { - const offset = offsetOrPosition ?? 0; - const length = (typeof lengthOrEncoding === "number" ? lengthOrEncoding : null) ?? (buffer.byteLength - offset); - dataBytes = new Uint8Array(buffer.buffer, buffer.byteOffset + offset, length); - writePosition = position; + dataBytes = new Uint8Array( + normalized.buffer.buffer, + normalized.buffer.byteOffset + normalized.offset, + normalized.length, + ); } const base64 = Buffer.from(dataBytes).toString("base64"); - const pos = (writePosition !== null && writePosition !== undefined) ? writePosition : null; + const pos = normalized.position ?? null; try { return _fdWrite.applySyncPromise(undefined, [fd, base64, pos]); @@ -1406,6 +2654,7 @@ const fs = { }, fstatSync(fd: number): Stats { + normalizeFdInteger(fd); let raw: string; try { raw = _fdFstat.applySyncPromise(undefined, [fd]); @@ -1418,6 +2667,7 @@ const fs = { }, ftruncateSync(fd: number, len?: number): void { + normalizeFdInteger(fd); try { _fdFtruncate.applySyncPromise(undefined, [fd, len]); } catch (e: any) { @@ -1429,6 +2679,7 @@ const fs = { // fsync / fdatasync — no-op for in-memory VFS (validates FD exists) fsyncSync(fd: number): void { + normalizeFdInteger(fd); try { _fdFsync.applySyncPromise(undefined, [fd]); } catch (e: any) { @@ -1439,6 +2690,7 @@ const fs = { }, fdatasyncSync(fd: number): void { + normalizeFdInteger(fd); try { _fdFsync.applySyncPromise(undefined, [fd]); } catch (e: any) { @@ -1450,15 +2702,19 @@ const fs = { // readv — scatter-read into multiple buffers (delegates to readSync) readvSync(fd: number, buffers: ArrayBufferView[], position?: number | null): number { + const normalizedFd = normalizeFdInteger(fd); + const normalizedBuffers = normalizeIoVectorBuffers(buffers); let totalBytesRead = 0; - for (const buffer of buffers) { + const normalizedPosition = normalizeOptionalPosition(position); + let nextPosition = normalizedPosition; + for (const buffer of normalizedBuffers) { const target = buffer instanceof Uint8Array ? buffer : new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.byteLength); - const bytesRead = fs.readSync(fd, target, 0, target.byteLength, position); + const bytesRead = fs.readSync(normalizedFd, target, 0, target.byteLength, nextPosition); totalBytesRead += bytesRead; - if (position !== null && position !== undefined) { - position += bytesRead; + if (nextPosition !== null) { + nextPosition += bytesRead; } // EOF — stop filling further buffers if (bytesRead < target.byteLength) break; @@ -1468,7 +2724,7 @@ const fs = { // statfs — return synthetic filesystem stats for the in-memory VFS statfsSync(path: PathLike, _options?: nodeFs.StatFsOptions): nodeFs.StatsFs { - const pathStr = toPathString(path); + const pathStr = normalizePathLike(path); // Verify path exists if (!fs.existsSync(pathStr)) { throw createFsError( @@ -1502,40 +2758,68 @@ const fs = { // Metadata and link sync methods — delegate to VFS via host refs chmodSync(path: PathLike, mode: Mode): void { - const pathStr = toPathString(path); - const modeNum = typeof mode === "string" ? parseInt(mode, 8) : mode; + const pathStr = normalizePathLike(path); + const modeNum = normalizeModeArgument(mode); bridgeCall(() => _fs.chmod.applySyncPromise(undefined, [pathStr, modeNum]), "chmod", pathStr); }, chownSync(path: PathLike, uid: number, gid: number): void { - const pathStr = toPathString(path); - bridgeCall(() => _fs.chown.applySyncPromise(undefined, [pathStr, uid, gid]), "chown", pathStr); + const pathStr = normalizePathLike(path); + const normalizedUid = normalizeNumberArgument("uid", uid, { min: -1, max: 0xffffffff, allowNegativeOne: true }); + const normalizedGid = normalizeNumberArgument("gid", gid, { min: -1, max: 0xffffffff, allowNegativeOne: true }); + bridgeCall(() => _fs.chown.applySyncPromise(undefined, [pathStr, normalizedUid, normalizedGid]), "chown", pathStr); + }, + + fchmodSync(fd: number, mode: Mode): void { + const normalizedFd = normalizeFdInteger(fd); + const pathStr = _fdGetPath.applySync(undefined, [normalizedFd]); + if (!pathStr) { + throw createFsError("EBADF", "EBADF: bad file descriptor", "chmod"); + } + fs.chmodSync(pathStr, normalizeModeArgument(mode)); + }, + + fchownSync(fd: number, uid: number, gid: number): void { + const normalizedFd = normalizeFdInteger(fd); + const pathStr = _fdGetPath.applySync(undefined, [normalizedFd]); + if (!pathStr) { + throw createFsError("EBADF", "EBADF: bad file descriptor", "chown"); + } + fs.chownSync(pathStr, uid, gid); + }, + + lchownSync(path: PathLike, uid: number, gid: number): void { + const pathStr = normalizePathLike(path); + const normalizedUid = normalizeNumberArgument("uid", uid, { min: -1, max: 0xffffffff, allowNegativeOne: true }); + const normalizedGid = normalizeNumberArgument("gid", gid, { min: -1, max: 0xffffffff, allowNegativeOne: true }); + bridgeCall(() => _fs.chown.applySyncPromise(undefined, [pathStr, normalizedUid, normalizedGid]), "chown", pathStr); }, linkSync(existingPath: PathLike, newPath: PathLike): void { - const existingStr = toPathString(existingPath); - const newStr = toPathString(newPath); + const existingStr = normalizePathLike(existingPath, "existingPath"); + const newStr = normalizePathLike(newPath, "newPath"); bridgeCall(() => _fs.link.applySyncPromise(undefined, [existingStr, newStr]), "link", newStr); }, symlinkSync(target: PathLike, path: PathLike, _type?: string | null): void { - const targetStr = toPathString(target); - const pathStr = toPathString(path); + const targetStr = normalizePathLike(target, "target"); + const pathStr = normalizePathLike(path); bridgeCall(() => _fs.symlink.applySyncPromise(undefined, [targetStr, pathStr]), "symlink", pathStr); }, readlinkSync(path: PathLike, _options?: nodeFs.EncodingOption): string { - const pathStr = toPathString(path); + validateEncodingOption(_options); + const pathStr = normalizePathLike(path); return bridgeCall(() => _fs.readlink.applySyncPromise(undefined, [pathStr]), "readlink", pathStr); }, truncateSync(path: PathLike, len?: number | null): void { - const pathStr = toPathString(path); + const pathStr = normalizePathLike(path); bridgeCall(() => _fs.truncate.applySyncPromise(undefined, [pathStr, len ?? 0]), "truncate", pathStr); }, utimesSync(path: PathLike, atime: string | number | Date, mtime: string | number | Date): void { - const pathStr = toPathString(path); + const pathStr = normalizePathLike(path); const atimeNum = typeof atime === "number" ? atime : new Date(atime).getTime() / 1000; const mtimeNum = typeof mtime === "number" ? mtime : new Date(mtime).getTime() / 1000; bridgeCall(() => _fs.utimes.applySyncPromise(undefined, [pathStr, atimeNum, mtimeNum]), "utimes", pathStr); @@ -1573,6 +2857,8 @@ const fs = { options = undefined; } if (callback) { + normalizePathLike(path); + validateEncodingOption(options); try { callback(null, fs.readFileSync(path, options)); } catch (e) { @@ -1594,6 +2880,8 @@ const fs = { options = undefined; } if (callback) { + normalizePathLike(path); + validateEncodingOption(options); try { fs.writeFileSync(path, data, options); callback(null); @@ -1618,6 +2906,8 @@ const fs = { options = undefined; } if (callback) { + normalizePathLike(path); + validateEncodingOption(options); try { fs.appendFileSync(path, data, options); callback(null); @@ -1641,6 +2931,8 @@ const fs = { options = undefined; } if (callback) { + normalizePathLike(path); + validateEncodingOption(options); try { callback(null, fs.readdirSync(path, options)); } catch (e) { @@ -1663,6 +2955,7 @@ const fs = { options = undefined; } if (callback) { + normalizePathLike(path); try { fs.mkdirSync(path, options); callback(null); @@ -1677,6 +2970,7 @@ const fs = { rmdir(path: string, callback?: NodeCallback): Promise | void { if (callback) { + normalizePathLike(path); // Defer callback to next tick to allow event loop to process stream events const cb = callback; try { @@ -1754,25 +3048,22 @@ const fs = { }, exists(path: string, callback?: (exists: boolean) => void): Promise | void { - if (callback) { - callback(fs.existsSync(path)); - } else { - return Promise.resolve(fs.existsSync(path)); + validateCallback(callback, "cb"); + if (path === undefined) { + throw createInvalidArgTypeError("path", "of type string or an instance of Buffer or URL", path); } + queueMicrotask(() => callback(Boolean(tryNormalizeExistsPath(path) && fs.existsSync(path)))); }, stat(path: string, callback?: NodeCallback): Promise | void { - if (callback) { - // Defer callback to next tick to allow event loop to process stream events - const cb = callback; - try { - const stats = fs.statSync(path); - queueMicrotask(() => cb(null, stats)); - } catch (e) { - queueMicrotask(() => cb(e as Error)); - } - } else { - return Promise.resolve(fs.statSync(path)); + validateCallback(callback, "cb"); + normalizePathLike(path); + const cb = callback; + try { + const stats = fs.statSync(path); + queueMicrotask(() => cb(null, stats)); + } catch (e) { + queueMicrotask(() => cb(e as Error)); } }, @@ -1793,6 +3084,7 @@ const fs = { unlink(path: string, callback?: NodeCallback): Promise | void { if (callback) { + normalizePathLike(path); // Defer callback to next tick to allow event loop to process stream events const cb = callback; try { @@ -1812,6 +3104,8 @@ const fs = { callback?: NodeCallback ): Promise | void { if (callback) { + normalizePathLike(oldPath, "oldPath"); + normalizePathLike(newPath, "newPath"); // Defer callback to next tick to allow event loop to process stream events const cb = callback; try { @@ -1873,14 +3167,12 @@ const fs = { callback = options; options = undefined; } - if (callback) { - try { - callback(null, fs.mkdtempSync(prefix, options as nodeFs.EncodingOption)); - } catch (e) { - callback(e as Error); - } - } else { - return Promise.resolve(fs.mkdtempSync(prefix, options as nodeFs.EncodingOption)); + validateCallback(callback, "cb"); + validateEncodingOption(options); + try { + callback(null, fs.mkdtempSync(prefix, options as nodeFs.EncodingOption)); + } catch (e) { + callback(e as Error); } }, @@ -1906,40 +3198,43 @@ const fs = { open( path: string, - flags: OpenFlags, + flags?: OpenFlags | NodeCallback, mode?: number | NodeCallback, callback?: NodeCallback ): Promise | void { + let resolvedFlags: OpenFlags = "r"; + let resolvedMode: number | null | undefined = mode as number | null | undefined; + if (typeof flags === "function") { + callback = flags; + resolvedMode = undefined; + } else { + resolvedFlags = flags ?? "r"; + } if (typeof mode === "function") { callback = mode; - mode = undefined; + resolvedMode = undefined; } - if (callback) { - // Defer callback to next tick to allow event loop to process stream events - const cb = callback; - try { - const fd = fs.openSync(path, flags, mode); - queueMicrotask(() => cb(null, fd)); - } catch (e) { - queueMicrotask(() => cb(e as Error)); - } - } else { - return Promise.resolve(fs.openSync(path, flags, mode)); + validateCallback(callback, "cb"); + normalizePathLike(path); + normalizeOpenModeArgument(resolvedMode); + const cb = callback; + try { + const fd = fs.openSync(path, resolvedFlags, resolvedMode); + queueMicrotask(() => cb(null, fd)); + } catch (e) { + queueMicrotask(() => cb(e as Error)); } }, close(fd: number, callback?: NodeCallback): Promise | void { - if (callback) { - // Defer callback to next tick to allow event loop to process stream events - const cb = callback; - try { - fs.closeSync(fd); - queueMicrotask(() => cb(null)); - } catch (e) { - queueMicrotask(() => cb(e as Error)); - } - } else { - return Promise.resolve(fs.closeSync(fd)); + normalizeFdInteger(fd); + validateCallback(callback, "cb"); + const cb = callback; + try { + fs.closeSync(fd); + queueMicrotask(() => cb(null)); + } catch (e) { + queueMicrotask(() => cb(e as Error)); } }, @@ -1968,8 +3263,8 @@ const fs = { write( fd: number, buffer: string | Uint8Array, - offset?: number | NodeCallback, - length?: number | NodeCallback, + offset?: number | Record | NodeCallback, + length?: number | BufferEncoding | NodeCallback, position?: number | null | NodeCallback, callback?: NodeCallback ): Promise | void { @@ -1987,16 +3282,34 @@ const fs = { position = undefined; } if (callback) { + const normalized = normalizeWriteSyncArgs( + buffer, + offset as number | Record | null | undefined, + length as number | BufferEncoding | null | undefined, + position as number | null | undefined, + ); // Defer callback to next tick to allow event loop to process stream events const cb = callback; try { - const bytesWritten = fs.writeSync( - fd, - buffer, - offset as number | undefined, - length as number | undefined, - position as number | null | undefined - ); + const bytesWritten = typeof normalized.buffer === "string" + ? _fdWrite.applySyncPromise( + undefined, + [fd, Buffer.from(normalized.buffer, normalized.encoding).toString("base64"), normalized.position ?? null], + ) + : _fdWrite.applySyncPromise( + undefined, + [ + fd, + Buffer.from( + new Uint8Array( + normalized.buffer.buffer, + normalized.buffer.byteOffset + normalized.offset, + normalized.length, + ), + ).toString("base64"), + normalized.position ?? null, + ], + ); queueMicrotask(() => cb(null, bytesWritten)); } catch (e) { queueMicrotask(() => cb(e as Error)); @@ -2025,23 +3338,29 @@ const fs = { callback = position; position = null; } + const normalizedFd = normalizeFdInteger(fd); + const normalizedBuffers = normalizeIoVectorBuffers(buffers); + const normalizedPosition = normalizeOptionalPosition(position); if (callback) { try { - const bytesWritten = fs.writevSync(fd, buffers, position as number | null); - callback(null, bytesWritten, buffers); + const bytesWritten = fs.writevSync(normalizedFd, normalizedBuffers, normalizedPosition); + queueMicrotask(() => callback(null, bytesWritten, normalizedBuffers)); } catch (e) { - callback(e as Error); + queueMicrotask(() => callback(e as Error)); } } }, writevSync(fd: number, buffers: ArrayBufferView[], position?: number | null): number { + const normalizedFd = normalizeFdInteger(fd); + const normalizedBuffers = normalizeIoVectorBuffers(buffers); + let nextPosition = normalizeOptionalPosition(position); let totalBytesWritten = 0; - for (const buffer of buffers) { + for (const buffer of normalizedBuffers) { const bytes = buffer instanceof Uint8Array ? buffer : new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.byteLength); - totalBytesWritten += fs.writeSync(fd, bytes, 0, bytes.length, position); - if (position !== null && position !== undefined) { - position += bytes.length; + totalBytesWritten += fs.writeSync(normalizedFd, bytes, 0, bytes.length, nextPosition); + if (nextPosition !== null) { + nextPosition += bytes.length; } } return totalBytesWritten; @@ -2061,28 +3380,24 @@ const fs = { // fsync / fdatasync async callback forms fsync(fd: number, callback?: NodeCallback): Promise | void { - if (callback) { - try { - fs.fsyncSync(fd); - callback(null); - } catch (e) { - callback(e as Error); - } - } else { - return Promise.resolve(fs.fsyncSync(fd)); + normalizeFdInteger(fd); + validateCallback(callback, "cb"); + try { + fs.fsyncSync(fd); + callback(null); + } catch (e) { + callback(e as Error); } }, fdatasync(fd: number, callback?: NodeCallback): Promise | void { - if (callback) { - try { - fs.fdatasyncSync(fd); - callback(null); - } catch (e) { - callback(e as Error); - } - } else { - return Promise.resolve(fs.fdatasyncSync(fd)); + normalizeFdInteger(fd); + validateCallback(callback, "cb"); + try { + fs.fdatasyncSync(fd); + callback(null); + } catch (e) { + callback(e as Error); } }, @@ -2097,12 +3412,15 @@ const fs = { callback = position; position = null; } + const normalizedFd = normalizeFdInteger(fd); + const normalizedBuffers = normalizeIoVectorBuffers(buffers); + const normalizedPosition = normalizeOptionalPosition(position); if (callback) { try { - const bytesRead = fs.readvSync(fd, buffers, position as number | null); - callback(null, bytesRead, buffers); + const bytesRead = fs.readvSync(normalizedFd, normalizedBuffers, normalizedPosition); + queueMicrotask(() => callback(null, bytesRead, normalizedBuffers)); } catch (e) { - callback(e as Error); + queueMicrotask(() => callback(e as Error)); } } }, @@ -2150,14 +3468,23 @@ const fs = { // fs.promises API // Note: Using async functions to properly catch sync errors and return rejected promises promises: { - async readFile(path: string, options?: ReadFileOptions) { + async readFile(path: string | FileHandle, options?: ReadFileOptions | FileHandleReadFileOptions) { + if (path instanceof FileHandle) { + return path.readFile(options as FileHandleReadFileOptions); + } return fs.readFileSync(path, options); }, - async writeFile(path: string, data: string | Uint8Array, options?: WriteFileOptions) { - return fs.writeFileSync(path, data, options); + async writeFile(path: string | FileHandle, data: unknown, options?: WriteFileOptions | FileHandleWriteFileOptions) { + if (path instanceof FileHandle) { + return path.writeFile(data, options as FileHandleWriteFileOptions); + } + return fs.writeFileSync(path, data as string | Uint8Array, options); }, - async appendFile(path: string, data: string | Uint8Array, options?: WriteFileOptions) { - return fs.appendFileSync(path, data, options); + async appendFile(path: string | FileHandle, data: unknown, options?: WriteFileOptions | FileHandleWriteFileOptions) { + if (path instanceof FileHandle) { + return path.appendFile(data, options as FileHandleWriteFileOptions); + } + return fs.appendFileSync(path, data as string | Uint8Array, options); }, async readdir(path: string, options?: ReaddirOptions) { return fs.readdirSync(path, options); @@ -2192,6 +3519,9 @@ const fs = { async opendir(path: string, options?: nodeFs.OpenDirOptions) { return fs.opendirSync(path, options); }, + async open(path: string, flags?: OpenFlags, mode?: Mode): Promise { + return new FileHandle(fs.openSync(path, flags ?? "r", mode)); + }, async statfs(path: string, options?: nodeFs.StatFsOptions) { return fs.statfsSync(path, options); }, @@ -2217,6 +3547,9 @@ const fs = { async chown(path: string, uid: number, gid: number): Promise { return fs.chownSync(path, uid, gid); }, + async lchown(path: string, uid: number, gid: number): Promise { + return fs.lchownSync(path, uid, gid); + }, async link(existingPath: string, newPath: string): Promise { return fs.linkSync(existingPath, newPath); }, @@ -2232,6 +3565,9 @@ const fs = { async utimes(path: string, atime: string | number | Date, mtime: string | number | Date): Promise { return fs.utimesSync(path, atime, mtime); }, + watch(path: unknown, options?: unknown) { + return createUnsupportedPromisesWatchIterator(path, options); + }, }, // Compatibility methods @@ -2270,11 +3606,12 @@ const fs = { }, realpathSync: Object.assign( - function realpathSync(path: PathLike): string { + function realpathSync(path: PathLike, options?: nodeFs.EncodingOption): string { + validateEncodingOption(options); // Resolve symlinks by walking each path component via lstat + readlink const MAX_SYMLINK_DEPTH = 40; let symlinksFollowed = 0; - const raw = toPathString(path); + const raw = normalizePathLike(path); // Build initial queue: normalize . and .. segments const pending: string[] = []; @@ -2331,68 +3668,121 @@ const fs = { return "/" + resolved.join("/") || "/"; }, { - native(path: PathLike): string { + native(path: PathLike, options?: nodeFs.EncodingOption): string { + validateEncodingOption(options); return fs.realpathSync(path); } } ), realpath: Object.assign( - function realpath(path: PathLike, callback?: NodeCallback): Promise | void { + function realpath( + path: PathLike, + optionsOrCallback?: nodeFs.EncodingOption | NodeCallback, + callback?: NodeCallback, + ): Promise | void { + let options: nodeFs.EncodingOption | undefined; + if (typeof optionsOrCallback === "function") { + callback = optionsOrCallback; + } else { + options = optionsOrCallback; + } if (callback) { - callback(null, fs.realpathSync(path)); + validateEncodingOption(options); + callback(null, fs.realpathSync(path, options)); } else { - return Promise.resolve(fs.realpathSync(path)); + return Promise.resolve(fs.realpathSync(path, options)); } }, { - native(path: PathLike, callback?: NodeCallback): Promise | void { + native( + path: PathLike, + optionsOrCallback?: nodeFs.EncodingOption | NodeCallback, + callback?: NodeCallback, + ): Promise | void { + let options: nodeFs.EncodingOption | undefined; + if (typeof optionsOrCallback === "function") { + callback = optionsOrCallback; + } else { + options = optionsOrCallback; + } if (callback) { - callback(null, fs.realpathSync.native(path)); + validateEncodingOption(options); + callback(null, fs.realpathSync.native(path, options)); } else { - return Promise.resolve(fs.realpathSync.native(path)); + return Promise.resolve(fs.realpathSync.native(path, options)); } } } ), - createReadStream( + ReadStream: ReadStreamFactory, + WriteStream: WriteStreamFactory, + + createReadStream: function createReadStream( path: nodeFs.PathLike, - options?: BufferEncoding | { encoding?: BufferEncoding; start?: number; end?: number; highWaterMark?: number } + options?: BufferEncoding | { + encoding?: BufferEncoding; + start?: number; + end?: number; + highWaterMark?: number; + bufferSize?: number; + autoClose?: boolean; + fd?: number | FileHandle; + fs?: unknown; + signal?: AbortSignal; + } ): nodeFs.ReadStream { - const pathStr = typeof path === "string" ? path : path instanceof Buffer ? path.toString() : String(path); const opts = typeof options === "string" ? { encoding: options } : options; + validateEncodingOption(opts); + const fd = normalizeStreamFd(opts?.fd); + const pathLike = normalizeStreamPath(path as nodeFs.PathLike | null, fd); // Use type assertion since our ReadStream has all the methods npm needs // but not all the complex overloaded signatures of the full Node.js interface - return new ReadStream(pathStr, opts) as unknown as nodeFs.ReadStream; + return new ReadStream(pathLike, opts) as unknown as nodeFs.ReadStream; }, - createWriteStream( + createWriteStream: function createWriteStream( path: nodeFs.PathLike, - options?: BufferEncoding | { encoding?: BufferEncoding; flags?: string; mode?: number } + options?: BufferEncoding | { + encoding?: BufferEncoding; + flags?: string; + mode?: number; + autoClose?: boolean; + fd?: number | FileHandle; + fs?: unknown; + } ): nodeFs.WriteStream { - const pathStr = typeof path === "string" ? path : path instanceof Buffer ? path.toString() : String(path); const opts = typeof options === "string" ? { encoding: options } : options; + validateEncodingOption(opts); + validateWriteStreamStartOption((opts ?? {}) as Record); + const fd = normalizeStreamFd(opts?.fd); + const pathLike = normalizeStreamPath(path as nodeFs.PathLike | null, fd); // Use type assertion since our WriteStream has all the methods npm needs // but not all the complex overloaded signatures of the full Node.js interface - return new WriteStream(pathStr, opts) as unknown as nodeFs.WriteStream; + return new WriteStream(pathLike, opts) as unknown as nodeFs.WriteStream; }, // Unsupported fs APIs — watch requires kernel-level inotify, use polling instead watch(..._args: unknown[]): never { - throw new Error("fs.watch is not supported in sandbox — use polling"); + normalizeWatchArguments(_args[0], _args[1], _args[2]); + throw createUnsupportedWatcherError("watch"); }, watchFile(..._args: unknown[]): never { - throw new Error("fs.watchFile is not supported in sandbox — use polling"); + normalizeWatchFileArguments(_args[0], _args[1], _args[2]); + throw createUnsupportedWatcherError("watchFile"); }, unwatchFile(..._args: unknown[]): never { - throw new Error("fs.unwatchFile is not supported in sandbox — use polling"); + normalizePathLike(_args[0]); + throw createUnsupportedWatcherError("unwatchFile"); }, chmod(path: PathLike, mode: Mode, callback?: NodeCallback): Promise | void { if (callback) { + normalizePathLike(path); + normalizeModeArgument(mode); try { fs.chmodSync(path, mode); callback(null); @@ -2406,6 +3796,9 @@ const fs = { chown(path: PathLike, uid: number, gid: number, callback?: NodeCallback): Promise | void { if (callback) { + normalizePathLike(path); + normalizeNumberArgument("uid", uid, { min: -1, max: 0xffffffff, allowNegativeOne: true }); + normalizeNumberArgument("gid", gid, { min: -1, max: 0xffffffff, allowNegativeOne: true }); try { fs.chownSync(path, uid, gid); callback(null); @@ -2417,8 +3810,63 @@ const fs = { } }, + fchmod(fd: number, mode: Mode, callback?: NodeCallback): Promise | void { + if (callback) { + normalizeFdInteger(fd); + normalizeModeArgument(mode); + try { + fs.fchmodSync(fd, mode); + callback(null); + } catch (e) { + callback(e as Error); + } + } else { + normalizeFdInteger(fd); + normalizeModeArgument(mode); + return Promise.resolve(fs.fchmodSync(fd, mode)); + } + }, + + fchown(fd: number, uid: number, gid: number, callback?: NodeCallback): Promise | void { + if (callback) { + normalizeFdInteger(fd); + normalizeNumberArgument("uid", uid, { min: -1, max: 0xffffffff, allowNegativeOne: true }); + normalizeNumberArgument("gid", gid, { min: -1, max: 0xffffffff, allowNegativeOne: true }); + try { + fs.fchownSync(fd, uid, gid); + callback(null); + } catch (e) { + callback(e as Error); + } + } else { + normalizeFdInteger(fd); + normalizeNumberArgument("uid", uid, { min: -1, max: 0xffffffff, allowNegativeOne: true }); + normalizeNumberArgument("gid", gid, { min: -1, max: 0xffffffff, allowNegativeOne: true }); + return Promise.resolve(fs.fchownSync(fd, uid, gid)); + } + }, + + lchown(path: PathLike, uid: number, gid: number, callback?: NodeCallback): Promise | void { + if (arguments.length >= 4) { + validateCallback(callback, "cb"); + normalizePathLike(path); + normalizeNumberArgument("uid", uid, { min: -1, max: 0xffffffff, allowNegativeOne: true }); + normalizeNumberArgument("gid", gid, { min: -1, max: 0xffffffff, allowNegativeOne: true }); + try { + fs.lchownSync(path, uid, gid); + callback(null); + } catch (e) { + callback(e as Error); + } + } else { + return Promise.resolve(fs.lchownSync(path, uid, gid)); + } + }, + link(existingPath: PathLike, newPath: PathLike, callback?: NodeCallback): Promise | void { if (callback) { + normalizePathLike(existingPath, "existingPath"); + normalizePathLike(newPath, "newPath"); try { fs.linkSync(existingPath, newPath); callback(null); @@ -2449,15 +3897,18 @@ const fs = { readlink(path: PathLike, optionsOrCb?: nodeFs.EncodingOption | NodeCallback, callback?: NodeCallback): Promise | void { if (typeof optionsOrCb === "function") { callback = optionsOrCb; + optionsOrCb = undefined; } if (callback) { + normalizePathLike(path); + validateEncodingOption(optionsOrCb); try { - callback(null, fs.readlinkSync(path)); + callback(null, fs.readlinkSync(path, optionsOrCb)); } catch (e) { callback(e as Error); } } else { - return Promise.resolve(fs.readlinkSync(path)); + return Promise.resolve(fs.readlinkSync(path, optionsOrCb)); } }, diff --git a/packages/nodejs/src/bridge/module.ts b/packages/nodejs/src/bridge/module.ts index 3ce28ebb..5061c21e 100644 --- a/packages/nodejs/src/bridge/module.ts +++ b/packages/nodejs/src/bridge/module.ts @@ -88,8 +88,10 @@ export function createRequire(filename: string | URL): RequireFunction { "events", "util", "http", + "_http_common", "https", "dns", + "dgram", "child_process", "stream", "buffer", diff --git a/packages/nodejs/src/bridge/network.ts b/packages/nodejs/src/bridge/network.ts index 40667f8e..12a2e699 100644 --- a/packages/nodejs/src/bridge/network.ts +++ b/packages/nodejs/src/bridge/network.ts @@ -6,8 +6,10 @@ const MAX_HTTP_BODY_BYTES = 50 * 1024 * 1024; // 50 MB import type * as nodeHttp from "http"; import type * as nodeDns from "dns"; +import type * as nodeDgram from "node:dgram"; import { exposeCustomGlobal } from "@secure-exec/core/internal/shared/global-exposure"; import type { + FsFacadeBridge, NetworkDnsLookupRawBridgeRef, NetworkFetchRawBridgeRef, NetworkHttpRequestRawBridgeRef, @@ -15,18 +17,60 @@ import type { NetworkHttpServerListenRawBridgeRef, NetworkHttpServerRespondRawBridgeRef, NetworkHttpServerWaitRawBridgeRef, + NetworkHttp2ServerCloseRawBridgeRef, + NetworkHttp2ServerListenRawBridgeRef, + NetworkHttp2ServerWaitRawBridgeRef, + NetworkHttp2SessionCloseRawBridgeRef, + NetworkHttp2SessionConnectRawBridgeRef, + NetworkHttp2SessionRequestRawBridgeRef, + NetworkHttp2SessionSettingsRawBridgeRef, + NetworkHttp2SessionSetLocalWindowSizeRawBridgeRef, + NetworkHttp2SessionGoawayRawBridgeRef, + NetworkHttp2SessionDestroyRawBridgeRef, + NetworkHttp2SessionWaitRawBridgeRef, + NetworkHttp2ServerRespondRawBridgeRef, + NetworkHttp2StreamEndRawBridgeRef, + NetworkHttp2StreamPauseRawBridgeRef, + NetworkHttp2StreamResumeRawBridgeRef, + NetworkHttp2StreamRespondWithFileRawBridgeRef, + NetworkHttp2StreamPushStreamRawBridgeRef, + NetworkHttp2StreamRespondRawBridgeRef, + NetworkHttp2StreamWriteRawBridgeRef, RegisterHandleBridgeFn, UnregisterHandleBridgeFn, UpgradeSocketWriteRawBridgeRef, UpgradeSocketEndRawBridgeRef, UpgradeSocketDestroyRawBridgeRef, NetSocketConnectRawBridgeRef, + NetSocketWaitConnectRawBridgeRef, + NetSocketReadRawBridgeRef, + NetSocketSetNoDelayRawBridgeRef, + NetSocketSetKeepAliveRawBridgeRef, NetSocketWriteRawBridgeRef, NetSocketEndRawBridgeRef, NetSocketDestroyRawBridgeRef, NetSocketUpgradeTlsRawBridgeRef, + NetSocketGetTlsClientHelloRawBridgeRef, + NetSocketTlsQueryRawBridgeRef, + NetServerListenRawBridgeRef, + NetServerAcceptRawBridgeRef, + NetServerCloseRawBridgeRef, + TlsGetCiphersRawBridgeRef, + DgramSocketCreateRawBridgeRef, + DgramSocketBindRawBridgeRef, + DgramSocketRecvRawBridgeRef, + DgramSocketSendRawBridgeRef, + DgramSocketCloseRawBridgeRef, + DgramSocketAddressRawBridgeRef, + DgramSocketSetBufferSizeRawBridgeRef, + DgramSocketGetBufferSizeRawBridgeRef, } from "../bridge-contract.js"; +declare const _fdGetPath: { + applySync(t: undefined, a: [number]): string | null; +}; +declare const _fs: FsFacadeBridge; + // Declare host bridge References declare const _networkFetchRaw: NetworkFetchRawBridgeRef; @@ -50,10 +94,102 @@ declare const _networkHttpServerWaitRaw: | NetworkHttpServerWaitRawBridgeRef | undefined; +declare const _networkHttp2ServerListenRaw: + | NetworkHttp2ServerListenRawBridgeRef + | undefined; + +declare const _networkHttp2ServerCloseRaw: + | NetworkHttp2ServerCloseRawBridgeRef + | undefined; + +declare const _networkHttp2ServerWaitRaw: + | NetworkHttp2ServerWaitRawBridgeRef + | undefined; + +declare const _networkHttp2SessionConnectRaw: + | NetworkHttp2SessionConnectRawBridgeRef + | undefined; + +declare const _networkHttp2SessionRequestRaw: + | NetworkHttp2SessionRequestRawBridgeRef + | undefined; + +declare const _networkHttp2SessionSettingsRaw: + | NetworkHttp2SessionSettingsRawBridgeRef + | undefined; + +declare const _networkHttp2SessionSetLocalWindowSizeRaw: + | NetworkHttp2SessionSetLocalWindowSizeRawBridgeRef + | undefined; + +declare const _networkHttp2SessionGoawayRaw: + | NetworkHttp2SessionGoawayRawBridgeRef + | undefined; + +declare const _networkHttp2SessionCloseRaw: + | NetworkHttp2SessionCloseRawBridgeRef + | undefined; + +declare const _networkHttp2SessionDestroyRaw: + | NetworkHttp2SessionDestroyRawBridgeRef + | undefined; + +declare const _networkHttp2SessionWaitRaw: + | NetworkHttp2SessionWaitRawBridgeRef + | undefined; + +declare const _networkHttp2ServerRespondRaw: + | NetworkHttp2ServerRespondRawBridgeRef + | undefined; + +declare const _networkHttp2StreamRespondRaw: + | NetworkHttp2StreamRespondRawBridgeRef + | undefined; + +declare const _networkHttp2StreamPushStreamRaw: + | NetworkHttp2StreamPushStreamRawBridgeRef + | undefined; + +declare const _networkHttp2StreamWriteRaw: + | NetworkHttp2StreamWriteRawBridgeRef + | undefined; + +declare const _networkHttp2StreamEndRaw: + | NetworkHttp2StreamEndRawBridgeRef + | undefined; + +declare const _networkHttp2StreamPauseRaw: + | NetworkHttp2StreamPauseRawBridgeRef + | undefined; + +declare const _networkHttp2StreamResumeRaw: + | NetworkHttp2StreamResumeRawBridgeRef + | undefined; + +declare const _networkHttp2StreamRespondWithFileRaw: + | NetworkHttp2StreamRespondWithFileRawBridgeRef + | undefined; + declare const _netSocketConnectRaw: | NetSocketConnectRawBridgeRef | undefined; +declare const _netSocketWaitConnectRaw: + | NetSocketWaitConnectRawBridgeRef + | undefined; + +declare const _netSocketReadRaw: + | NetSocketReadRawBridgeRef + | undefined; + +declare const _netSocketSetNoDelayRaw: + | NetSocketSetNoDelayRawBridgeRef + | undefined; + +declare const _netSocketSetKeepAliveRaw: + | NetSocketSetKeepAliveRawBridgeRef + | undefined; + declare const _netSocketWriteRaw: | NetSocketWriteRawBridgeRef | undefined; @@ -70,6 +206,62 @@ declare const _netSocketUpgradeTlsRaw: | NetSocketUpgradeTlsRawBridgeRef | undefined; +declare const _netSocketGetTlsClientHelloRaw: + | NetSocketGetTlsClientHelloRawBridgeRef + | undefined; + +declare const _netSocketTlsQueryRaw: + | NetSocketTlsQueryRawBridgeRef + | undefined; + +declare const _netServerListenRaw: + | NetServerListenRawBridgeRef + | undefined; + +declare const _netServerAcceptRaw: + | NetServerAcceptRawBridgeRef + | undefined; + +declare const _netServerCloseRaw: + | NetServerCloseRawBridgeRef + | undefined; + +declare const _dgramSocketCreateRaw: + | DgramSocketCreateRawBridgeRef + | undefined; + +declare const _dgramSocketBindRaw: + | DgramSocketBindRawBridgeRef + | undefined; + +declare const _dgramSocketRecvRaw: + | DgramSocketRecvRawBridgeRef + | undefined; + +declare const _dgramSocketSendRaw: + | DgramSocketSendRawBridgeRef + | undefined; + +declare const _dgramSocketCloseRaw: + | DgramSocketCloseRawBridgeRef + | undefined; + +declare const _dgramSocketAddressRaw: + | DgramSocketAddressRawBridgeRef + | undefined; + +declare const _dgramSocketSetBufferSizeRaw: + | DgramSocketSetBufferSizeRawBridgeRef + | undefined; + +declare const _dgramSocketGetBufferSizeRaw: + | DgramSocketGetBufferSizeRawBridgeRef + | undefined; + +declare const _tlsGetCiphersRaw: + | TlsGetCiphersRawBridgeRef + | undefined; + declare const _upgradeSocketWriteRaw: | UpgradeSocketWriteRawBridgeRef | undefined; @@ -425,6 +617,43 @@ export const dns = { // Event listener type type EventListener = (...args: unknown[]) => void; +type RequestSocketLike = { + destroyed: boolean; + readable?: boolean; + writable?: boolean; + timeout?: number; + _freeTimer?: ReturnType | null; + on(event: string, listener: EventListener): unknown; + once(event: string, listener: EventListener): unknown; + off?(event: string, listener: EventListener): unknown; + removeListener?(event: string, listener: EventListener): unknown; + removeAllListeners?(event?: string): unknown; + emit?(event: string, ...args: unknown[]): boolean; + listeners?(event: string): EventListener[]; + listenerCount?(event: string): number; + setTimeout?(timeout: number, callback?: () => void): unknown; + setNoDelay?(noDelay?: boolean): unknown; + setKeepAlive?(enable?: boolean, delay?: number): unknown; + end?(...args: unknown[]): unknown; + destroy(error?: Error): unknown; +}; + +function createConnResetError(message = "socket hang up"): Error & { code: string } { + const error = new Error(message) as Error & { code: string }; + error.code = "ECONNRESET"; + return error; +} + +function createAbortError(): Error & { code: string; name: string } { + const error = new Error("The operation was aborted") as Error & { + code: string; + name: string; + }; + error.name = "AbortError"; + error.code = "ABORT_ERR"; + return error; +} + // Module-level globalAgent used by ClientRequest when no agent option is provided. // Initialized lazily after Agent class is defined; set by createHttpModule(). let _moduleGlobalAgent: Agent | null = null; @@ -436,7 +665,7 @@ let _moduleGlobalAgent: Agent | null = null; * `x-body-encoding` header. */ export class IncomingMessage { - headers: Record; + headers: Record; rawHeaders: string[]; trailers: Record; rawTrailers: string[]; @@ -452,7 +681,7 @@ export class IncomingMessage { private _listeners: Record; complete: boolean; aborted: boolean; - socket: FakeSocket | UpgradeSocket | null; + socket: FakeSocket | UpgradeSocket | DirectTunnelSocket | null; private _bodyConsumed: boolean; private _ended: boolean; private _flowing: boolean; @@ -461,12 +690,51 @@ export class IncomingMessage { readableFlowing: boolean | null; destroyed: boolean; private _encoding?: string; + private _closeEmitted: boolean; - constructor(response?: { headers?: Record; url?: string; status?: number; statusText?: string; body?: string; trailers?: Record; bodyEncoding?: "utf8" | "base64" }) { - this.headers = response?.headers || {}; - this.rawHeaders = []; - if (this.headers && typeof this.headers === "object") { + constructor(response?: { + headers?: Record | Array<[string, string]>; + rawHeaders?: string[]; + url?: string; + status?: number; + statusText?: string; + body?: string; + trailers?: Record; + bodyEncoding?: "utf8" | "base64"; + }) { + const normalizedHeaders: Record = {}; + if (Array.isArray(response?.headers)) { + response.headers.forEach(([key, value]) => { + appendNormalizedHeader(normalizedHeaders, key.toLowerCase(), value); + }); + } else if (response?.headers) { + Object.entries(response.headers).forEach(([key, value]) => { + normalizedHeaders[key] = Array.isArray(value) ? [...value] : value; + }); + } + this.rawHeaders = Array.isArray(response?.rawHeaders) + ? [...response.rawHeaders] + : []; + if (this.rawHeaders.length > 0) { + this.headers = {}; + for (let index = 0; index < this.rawHeaders.length; index += 2) { + const key = this.rawHeaders[index]; + const value = this.rawHeaders[index + 1]; + if (key !== undefined && value !== undefined) { + appendNormalizedHeader(this.headers, key.toLowerCase(), value); + } + } + } else { + this.headers = normalizedHeaders; + } + if (this.rawHeaders.length === 0 && this.headers && typeof this.headers === "object") { Object.entries(this.headers).forEach(([k, v]) => { + if (Array.isArray(v)) { + v.forEach((entry) => { + this.rawHeaders.push(k, entry); + }); + return; + } this.rawHeaders.push(k, v); }); } @@ -489,7 +757,10 @@ export class IncomingMessage { this.statusCode = response?.status; this.statusMessage = response?.statusText; // Decode base64 body if x-body-encoding header is set - const bodyEncoding = response?.bodyEncoding || this.headers['x-body-encoding']; + const bodyEncodingHeader = this.headers["x-body-encoding"]; + const bodyEncoding = + response?.bodyEncoding || + (Array.isArray(bodyEncodingHeader) ? bodyEncodingHeader[0] : bodyEncodingHeader); if (bodyEncoding === 'base64' && response?.body && typeof Buffer !== 'undefined') { this._body = Buffer.from(response.body, 'base64').toString('binary'); this._isBinary = true; @@ -508,6 +779,7 @@ export class IncomingMessage { this.readableEnded = false; this.readableFlowing = null; this.destroyed = false; + this._closeEmitted = false; } on(event: string, listener: EventListener): this { @@ -662,17 +934,19 @@ export class IncomingMessage { resume(): this { this._flowing = true; this.readableFlowing = true; - if (!this._bodyConsumed && this._body) { + if (!this._bodyConsumed) { Promise.resolve().then(() => { if (!this._bodyConsumed) { this._bodyConsumed = true; - let buf: Buffer | string; - if (typeof Buffer !== "undefined") { - buf = this._isBinary ? Buffer.from(this._body, 'binary') : Buffer.from(this._body); - } else { - buf = this._body; + if (this._body) { + let buf: Buffer | string; + if (typeof Buffer !== "undefined") { + buf = this._isBinary ? Buffer.from(this._body, 'binary') : Buffer.from(this._body); + } else { + buf = this._body; + } + this.emit("data", buf); } - this.emit("data", buf); Promise.resolve().then(() => { if (!this._ended) { this._ended = true; @@ -696,10 +970,34 @@ export class IncomingMessage { this.destroyed = true; this.readable = false; if (err) this.emit("error", err); - this.emit("close"); + this._emitClose(); return this; } + _abort(err: Error = createConnResetError("aborted")): void { + if (this.aborted) { + return; + } + this.aborted = true; + this.complete = false; + this.destroyed = true; + this.readable = false; + this.readableEnded = true; + this.emit("aborted"); + if (err) { + this.emit("error", err); + } + this._emitClose(); + } + + private _emitClose(): void { + if (this._closeEmitted) { + return; + } + this._closeEmitted = true; + this.emit("close"); + } + [Symbol.asyncIterator](): AsyncIterator { const self = this; let dataEmitted = false; @@ -753,23 +1051,51 @@ export class ClientRequest { private _options: nodeHttp.RequestOptions; private _callback?: (res: IncomingMessage) => void; private _listeners: Record = {}; + private _headers: NormalizedHeaders = {}; + private _rawHeaderNames = new Map(); private _body = ""; private _bodyBytes = 0; private _ended = false; private _agent: Agent | null; private _hostKey: string; private _socketEndListener: EventListener | null = null; - socket!: FakeSocket; + private _socketCloseListener: EventListener | null = null; + private _loopbackAbort?: () => void; + private _response: IncomingMessage | null = null; + private _closeEmitted = false; + private _abortEmitted = false; + private _signalAbortHandler?: () => void; + private _signalPollTimer: ReturnType | null = null; + private _skipExecute = false; + private _destroyError: Error | undefined; + private _errorEmitted = false; + socket!: RequestSocketLike; finished = false; aborted = false; + destroyed = false; + path: string; + method: string; reusedSocket = false; + timeoutCb?: () => void; constructor(options: nodeHttp.RequestOptions, callback?: (res: IncomingMessage) => void) { - this._options = options; + const normalizedMethod = validateRequestMethod(options.method); + this._options = { + ...options, + method: normalizedMethod, + path: validateRequestPath(options.path), + }; this._callback = callback; + this._validateTimeoutOption(); + this._setOutgoingHeaders(options.headers); + if (!this._headers.host) { + this._setHeaderValue("Host", buildHostHeader(this._options)); + } + this.path = String(this._options.path || "/"); + this.method = String(this._options.method || "GET").toUpperCase(); // Resolve agent: false = no agent, undefined = globalAgent, or explicit Agent - const agentOpt = options.agent; + const agentOpt = this._options.agent; if (agentOpt === false) { this._agent = null; } else if (agentOpt instanceof Agent) { @@ -777,16 +1103,20 @@ export class ClientRequest { } else { this._agent = _moduleGlobalAgent; } - this._hostKey = this._agent ? this._agent._getHostKey(options as { hostname?: string; host?: string; port?: string | number }) : ""; + this._hostKey = this._agent ? this._agent._getHostKey(this._options as { hostname?: string; host?: string; port?: string | number }) : ""; + this._bindAbortSignal(); + if (typeof this._options.timeout === "number") { + this.setTimeout(this._options.timeout); + } // Execute request asynchronously Promise.resolve().then(() => this._execute()); } - _assignSocket(socket: FakeSocket, reusedSocket: boolean): void { + _assignSocket(socket: RequestSocketLike, reusedSocket: boolean): void { this.socket = socket; this.reusedSocket = reusedSocket; - const trackedSocket = socket as FakeSocket & { + const trackedSocket = socket as RequestSocketLike & { _agentPermanentListenersInstalled?: boolean; }; if (!trackedSocket._agentPermanentListenersInstalled) { @@ -796,7 +1126,24 @@ export class ClientRequest { } this._socketEndListener = () => {}; socket.on("end", this._socketEndListener); + this._socketCloseListener = () => { + this.destroyed = true; + this._clearTimeout(); + this._emitClose(); + }; + socket.on("close", this._socketCloseListener); + this._applyTimeoutToSocket(socket); this._emit("socket", socket); + if (this.destroyed) { + if (this._destroyError && !this._errorEmitted) { + this._errorEmitted = true; + queueMicrotask(() => { + this._emit("error", this._destroyError); + }); + } + socket.destroy(); + return; + } void this._dispatchWithSocket(socket); } @@ -805,21 +1152,27 @@ export class ClientRequest { } private _finalizeSocket( - socket: FakeSocket, + socket: RequestSocketLike, keepSocketAlive: boolean, ): void { if (this._socketEndListener) { - socket.off("end", this._socketEndListener); + socket.off?.("end", this._socketEndListener); + socket.removeListener?.("end", this._socketEndListener); this._socketEndListener = null; } + if (this._socketCloseListener) { + socket.off?.("close", this._socketCloseListener); + socket.removeListener?.("close", this._socketCloseListener); + this._socketCloseListener = null; + } if (this._agent) { - this._agent._releaseSocket(this._hostKey, socket, this._options, keepSocketAlive); + this._agent._releaseSocket(this._hostKey, socket as FakeSocket, this._options, keepSocketAlive); } else if (!socket.destroyed) { socket.destroy(); } } - private async _dispatchWithSocket(socket: FakeSocket): Promise { + private async _dispatchWithSocket(socket: RequestSocketLike): Promise { try { if (typeof _networkHttpRequestRaw === 'undefined') { console.error('http/https request requires NetworkAdapter to be configured'); @@ -832,21 +1185,93 @@ export class ClientRequest { tls.rejectUnauthorized = (this._options as Record).rejectUnauthorized; } const normalizedHeaders = normalizeRequestHeaders(this._options.headers); + const requestMethod = String(this._options.method || "GET").toUpperCase(); + const loopbackServerByPort = findLoopbackServerByPort(this._options); + const directLoopbackConnectServer = + requestMethod === "CONNECT" + ? loopbackServerByPort + : null; + const directLoopbackUpgradeServer = + requestMethod !== "CONNECT" && + hasUpgradeRequestHeaders(normalizedHeaders) && + loopbackServerByPort?.listenerCount("upgrade") + ? loopbackServerByPort + : null; + + if (directLoopbackConnectServer) { + const response = await dispatchLoopbackConnectRequest( + directLoopbackConnectServer, + this._options, + ); + this.finished = true; + this.socket = response.socket; + response.response.socket = response.socket; + response.socket.once("close", () => { + this._emit("close"); + }); + this._emit("connect", response.response, response.socket, response.head); + process.nextTick(() => { + this._finalizeSocket(socket, false); + }); + return; + } - const directLoopbackServer = findLoopbackServerForRequest(this._options); - const responseJson = directLoopbackServer - ? await dispatchServerRequest( + if (directLoopbackUpgradeServer) { + const response = await dispatchLoopbackUpgradeRequest( + directLoopbackUpgradeServer, + this._options, + this._body, + ); + this.finished = true; + this.socket = response.socket; + response.response.socket = response.socket; + response.socket.once("close", () => { + this._emit("close"); + }); + this._emit("upgrade", response.response, response.socket, response.head); + process.nextTick(() => { + this._finalizeSocket(socket, false); + }); + return; + } + + const directLoopbackServer = + requestMethod !== "CONNECT" && + hasUpgradeRequestHeaders(normalizedHeaders) && + !directLoopbackUpgradeServer + ? loopbackServerByPort + : findLoopbackServerForRequest(this._options); + const directLoopbackHttp2CompatServer = + !directLoopbackServer && + requestMethod !== "CONNECT" && + !hasUpgradeRequestHeaders(normalizedHeaders) + ? findLoopbackHttp2CompatibilityServer(this._options) + : null; + const serializedRequest = JSON.stringify({ + method: requestMethod, + url: this._options.path || "/", + headers: normalizedHeaders, + rawHeaders: flattenRawHeaders(normalizedHeaders), + bodyBase64: this._body + ? Buffer.from(this._body).toString("base64") + : undefined, + } satisfies SerializedServerRequest); + const loopbackResponse = directLoopbackServer + ? await dispatchLoopbackServerRequest( directLoopbackServer._bridgeServerId, - JSON.stringify({ - method: this._options.method || "GET", - url: this._options.path || "/", - headers: normalizedHeaders, - rawHeaders: flattenRawHeaders(normalizedHeaders), - bodyBase64: this._body - ? Buffer.from(this._body).toString("base64") - : undefined, - } satisfies SerializedServerRequest), + serializedRequest, ) + : directLoopbackHttp2CompatServer + ? await dispatchLoopbackHttp2CompatibilityRequest( + directLoopbackHttp2CompatServer, + serializedRequest, + ) + : null; + if (loopbackResponse) { + this._loopbackAbort = loopbackResponse.abortRequest; + } + const responseJson = loopbackResponse + ? loopbackResponse.responseJson : await _networkHttpRequestRaw.apply(undefined, [url, JSON.stringify({ method: this._options.method || "GET", headers: normalizedHeaders, @@ -856,25 +1281,28 @@ export class ClientRequest { result: { promise: true }, }); const response = JSON.parse(responseJson) as { - headers?: Record; + headers?: Record; + rawHeaders?: string[]; url?: string; status?: number; statusText?: string; body?: string; bodyEncoding?: "utf8" | "base64"; trailers?: Record; + informational?: SerializedInformationalResponse[]; upgradeSocketId?: number; connectionEnded?: boolean; connectionReset?: boolean; }; this.finished = true; + this._clearTimeout(); // 101 Switching Protocols → fire 'upgrade' event if (response.status === 101) { const res = new IncomingMessage(response); // Use UpgradeSocket for bidirectional data relay when socketId is available - let upgradeSocket: FakeSocket | UpgradeSocket = socket; + let upgradeSocket: FakeSocket | UpgradeSocket | DirectTunnelSocket = socket as FakeSocket; if (response.upgradeSocketId != null) { upgradeSocket = new UpgradeSocket(response.upgradeSocketId, { host: this._options.hostname as string, @@ -886,46 +1314,120 @@ export class ClientRequest { ? (response.body ? Buffer.from(response.body, "base64") : Buffer.alloc(0)) : new Uint8Array(0); res.socket = upgradeSocket; + upgradeSocket.once("close", () => { + this._emit("close"); + }); + if (this._listenerCount("upgrade") === 0) { + process.nextTick(() => { + this._finalizeSocket(socket, false); + }); + upgradeSocket.destroy(); + return; + } this._emit("upgrade", res, upgradeSocket, head); + process.nextTick(() => { + this._finalizeSocket(socket, false); + }); + return; + } + + if (requestMethod === "CONNECT" && response.upgradeSocketId != null) { + const res = new IncomingMessage(response); + const connectSocket = new UpgradeSocket(response.upgradeSocketId, { + host: this._options.hostname as string, + port: Number(this._options.port) || 80, + }); + upgradeSocketInstances.set(response.upgradeSocketId, connectSocket); + const head = typeof Buffer !== "undefined" + ? (response.body ? Buffer.from(response.body, "base64") : Buffer.alloc(0)) + : new Uint8Array(0); + res.socket = connectSocket; + connectSocket.once("close", () => { + this._emit("close"); + }); + this._emit("connect", res, connectSocket, head); + process.nextTick(() => { + this._finalizeSocket(socket, false); + }); return; } if (response.connectionReset) { - const error = new Error("socket hang up"); + const error = createConnResetError(); this._emit("error", error); - setTimeout(() => socket.destroy(), 0); + process.nextTick(() => { + this._finalizeSocket(socket, false); + }); return; } + for (const informational of response.informational || []) { + this._emit("information", new IncomingMessage({ + headers: Object.fromEntries(informational.headers || []), + rawHeaders: informational.rawHeaders, + status: informational.status, + statusText: informational.statusText, + })); + } + const res = new IncomingMessage(response); - res.socket = socket; + this._response = res; + res.socket = socket as FakeSocket | UpgradeSocket | DirectTunnelSocket; res.once("end", () => { - this._finalizeSocket(socket, this._agent?.keepAlive === true && !this.aborted); - if (response.connectionEnded) { - setTimeout(() => socket.end(), 0); - } + process.nextTick(() => { + this._finalizeSocket(socket, this._agent?.keepAlive === true && !this.aborted); + if (response.connectionEnded) { + queueMicrotask(() => socket.end?.()); + } + }); }); if (this._callback) { this._callback(res); } this._emit("response", res); + if (!this._callback && this._listenerCount("response") === 0) { + queueMicrotask(() => { + res.resume(); + }); + } } catch (err) { + this._clearTimeout(); this._emit("error", err); this._finalizeSocket(socket, false); } } private _execute(): void { + if (this._skipExecute) { + return; + } if (this._agent) { this._agent.addRequest(this, this._options); return; } - const socket = new FakeSocket({ + const finish = (socket?: RequestSocketLike): void => { + if (!socket) { + this._handleSocketError(new Error("Failed to create socket")); + this._emitClose(); + return; + } + this._assignSocket(socket, false); + }; + + const createConnection = this._options.createConnection; + if (typeof createConnection === "function") { + const maybeSocket = createConnection(this._options, (_err, socket) => { + finish(socket as unknown as RequestSocketLike | undefined); + }); + finish(maybeSocket as unknown as RequestSocketLike | undefined); + return; + } + + finish(new FakeSocket({ host: (this._options.hostname || this._options.host || "localhost") as string, port: Number(this._options.port) || 80, - }); - this._assignSocket(socket, false); + })); } private _buildUrl(): string { @@ -943,6 +1445,10 @@ export class ClientRequest { return this; } + addListener(event: string, listener: EventListener): this { + return this.on(event, listener); + } + once(event: string, listener: EventListener): this { const wrapper = (...args: unknown[]): void => { this.off(event, wrapper); @@ -972,12 +1478,110 @@ export class ClientRequest { return this; } + removeListener(event: string, listener: EventListener): this { + return this.off(event, listener); + } + + getHeader(name: string): string | string[] | undefined { + if (typeof name !== "string") { + throw createTypeErrorWithCode( + `The "name" argument must be of type string. Received ${formatReceivedType(name)}`, + "ERR_INVALID_ARG_TYPE", + ); + } + return this._headers[name.toLowerCase()]; + } + + getHeaders(): Record { + const headers = Object.create(null) as Record; + for (const [key, value] of Object.entries(this._headers)) { + headers[key] = Array.isArray(value) ? [...value] : value; + } + return headers; + } + + getHeaderNames(): string[] { + return Object.keys(this._headers); + } + + getRawHeaderNames(): string[] { + return Object.keys(this._headers).map((key) => this._rawHeaderNames.get(key) || key); + } + + hasHeader(name: string): boolean { + if (typeof name !== "string") { + throw createTypeErrorWithCode( + `The "name" argument must be of type string. Received ${formatReceivedType(name)}`, + "ERR_INVALID_ARG_TYPE", + ); + } + return Object.prototype.hasOwnProperty.call(this._headers, name.toLowerCase()); + } + + removeHeader(name: string): void { + if (typeof name !== "string") { + throw createTypeErrorWithCode( + `The "name" argument must be of type string. Received ${formatReceivedType(name)}`, + "ERR_INVALID_ARG_TYPE", + ); + } + const lowerName = name.toLowerCase(); + delete this._headers[lowerName]; + this._rawHeaderNames.delete(lowerName); + this._options.headers = { ...this._headers }; + } + private _emit(event: string, ...args: unknown[]): void { if (this._listeners[event]) { this._listeners[event].forEach((fn) => fn(...args)); } } + private _listenerCount(event: string): number { + return this._listeners[event]?.length || 0; + } + + private _setOutgoingHeaders(headers: nodeHttp.OutgoingHttpHeaders | readonly string[] | undefined): void { + this._headers = {}; + this._rawHeaderNames = new Map(); + if (!headers) { + this._options.headers = {}; + return; + } + + if (Array.isArray(headers)) { + for (let index = 0; index < headers.length; index += 2) { + const key = headers[index]; + const value = headers[index + 1]; + if (key !== undefined && value !== undefined) { + this._setHeaderValue(String(key), value); + } + } + return; + } + + Object.entries(headers).forEach(([key, value]) => { + if (value !== undefined) { + this._setHeaderValue(key, value); + } + }); + } + + private _setHeaderValue( + name: string, + value: string | number | readonly string[] | readonly number[], + ): void { + const actualName = validateHeaderName(name).toLowerCase(); + validateHeaderValue(actualName, value); + this._headers[actualName] = Array.isArray(value) + ? value.map((entry) => String(entry)) + : String(value); + if (!this._rawHeaderNames.has(actualName)) { + this._rawHeaderNames.set(actualName, name); + } + this._options.headers = { ...this._headers }; + } + write(data: string): boolean { const addedBytes = typeof Buffer !== "undefined" ? Buffer.byteLength(data) : data.length; if (this._bodyBytes + addedBytes > MAX_HTTP_BODY_BYTES) { @@ -995,27 +1599,230 @@ export class ClientRequest { } abort(): void { + if (this.aborted) { + return; + } this.aborted = true; - if (this.socket && !this.socket.destroyed) { - this.socket.destroy(); + if (!this._abortEmitted) { + this._abortEmitted = true; + queueMicrotask(() => { + this._emit("abort"); + }); } + this._loopbackAbort?.(); + this.destroy(); } - setTimeout(_timeout: number): this { - return this; - } + destroy(err?: Error): this { + if (this.destroyed) { + return this; + } + this.destroyed = true; + this._clearTimeout(); + this._unbindAbortSignal(); + this._loopbackAbort?.(); + this._loopbackAbort = undefined; + if (!this.socket && err && (err as { code?: string }).code === "ABORT_ERR") { + this._skipExecute = true; + } - setNoDelay(): this { - return this; - } + const responseStarted = this._response != null; + const destroyError = + err ?? + (!this.aborted && !responseStarted ? createConnResetError() : undefined); + this._destroyError = destroyError; - setSocketKeepAlive(): this { - return this; - } + if (this._response && !this._response.complete && !this._response.aborted) { + this._response._abort(destroyError ?? createConnResetError("aborted")); + } + + if (this.socket && !this.socket.destroyed) { + if (destroyError && !this._errorEmitted) { + this._errorEmitted = true; + queueMicrotask(() => { + this._emit("error", destroyError); + }); + } + this.socket.destroy(destroyError); + } else { + if (destroyError) { + this._errorEmitted = true; + queueMicrotask(() => { + this._emit("error", destroyError); + }); + } + queueMicrotask(() => { + this._emitClose(); + }); + } + return this; + } + + setTimeout(timeout: number, callback?: () => void): this { + if (callback) { + this.once("timeout", callback); + } + this.timeoutCb = () => { + this._emit("timeout"); + }; + this._clearTimeout(); + if (timeout === 0) { + return this; + } + if (!Number.isFinite(timeout) || timeout < 0) { + throw new TypeError(`The "timeout" argument must be of type number. Received ${String(timeout)}`); + } + this._options.timeout = timeout; + if (this.socket) { + this._applyTimeoutToSocket(this.socket); + } + return this; + } + + setNoDelay(): this { + return this; + } + + setSocketKeepAlive(): this { + return this; + } flushHeaders(): void { // no-op } + + private _emitClose(): void { + if (this._closeEmitted) { + return; + } + this._closeEmitted = true; + this._emit("close"); + } + + private _applyTimeoutToSocket(socket: RequestSocketLike): void { + const timeout = this._options.timeout; + if (typeof timeout !== "number" || timeout === 0) { + return; + } + if (!this.timeoutCb) { + this.timeoutCb = () => { + this._emit("timeout"); + }; + } + socket.off?.("timeout", this.timeoutCb); + socket.removeListener?.("timeout", this.timeoutCb); + socket.setTimeout?.(timeout, this.timeoutCb); + } + + private _validateTimeoutOption(): void { + const timeout = this._options.timeout; + if (timeout === undefined) { + return; + } + if (typeof timeout !== "number") { + const received = timeout === null + ? "null" + : typeof timeout === "string" + ? `type string ('${timeout}')` + : `type ${typeof timeout} (${JSON.stringify(timeout)})`; + const error = new TypeError(`The "timeout" argument must be of type number. Received ${received}`) as TypeError & { + code?: string; + }; + error.code = "ERR_INVALID_ARG_TYPE"; + throw error; + } + } + + private _bindAbortSignal(): void { + const signal = this._options.signal; + if (!signal) { + return; + } + this._signalAbortHandler = () => { + this.destroy(createAbortError()); + }; + if (signal.aborted) { + this.destroyed = true; + this._skipExecute = true; + queueMicrotask(() => { + this._emit("error", createAbortError()); + this._emitClose(); + }); + return; + } + if (typeof signal.addEventListener === "function") { + signal.addEventListener("abort", this._signalAbortHandler, { once: true }); + return; + } + + const signalWithOnAbort = signal as AbortSignal & { + onabort?: ((this: AbortSignal, event: Event) => void) | null; + __secureExecPrevOnAbort__?: ((this: AbortSignal, event: Event) => void) | null; + }; + signalWithOnAbort.__secureExecPrevOnAbort__ = signalWithOnAbort.onabort ?? null; + signalWithOnAbort.onabort = ((event: Event) => { + signalWithOnAbort.__secureExecPrevOnAbort__?.call(signal, event); + this._signalAbortHandler?.(); + }) as (this: AbortSignal, event: Event) => void; + this._startAbortSignalPoll(signal); + } + + private _unbindAbortSignal(): void { + const signal = this._options.signal; + if (!signal || !this._signalAbortHandler) { + return; + } + if (this._signalPollTimer) { + clearTimeout(this._signalPollTimer); + this._signalPollTimer = null; + } + if (typeof signal.removeEventListener === "function") { + signal.removeEventListener("abort", this._signalAbortHandler); + this._signalAbortHandler = undefined; + return; + } + + const signalWithOnAbort = signal as AbortSignal & { + onabort?: ((this: AbortSignal, event: Event) => void) | null; + __secureExecPrevOnAbort__?: ((this: AbortSignal, event: Event) => void) | null; + }; + if (signalWithOnAbort.onabort === this._signalAbortHandler) { + signalWithOnAbort.onabort = signalWithOnAbort.__secureExecPrevOnAbort__ ?? null; + } else if (signalWithOnAbort.__secureExecPrevOnAbort__ !== undefined) { + signalWithOnAbort.onabort = signalWithOnAbort.__secureExecPrevOnAbort__ ?? null; + } + delete signalWithOnAbort.__secureExecPrevOnAbort__; + this._signalAbortHandler = undefined; + } + + private _startAbortSignalPoll(signal: AbortSignal): void { + const poll = (): void => { + if (this.destroyed) { + this._signalPollTimer = null; + return; + } + if (signal.aborted) { + this._signalPollTimer = null; + this._signalAbortHandler?.(); + return; + } + this._signalPollTimer = setTimeout(poll, 5); + }; + + if (!this._signalPollTimer) { + this._signalPollTimer = setTimeout(poll, 5); + } + } + + private _clearTimeout(): void { + if (this.socket && this.timeoutCb) { + this.socket.off?.("timeout", this.timeoutCb); + this.socket.removeListener?.("timeout", this.timeoutCb); + } + if (this.socket?.setTimeout) { + this.socket.setTimeout(0); + } + } } // Minimal socket-like object emitted by ClientRequest 'socket' event @@ -1028,9 +1835,11 @@ class FakeSocket { destroyed = false; writable = true; readable = true; + timeout = 0; private _listeners: Record = {}; private _closed = false; private _closeScheduled = false; + private _timeoutTimer: ReturnType | null = null; _freeTimer: ReturnType | null = null; constructor(options?: { host?: string; port?: number }) { @@ -1038,7 +1847,22 @@ class FakeSocket { this.remotePort = options?.port || 80; } - setTimeout(_ms: number, _cb?: () => void): this { return this; } + setTimeout(ms: number, cb?: () => void): this { + this.timeout = ms; + if (cb) { + this.on("timeout", cb); + } + if (this._timeoutTimer) { + clearTimeout(this._timeoutTimer); + this._timeoutTimer = null; + } + if (ms > 0) { + this._timeoutTimer = setTimeout(() => { + this.emit("timeout"); + }, ms); + } + return this; + } setNoDelay(_noDelay?: boolean): this { return this; } setKeepAlive(_enable?: boolean, _delay?: number): this { return this; } @@ -1087,6 +1911,10 @@ class FakeSocket { return this._listeners[event]?.length || 0; } + listeners(event: string): EventListener[] { + return [...(this._listeners[event] || [])]; + } + write(_data: unknown): boolean { return true; } end(): this { if (this.destroyed || this._closed) return this; @@ -1106,6 +1934,10 @@ class FakeSocket { this._closed = true; this.writable = false; this.readable = false; + if (this._timeoutTimer) { + clearTimeout(this._timeoutTimer); + this._timeoutTimer = null; + } if (!this._closeScheduled) { this._closeScheduled = true; queueMicrotask(() => { @@ -1117,6 +1949,167 @@ class FakeSocket { } } +class DirectTunnelSocket { + remoteAddress: string; + remotePort: number; + localAddress = "127.0.0.1"; + localPort = 0; + connecting = false; + destroyed = false; + writable = true; + readable = true; + readyState = "open"; + bytesWritten = 0; + private _listeners: Record = {}; + private _encoding?: BufferEncoding; + private _peer: DirectTunnelSocket | null = null; + _readableState = { endEmitted: false }; + _writableState = { finished: false, errorEmitted: false }; + + constructor(options?: { host?: string; port?: number }) { + this.remoteAddress = options?.host || "127.0.0.1"; + this.remotePort = options?.port || 80; + } + + _attachPeer(peer: DirectTunnelSocket): void { + this._peer = peer; + } + + setTimeout(_ms: number, _cb?: () => void): this { return this; } + setNoDelay(_noDelay?: boolean): this { return this; } + setKeepAlive(_enable?: boolean, _delay?: number): this { return this; } + setEncoding(encoding: BufferEncoding): this { + this._encoding = encoding; + return this; + } + ref(): this { return this; } + unref(): this { return this; } + cork(): void {} + uncork(): void {} + pause(): this { return this; } + resume(): this { return this; } + address(): { address: string; family: string; port: number } { + return { address: this.localAddress, family: "IPv4", port: this.localPort }; + } + + on(event: string, listener: EventListener): this { + if (!this._listeners[event]) this._listeners[event] = []; + this._listeners[event].push(listener); + return this; + } + + once(event: string, listener: EventListener): this { + const wrapper = (...args: unknown[]): void => { + this.off(event, wrapper); + listener(...args); + }; + return this.on(event, wrapper); + } + + off(event: string, listener: EventListener): this { + const listeners = this._listeners[event]; + if (!listeners) return this; + const index = listeners.indexOf(listener); + if (index !== -1) listeners.splice(index, 1); + return this; + } + + removeListener(event: string, listener: EventListener): this { + return this.off(event, listener); + } + + removeAllListeners(event?: string): this { + if (event) { + delete this._listeners[event]; + } else { + this._listeners = {}; + } + return this; + } + + emit(event: string, ...args: unknown[]): boolean { + const listeners = this._listeners[event]; + if (!listeners || listeners.length === 0) return false; + listeners.slice().forEach((listener) => listener.call(this, ...args)); + return true; + } + + listenerCount(event: string): number { + return this._listeners[event]?.length || 0; + } + + write(data: unknown, encodingOrCb?: string | (() => void), cb?: (() => void)): boolean { + if (this.destroyed || !this._peer) return false; + const callback = typeof encodingOrCb === "function" ? encodingOrCb : cb; + const buffer = normalizeSocketChunk(data); + this.bytesWritten += buffer.length; + queueMicrotask(() => { + this._peer?._pushData(buffer); + }); + callback?.(); + return true; + } + + end(data?: unknown): this { + if (data !== undefined) { + this.write(data); + } + this.writable = false; + this._writableState.finished = true; + queueMicrotask(() => { + this._peer?._pushEnd(); + }); + this.emit("finish"); + return this; + } + + destroy(err?: Error): this { + if (this.destroyed) return this; + this.destroyed = true; + this.readable = false; + this.writable = false; + this._readableState.endEmitted = true; + this._writableState.finished = true; + if (err) { + this.emit("error", err); + } + queueMicrotask(() => { + this._peer?._pushEnd(); + }); + this.emit("close", false); + return this; + } + + _pushData(buffer: Buffer): void { + if (!this.readable || this.destroyed) { + return; + } + this.emit("data", this._encoding ? buffer.toString(this._encoding) : buffer); + } + + _pushEnd(): void { + if (this.destroyed) { + return; + } + this.readable = false; + this.writable = false; + this._readableState.endEmitted = true; + this._writableState.finished = true; + this.emit("end"); + this.emit("close", false); + } +} + +function normalizeSocketChunk(data: unknown): Buffer { + if (typeof Buffer !== "undefined" && Buffer.isBuffer(data)) { + return data; + } + if (data instanceof Uint8Array) { + return Buffer.from(data); + } + return Buffer.from(String(data)); +} + type QueuedAgentRequest = { request: ClientRequest; options: nodeHttp.RequestOptions; @@ -1257,6 +2250,12 @@ class Agent { }, cb?: (err: Error | null, socket?: FakeSocket) => void, ): FakeSocket { + if (typeof options.createConnection === "function") { + return options.createConnection( + options, + (cb ?? (() => undefined)) as (err: Error | null, socket: unknown) => void, + ) as unknown as FakeSocket; + } const socket = new FakeSocket({ host: String(options.hostname || options.host || "localhost"), port: Number(options.port) || 80, @@ -1293,7 +2292,7 @@ class Agent { options: nodeHttp.RequestOptions, keepSocketAlive: boolean, ): void { - this._removeSocket(this.sockets, name, socket); + const removedActive = this._removeSocket(this.sockets, name, socket); if (keepSocketAlive && !socket.destroyed) { const freeList = this.freeSockets[name] ?? (this.freeSockets[name] = []); if (freeList.length < this.maxFreeSockets) { @@ -1311,9 +2310,15 @@ class Agent { socket.emit("free"); this.emit("free", socket, options); } else { + if (removedActive) { + this.totalSocketCount = Math.max(0, this.totalSocketCount - 1); + } socket.destroy(); } } else if (!socket.destroyed) { + if (removedActive) { + this.totalSocketCount = Math.max(0, this.totalSocketCount - 1); + } socket.destroy(); } Promise.resolve().then(() => this._processPendingRequests()); @@ -1384,6 +2389,15 @@ class Agent { this._processPendingRequests(); return; } + if (request.destroyed) { + this.totalSocketCount += 1; + this._activateSocket(name, socket); + socket.once("close", () => { + this._removeSocketCompletely(name, socket); + }); + request._assignSocket(socket, false); + return; + } this.totalSocketCount += 1; this._activateSocket(name, socket); socket.once("close", () => { @@ -1417,6 +2431,11 @@ class Agent { const freeSocket = this._takeFreeSocket(name); if (freeSocket) { const entry = queue.shift()!; + if (entry.request.destroyed) { + this._activateSocket(name, freeSocket); + this._releaseSocket(name, freeSocket, entry.options, true); + continue; + } this._activateSocket(name, freeSocket); entry.request._assignSocket(freeSocket, true); continue; @@ -1425,6 +2444,9 @@ class Agent { break; } const entry = queue.shift()!; + if (entry.request.destroyed) { + continue; + } this._createSocketForRequest(name, entry.request, entry.options); } if (!queue || queue.length === 0) { @@ -1487,7 +2509,7 @@ interface SerializedServerListenResult { interface SerializedServerRequest { method: string; url: string; - headers: Record; + headers: Record; rawHeaders: string[]; bodyBase64?: string; } @@ -1495,10 +2517,22 @@ interface SerializedServerRequest { interface SerializedServerResponse { status: number; headers?: Array<[string, string]>; + rawHeaders?: string[]; + informational?: SerializedInformationalResponse[]; body?: string; bodyEncoding?: "utf8" | "base64"; + trailers?: Array<[string, string]>; + rawTrailers?: string[]; connectionEnded?: boolean; connectionReset?: boolean; + upgradeSocketId?: number; +} + +interface SerializedInformationalResponse { + status: number; + statusText?: string; + headers?: Array<[string, string]>; + rawHeaders?: string[]; } function debugBridgeNetwork(...args: unknown[]): void { @@ -1511,36 +2545,789 @@ let nextServerId = 1; // Server instances indexed by serverId — used by request/upgrade dispatch const serverInstances = new Map(); -function normalizeRequestHeaders( - headers: nodeHttp.OutgoingHttpHeaders | readonly string[] | undefined, -): Record { - if (!headers) return {}; - if (Array.isArray(headers)) { - const normalized: Record = {}; - for (let i = 0; i < headers.length; i += 2) { - const key = headers[i]; - const value = headers[i + 1]; - if (key !== undefined && value !== undefined) { - normalized[String(key).toLowerCase()] = String(value); - } - } - return normalized; - } +const HTTP_METHODS = [ + "ACL", + "BIND", + "CHECKOUT", + "CONNECT", + "COPY", + "DELETE", + "GET", + "HEAD", + "LINK", + "LOCK", + "M-SEARCH", + "MERGE", + "MKACTIVITY", + "MKCALENDAR", + "MKCOL", + "MOVE", + "NOTIFY", + "OPTIONS", + "PATCH", + "POST", + "PROPFIND", + "PROPPATCH", + "PURGE", + "PUT", + "QUERY", + "REBIND", + "REPORT", + "SEARCH", + "SOURCE", + "SUBSCRIBE", + "TRACE", + "UNBIND", + "UNLINK", + "UNLOCK", + "UNSUBSCRIBE", +]; + +type NormalizedHeaderValue = string | string[]; +type NormalizedHeaders = Record; +type StoredHeaderValue = string | number | Array; +type LoopbackRequestParseResult = + | { + kind: "incomplete"; + } + | { + kind: "bad-request"; + closeConnection: boolean; + } + | { + kind: "request"; + bytesConsumed: number; + request: SerializedServerRequest; + closeConnection: boolean; + upgradeHead?: Buffer; + }; - const normalized: Record = {}; - Object.entries(headers).forEach(([key, value]) => { +const INVALID_REQUEST_PATH_REGEXP = /[^\u0021-\u00ff]/; +const HTTP_TOKEN_EXTRA_CHARS = new Set(["!", "#", "$", "%", "&", "'", "*", "+", "-", ".", "^", "_", "`", "|", "~"]); + +function createTypeErrorWithCode(message: string, code: string): TypeError & { code: string } { + const error = new TypeError(message) as TypeError & { code: string }; + error.code = code; + return error; +} + +function createErrorWithCode(message: string, code: string): Error & { code: string } { + const error = new Error(message) as Error & { code: string }; + error.code = code; + return error; +} + +function formatReceivedType(value: unknown): string { + if (value === null) { + return "null"; + } + if (Array.isArray(value)) { + return "an instance of Array"; + } + const valueType = typeof value; + if (valueType === "function") { + const name = + typeof (value as { name?: unknown }).name === "string" && + (value as { name?: string }).name!.length > 0 + ? (value as { name?: string }).name! + : "anonymous"; + return `function ${name}`; + } + if (valueType === "object") { + const ctorName = + value && + typeof value === "object" && + typeof (value as { constructor?: { name?: string } }).constructor?.name === "string" + ? (value as { constructor?: { name?: string } }).constructor!.name! + : "Object"; + return `an instance of ${ctorName}`; + } + if (valueType === "string") { + return `type string ('${String(value)}')`; + } + if (valueType === "symbol") { + return `type symbol (${String(value)})`; + } + return `type ${valueType} (${String(value)})`; +} + +function createInvalidArgTypeError(argumentName: string, expectedType: string, value: unknown): TypeError & { code: string } { + return createTypeErrorWithCode( + `The "${argumentName}" property must be of type ${expectedType}. Received ${formatReceivedType(value)}`, + "ERR_INVALID_ARG_TYPE", + ); +} + +function checkIsHttpToken(value: string): boolean { + if (value.length === 0) { + return false; + } + for (let index = 0; index < value.length; index += 1) { + const char = value[index]; + const code = value.charCodeAt(index); + const isAlphaNum = + (code >= 48 && code <= 57) || + (code >= 65 && code <= 90) || + (code >= 97 && code <= 122); + if (!isAlphaNum && !HTTP_TOKEN_EXTRA_CHARS.has(char)) { + return false; + } + } + return true; +} + +function checkInvalidHeaderChar(value: string): boolean { + for (let index = 0; index < value.length; index += 1) { + const code = value.charCodeAt(index); + if (code === 9) { + continue; + } + if (code < 32 || code === 127 || code > 255) { + return true; + } + } + return false; +} + +function validateHeaderName(name: unknown, label = "Header name"): string { + const actualName = String(name); + if (!checkIsHttpToken(actualName)) { + throw createTypeErrorWithCode( + `${label} must be a valid HTTP token [${JSON.stringify(actualName)}]`, + "ERR_INVALID_HTTP_TOKEN", + ); + } + return actualName; +} + +function validateHeaderValue(name: string, value: unknown): void { + if (value === undefined) { + throw createTypeErrorWithCode( + `Invalid value "undefined" for header "${name}"`, + "ERR_HTTP_INVALID_HEADER_VALUE", + ); + } + if (Array.isArray(value)) { + for (const entry of value) { + validateHeaderValue(name, entry); + } + return; + } + if (checkInvalidHeaderChar(String(value))) { + throw createTypeErrorWithCode( + `Invalid character in header content [${JSON.stringify(name)}]`, + "ERR_INVALID_CHAR", + ); + } +} + +function serializeHeaderValue(value: StoredHeaderValue): string | string[] { + if (Array.isArray(value)) { + return value.map((entry) => String(entry)); + } + return String(value); +} + +function joinHeaderValue(value: NormalizedHeaderValue): string { + return Array.isArray(value) ? value.join(", ") : value; +} + +function cloneStoredHeaderValue(value: StoredHeaderValue): StoredHeaderValue { + return Array.isArray(value) ? [...value] : value; +} + +function appendNormalizedHeader( + target: NormalizedHeaders, + key: string, + value: string, +): void { + if (key === "set-cookie") { + const existing = target[key]; + if (existing === undefined) { + target[key] = [value]; + } else if (Array.isArray(existing)) { + existing.push(value); + } else { + target[key] = [existing, value]; + } + return; + } + + const existing = target[key]; + target[key] = + existing === undefined + ? value + : `${joinHeaderValue(existing)}, ${value}`; +} + +function flattenRawHeaders(headers: NormalizedHeaders): string[] { + const rawHeaders: string[] = []; + for (const [key, value] of Object.entries(headers)) { + if (Array.isArray(value)) { + value.forEach((entry) => { + rawHeaders.push(key, entry); + }); + continue; + } + rawHeaders.push(key, value); + } + return rawHeaders; +} + +function validateRequestMethod(method: unknown): string | undefined { + if (method == null || method === "") { + return undefined; + } + if (typeof method !== "string") { + throw createInvalidArgTypeError("options.method", "string", method); + } + return validateHeaderName(method, "Method"); +} + +function validateRequestPath(path: unknown): string { + const resolvedPath = path == null || path === "" ? "/" : String(path); + if (INVALID_REQUEST_PATH_REGEXP.test(resolvedPath)) { + throw createTypeErrorWithCode( + "Request path contains unescaped characters", + "ERR_UNESCAPED_CHARACTERS", + ); + } + return resolvedPath; +} + +function buildHostHeader(options: nodeHttp.RequestOptions): string { + const host = String(options.hostname || options.host || "localhost"); + const defaultPort = + options.protocol === "https:" || Number(options.port) === 443 + ? 443 + : 80; + const port = options.port != null ? Number(options.port) : defaultPort; + return port === defaultPort ? host : `${host}:${port}`; +} + +function isFlatHeaderList( + headers: Record | Array<[string, string]> | readonly string[], +): headers is readonly string[] { + return Array.isArray(headers) && (headers.length === 0 || typeof headers[0] === "string"); +} + +function normalizeRequestHeaders( + headers: nodeHttp.OutgoingHttpHeaders | readonly string[] | undefined, +) : NormalizedHeaders { + if (!headers) return {}; + if (Array.isArray(headers)) { + const normalized: NormalizedHeaders = {}; + for (let i = 0; i < headers.length; i += 2) { + const key = headers[i]; + const value = headers[i + 1]; + if (key !== undefined && value !== undefined) { + const normalizedKey = validateHeaderName(key).toLowerCase(); + validateHeaderValue(normalizedKey, value); + appendNormalizedHeader(normalized, normalizedKey, String(value)); + } + } + return normalized; + } + + const normalized: NormalizedHeaders = {}; + Object.entries(headers).forEach(([key, value]) => { if (value === undefined) return; - normalized[key.toLowerCase()] = Array.isArray(value) - ? value.join(", ") - : String(value); + const normalizedKey = validateHeaderName(key).toLowerCase(); + validateHeaderValue(normalizedKey, value); + if (Array.isArray(value)) { + value.forEach((entry) => appendNormalizedHeader(normalized, normalizedKey, String(entry))); + return; + } + appendNormalizedHeader(normalized, normalizedKey, String(value)); }); return normalized; } -function flattenRawHeaders(headers: Record): string[] { - return Object.entries(headers).flatMap(([key, value]) => [key, value]); +function hasUpgradeRequestHeaders(headers: NormalizedHeaders): boolean { + const connectionHeader = joinHeaderValue(headers.connection || "").toLowerCase(); + return connectionHeader.includes("upgrade") && Boolean(headers.upgrade); +} + +function hasResponseBody(statusCode: number, method?: string): boolean { + if (method === "HEAD") { + return false; + } + if ((statusCode >= 100 && statusCode < 200) || statusCode === 204 || statusCode === 304) { + return false; + } + return true; +} + +function splitTransferEncodingTokens(value: string): string[] { + return value + .split(",") + .map((entry) => entry.trim().toLowerCase()) + .filter((entry) => entry.length > 0); +} + +function parseContentLengthHeader(value: NormalizedHeaderValue | undefined): number | null { + if (value === undefined) { + return 0; + } + + const entries = Array.isArray(value) ? value : [value]; + let parsed: number | null = null; + for (const entry of entries) { + if (!/^\d+$/.test(entry)) { + return null; + } + const nextValue = Number(entry); + if (!Number.isSafeInteger(nextValue) || nextValue < 0) { + return null; + } + if (parsed !== null && parsed !== nextValue) { + return null; + } + parsed = nextValue; + } + return parsed ?? 0; +} + +function parseChunkedBody( + bodyBuffer: Buffer, +): { complete: false } | { complete: true; bytesConsumed: number; body: Buffer } | null { + let offset = 0; + const chunks: Buffer[] = []; + + while (true) { + const lineEnd = bodyBuffer.indexOf("\r\n", offset); + if (lineEnd === -1) { + return { complete: false }; + } + + const sizeLine = bodyBuffer.subarray(offset, lineEnd).toString("latin1"); + if (sizeLine.length === 0 || /[\r\n]/.test(sizeLine)) { + return null; + } + const [sizePart, extensionPart] = sizeLine.split(";", 2); + if (!/^[0-9A-Fa-f]+$/.test(sizePart)) { + return null; + } + if (extensionPart !== undefined && /[\r\n]/.test(extensionPart)) { + return null; + } + + const chunkSize = Number.parseInt(sizePart, 16); + if (!Number.isSafeInteger(chunkSize) || chunkSize < 0) { + return null; + } + + const chunkStart = lineEnd + 2; + const chunkEnd = chunkStart + chunkSize; + const chunkTerminatorEnd = chunkEnd + 2; + if (chunkTerminatorEnd > bodyBuffer.length) { + return { complete: false }; + } + if ( + bodyBuffer[chunkEnd] !== 13 || + bodyBuffer[chunkEnd + 1] !== 10 + ) { + return null; + } + + if (chunkSize > 0) { + chunks.push(bodyBuffer.subarray(chunkStart, chunkEnd)); + offset = chunkTerminatorEnd; + continue; + } + + const trailersEnd = bodyBuffer.indexOf("\r\n\r\n", chunkStart); + if (trailersEnd === -1) { + return { complete: false }; + } + + const trailerBlock = bodyBuffer.subarray(chunkStart, trailersEnd).toString("latin1"); + if (trailerBlock.length > 0) { + for (const trailerLine of trailerBlock.split("\r\n")) { + if (trailerLine.length === 0) { + continue; + } + if (trailerLine.startsWith(" ") || trailerLine.startsWith("\t")) { + return null; + } + if (trailerLine.indexOf(":") === -1) { + return null; + } + } + } + + return { + complete: true, + bytesConsumed: trailersEnd + 4, + body: chunks.length > 0 ? Buffer.concat(chunks) : Buffer.alloc(0), + }; + } +} + +function parseLoopbackRequestBuffer( + buffer: Buffer, + server: Server, +): LoopbackRequestParseResult { + let requestStart = 0; + while ( + requestStart + 1 < buffer.length && + buffer[requestStart] === 13 && + buffer[requestStart + 1] === 10 + ) { + requestStart += 2; + } + + const headerEnd = buffer.indexOf("\r\n\r\n", requestStart); + if (headerEnd === -1) { + return { kind: "incomplete" }; + } + + const headerBlock = buffer.subarray(requestStart, headerEnd).toString("latin1"); + const [requestLine, ...headerLines] = headerBlock.split("\r\n"); + const requestMatch = /^([A-Z]+)\s+(\S+)\s+HTTP\/(1)\.(0|1)$/.exec(requestLine); + if (!requestMatch) { + return { + kind: "bad-request", + closeConnection: true, + }; + } + + const headers: NormalizedHeaders = {}; + const rawHeaders: string[] = []; + let previousHeaderName: string | null = null; + + try { + for (const headerLine of headerLines) { + if (headerLine.length === 0) { + continue; + } + if (headerLine.startsWith(" ") || headerLine.startsWith("\t")) { + return { + kind: "bad-request", + closeConnection: true, + }; + } + + const separatorIndex = headerLine.indexOf(":"); + if (separatorIndex === -1) { + return { + kind: "bad-request", + closeConnection: true, + }; + } + + const rawName = headerLine.slice(0, separatorIndex).trim(); + const rawValue = headerLine.slice(separatorIndex + 1).trim(); + const normalizedName = validateHeaderName(rawName).toLowerCase(); + validateHeaderValue(normalizedName, rawValue); + appendNormalizedHeader(headers, normalizedName, rawValue); + rawHeaders.push(rawName, rawValue); + previousHeaderName = normalizedName; + } + } catch { + return { + kind: "bad-request", + closeConnection: true, + }; + } + + const requestMethod = requestMatch[1]; + const requestUrl = requestMatch[2]; + const httpMinorVersion = Number(requestMatch[4]); + const requestCloseHeader = joinHeaderValue(headers.connection || "").toLowerCase(); + let closeConnection = httpMinorVersion === 0 + ? !requestCloseHeader.includes("keep-alive") + : requestCloseHeader.includes("close"); + + if (hasUpgradeRequestHeaders(headers) && server.listenerCount("upgrade") > 0) { + return { + kind: "request", + bytesConsumed: buffer.length, + closeConnection: false, + request: { + method: requestMethod, + url: requestUrl, + headers, + rawHeaders, + bodyBase64: headerEnd + 4 < buffer.length + ? buffer.subarray(headerEnd + 4).toString("base64") + : undefined, + }, + upgradeHead: headerEnd + 4 < buffer.length + ? buffer.subarray(headerEnd + 4) + : Buffer.alloc(0), + }; + } + + const transferEncoding = headers["transfer-encoding"]; + const contentLength = headers["content-length"]; + let requestBody: Buffer = Buffer.alloc(0); + let bytesConsumed = headerEnd + 4; + + if (transferEncoding !== undefined) { + const tokens = splitTransferEncodingTokens(joinHeaderValue(transferEncoding)); + const chunkedCount = tokens.filter((entry) => entry === "chunked").length; + const hasChunked = chunkedCount > 0; + const chunkedIsFinal = hasChunked && tokens[tokens.length - 1] === "chunked"; + if (!hasChunked || chunkedCount !== 1 || !chunkedIsFinal || contentLength !== undefined) { + return { + kind: "bad-request", + closeConnection: true, + }; + } + + const parsedChunked = parseChunkedBody(buffer.subarray(headerEnd + 4)); + if (parsedChunked === null) { + return { + kind: "bad-request", + closeConnection: true, + }; + } + if (!parsedChunked.complete) { + return { kind: "incomplete" }; + } + + requestBody = parsedChunked.body; + bytesConsumed = headerEnd + 4 + parsedChunked.bytesConsumed; + } else if (contentLength !== undefined) { + const parsedContentLength = parseContentLengthHeader(contentLength); + if (parsedContentLength === null) { + return { + kind: "bad-request", + closeConnection: true, + }; + } + const bodyEnd = headerEnd + 4 + parsedContentLength; + if (bodyEnd > buffer.length) { + return { kind: "incomplete" }; + } + requestBody = buffer.subarray(headerEnd + 4, bodyEnd); + bytesConsumed = bodyEnd; + } + + return { + kind: "request", + bytesConsumed, + closeConnection, + request: { + method: requestMethod, + url: requestUrl, + headers, + rawHeaders, + bodyBase64: requestBody.length > 0 ? requestBody.toString("base64") : undefined, + }, + }; +} + +function serializeRawHeaderPairs( + rawHeaders: string[] | undefined, + fallbackHeaders: Array<[string, string]> | undefined, +): { + headers: NormalizedHeaders; + rawNameMap: Map; + order: string[]; +} { + const headers: NormalizedHeaders = {}; + const rawNameMap = new Map(); + const order: string[] = []; + + if (Array.isArray(rawHeaders) && rawHeaders.length > 0) { + for (let index = 0; index < rawHeaders.length; index += 2) { + const rawName = rawHeaders[index]; + const value = rawHeaders[index + 1]; + if (rawName === undefined || value === undefined) { + continue; + } + const normalizedName = rawName.toLowerCase(); + appendNormalizedHeader(headers, normalizedName, value); + if (!rawNameMap.has(normalizedName)) { + rawNameMap.set(normalizedName, rawName); + order.push(normalizedName); + } + } + return { headers, rawNameMap, order }; + } + + if (Array.isArray(fallbackHeaders)) { + for (const [name, value] of fallbackHeaders) { + const normalizedName = name.toLowerCase(); + appendNormalizedHeader(headers, normalizedName, value); + if (!rawNameMap.has(normalizedName)) { + rawNameMap.set(normalizedName, name); + order.push(normalizedName); + } + } + } + + return { headers, rawNameMap, order }; +} + +function finalizeRawHeaderPairs( + headers: NormalizedHeaders, + rawNameMap: Map, + order: string[], +): Array<[string, string]> { + const entries: Array<[string, string]> = []; + const seen = new Set(); + for (const key of order) { + const value = headers[key]; + if (value === undefined) { + continue; + } + const rawName = rawNameMap.get(key) || key; + const serialized = Array.isArray(value) + ? (key === "set-cookie" ? value : [value.join(", ")]) + : [value]; + for (const entry of serialized) { + entries.push([rawName, entry]); + } + seen.add(key); + } + + for (const [key, value] of Object.entries(headers)) { + if (seen.has(key)) { + continue; + } + const rawName = rawNameMap.get(key) || key; + const serialized = Array.isArray(value) + ? (key === "set-cookie" ? value : [value.join(", ")]) + : [value]; + for (const entry of serialized) { + entries.push([rawName, entry]); + } + } + + return entries; +} + +function createBadRequestResponseBuffer(): Buffer { + return Buffer.from("HTTP/1.1 400 Bad Request\r\nConnection: close\r\n\r\n", "latin1"); +} + +function serializeLoopbackResponse( + response: SerializedServerResponse, + request: SerializedServerRequest, + requestWantsClose: boolean, +): { payload: Buffer; closeConnection: boolean } { + const statusCode = response.status || 200; + const statusText = HTTP_STATUS_TEXT[statusCode] || "OK"; + const { + headers, + rawNameMap, + order, + } = serializeRawHeaderPairs(response.rawHeaders, response.headers); + const trailerInfo = serializeRawHeaderPairs(response.rawTrailers, response.trailers); + + const bodyBuffer = + response.body == null + ? Buffer.alloc(0) + : response.bodyEncoding === "base64" + ? Buffer.from(response.body, "base64") + : Buffer.from(response.body, "utf8"); + const bodyAllowed = hasResponseBody(statusCode, request.method); + const transferEncodingTokens = headers["transfer-encoding"] + ? splitTransferEncodingTokens(joinHeaderValue(headers["transfer-encoding"])) + : []; + const isChunked = transferEncodingTokens.includes("chunked"); + const hasExplicitContentLength = headers["content-length"] !== undefined; + let closeConnection = + requestWantsClose || + response.connectionEnded === true || + response.connectionReset === true; + + if (!bodyAllowed) { + if (isChunked) { + closeConnection = true; + } + delete headers["content-length"]; + } else if (!isChunked && !hasExplicitContentLength) { + headers["content-length"] = String(bodyBuffer.length); + rawNameMap.set("content-length", "Content-Length"); + order.push("content-length"); + } + + if (closeConnection) { + headers.connection = "close"; + if (!rawNameMap.has("connection")) { + rawNameMap.set("connection", "Connection"); + order.push("connection"); + } + } else if (headers.connection === undefined && request.headers.connection !== undefined) { + headers.connection = "keep-alive"; + rawNameMap.set("connection", "Connection"); + order.push("connection"); + } + + const serializedChunks: Buffer[] = []; + for (const informational of response.informational ?? []) { + const infoHeaders = finalizeRawHeaderPairs( + serializeRawHeaderPairs(informational.rawHeaders, informational.headers).headers, + serializeRawHeaderPairs(informational.rawHeaders, informational.headers).rawNameMap, + serializeRawHeaderPairs(informational.rawHeaders, informational.headers).order, + ); + const headerLines = infoHeaders.map(([name, value]) => `${name}: ${value}\r\n`).join(""); + serializedChunks.push( + Buffer.from( + `HTTP/1.1 ${informational.status} ${informational.statusText || HTTP_STATUS_TEXT[informational.status] || ""}\r\n${headerLines}\r\n`, + "latin1", + ), + ); + } + + const finalHeaders = finalizeRawHeaderPairs(headers, rawNameMap, order); + const headerLines = finalHeaders.map(([name, value]) => `${name}: ${value}\r\n`).join(""); + serializedChunks.push( + Buffer.from(`HTTP/1.1 ${statusCode} ${statusText}\r\n${headerLines}\r\n`, "latin1"), + ); + + if (bodyAllowed) { + if (isChunked) { + if (bodyBuffer.length > 0) { + serializedChunks.push(Buffer.from(bodyBuffer.length.toString(16) + "\r\n", "latin1")); + serializedChunks.push(bodyBuffer); + serializedChunks.push(Buffer.from("\r\n", "latin1")); + } + serializedChunks.push(Buffer.from("0\r\n", "latin1")); + if (Object.keys(trailerInfo.headers).length > 0) { + const trailerPairs = finalizeRawHeaderPairs( + trailerInfo.headers, + trailerInfo.rawNameMap, + trailerInfo.order, + ); + for (const [name, value] of trailerPairs) { + serializedChunks.push(Buffer.from(`${name}: ${value}\r\n`, "latin1")); + } + } + serializedChunks.push(Buffer.from("\r\n", "latin1")); + } else if (bodyBuffer.length > 0) { + serializedChunks.push(bodyBuffer); + } + } + + return { + payload: serializedChunks.length === 1 ? serializedChunks[0] : Buffer.concat(serializedChunks), + closeConnection, + }; } +const HTTP_STATUS_TEXT: Record = { + 100: "Continue", + 101: "Switching Protocols", + 102: "Processing", + 103: "Early Hints", + 200: "OK", + 201: "Created", + 204: "No Content", + 301: "Moved Permanently", + 302: "Found", + 304: "Not Modified", + 400: "Bad Request", + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 500: "Internal Server Error", +}; + function isLoopbackRequestHost(hostname: string): boolean { const bare = hostname.startsWith("[") && hostname.endsWith("]") ? hostname.slice(1, -1) @@ -1550,6 +3337,16 @@ function isLoopbackRequestHost(hostname: string): boolean { function findLoopbackServerForRequest( options: nodeHttp.RequestOptions, +): Server | null { + if (String(options.method || "GET").toUpperCase() === "CONNECT") { + return null; + } + return findLoopbackServerByPort(options, true); +} + +function findLoopbackServerByPort( + options: nodeHttp.RequestOptions, + skipUpgradeHeaders = false, ): Server | null { const hostname = String(options.hostname || options.host || "localhost"); if (!isLoopbackRequestHost(hostname)) { @@ -1557,9 +3354,7 @@ function findLoopbackServerForRequest( } const normalizedHeaders = normalizeRequestHeaders(options.headers); - const connectionHeader = normalizedHeaders["connection"]?.toLowerCase(); - const upgradeHeader = normalizedHeaders["upgrade"]; - if (connectionHeader?.includes("upgrade") || upgradeHeader) { + if (skipUpgradeHeaders && hasUpgradeRequestHeaders(normalizedHeaders)) { return null; } @@ -1575,8 +3370,35 @@ function findLoopbackServerForRequest( return null; } +function findLoopbackHttp2CompatibilityServer( + options: nodeHttp.RequestOptions, +): Http2Server | null { + const hostname = String(options.hostname || options.host || "localhost"); + if (!isLoopbackRequestHost(hostname)) { + return null; + } + + const port = Number(options.port) || 443; + for (const server of http2Servers.values()) { + const address = server.address(); + if (!address || typeof address !== "object") { + continue; + } + if ( + address.port === port && + server.encrypted && + server.allowHTTP1 && + server.listenerCount("request") > 0 + ) { + return server; + } + } + + return null; +} + class ServerIncomingMessage { - headers: Record; + headers: Record; rawHeaders: string[]; method: string; url: string; @@ -1590,6 +3412,7 @@ class ServerIncomingMessage { httpVersionMajor = 1; httpVersionMinor = 1; complete = true; + aborted = false; // Readable stream state stub for frameworks that inspect internal state _readableState = { flowing: null, length: 0, ended: false, objectMode: false }; private _listeners: Record = {}; @@ -1624,6 +3447,12 @@ class ServerIncomingMessage { } if (this.rawHeaders.length === 0) { Object.entries(this.headers).forEach(([key, value]) => { + if (Array.isArray(value)) { + value.forEach((entry) => { + this.rawHeaders.push(key, entry); + }); + return; + } this.rawHeaders.push(key, value); }); } @@ -1683,6 +3512,17 @@ class ServerIncomingMessage { this.emit("close"); return this; } + + _abort(): void { + if (this.aborted) { + return; + } + this.aborted = true; + const error = createConnResetError("aborted"); + this.emit("aborted"); + this.emit("error", error); + this.emit("close"); + } } /** @@ -1695,7 +3535,9 @@ class ServerResponseBridge { headersSent = false; writable = true; writableFinished = false; - private _headers = new Map(); + outputSize = 0; + private _headers = new Map(); + private _trailers = new Map(); private _chunks: Uint8Array[] = []; private _chunksBytes = 0; private _listeners: Record = {}; @@ -1703,6 +3545,10 @@ class ServerResponseBridge { private _resolveClosed: (() => void) | null = null; private _connectionEnded = false; private _connectionReset = false; + private _rawHeaderNames = new Map(); + private _rawTrailerNames = new Map(); + private _informational: SerializedInformationalResponse[] = []; + private _pendingRawInfoBuffer = ""; constructor() { this._closedPromise = new Promise((resolve) => { @@ -1749,57 +3595,223 @@ class ServerResponseBridge { writeHead( statusCode: number, - headers?: Record | Array<[string, string]> + headers?: Record | Array<[string, string]> | readonly string[] ): this { - this.statusCode = statusCode; - if (headers) { - if (Array.isArray(headers)) { - headers.forEach(([key, value]) => this.setHeader(key, value)); - } else { - Object.entries(headers).forEach(([key, value]) => - this.setHeader(key, value) - ); - } - } - this.headersSent = true; + if (statusCode >= 100 && statusCode < 200 && statusCode !== 101) { + const informationalHeaders = new Map(); + const informationalRawHeaderNames = new Map(); + if (headers) { + if (isFlatHeaderList(headers)) { + for (let index = 0; index < headers.length; index += 2) { + const key = headers[index]; + const value = headers[index + 1]; + if (key === undefined || value === undefined) { + continue; + } + const actualName = validateHeaderName(key).toLowerCase(); + validateHeaderValue(actualName, value); + informationalHeaders.set(actualName, String(value)); + if (!informationalRawHeaderNames.has(actualName)) { + informationalRawHeaderNames.set(actualName, key); + } + } + } else if (Array.isArray(headers)) { + headers.forEach(([key, value]) => { + const actualName = validateHeaderName(key).toLowerCase(); + validateHeaderValue(actualName, value); + informationalHeaders.set(actualName, String(value)); + if (!informationalRawHeaderNames.has(actualName)) { + informationalRawHeaderNames.set(actualName, key); + } + }); + } else { + Object.entries(headers).forEach(([key, value]) => { + const actualName = validateHeaderName(key).toLowerCase(); + validateHeaderValue(actualName, value); + informationalHeaders.set(actualName, String(value)); + if (!informationalRawHeaderNames.has(actualName)) { + informationalRawHeaderNames.set(actualName, key); + } + }); + } + } + const normalizedHeaders = Array.from(informationalHeaders.entries()).flatMap(([key, value]) => { + const serialized = serializeHeaderValue(value); + return Array.isArray(serialized) + ? serialized.map((entry) => [key, entry] as [string, string]) + : [[key, serialized] as [string, string]]; + }); + const rawHeaders = Array.from(informationalHeaders.entries()).flatMap(([key, value]) => { + const rawName = informationalRawHeaderNames.get(key) || key; + const serialized = serializeHeaderValue(value); + return Array.isArray(serialized) + ? serialized.flatMap((entry) => [rawName, entry]) + : [rawName, serialized]; + }); + this._informational.push({ + status: statusCode, + statusText: HTTP_STATUS_TEXT[statusCode], + headers: normalizedHeaders, + rawHeaders, + }); + return this; + } + this.statusCode = statusCode; + if (headers) { + if (isFlatHeaderList(headers)) { + for (let index = 0; index < headers.length; index += 2) { + const key = headers[index]; + const value = headers[index + 1]; + if (key !== undefined && value !== undefined) { + this.setHeader(key, value); + } + } + } else if (Array.isArray(headers)) { + headers.forEach(([key, value]) => this.setHeader(key, value)); + } else { + Object.entries(headers).forEach(([key, value]) => + this.setHeader(key, value) + ); + } + } + this.headersSent = true; + this.outputSize += 64; + return this; + } + + setHeader(name: string, value: string | number | readonly (string | number)[]): this { + if (this.headersSent) { + throw createErrorWithCode( + "Cannot set headers after they are sent to the client", + "ERR_HTTP_HEADERS_SENT", + ); + } + const lower = validateHeaderName(name).toLowerCase(); + validateHeaderValue(lower, value); + const storedValue: StoredHeaderValue = Array.isArray(value) + ? Array.from(value as readonly (string | number)[]) + : value as string | number; + this._headers.set(lower, storedValue); + if (!this._rawHeaderNames.has(lower)) { + this._rawHeaderNames.set(lower, name); + } return this; } - setHeader(name: string, value: string | number | string[]): this { - const normalized = Array.isArray(value) ? value.join(", ") : String(value); - this._headers.set(name.toLowerCase(), normalized); + setHeaders(headers: Headers | Map): this { + if (this.headersSent) { + throw createErrorWithCode( + "Cannot set headers after they are sent to the client", + "ERR_HTTP_HEADERS_SENT", + ); + } + if (!(headers instanceof Headers) && !(headers instanceof Map)) { + throw createTypeErrorWithCode( + `The "headers" argument must be an instance of Headers or Map. Received ${formatReceivedType(headers)}`, + "ERR_INVALID_ARG_TYPE", + ); + } + + if (headers instanceof Headers) { + const pending = Object.create(null) as Record; + headers.forEach((value, key) => { + appendNormalizedHeader(pending, key.toLowerCase(), value); + }); + Object.entries(pending).forEach(([key, value]) => { + this.setHeader(key, value); + }); + return this; + } + + headers.forEach((value, key) => { + this.setHeader(key, value); + }); return this; } - getHeader(name: string): string | undefined { - return this._headers.get(name.toLowerCase()); + getHeader(name: string): StoredHeaderValue | undefined { + if (typeof name !== "string") { + throw createTypeErrorWithCode( + `The "name" argument must be of type string. Received ${formatReceivedType(name)}`, + "ERR_INVALID_ARG_TYPE", + ); + } + const value = this._headers.get(name.toLowerCase()); + return value === undefined ? undefined : cloneStoredHeaderValue(value); } hasHeader(name: string): boolean { + if (typeof name !== "string") { + throw createTypeErrorWithCode( + `The "name" argument must be of type string. Received ${formatReceivedType(name)}`, + "ERR_INVALID_ARG_TYPE", + ); + } return this._headers.has(name.toLowerCase()); } removeHeader(name: string): void { - this._headers.delete(name.toLowerCase()); + if (typeof name !== "string") { + throw createTypeErrorWithCode( + `The "name" argument must be of type string. Received ${formatReceivedType(name)}`, + "ERR_INVALID_ARG_TYPE", + ); + } + const lower = name.toLowerCase(); + this._headers.delete(lower); + this._rawHeaderNames.delete(lower); } - write(chunk: string | Uint8Array | null): boolean { + write( + chunk: string | Uint8Array | null, + encodingOrCallback?: BufferEncoding | (() => void), + callback?: () => void, + ): boolean { if (chunk == null) return true; this.headersSent = true; - const buf = typeof chunk === "string" ? Buffer.from(chunk) : chunk; + const buf = + typeof chunk === "string" + ? Buffer.from(chunk, typeof encodingOrCallback === "string" ? encodingOrCallback : undefined) + : chunk; if (this._chunksBytes + buf.byteLength > MAX_HTTP_BODY_BYTES) { throw new Error("ERR_HTTP_BODY_TOO_LARGE: response body exceeds " + MAX_HTTP_BODY_BYTES + " byte limit"); } this._chunks.push(buf); this._chunksBytes += buf.byteLength; + this.outputSize += buf.byteLength; + const writeCallback = typeof encodingOrCallback === "function" ? encodingOrCallback : callback; + if (typeof writeCallback === "function") { + queueMicrotask(writeCallback); + } return true; } - end(chunk?: string | Uint8Array | null): this { + end( + chunkOrCallback?: string | Uint8Array | null | (() => void), + encodingOrCallback?: BufferEncoding | (() => void), + callback?: () => void, + ): this { + let chunk: string | Uint8Array | null | undefined; + let endCallback: (() => void) | undefined; + + if (typeof chunkOrCallback === "function") { + endCallback = chunkOrCallback; + } else { + chunk = chunkOrCallback; + endCallback = typeof encodingOrCallback === "function" ? encodingOrCallback : callback; + } + if (chunk != null) { - this.write(chunk); + if (typeof chunk === "string" && typeof encodingOrCallback === "string") { + this.write(Buffer.from(chunk, encodingOrCallback)); + } else { + this.write(chunk); + } } this._finalize(); + if (typeof endCallback === "function") { + queueMicrotask(endCallback); + } return this; } @@ -1807,9 +3819,15 @@ class ServerResponseBridge { return Array.from(this._headers.keys()); } - getHeaders(): Record { - const result: Record = {}; - for (const [key, value] of this._headers) result[key] = value; + getRawHeaderNames(): string[] { + return Array.from(this._headers.keys()).map((key) => this._rawHeaderNames.get(key) || key); + } + + getHeaders(): Record { + const result = Object.create(null) as Record; + for (const [key, value] of this._headers) { + result[key] = cloneStoredHeaderValue(value); + } return result; } @@ -1819,6 +3837,8 @@ class ServerResponseBridge { // Fake socket for frameworks that access res.socket/res.connection socket = { writable: true, + writableCorked: 0, + writableHighWaterMark: 16 * 1024, on: () => this.socket, once: () => this.socket, removeListener: () => this.socket, @@ -1829,21 +3849,65 @@ class ServerResponseBridge { end: () => { this._connectionEnded = true; }, - cork: () => {}, - uncork: () => {}, - write: () => true, + cork: () => { + this._writableState.corked += 1; + this.socket.writableCorked = this._writableState.corked; + }, + uncork: () => { + this._writableState.corked = Math.max(0, this._writableState.corked - 1); + this.socket.writableCorked = this._writableState.corked; + }, + write: (_chunk?: unknown, callback?: () => void) => { + if (typeof callback === "function") { + queueMicrotask(callback); + } + return true; + }, } as Record; connection = this.socket; // Node.js http.ServerResponse socket/stream compatibility stubs assignSocket(): void { /* no-op */ } detachSocket(): void { /* no-op */ } - writeContinue(): void { /* no-op */ } - writeProcessing(): void { /* no-op */ } - addTrailers(): void { /* no-op */ } - cork(): void { /* no-op */ } - uncork(): void { /* no-op */ } + writeContinue(): void { this.writeHead(100); } + writeProcessing(): void { this.writeHead(102); } + addTrailers(headers: Record | readonly string[]): void { + if (Array.isArray(headers)) { + for (let index = 0; index < headers.length; index += 2) { + const key = headers[index]; + const value = headers[index + 1]; + if (key === undefined || value === undefined) { + continue; + } + const actualName = validateHeaderName(key).toLowerCase(); + validateHeaderValue(actualName, value); + this._trailers.set(actualName, String(value)); + if (!this._rawTrailerNames.has(actualName)) { + this._rawTrailerNames.set(actualName, key); + } + } + return; + } + + Object.entries(headers).forEach(([key, value]) => { + const actualName = validateHeaderName(key).toLowerCase(); + validateHeaderValue(actualName, value); + this._trailers.set(actualName, String(value)); + if (!this._rawTrailerNames.has(actualName)) { + this._rawTrailerNames.set(actualName, key); + } + }); + } + cork(): void { + (this.socket.cork as () => void)(); + } + uncork(): void { + (this.socket.uncork as () => void)(); + } setTimeout(_msecs?: number): this { return this; } + get writableCorked(): number { + return Number((this.socket as { writableCorked?: number }).writableCorked || 0); + } flushHeaders(): void { this.headersSent = true; @@ -1864,27 +3928,116 @@ class ServerResponseBridge { serialize(): SerializedServerResponse { const bodyBuffer = this._chunks.length > 0 ? Buffer.concat(this._chunks) : Buffer.alloc(0); + const serializedHeaders = Array.from(this._headers.entries()).flatMap(([key, value]) => { + const serialized = serializeHeaderValue(value); + if (Array.isArray(serialized)) { + if (key === "set-cookie") { + return serialized.map((entry) => [key, entry] as [string, string]); + } + return [[key, serialized.join(", ")] as [string, string]]; + } + return [[key, serialized] as [string, string]]; + }); + const rawHeaders = Array.from(this._headers.entries()).flatMap(([key, value]) => { + const rawName = this._rawHeaderNames.get(key) || key; + const serialized = serializeHeaderValue(value); + if (Array.isArray(serialized)) { + if (key === "set-cookie") { + return serialized.flatMap((entry) => [rawName, entry]); + } + return [rawName, serialized.join(", ")]; + } + return [rawName, serialized]; + }); + const serializedTrailers = Array.from(this._trailers.entries()).flatMap(([key, value]) => { + const serialized = serializeHeaderValue(value); + return Array.isArray(serialized) + ? serialized.map((entry) => [key, entry] as [string, string]) + : [[key, serialized] as [string, string]]; + }); + const rawTrailers = Array.from(this._trailers.entries()).flatMap(([key, value]) => { + const rawName = this._rawTrailerNames.get(key) || key; + const serialized = serializeHeaderValue(value); + return Array.isArray(serialized) + ? serialized.flatMap((entry) => [rawName, entry]) + : [rawName, serialized]; + }); return { status: this.statusCode, - headers: Array.from(this._headers.entries()), + headers: serializedHeaders, + rawHeaders, + informational: this._informational.length > 0 ? [...this._informational] : undefined, body: bodyBuffer.toString("base64"), bodyEncoding: "base64", + trailers: serializedTrailers.length > 0 ? serializedTrailers : undefined, + rawTrailers: rawTrailers.length > 0 ? rawTrailers : undefined, connectionEnded: this._connectionEnded, connectionReset: this._connectionReset, }; } + _writeRaw(chunk: string, callback?: () => void): boolean { + this._pendingRawInfoBuffer += String(chunk); + this._flushPendingRawInformational(); + if (typeof callback === "function") { + queueMicrotask(callback); + } + return true; + } + private _finalize(): void { if (this.writableFinished) { return; } this.writableFinished = true; this.writable = false; + this._writableState.ended = true; + this._writableState.finished = true; this._emit("finish"); this._emit("close"); this._resolveClosed?.(); this._resolveClosed = null; } + + private _flushPendingRawInformational(): void { + let separatorIndex = this._pendingRawInfoBuffer.indexOf("\r\n\r\n"); + while (separatorIndex !== -1) { + const rawFrame = this._pendingRawInfoBuffer.slice(0, separatorIndex); + this._pendingRawInfoBuffer = this._pendingRawInfoBuffer.slice(separatorIndex + 4); + + const [statusLine, ...headerLines] = rawFrame.split("\r\n"); + const statusMatch = /^HTTP\/1\.[01]\s+(\d{3})(?:\s+(.*))?$/.exec(statusLine); + if (!statusMatch) { + separatorIndex = this._pendingRawInfoBuffer.indexOf("\r\n\r\n"); + continue; + } + + const status = Number(statusMatch[1]); + if (status >= 100 && status < 200 && status !== 101) { + const headers: Array<[string, string]> = []; + const rawHeaders: string[] = []; + for (const headerLine of headerLines) { + const separator = headerLine.indexOf(":"); + if (separator === -1) { + continue; + } + const key = headerLine.slice(0, separator).trim(); + const value = headerLine.slice(separator + 1).trim(); + headers.push([key.toLowerCase(), value]); + rawHeaders.push(key, value); + } + + this._informational.push({ + status, + statusText: statusMatch[2] || HTTP_STATUS_TEXT[status] || undefined, + headers, + rawHeaders, + }); + } + + separatorIndex = this._pendingRawInfoBuffer.indexOf("\r\n\r\n"); + } + } } /** @@ -2018,7 +4171,7 @@ class Server { this._listenPromise = this._start(port, hostname) .then(() => { this._emit("listening"); - callback?.(); + callback?.call(this); }) .catch((error) => { this._emit("error", error); @@ -2115,6 +4268,10 @@ class Server { return this; } + listenerCount(event: string): number { + return this._listeners[event]?.length || 0; + } + // Node.js Server timeout properties (no-op in sandbox) keepAliveTimeout = 5000; requestTimeout = 300000; @@ -2159,19 +4316,121 @@ async function dispatchServerRequest( const request = JSON.parse(requestJson) as SerializedServerRequest; const incoming = new ServerIncomingMessage(request); const outgoing = new ServerResponseBridge(); + incoming.socket = outgoing.socket; + incoming.connection = outgoing.socket; + const pendingImmediates: Promise[] = []; + const pendingTimers: Promise[] = []; + const trackedTimers = new Map, () => void>(); + let consumedTimerCount = 0; + let consumedImmediateCount = 0; try { try { - // Call listener synchronously — frameworks register event handlers here - const listenerResult = listener(incoming, outgoing); + const originalSetImmediate = globalThis.setImmediate; + const originalSetTimeout = globalThis.setTimeout; + const originalClearTimeout = globalThis.clearTimeout; + if (typeof originalSetImmediate === "function") { + globalThis.setImmediate = (( + callback: (...args: unknown[]) => unknown, + ...args: unknown[] + ) => { + const pending = new Promise((resolve) => { + queueMicrotask(() => { + try { + callback(...args); + } finally { + resolve(); + } + }); + }); + pendingImmediates.push(pending); + return 0 as unknown as ReturnType; + }) as typeof setImmediate; + } + if (typeof originalSetTimeout === "function") { + globalThis.setTimeout = (( + callback: (...args: unknown[]) => unknown, + delay?: number, + ...args: unknown[] + ) => { + if (typeof callback !== "function") { + return originalSetTimeout(callback as TimerHandler, delay, ...args); + } + + const normalizedDelay = + typeof delay === "number" && Number.isFinite(delay) + ? Math.max(0, delay) + : 0; + + if (normalizedDelay > 1_000) { + return originalSetTimeout(callback, normalizedDelay, ...args); + } - // Emit readable stream events so body-parsing middleware (e.g. express.json()) can proceed - if (incoming.rawBody && incoming.rawBody.length > 0) { - incoming.emit("data", incoming.rawBody); + let resolvePending!: () => void; + const pending = new Promise((resolve) => { + resolvePending = resolve; + }); + let handle: ReturnType; + handle = originalSetTimeout(() => { + trackedTimers.delete(handle); + try { + callback(...args); + } finally { + resolvePending(); + } + }, normalizedDelay); + trackedTimers.set(handle, resolvePending); + pendingTimers.push(pending); + return handle; + }) as typeof setTimeout; + } + if (typeof originalClearTimeout === "function") { + globalThis.clearTimeout = ((handle?: ReturnType) => { + if (handle != null) { + const resolvePending = trackedTimers.get(handle); + if (resolvePending) { + trackedTimers.delete(handle); + resolvePending(); + } + } + return originalClearTimeout(handle); + }) as typeof clearTimeout; } - incoming.emit("end"); - await Promise.resolve(listenerResult); + try { + // Call listener synchronously — frameworks register event handlers here + const listenerResult = listener(incoming, outgoing); + + // Emit readable stream events so body-parsing middleware (e.g. express.json()) can proceed + if (incoming.rawBody && incoming.rawBody.length > 0) { + incoming.emit("data", incoming.rawBody); + } + incoming.emit("end"); + + await Promise.resolve(listenerResult); + while ( + consumedTimerCount < pendingTimers.length || + consumedImmediateCount < pendingImmediates.length + ) { + const pending = [ + ...pendingTimers.slice(consumedTimerCount), + ...pendingImmediates.slice(consumedImmediateCount), + ]; + consumedTimerCount = pendingTimers.length; + consumedImmediateCount = pendingImmediates.length; + await Promise.allSettled(pending); + } + } finally { + if (typeof originalSetImmediate === "function") { + globalThis.setImmediate = originalSetImmediate; + } + if (typeof originalSetTimeout === "function") { + globalThis.setTimeout = originalSetTimeout; + } + if (typeof originalClearTimeout === "function") { + globalThis.clearTimeout = originalClearTimeout; + } + } } catch (err) { outgoing.statusCode = 500; try { @@ -2187,129 +4446,616 @@ async function dispatchServerRequest( } await outgoing.waitForClose(); - // Let same-turn deferred socket teardown (e.g. setImmediate(() => res.connection.end())) - // update the serialized connection flags before the client receives the response. - await new Promise((resolve) => setTimeout(resolve, 0)); + await Promise.allSettled([...pendingTimers, ...pendingImmediates]); return JSON.stringify(outgoing.serialize()); } finally { server._endRequestDispatch(); } } -// Upgrade socket for bidirectional data relay through the host bridge -const upgradeSocketInstances = new Map(); - -class UpgradeSocket { - remoteAddress: string; - remotePort: number; - localAddress = "127.0.0.1"; - localPort = 0; - connecting = false; - destroyed = false; - writable = true; - readable = true; - readyState = "open"; - bytesWritten = 0; - private _listeners: Record = {}; - private _socketId: number; - - // Readable stream state stub for ws compatibility (socketOnClose checks _readableState.endEmitted) - _readableState = { endEmitted: false }; - _writableState = { finished: false, errorEmitted: false }; - - constructor(socketId: number, options?: { host?: string; port?: number }) { - this._socketId = socketId; - this.remoteAddress = options?.host || "127.0.0.1"; - this.remotePort = options?.port || 80; - } - - setTimeout(_ms: number, _cb?: () => void): this { return this; } - setNoDelay(_noDelay?: boolean): this { return this; } - setKeepAlive(_enable?: boolean, _delay?: number): this { return this; } - ref(): this { return this; } - unref(): this { return this; } - cork(): void {} - uncork(): void {} - pause(): this { return this; } - resume(): this { return this; } - address(): { address: string; family: string; port: number } { - return { address: this.localAddress, family: "IPv4", port: this.localPort }; - } - - on(event: string, listener: EventListener): this { - if (!this._listeners[event]) this._listeners[event] = []; - this._listeners[event].push(listener); - return this; +async function dispatchHttp2CompatibilityRequest( + serverId: number, + requestId: number, +): Promise { + const pending = pendingHttp2CompatRequests.get(requestId); + if (!pending || pending.serverId !== serverId || typeof _networkHttp2ServerRespondRaw === "undefined") { + return; } + pendingHttp2CompatRequests.delete(requestId); - addListener(event: string, listener: EventListener): this { - return this.on(event, listener); + const server = http2Servers.get(serverId); + if (!server) { + _networkHttp2ServerRespondRaw.applySync(undefined, [ + serverId, + requestId, + JSON.stringify({ + status: 500, + headers: [["content-type", "text/plain"]], + body: "Unknown HTTP/2 server", + bodyEncoding: "utf8", + }), + ]); + return; } - once(event: string, listener: EventListener): this { - const wrapper = (...args: unknown[]): void => { - this.off(event, wrapper); - listener(...args); - }; - return this.on(event, wrapper); - } + const request = JSON.parse(pending.requestJson) as SerializedServerRequest; + const incoming = new ServerIncomingMessage(request); + const outgoing = new ServerResponseBridge(); + incoming.socket = outgoing.socket; + incoming.connection = outgoing.socket; - off(event: string, listener: EventListener): this { - if (this._listeners[event]) { - const idx = this._listeners[event].indexOf(listener); - if (idx !== -1) this._listeners[event].splice(idx, 1); + try { + server.emit("request", incoming, outgoing); + if (incoming.rawBody && incoming.rawBody.length > 0) { + incoming.emit("data", incoming.rawBody); } - return this; + incoming.emit("end"); + if (!outgoing.writableFinished) { + outgoing.end(); + } + await outgoing.waitForClose(); + _networkHttp2ServerRespondRaw.applySync(undefined, [ + serverId, + requestId, + JSON.stringify(outgoing.serialize()), + ]); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + _networkHttp2ServerRespondRaw.applySync(undefined, [ + serverId, + requestId, + JSON.stringify({ + status: 500, + headers: [["content-type", "text/plain"]], + body: `Error: ${message}`, + bodyEncoding: "utf8", + }), + ]); } +} - removeListener(event: string, listener: EventListener): this { - return this.off(event, listener); +async function dispatchLoopbackServerRequest( + serverOrId: number | Server, + requestInput: string | SerializedServerRequest, +): Promise<{ + responseJson: string; + abortRequest: () => void; +}> { + const server = + typeof serverOrId === "number" + ? serverInstances.get(serverOrId) + : serverOrId; + if (!server) { + throw new Error( + `Unknown HTTP server: ${typeof serverOrId === "number" ? serverOrId : ""}`, + ); } - removeAllListeners(event?: string): this { - if (event) { - delete this._listeners[event]; - } else { - this._listeners = {}; - } - return this; - } + const request = + typeof requestInput === "string" + ? JSON.parse(requestInput) as SerializedServerRequest + : requestInput; + const incoming = new ServerIncomingMessage(request); + const outgoing = new ServerResponseBridge(); + incoming.socket = outgoing.socket; + incoming.connection = outgoing.socket; + const pendingImmediates: Promise[] = []; + const pendingTimers: Promise[] = []; + const trackedTimers = new Map, () => void>(); + let consumedTimerCount = 0; + let consumedImmediateCount = 0; + server._beginRequestDispatch(); - emit(event: string, ...args: unknown[]): boolean { - const handlers = this._listeners[event]; - if (handlers) handlers.slice().forEach((fn) => fn.call(this, ...args)); - return handlers !== undefined && handlers.length > 0; - } + try { + try { + const originalSetImmediate = globalThis.setImmediate; + const originalSetTimeout = globalThis.setTimeout; + const originalClearTimeout = globalThis.clearTimeout; + if (typeof originalSetImmediate === "function") { + globalThis.setImmediate = (( + callback: (...args: unknown[]) => unknown, + ...args: unknown[] + ) => { + const pending = new Promise((resolve) => { + queueMicrotask(() => { + try { + callback(...args); + } finally { + resolve(); + } + }); + }); + pendingImmediates.push(pending); + return 0 as unknown as ReturnType; + }) as typeof setImmediate; + } + if (typeof originalSetTimeout === "function") { + globalThis.setTimeout = (( + callback: (...args: unknown[]) => unknown, + delay?: number, + ...args: unknown[] + ) => { + if (typeof callback !== "function") { + return originalSetTimeout(callback as TimerHandler, delay, ...args); + } - listenerCount(event: string): number { - return this._listeners[event]?.length || 0; - } + const normalizedDelay = + typeof delay === "number" && Number.isFinite(delay) + ? Math.max(0, delay) + : 0; - // Allow arbitrary property assignment (used by ws for Symbol properties) - [key: string | symbol]: unknown; + if (normalizedDelay > 1_000) { + return originalSetTimeout(callback, normalizedDelay, ...args); + } - write(data: unknown, encodingOrCb?: string | (() => void), cb?: (() => void)): boolean { - if (this.destroyed) return false; - const callback = typeof encodingOrCb === "function" ? encodingOrCb : cb; - if (typeof _upgradeSocketWriteRaw !== "undefined") { - let base64: string; - if (typeof Buffer !== "undefined" && Buffer.isBuffer(data)) { - base64 = data.toString("base64"); - } else if (typeof data === "string") { - base64 = typeof Buffer !== "undefined" ? Buffer.from(data).toString("base64") : btoa(data); - } else if (data instanceof Uint8Array) { - base64 = typeof Buffer !== "undefined" ? Buffer.from(data).toString("base64") : btoa(String.fromCharCode(...data)); - } else { - base64 = typeof Buffer !== "undefined" ? Buffer.from(String(data)).toString("base64") : btoa(String(data)); + let resolvePending!: () => void; + const pending = new Promise((resolve) => { + resolvePending = resolve; + }); + let handle: ReturnType; + handle = originalSetTimeout(() => { + trackedTimers.delete(handle); + try { + callback(...args); + } finally { + resolvePending(); + } + }, normalizedDelay); + trackedTimers.set(handle, resolvePending); + pendingTimers.push(pending); + return handle; + }) as typeof setTimeout; + } + if (typeof originalClearTimeout === "function") { + globalThis.clearTimeout = ((handle?: ReturnType) => { + if (handle != null) { + const resolvePending = trackedTimers.get(handle); + if (resolvePending) { + trackedTimers.delete(handle); + resolvePending(); + } + } + return originalClearTimeout(handle); + }) as typeof clearTimeout; } - this.bytesWritten += base64.length; - _upgradeSocketWriteRaw.applySync(undefined, [this._socketId, base64]); - } - if (callback) callback(); - return true; - } - end(data?: unknown): this { + try { + const listenerResult = server._requestListener(incoming, outgoing); + + if (incoming.rawBody && incoming.rawBody.length > 0) { + incoming.emit("data", incoming.rawBody); + } + incoming.emit("end"); + + await Promise.resolve(listenerResult); + while ( + consumedTimerCount < pendingTimers.length || + consumedImmediateCount < pendingImmediates.length + ) { + const pending = [ + ...pendingTimers.slice(consumedTimerCount), + ...pendingImmediates.slice(consumedImmediateCount), + ]; + consumedTimerCount = pendingTimers.length; + consumedImmediateCount = pendingImmediates.length; + await Promise.allSettled(pending); + } + } finally { + if (typeof originalSetImmediate === "function") { + globalThis.setImmediate = originalSetImmediate; + } + if (typeof originalSetTimeout === "function") { + globalThis.setTimeout = originalSetTimeout; + } + if (typeof originalClearTimeout === "function") { + globalThis.clearTimeout = originalClearTimeout; + } + } + } catch (err) { + outgoing.statusCode = 500; + try { + outgoing.end(err instanceof Error ? `Error: ${err.message}` : "Error"); + } catch { + if (!outgoing.writableFinished) outgoing.end(); + } + } + + if (!outgoing.writableFinished) { + outgoing.end(); + } + + await outgoing.waitForClose(); + await Promise.allSettled([...pendingTimers, ...pendingImmediates]); + let aborted = false; + return { + responseJson: JSON.stringify(outgoing.serialize()), + abortRequest: () => { + if (aborted) { + return; + } + aborted = true; + incoming._abort(); + }, + }; + } finally { + server._endRequestDispatch(); + } +} + +async function dispatchLoopbackConnectRequest( + server: Server, + options: nodeHttp.RequestOptions, +): Promise<{ + response: IncomingMessage; + socket: DirectTunnelSocket; + head: Buffer; +}> { + return await new Promise((resolve, reject) => { + const request = new ServerIncomingMessage({ + method: "CONNECT", + url: String(options.path || "/"), + headers: normalizeRequestHeaders(options.headers), + rawHeaders: flattenRawHeaders(normalizeRequestHeaders(options.headers)), + }); + const clientSocket = new DirectTunnelSocket({ + host: String(options.hostname || options.host || "127.0.0.1"), + port: Number(options.port) || 80, + }); + const serverSocket = new DirectTunnelSocket({ + host: "127.0.0.1", + port: 0, + }); + clientSocket._attachPeer(serverSocket); + serverSocket._attachPeer(clientSocket); + + const originalWrite = serverSocket.write.bind(serverSocket); + const originalEnd = serverSocket.end.bind(serverSocket); + let handshakeBuffer = Buffer.alloc(0); + let handshakeResolved = false; + + const maybeResolveHandshake = (): void => { + if (handshakeResolved) { + return; + } + + const separator = handshakeBuffer.indexOf("\r\n\r\n"); + if (separator === -1) { + return; + } + + const headerBuffer = handshakeBuffer.subarray(0, separator); + const head = handshakeBuffer.subarray(separator + 4); + const [statusLine, ...headerLines] = headerBuffer.toString("latin1").split("\r\n"); + const statusMatch = /^HTTP\/1\.[01]\s+(\d{3})(?:\s+(.*))?$/.exec(statusLine); + if (!statusMatch) { + reject(new Error(`Invalid CONNECT response: ${statusLine}`)); + return; + } + + handshakeResolved = true; + const headers: Record = {}; + const rawHeaders: string[] = []; + for (const headerLine of headerLines) { + const separatorIndex = headerLine.indexOf(":"); + if (separatorIndex === -1) { + continue; + } + const key = headerLine.slice(0, separatorIndex).trim(); + const value = headerLine.slice(separatorIndex + 1).trim(); + headers[key.toLowerCase()] = value; + rawHeaders.push(key, value); + } + + resolve({ + response: new IncomingMessage({ + headers, + rawHeaders, + status: Number(statusMatch[1]), + statusText: statusMatch[2] || HTTP_STATUS_TEXT[Number(statusMatch[1])], + }), + socket: clientSocket, + head, + }); + }; + + serverSocket.write = ((data: unknown, encodingOrCb?: string | (() => void), cb?: (() => void)) => { + if (handshakeResolved) { + return originalWrite(data, encodingOrCb as string, cb); + } + const callback = typeof encodingOrCb === "function" ? encodingOrCb : cb; + handshakeBuffer = Buffer.concat([handshakeBuffer, normalizeSocketChunk(data)]); + maybeResolveHandshake(); + callback?.(); + return true; + }) as typeof serverSocket.write; + + serverSocket.end = ((data?: unknown) => { + if (data !== undefined) { + serverSocket.write(data); + } + if (!handshakeResolved) { + maybeResolveHandshake(); + } + return originalEnd(); + }) as typeof serverSocket.end; + + try { + server._emit("connect", request, serverSocket, Buffer.alloc(0)); + } catch (error) { + reject(error instanceof Error ? error : new Error(String(error))); + return; + } + + queueMicrotask(() => { + if (!handshakeResolved) { + reject(new Error("Loopback CONNECT handler did not establish a tunnel")); + } + }); + }); +} + +async function dispatchLoopbackUpgradeRequest( + server: Server, + options: nodeHttp.RequestOptions, + requestBody?: string, +): Promise<{ + response: IncomingMessage; + socket: DirectTunnelSocket; + head: Buffer; +}> { + return await new Promise((resolve, reject) => { + const normalizedHeaders = normalizeRequestHeaders(options.headers); + const request = new ServerIncomingMessage({ + method: String(options.method || "GET").toUpperCase(), + url: String(options.path || "/"), + headers: normalizedHeaders, + rawHeaders: flattenRawHeaders(normalizedHeaders), + bodyBase64: requestBody + ? Buffer.from(requestBody).toString("base64") + : undefined, + }); + const clientSocket = new DirectTunnelSocket({ + host: String(options.hostname || options.host || "127.0.0.1"), + port: Number(options.port) || 80, + }); + const serverSocket = new DirectTunnelSocket({ + host: "127.0.0.1", + port: 0, + }); + clientSocket._attachPeer(serverSocket); + serverSocket._attachPeer(clientSocket); + + const originalWrite = serverSocket.write.bind(serverSocket); + const originalEnd = serverSocket.end.bind(serverSocket); + let handshakeBuffer = Buffer.alloc(0); + let handshakeResolved = false; + + const maybeResolveHandshake = (): void => { + if (handshakeResolved) { + return; + } + + const separator = handshakeBuffer.indexOf("\r\n\r\n"); + if (separator === -1) { + return; + } + + const headerBuffer = handshakeBuffer.subarray(0, separator); + const head = handshakeBuffer.subarray(separator + 4); + const [statusLine, ...headerLines] = headerBuffer.toString("latin1").split("\r\n"); + const statusMatch = /^HTTP\/1\.[01]\s+(\d{3})(?:\s+(.*))?$/.exec(statusLine); + if (!statusMatch) { + reject(new Error(`Invalid upgrade response: ${statusLine}`)); + return; + } + + handshakeResolved = true; + const headers: Record = {}; + const rawHeaders: string[] = []; + for (const headerLine of headerLines) { + const separatorIndex = headerLine.indexOf(":"); + if (separatorIndex === -1) { + continue; + } + const key = headerLine.slice(0, separatorIndex).trim(); + const value = headerLine.slice(separatorIndex + 1).trim(); + headers[key.toLowerCase()] = value; + rawHeaders.push(key, value); + } + + resolve({ + response: new IncomingMessage({ + headers, + rawHeaders, + status: Number(statusMatch[1]), + statusText: statusMatch[2] || HTTP_STATUS_TEXT[Number(statusMatch[1])], + }), + socket: clientSocket, + head, + }); + }; + + serverSocket.write = ((data: unknown, encodingOrCb?: string | (() => void), cb?: (() => void)) => { + if (handshakeResolved) { + return originalWrite(data, encodingOrCb as string, cb); + } + const callback = typeof encodingOrCb === "function" ? encodingOrCb : cb; + handshakeBuffer = Buffer.concat([handshakeBuffer, normalizeSocketChunk(data)]); + maybeResolveHandshake(); + callback?.(); + return true; + }) as typeof serverSocket.write; + + serverSocket.end = ((data?: unknown) => { + if (data !== undefined) { + serverSocket.write(data); + } + if (!handshakeResolved) { + maybeResolveHandshake(); + } + return originalEnd(); + }) as typeof serverSocket.end; + + try { + server._emit( + "upgrade", + request, + serverSocket, + request.rawBody || Buffer.alloc(0), + ); + } catch (error) { + reject(error instanceof Error ? error : new Error(String(error))); + return; + } + + queueMicrotask(() => { + if (!handshakeResolved) { + reject(new Error("Loopback upgrade handler did not establish a protocol switch")); + } + }); + }); +} + +function dispatchSocketRequest( + event: "upgrade" | "connect", + serverId: number, + requestJson: string, + headBase64: string, + socketId: number, +): void { + const server = serverInstances.get(serverId); + if (!server) { + throw new Error(`Unknown HTTP server for ${event}: ${serverId}`); + } + + const request = JSON.parse(requestJson) as SerializedServerRequest; + const incoming = new ServerIncomingMessage(request); + const head = typeof Buffer !== "undefined" ? Buffer.from(headBase64, "base64") : new Uint8Array(0); + const hostHeader = incoming.headers["host"]; + + const socket = new UpgradeSocket(socketId, { + host: ( + Array.isArray(hostHeader) ? hostHeader[0] : hostHeader + )?.split(":")[0] || "127.0.0.1", + }); + upgradeSocketInstances.set(socketId, socket); + server._emit(event, incoming, socket, head); +} + +// Upgrade socket for bidirectional data relay through the host bridge +const upgradeSocketInstances = new Map(); + +class UpgradeSocket { + remoteAddress: string; + remotePort: number; + localAddress = "127.0.0.1"; + localPort = 0; + connecting = false; + destroyed = false; + writable = true; + readable = true; + readyState = "open"; + bytesWritten = 0; + private _listeners: Record = {}; + private _socketId: number; + + // Readable stream state stub for ws compatibility (socketOnClose checks _readableState.endEmitted) + _readableState = { endEmitted: false }; + _writableState = { finished: false, errorEmitted: false }; + + constructor(socketId: number, options?: { host?: string; port?: number }) { + this._socketId = socketId; + this.remoteAddress = options?.host || "127.0.0.1"; + this.remotePort = options?.port || 80; + } + + setTimeout(_ms: number, _cb?: () => void): this { return this; } + setNoDelay(_noDelay?: boolean): this { return this; } + setKeepAlive(_enable?: boolean, _delay?: number): this { return this; } + ref(): this { return this; } + unref(): this { return this; } + cork(): void {} + uncork(): void {} + pause(): this { return this; } + resume(): this { return this; } + address(): { address: string; family: string; port: number } { + return { address: this.localAddress, family: "IPv4", port: this.localPort }; + } + + on(event: string, listener: EventListener): this { + if (!this._listeners[event]) this._listeners[event] = []; + this._listeners[event].push(listener); + return this; + } + + addListener(event: string, listener: EventListener): this { + return this.on(event, listener); + } + + once(event: string, listener: EventListener): this { + const wrapper = (...args: unknown[]): void => { + this.off(event, wrapper); + listener(...args); + }; + return this.on(event, wrapper); + } + + off(event: string, listener: EventListener): this { + if (this._listeners[event]) { + const idx = this._listeners[event].indexOf(listener); + if (idx !== -1) this._listeners[event].splice(idx, 1); + } + return this; + } + + removeListener(event: string, listener: EventListener): this { + return this.off(event, listener); + } + + removeAllListeners(event?: string): this { + if (event) { + delete this._listeners[event]; + } else { + this._listeners = {}; + } + return this; + } + + emit(event: string, ...args: unknown[]): boolean { + const handlers = this._listeners[event]; + if (handlers) handlers.slice().forEach((fn) => fn.call(this, ...args)); + return handlers !== undefined && handlers.length > 0; + } + + listenerCount(event: string): number { + return this._listeners[event]?.length || 0; + } + + // Allow arbitrary property assignment (used by ws for Symbol properties) + [key: string | symbol]: unknown; + + write(data: unknown, encodingOrCb?: string | (() => void), cb?: (() => void)): boolean { + if (this.destroyed) return false; + const callback = typeof encodingOrCb === "function" ? encodingOrCb : cb; + if (typeof _upgradeSocketWriteRaw !== "undefined") { + let base64: string; + if (typeof Buffer !== "undefined" && Buffer.isBuffer(data)) { + base64 = data.toString("base64"); + } else if (typeof data === "string") { + base64 = typeof Buffer !== "undefined" ? Buffer.from(data).toString("base64") : btoa(data); + } else if (data instanceof Uint8Array) { + base64 = typeof Buffer !== "undefined" ? Buffer.from(data).toString("base64") : btoa(String.fromCharCode(...data)); + } else { + base64 = typeof Buffer !== "undefined" ? Buffer.from(String(data)).toString("base64") : btoa(String(data)); + } + this.bytesWritten += base64.length; + _upgradeSocketWriteRaw.applySync(undefined, [this._socketId, base64]); + } + if (callback) callback(); + return true; + } + + end(data?: unknown): this { if (data) this.write(data); if (typeof _upgradeSocketEndRaw !== "undefined" && !this.destroyed) { _upgradeSocketEndRaw.applySync(undefined, [this._socketId]); @@ -2358,22 +5104,17 @@ function dispatchUpgradeRequest( headBase64: string, socketId: number ): void { - const server = serverInstances.get(serverId); - if (!server) { - throw new Error(`Unknown HTTP server for upgrade: ${serverId}`); - } - - const request = JSON.parse(requestJson) as SerializedServerRequest; - const incoming = new ServerIncomingMessage(request); - const head = typeof Buffer !== "undefined" ? Buffer.from(headBase64, "base64") : new Uint8Array(0); + dispatchSocketRequest("upgrade", serverId, requestJson, headBase64, socketId); +} - const socket = new UpgradeSocket(socketId, { - host: incoming.headers["host"]?.split(":")[0] || "127.0.0.1", - }); - upgradeSocketInstances.set(socketId, socket); - - // Emit 'upgrade' on the server — ws.WebSocketServer listens for this - server._emit("upgrade", incoming, socket, head); +/** Route an incoming HTTP CONNECT to the server's 'connect' event listeners. */ +function dispatchConnectRequest( + serverId: number, + requestJson: string, + headBase64: string, + socketId: number +): void { + dispatchSocketRequest("connect", serverId, requestJson, headBase64, socketId); } /** Push data from host to an upgrade socket. */ @@ -2403,18 +5144,28 @@ function ServerResponseCallable(this: any): void { this.headersSent = false; this.writable = true; this.writableFinished = false; + this.outputSize = 0; this._headers = new Map(); + this._trailers = new Map(); + this._rawHeaderNames = new Map(); + this._rawTrailerNames = new Map(); + this._informational = []; + this._pendingRawInfoBuffer = ""; this._chunks = [] as Uint8Array[]; this._chunksBytes = 0; this._listeners = {} as Record; this._closedPromise = new Promise((resolve) => { this._resolveClosed = resolve; }); + this._connectionEnded = false; + this._connectionReset = false; // Writable stream state stub this._writableState = { length: 0, ended: false, finished: false, objectMode: false, corked: 0 }; // Fake socket for frameworks/inject libraries that access res.socket const fakeSocket = { writable: true, + writableCorked: 0, + writableHighWaterMark: 16 * 1024, on() { return fakeSocket; }, once() { return fakeSocket; }, removeListener() { return fakeSocket; }, @@ -2445,8 +5196,16 @@ function createHttpModule(protocol: string): Record { } return { - request(options: string | URL | nodeHttp.RequestOptions, callback?: (res: IncomingMessage) => void): ClientRequest { + request( + options: string | URL | nodeHttp.RequestOptions, + optionsOrCallback?: nodeHttp.RequestOptions | ((res: IncomingMessage) => void), + maybeCallback?: (res: IncomingMessage) => void, + ): ClientRequest { let opts: nodeHttp.RequestOptions; + const callback = + typeof optionsOrCallback === "function" + ? optionsOrCallback + : maybeCallback; if (typeof options === "string") { const url = new URL(options); opts = { @@ -2454,6 +5213,7 @@ function createHttpModule(protocol: string): Record { hostname: url.hostname, port: url.port, path: url.pathname + url.search, + ...(typeof optionsOrCallback === "object" && optionsOrCallback ? optionsOrCallback : {}), }; } else if (options instanceof URL) { opts = { @@ -2461,15 +5221,27 @@ function createHttpModule(protocol: string): Record { hostname: options.hostname, port: options.port, path: options.pathname + options.search, + ...(typeof optionsOrCallback === "object" && optionsOrCallback ? optionsOrCallback : {}), }; } else { - opts = options; + opts = { + ...options, + ...(typeof optionsOrCallback === "object" && optionsOrCallback ? optionsOrCallback : {}), + }; } return new ClientRequest(ensureProtocol(opts), callback as (res: IncomingMessage) => void); }, - get(options: string | URL | nodeHttp.RequestOptions, callback?: (res: IncomingMessage) => void): ClientRequest { + get( + options: string | URL | nodeHttp.RequestOptions, + optionsOrCallback?: nodeHttp.RequestOptions | ((res: IncomingMessage) => void), + maybeCallback?: (res: IncomingMessage) => void, + ): ClientRequest { let opts: nodeHttp.RequestOptions; + const callback = + typeof optionsOrCallback === "function" + ? optionsOrCallback + : maybeCallback; if (typeof options === "string") { const url = new URL(options); opts = { @@ -2478,6 +5250,7 @@ function createHttpModule(protocol: string): Record { port: url.port, path: url.pathname + url.search, method: "GET", + ...(typeof optionsOrCallback === "object" && optionsOrCallback ? optionsOrCallback : {}), }; } else if (options instanceof URL) { opts = { @@ -2486,9 +5259,14 @@ function createHttpModule(protocol: string): Record { port: options.port, path: options.pathname + options.search, method: "GET", + ...(typeof optionsOrCallback === "object" && optionsOrCallback ? optionsOrCallback : {}), }; } else { - opts = { ...options, method: "GET" }; + opts = { + ...options, + ...(typeof optionsOrCallback === "object" && optionsOrCallback ? optionsOrCallback : {}), + method: "GET", + }; } const req = new ClientRequest(ensureProtocol(opts), callback as (res: IncomingMessage) => void); req.end(); @@ -2515,539 +5293,5456 @@ function createHttpModule(protocol: string): Record { ServerResponse: ServerResponseCallable as unknown as typeof nodeHttp.ServerResponse, IncomingMessage: IncomingMessage as unknown as typeof nodeHttp.IncomingMessage, ClientRequest: ClientRequest as unknown as typeof nodeHttp.ClientRequest, + validateHeaderName, + validateHeaderValue, + _checkIsHttpToken: checkIsHttpToken, + _checkInvalidHeaderChar: checkInvalidHeaderChar, + METHODS: [...HTTP_METHODS], + STATUS_CODES: HTTP_STATUS_TEXT, + }; +} - METHODS: ["GET", "POST", "PUT", "DELETE", "PATCH", "HEAD", "OPTIONS"], - STATUS_CODES: { - 200: "OK", - 201: "Created", - 204: "No Content", - 301: "Moved Permanently", - 302: "Found", - 304: "Not Modified", - 400: "Bad Request", - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 500: "Internal Server Error", - }, +async function dispatchLoopbackHttp2CompatibilityRequest( + server: Http2Server, + requestInput: string | SerializedServerRequest, +): Promise<{ + responseJson: string; + abortRequest: () => void; +}> { + const request = + typeof requestInput === "string" + ? JSON.parse(requestInput) as SerializedServerRequest + : requestInput; + const incoming = new ServerIncomingMessage(request); + const outgoing = new ServerResponseBridge(); + incoming.socket = outgoing.socket; + incoming.connection = outgoing.socket; + + server.emit("request", incoming, outgoing); + if (incoming.rawBody && incoming.rawBody.length > 0) { + incoming.emit("data", incoming.rawBody); + } + incoming.emit("end"); + if (!outgoing.writableFinished) { + outgoing.end(); + } + await outgoing.waitForClose(); + + return { + responseJson: JSON.stringify(outgoing.serialize()), + abortRequest: () => incoming._abort(), }; } export const http = createHttpModule("http"); export const https = createHttpModule("https"); -export const http2 = { - Http2ServerRequest: class Http2ServerRequest {}, - Http2ServerResponse: class Http2ServerResponse {}, - constants: { - HTTP2_HEADER_METHOD: ":method", - HTTP2_HEADER_PATH: ":path", - HTTP2_HEADER_SCHEME: ":scheme", - HTTP2_HEADER_AUTHORITY: ":authority", - HTTP2_HEADER_STATUS: ":status", - HTTP2_HEADER_CONTENT_TYPE: "content-type", - HTTP2_HEADER_CONTENT_LENGTH: "content-length", - HTTP2_HEADER_ACCEPT: "accept", - HTTP2_HEADER_ACCEPT_ENCODING: "accept-encoding", - HTTP2_METHOD_GET: "GET", - HTTP2_METHOD_POST: "POST", - HTTP2_METHOD_PUT: "PUT", - HTTP2_METHOD_DELETE: "DELETE", - NGHTTP2_NO_ERROR: 0, - NGHTTP2_PROTOCOL_ERROR: 1, - NGHTTP2_INTERNAL_ERROR: 2, - NGHTTP2_REFUSED_STREAM: 7, - NGHTTP2_CANCEL: 8, - } as Record, - createServer(): never { - throw new Error("http2.createServer is not supported in sandbox"); - }, - createSecureServer(): never { - throw new Error("http2.createSecureServer is not supported in sandbox"); - }, +const HTTP2_K_SOCKET = Symbol.for("secure-exec.http2.kSocket"); +const HTTP2_OPTIONS = Symbol("options"); +type Http2HeaderValue = string | string[] | number; +type Http2HeadersRecord = Record; +type Http2SettingsRecord = Record>; +type Http2SessionRuntimeState = { + effectiveLocalWindowSize?: number; + localWindowSize?: number; + remoteWindowSize?: number; + nextStreamID?: number; + outboundQueueSize?: number; + deflateDynamicTableSize?: number; + inflateDynamicTableSize?: number; +}; +type Http2EventListener = (...args: unknown[]) => void; + +type SerializedHttp2SocketState = { + encrypted?: boolean; + allowHalfOpen?: boolean; + localAddress?: string; + localPort?: number; + localFamily?: string; + remoteAddress?: string; + remotePort?: number; + remoteFamily?: string; + servername?: string; + alpnProtocol?: string | false; }; -// Export modules and make them available as globals for require() -exposeCustomGlobal("_httpModule", http); -exposeCustomGlobal("_httpsModule", https); -exposeCustomGlobal("_http2Module", http2); -exposeCustomGlobal("_dnsModule", dns); -function onHttpServerRequest( - eventType: string, - payload?: { - serverId?: number; - requestId?: number; - request?: string; - } | null, -): void { - debugBridgeNetwork("http stream event", eventType, payload); - if (eventType !== "http_request") { - return; +type SerializedHttp2SessionState = { + encrypted?: boolean; + alpnProtocol?: string | false; + originSet?: string[]; + localSettings?: Http2SettingsRecord; + remoteSettings?: Http2SettingsRecord; + state?: Http2SessionRuntimeState; + socket?: SerializedHttp2SocketState; +}; + +const http2Servers = new Map(); +const http2Sessions = new Map(); +const http2Streams = new Map(); +const pendingHttp2ClientStreamEvents = new Map>(); +const scheduledHttp2ClientStreamFlushes = new Set(); +const queuedHttp2DispatchEvents: Array<{ + kind: string; + id: number; + data?: string; + extra?: string; + extraNumber?: string | number; + extraHeaders?: string; + flags?: string | number; +}> = []; +const pendingHttp2CompatRequests = new Map(); +let scheduledHttp2DispatchDrain = false; +let nextHttp2ServerId = 1; + +class Http2EventEmitter { + private _listeners: Record = {}; + private _onceListeners: Record = {}; + on(event: string, listener: Http2EventListener): this { + if (!this._listeners[event]) this._listeners[event] = []; + this._listeners[event].push(listener); + return this; } - if (!payload || payload.serverId === undefined || payload.requestId === undefined || typeof payload.request !== "string") { - return; + addListener(event: string, listener: Http2EventListener): this { + return this.on(event, listener); } - if (typeof _networkHttpServerRespondRaw === "undefined") { - debugBridgeNetwork("http stream missing respond bridge"); - return; + once(event: string, listener: Http2EventListener): this { + if (!this._onceListeners[event]) this._onceListeners[event] = []; + this._onceListeners[event].push(listener); + return this; + } + removeListener(event: string, listener: Http2EventListener): this { + const remove = (target?: Http2EventListener[]) => { + if (!target) return; + const index = target.indexOf(listener); + if (index !== -1) target.splice(index, 1); + }; + remove(this._listeners[event]); + remove(this._onceListeners[event]); + return this; + } + off(event: string, listener: Http2EventListener): this { + return this.removeListener(event, listener); + } + listenerCount(event: string): number { + return (this._listeners[event]?.length ?? 0) + (this._onceListeners[event]?.length ?? 0); + } + setMaxListeners(_value: number): this { + return this; + } + emit(event: string, ...args: unknown[]): boolean { + let handled = false; + const listeners = this._listeners[event]; + if (listeners) { + for (const listener of [...listeners]) { + listener(...args); + handled = true; + } + } + const onceListeners = this._onceListeners[event]; + if (onceListeners) { + this._onceListeners[event] = []; + for (const listener of [...onceListeners]) { + listener(...args); + handled = true; + } + } + return handled; } +} - void dispatchServerRequest(payload.serverId, payload.request) - .then((responseJson) => { - debugBridgeNetwork("http stream response", payload.serverId, payload.requestId); - _networkHttpServerRespondRaw.applySync(undefined, [ - payload.serverId!, - payload.requestId!, - responseJson, - ]); - }) - .catch((err) => { - const message = err instanceof Error ? err.message : String(err); - debugBridgeNetwork("http stream error", payload.serverId, payload.requestId, message); - _networkHttpServerRespondRaw.applySync(undefined, [ - payload.serverId!, - payload.requestId!, - JSON.stringify({ - status: 500, - headers: [["content-type", "text/plain"]], - body: `Error: ${message}`, - bodyEncoding: "utf8", - }), - ]); - }); +class Http2SocketProxy extends Http2EventEmitter { + allowHalfOpen = false; + encrypted = false; + localAddress = "127.0.0.1"; + localPort = 0; + localFamily = "IPv4"; + remoteAddress = "127.0.0.1"; + remotePort = 0; + remoteFamily = "IPv4"; + servername?: string; + alpnProtocol: string | false = false; + destroyed = false; + private _onDestroy?: () => void; + constructor( + state?: SerializedHttp2SocketState, + onDestroy?: () => void, + ) { + super(); + this._onDestroy = onDestroy; + this._applyState(state); + } + _applyState(state?: SerializedHttp2SocketState): void { + if (!state) return; + this.allowHalfOpen = state.allowHalfOpen === true; + this.encrypted = state.encrypted === true; + this.localAddress = state.localAddress ?? this.localAddress; + this.localPort = state.localPort ?? this.localPort; + this.localFamily = state.localFamily ?? this.localFamily; + this.remoteAddress = state.remoteAddress ?? this.remoteAddress; + this.remotePort = state.remotePort ?? this.remotePort; + this.remoteFamily = state.remoteFamily ?? this.remoteFamily; + this.servername = state.servername; + this.alpnProtocol = state.alpnProtocol ?? this.alpnProtocol; + } + end(): this { + this.destroyed = true; + this.emit("close"); + return this; + } + destroy(): this { + if (this.destroyed) { + return this; + } + this.destroyed = true; + this._onDestroy?.(); + this.emit("close"); + return this; + } } -exposeCustomGlobal("_httpServerDispatch", onHttpServerRequest); -exposeCustomGlobal("_httpServerUpgradeDispatch", dispatchUpgradeRequest); -exposeCustomGlobal("_upgradeSocketData", onUpgradeSocketData); -exposeCustomGlobal("_upgradeSocketEnd", onUpgradeSocketEnd); +function createHttp2ArgTypeError(argumentName: string, expected: string, value: unknown): TypeError & { code: string } { + return createTypeErrorWithCode( + `The "${argumentName}" argument must be of type ${expected}. Received ${formatReceivedType(value)}`, + "ERR_INVALID_ARG_TYPE", + ); +} -// Harden fetch API globals (non-writable, non-configurable) -exposeCustomGlobal("fetch", fetch); -exposeCustomGlobal("Headers", Headers); -exposeCustomGlobal("Request", Request); -exposeCustomGlobal("Response", Response); -if (typeof (globalThis as Record).Blob === "undefined") { - // Minimal Blob stub used by server frameworks for instanceof checks. - exposeCustomGlobal("Blob", class BlobStub {}); +function createHttp2Error(code: string, message: string): Error & { code: string } { + return createErrorWithCode(message, code); } -if (typeof (globalThis as Record).FormData === "undefined") { - // Minimal FormData stub — server frameworks check `instanceof FormData`. - class FormDataStub { - private _entries: [string, string][] = []; - append(name: string, value: string): void { - this._entries.push([name, value]); - } - get(name: string): string | null { - const entry = this._entries.find(([k]) => k === name); - return entry ? entry[1] : null; + +function createHttp2SettingRangeError(setting: string, value: unknown): RangeError & { code: string } { + const error = new RangeError( + `Invalid value for setting "${setting}": ${String(value)}`, + ) as RangeError & { code: string }; + error.code = "ERR_HTTP2_INVALID_SETTING_VALUE"; + return error; +} + +function createHttp2SettingTypeError(setting: string, value: unknown): TypeError & { code: string } { + const error = new TypeError( + `Invalid value for setting "${setting}": ${String(value)}`, + ) as TypeError & { code: string }; + error.code = "ERR_HTTP2_INVALID_SETTING_VALUE"; + return error; +} + +const DEFAULT_HTTP2_SETTINGS: Http2SettingsRecord = { + headerTableSize: 4096, + enablePush: true, + initialWindowSize: 65535, + maxFrameSize: 16384, + maxConcurrentStreams: 4294967295, + maxHeaderListSize: 65535, + maxHeaderSize: 65535, + enableConnectProtocol: false, +}; + +const DEFAULT_HTTP2_SESSION_STATE: Http2SessionRuntimeState = { + effectiveLocalWindowSize: 65535, + localWindowSize: 65535, + remoteWindowSize: 65535, + nextStreamID: 1, + outboundQueueSize: 1, + deflateDynamicTableSize: 0, + inflateDynamicTableSize: 0, +}; + +function cloneHttp2Settings(settings?: Http2SettingsRecord | null): Http2SettingsRecord { + const cloned: Http2SettingsRecord = {}; + for (const [key, value] of Object.entries(settings ?? {})) { + if (key === "customSettings" && value && typeof value === "object") { + const customSettings: Record = {}; + for (const [customKey, customValue] of Object.entries(value as Record)) { + customSettings[Number(customKey)] = Number(customValue); + } + cloned.customSettings = customSettings; + continue; } - getAll(name: string): string[] { - return this._entries.filter(([k]) => k === name).map(([, v]) => v); + cloned[key] = value as boolean | number; + } + return cloned; +} + +function cloneHttp2SessionRuntimeState( + state?: Http2SessionRuntimeState | null, +): Http2SessionRuntimeState { + return { + ...DEFAULT_HTTP2_SESSION_STATE, + ...(state ?? {}), + }; +} + +function parseHttp2SessionRuntimeState( + state?: unknown, +): Http2SessionRuntimeState | undefined { + if (!state || typeof state !== "object") { + return undefined; + } + const record = state as Record; + const parsed: Http2SessionRuntimeState = {}; + const numericKeys = [ + "effectiveLocalWindowSize", + "localWindowSize", + "remoteWindowSize", + "nextStreamID", + "outboundQueueSize", + "deflateDynamicTableSize", + "inflateDynamicTableSize", + ] as const; + for (const key of numericKeys) { + if (typeof record[key] === "number") { + parsed[key] = record[key] as number; } - has(name: string): boolean { - return this._entries.some(([k]) => k === name); + } + return parsed; +} + +function validateHttp2Settings(settings: unknown, argumentName = "settings"): Http2SettingsRecord { + if (!settings || typeof settings !== "object" || Array.isArray(settings)) { + throw createHttp2ArgTypeError(argumentName, "object", settings); + } + const record = settings as Record; + const normalized: Http2SettingsRecord = {}; + const numberRanges: Record = { + headerTableSize: [0, 4294967295], + initialWindowSize: [0, 4294967295], + maxFrameSize: [16384, 16777215], + maxConcurrentStreams: [0, 4294967295], + maxHeaderListSize: [0, 4294967295], + maxHeaderSize: [0, 4294967295], + }; + for (const [key, value] of Object.entries(record)) { + if (value === undefined) { + continue; } - delete(name: string): void { - this._entries = this._entries.filter(([k]) => k !== name); + if (key === "enablePush" || key === "enableConnectProtocol") { + if (typeof value !== "boolean") { + throw createHttp2SettingTypeError(key, value); + } + normalized[key] = value; + continue; } - entries(): IterableIterator<[string, string]> { - return this._entries[Symbol.iterator](); + if (key === "customSettings") { + if (!value || typeof value !== "object" || Array.isArray(value)) { + throw createHttp2SettingRangeError(key, value); + } + const customSettings: Record = {}; + for (const [customKey, customValue] of Object.entries(value as Record)) { + const numericKey = Number(customKey); + if (!Number.isInteger(numericKey) || numericKey < 0 || numericKey > 0xffff) { + throw createHttp2SettingRangeError(key, value); + } + if ( + typeof customValue !== "number" || + !Number.isInteger(customValue) || + customValue < 0 || + customValue > 4294967295 + ) { + throw createHttp2SettingRangeError(key, value); + } + customSettings[numericKey] = customValue; + } + normalized.customSettings = customSettings; + continue; } - [Symbol.iterator](): IterableIterator<[string, string]> { - return this.entries(); + if (key in numberRanges) { + const [min, max] = numberRanges[key]!; + if ( + typeof value !== "number" || + !Number.isInteger(value) || + value < min || + value > max + ) { + throw createHttp2SettingRangeError(key, value); + } + normalized[key] = value; + continue; } + normalized[key] = value as boolean | number | Record; } - exposeCustomGlobal("FormData", FormDataStub); + return normalized; } -// =================================================================== -// net module — TCP socket support bridged to the host -// =================================================================== +function serializeHttp2Headers(headers?: Http2HeadersRecord): string { + return JSON.stringify(headers ?? {}); +} -type NetEventListener = (...args: unknown[]) => void; +function parseHttp2Headers(headersJson?: string): Http2HeadersRecord { + if (!headersJson) { + return {}; + } + try { + const parsed = JSON.parse(headersJson) as Http2HeadersRecord; + return parsed && typeof parsed === "object" ? parsed : {}; + } catch { + return {}; + } +} -const NET_SOCKET_REGISTRY_PREFIX = "__secureExecNetSocket:"; +function parseHttp2SessionState(data?: string): SerializedHttp2SessionState | null { + if (!data) { + return null; + } + try { + const parsed = JSON.parse(data) as SerializedHttp2SessionState; + return parsed && typeof parsed === "object" ? parsed : null; + } catch { + return null; + } +} -function getRegisteredNetSocket(socketId: number): NetSocket | undefined { - return (globalThis as Record)[`${NET_SOCKET_REGISTRY_PREFIX}${socketId}`] as NetSocket | undefined; +function parseHttp2SocketState(data?: string): SerializedHttp2SocketState | null { + if (!data) { + return null; + } + try { + const parsed = JSON.parse(data) as SerializedHttp2SocketState; + return parsed && typeof parsed === "object" ? parsed : null; + } catch { + return null; + } } -function registerNetSocket(socketId: number, socket: NetSocket): void { - (globalThis as Record)[`${NET_SOCKET_REGISTRY_PREFIX}${socketId}`] = socket; +function parseHttp2ErrorPayload(data?: string): Error { + if (!data) { + return new Error("Unknown HTTP/2 bridge error"); + } + try { + const parsed = JSON.parse(data) as { message?: string; name?: string; code?: string }; + const error = new Error(parsed.message ?? "Unknown HTTP/2 bridge error") as Error & { code?: string }; + if (parsed.name) error.name = parsed.name; + if (parsed.code) error.code = parsed.code; + return error; + } catch { + return new Error(data); + } } -function unregisterNetSocket(socketId: number): void { - delete (globalThis as Record)[`${NET_SOCKET_REGISTRY_PREFIX}${socketId}`]; +function normalizeHttp2Headers(headers?: Http2HeadersRecord): Http2HeadersRecord { + const normalized: Http2HeadersRecord = {}; + if (!headers || typeof headers !== "object") { + return normalized; + } + for (const [key, value] of Object.entries(headers)) { + normalized[String(key)] = value; + } + return normalized; } -// Dispatch callback invoked by the host when socket events arrive -function netSocketDispatch(socketId: number, event: string, data?: string): void { - const socket = getRegisteredNetSocket(socketId); - if (!socket) return; +function validateHttp2RequestOptions(options?: Record): void { + if (!options) { + return; + } + const validators: Record = { + endStream: "boolean", + weight: "number", + parent: "number", + exclusive: "boolean", + silent: "boolean", + }; + for (const [key, expectedType] of Object.entries(validators)) { + if (!(key in options) || options[key] === undefined) { + continue; + } + const value = options[key]; + if (expectedType === "boolean" && typeof value !== "boolean") { + throw createHttp2ArgTypeError(key, "boolean", value); + } + if (expectedType === "number" && typeof value !== "number") { + throw createHttp2ArgTypeError(key, "number", value); + } + } +} - switch (event) { - case "connect": - socket._connected = true; - socket.connecting = false; +function validateHttp2ConnectOptions(options?: Record): void { + if (!options || !options.settings || typeof options.settings !== "object") { + return; + } + const settings = options.settings as Record; + if ("maxFrameSize" in settings) { + const value = settings.maxFrameSize; + if (typeof value !== "number" || !Number.isInteger(value) || value < 16384 || value > 16777215) { + throw createHttp2SettingRangeError("maxFrameSize", value); + } + } +} + +function applyHttp2SessionState( + session: Http2Session, + state?: SerializedHttp2SessionState | null, +): void { + if (!state) { + return; + } + session.encrypted = state.encrypted === true; + session.alpnProtocol = state.alpnProtocol ?? (session.encrypted ? "h2" : "h2c"); + session.originSet = Array.isArray(state.originSet) && state.originSet.length > 0 + ? [...state.originSet] + : session.encrypted + ? [] + : undefined; + if (state.localSettings && typeof state.localSettings === "object") { + session.localSettings = cloneHttp2Settings(state.localSettings); + } + if (state.remoteSettings && typeof state.remoteSettings === "object") { + session.remoteSettings = cloneHttp2Settings(state.remoteSettings); + } + if (state.state && typeof state.state === "object") { + session._applyRuntimeState(parseHttp2SessionRuntimeState(state.state)); + } + session.socket._applyState(state.socket); +} + +function normalizeHttp2Authority( + authority: unknown, + options?: Record, +): URL { + if (authority instanceof URL) { + return authority; + } + if (typeof authority === "string") { + return new URL(authority); + } + if (authority && typeof authority === "object") { + const record = authority as Record; + const protocol = + typeof (options?.protocol ?? record.protocol) === "string" + ? String(options?.protocol ?? record.protocol) + : "http:"; + const hostname = + typeof (options?.host ?? record.host ?? options?.hostname ?? record.hostname) === "string" + ? String(options?.host ?? record.host ?? options?.hostname ?? record.hostname) + : "localhost"; + const portValue = options?.port ?? record.port; + const port = portValue === undefined ? "" : String(portValue); + return new URL(`${protocol}//${hostname}${port ? `:${port}` : ""}`); + } + return new URL("http://localhost"); +} + +function normalizeHttp2ConnectArgs( + authorityOrOptions: unknown, + optionsOrListener?: Record | ((session: Http2Session) => void), + maybeListener?: (session: Http2Session) => void, +): { + authority: URL; + options: Record; + listener?: (session: Http2Session) => void; +} { + const listener = + typeof optionsOrListener === "function" + ? optionsOrListener + : typeof maybeListener === "function" + ? maybeListener + : undefined; + const options = + typeof optionsOrListener === "function" + ? {} + : (optionsOrListener ?? {}); + return { + authority: normalizeHttp2Authority(authorityOrOptions, options), + options, + listener, + }; +} + +function resolveHttp2SocketId(socket: unknown): number | undefined { + if (!socket || typeof socket !== "object") { + return undefined; + } + const value = (socket as { _socketId?: unknown })._socketId; + return typeof value === "number" && Number.isFinite(value) ? value : undefined; +} + +class ClientHttp2Stream extends Http2EventEmitter { + private _streamId: number; + private _encoding?: BufferEncoding; + private _utf8Remainder?: Buffer; + private _isPushStream: boolean; + private _session?: Http2Session; + private _receivedResponse = false; + private _needsDrain = false; + private _pendingWritableBytes = 0; + private _drainScheduled = false; + private readonly _writableHighWaterMark = 16 * 1024; + rstCode = 0; + readable = true; + writable = true; + writableEnded = false; + writableFinished = false; + destroyed = false; + _writableState = { ended: false, finished: false, objectMode: false, corked: 0, length: 0 }; + constructor(streamId: number, session?: Http2Session, isPushStream = false) { + super(); + this._streamId = streamId; + this._session = session; + this._isPushStream = isPushStream; + if (!isPushStream) { + queueMicrotask(() => { + this.emit("ready"); + }); + } + } + setEncoding(encoding: string): this { + this._encoding = encoding as BufferEncoding; + this._utf8Remainder = + this._encoding === "utf8" || this._encoding === "utf-8" + ? Buffer.alloc(0) + : undefined; + return this; + } + close(): this { + this.end(); + return this; + } + destroy(error?: Error): this { + if (this.destroyed) { + return this; + } + this.destroyed = true; + if (error) { + this.emit("error", error); + } + this.end(); + return this; + } + private _scheduleDrain(): void { + if (!this._needsDrain || this._drainScheduled) { + return; + } + this._drainScheduled = true; + queueMicrotask(() => { + this._drainScheduled = false; + if (!this._needsDrain) { + return; + } + this._needsDrain = false; + this._pendingWritableBytes = 0; + this.emit("drain"); + }); + } + write(data: unknown, encodingOrCallback?: BufferEncoding | (() => void), callback?: () => void): boolean { + if (typeof _networkHttp2StreamWriteRaw === "undefined") { + throw new Error("http2 session stream write bridge is not available"); + } + const buffer = Buffer.isBuffer(data) + ? data + : typeof data === "string" + ? Buffer.from(data, typeof encodingOrCallback === "string" ? encodingOrCallback : "utf8") + : Buffer.from(data as Uint8Array); + const wrote = _networkHttp2StreamWriteRaw.applySync(undefined, [this._streamId, buffer.toString("base64")]); + this._pendingWritableBytes += buffer.byteLength; + const shouldBackpressure = wrote === false || this._pendingWritableBytes >= this._writableHighWaterMark; + if (shouldBackpressure) { + this._needsDrain = true; + } + const cb = typeof encodingOrCallback === "function" ? encodingOrCallback : callback; + cb?.(); + return !shouldBackpressure; + } + end(data?: unknown): this { + if (typeof _networkHttp2StreamEndRaw === "undefined") { + throw new Error("http2 session stream end bridge is not available"); + } + let encoded: string | null = null; + if (data !== undefined) { + const buffer = Buffer.isBuffer(data) + ? data + : typeof data === "string" + ? Buffer.from(data) + : Buffer.from(data as Uint8Array); + encoded = buffer.toString("base64"); + } + _networkHttp2StreamEndRaw.applySync(undefined, [this._streamId, encoded]); + this.writableEnded = true; + this._writableState.ended = true; + queueMicrotask(() => { + this.writable = false; + this.writableFinished = true; + this._writableState.finished = true; + this.emit("finish"); + }); + return this; + } + resume(): this { + return this; + } + _emitPush(headers: Http2HeadersRecord, flags?: number): void { + if (process.env.SECURE_EXEC_DEBUG_HTTP2_BRIDGE === "1") { + console.error("[secure-exec http2 isolate] push", this._streamId); + } + this.emit("push", headers, flags ?? 0); + } + _hasReceivedResponse(): boolean { + return this._receivedResponse; + } + _belongsTo(session: Http2Session): boolean { + return this._session === session; + } + _emitResponseHeaders(headers: Http2HeadersRecord): void { + this._receivedResponse = true; + if (process.env.SECURE_EXEC_DEBUG_HTTP2_BRIDGE === "1") { + console.error("[secure-exec http2 isolate] response headers", this._streamId, this._isPushStream); + } + if (!this._isPushStream) { + this.emit("response", headers); + } + } + _emitDataChunk(dataBase64?: string): void { + if (!dataBase64) { + return; + } + const chunkBuffer = Buffer.from(dataBase64, "base64"); + if (this._utf8Remainder !== undefined) { + const buffer = + this._utf8Remainder.length > 0 + ? Buffer.concat([this._utf8Remainder, chunkBuffer]) + : chunkBuffer; + const completeLength = getCompleteUtf8PrefixLength(buffer); + const chunk = buffer.subarray(0, completeLength).toString("utf8"); + this._utf8Remainder = + completeLength < buffer.length ? buffer.subarray(completeLength) : Buffer.alloc(0); + if (chunk.length > 0) { + this.emit("data", chunk); + } + } else if (this._encoding) { + this.emit("data", chunkBuffer.toString(this._encoding)); + } else { + this.emit("data", chunkBuffer); + } + this._scheduleDrain(); + } + _emitEnd(): void { + if (this._utf8Remainder && this._utf8Remainder.length > 0) { + const trailing = this._utf8Remainder.toString("utf8"); + this._utf8Remainder = Buffer.alloc(0); + if (trailing.length > 0) { + this.emit("data", trailing); + } + } + this.readable = false; + this.emit("end"); + this._scheduleDrain(); + } + _emitClose(rstCode?: number): void { + if (typeof rstCode === "number") { + this.rstCode = rstCode; + } + this.destroyed = true; + this.readable = false; + this.writable = false; + this._scheduleDrain(); + this.emit("close"); + } +} + +function getCompleteUtf8PrefixLength(buffer: Buffer): number { + if (buffer.length === 0) { + return 0; + } + let continuationCount = 0; + for (let index = buffer.length - 1; index >= 0 && continuationCount < 3; index -= 1) { + if ((buffer[index] & 0xc0) !== 0x80) { + const trailingBytes = buffer.length - index; + const lead = buffer[index]; + const expectedBytes = + (lead & 0x80) === 0 + ? 1 + : (lead & 0xe0) === 0xc0 + ? 2 + : (lead & 0xf0) === 0xe0 + ? 3 + : (lead & 0xf8) === 0xf0 + ? 4 + : 1; + return trailingBytes < expectedBytes ? index : buffer.length; + } + continuationCount += 1; + } + return continuationCount > 0 ? buffer.length - continuationCount : buffer.length; +} + +class ServerHttp2Stream extends Http2EventEmitter { + private _streamId: number; + private _responded = false; + private _requestHeaders?: Http2HeadersRecord; + private _isPushStream: boolean; + session: Http2Session; + rstCode = 0; + readable = true; + writable = true; + destroyed = false; + _readableState: { + flowing: boolean | null; + ended: boolean; + highWaterMark: number; + }; + _writableState: { ended: boolean }; + constructor( + streamId: number, + session: Http2Session, + requestHeaders?: Http2HeadersRecord, + isPushStream = false, + ) { + super(); + this._streamId = streamId; + this.session = session; + this._requestHeaders = requestHeaders; + this._isPushStream = isPushStream; + this._readableState = { + flowing: null, + ended: false, + highWaterMark: 16 * 1024, + }; + this._writableState = { + ended: requestHeaders?.[":method"] === "HEAD", + }; + } + respond(headers?: Http2HeadersRecord): void { + if (typeof _networkHttp2StreamRespondRaw === "undefined") { + throw new Error("http2 server stream respond bridge is not available"); + } + this._responded = true; + _networkHttp2StreamRespondRaw.applySync(undefined, [this._streamId, serializeHttp2Headers(headers)]); + } + pushStream( + headers: Http2HeadersRecord, + optionsOrCallback?: Record | ((error: Error | null, stream?: ServerHttp2Stream, headers?: Http2HeadersRecord) => void), + maybeCallback?: (error: Error | null, stream?: ServerHttp2Stream, headers?: Http2HeadersRecord) => void, + ): void { + if (this._isPushStream) { + throw createHttp2Error( + "ERR_HTTP2_NESTED_PUSH", + "A push stream cannot initiate another push stream.", + ); + } + const callback = + typeof optionsOrCallback === "function" + ? optionsOrCallback + : maybeCallback; + if (typeof callback !== "function") { + throw createHttp2ArgTypeError("callback", "function", callback); + } + if (typeof _networkHttp2StreamPushStreamRaw === "undefined") { + throw new Error("http2 server stream push bridge is not available"); + } + const options = + optionsOrCallback && typeof optionsOrCallback === "object" && !Array.isArray(optionsOrCallback) + ? optionsOrCallback + : {}; + const resultJson = _networkHttp2StreamPushStreamRaw.applySyncPromise( + undefined, + [ + this._streamId, + serializeHttp2Headers(normalizeHttp2Headers(headers)), + JSON.stringify(options ?? {}), + ], + ); + const result = JSON.parse(resultJson) as { + error?: string; + streamId?: number; + headers?: string; + }; + queueMicrotask(() => { + if (result.error) { + callback(parseHttp2ErrorPayload(result.error)); + return; + } + const pushStream = new ServerHttp2Stream( + Number(result.streamId), + this.session, + parseHttp2Headers(result.headers), + true, + ); + http2Streams.set(Number(result.streamId), pushStream); + callback(null, pushStream, parseHttp2Headers(result.headers)); + }); + } + write(data: unknown): boolean { + if (this._writableState.ended) { + queueMicrotask(() => { + this.emit("error", createHttp2Error("ERR_STREAM_WRITE_AFTER_END", "write after end")); + }); + return false; + } + if (typeof _networkHttp2StreamWriteRaw === "undefined") { + throw new Error("http2 server stream write bridge is not available"); + } + const buffer = Buffer.isBuffer(data) + ? data + : typeof data === "string" + ? Buffer.from(data) + : Buffer.from(data as Uint8Array); + return _networkHttp2StreamWriteRaw.applySync(undefined, [this._streamId, buffer.toString("base64")]); + } + end(data?: unknown): void { + if (!this._responded) { + this.respond({ ":status": 200 }); + } + if (typeof _networkHttp2StreamEndRaw === "undefined") { + throw new Error("http2 server stream end bridge is not available"); + } + this._writableState.ended = true; + let encoded: string | null = null; + if (data !== undefined) { + const buffer = Buffer.isBuffer(data) + ? data + : typeof data === "string" + ? Buffer.from(data) + : Buffer.from(data as Uint8Array); + encoded = buffer.toString("base64"); + } + _networkHttp2StreamEndRaw.applySync(undefined, [this._streamId, encoded]); + this._writableState.ended = true; + } + pause(): this { + this._readableState.flowing = false; + _networkHttp2StreamPauseRaw?.applySync(undefined, [this._streamId]); + return this; + } + resume(): this { + this._readableState.flowing = true; + _networkHttp2StreamResumeRaw?.applySync(undefined, [this._streamId]); + return this; + } + respondWithFile( + path: string, + headers?: Record, + options?: Record + ): void { + try { + const bodyBase64 = _fs.readFileBinary.applySyncPromise(undefined, [path]); + const body = Buffer.from(bodyBase64, "base64"); + this._responded = true; + this.respond({ + ":status": 200, + ...(headers ?? {}), + }); + this.end(body); + queueMicrotask(() => { + this.session.close(); + }); + return; + } catch { + // Fall back to the host http2 helper when the path is not available through the VFS bridge. + } + if (typeof _networkHttp2StreamRespondWithFileRaw === "undefined") { + throw new Error("http2 server stream respondWithFile bridge is not available"); + } + this._responded = true; + _networkHttp2StreamRespondWithFileRaw.applySync( + undefined, + [ + this._streamId, + path, + JSON.stringify(headers ?? {}), + JSON.stringify(options ?? {}), + ], + ); + } + respondWithFD( + fdOrHandle: number | { fd?: unknown }, + headers?: Record, + options?: Record + ): void { + const fd = + typeof fdOrHandle === "number" + ? fdOrHandle + : typeof fdOrHandle?.fd === "number" + ? fdOrHandle.fd + : NaN; + const path = Number.isFinite(fd) ? _fdGetPath.applySync(undefined, [fd]) : null; + if (!path) { + throw new Error("Invalid file descriptor for respondWithFD"); + } + this.respondWithFile(path, headers, options); + } + _emitData(dataBase64?: string): void { + if (!dataBase64) { + return; + } + this.emit("data", Buffer.from(dataBase64, "base64")); + } + _emitEnd(): void { + this._readableState.ended = true; + this.emit("end"); + } + _emitDrain(): void { + this.emit("drain"); + } + _emitClose(rstCode?: number): void { + if (typeof rstCode === "number") { + this.rstCode = rstCode; + } + this.destroyed = true; + this.emit("close"); + } +} + +class Http2ServerRequest extends Http2EventEmitter { + headers: Http2HeadersRecord; + method: string; + url: string; + connection: Http2SocketProxy; + socket: Http2SocketProxy; + stream: ServerHttp2Stream; + destroyed = false; + readable = true; + _readableState = { flowing: null as boolean | null, length: 0, ended: false, objectMode: false }; + constructor(headers: Http2HeadersRecord, socket: Http2SocketProxy, stream: ServerHttp2Stream) { + super(); + this.headers = headers; + this.method = typeof headers[":method"] === "string" ? String(headers[":method"]) : "GET"; + this.url = typeof headers[":path"] === "string" ? String(headers[":path"]) : "/"; + this.connection = socket; + this.socket = socket; + this.stream = stream; + } + on(event: string, listener: Http2EventListener): this { + super.on(event, listener); + if (event === "data" && this._readableState.flowing !== false) { + this.resume(); + } + return this; + } + once(event: string, listener: Http2EventListener): this { + super.once(event, listener); + if (event === "data" && this._readableState.flowing !== false) { + this.resume(); + } + return this; + } + resume(): this { + this._readableState.flowing = true; + this.stream.resume(); + return this; + } + pause(): this { + this._readableState.flowing = false; + this.stream.pause(); + return this; + } + pipe(dest: { + write: (chunk: Buffer) => boolean; + end: () => void; + once?: (event: string, listener: () => void) => unknown; + }): typeof dest { + this.on("data", (chunk) => { + const wrote = dest.write(chunk as Buffer); + if (wrote === false && typeof dest.once === "function") { + this.pause(); + dest.once("drain", () => this.resume()); + } + }); + this.on("end", () => dest.end()); + this.resume(); + return dest; + } + unpipe(): this { return this; } + read(): null { return null; } + isPaused(): boolean { return this._readableState.flowing === false; } + setEncoding(): this { return this; } + _emitData(chunk: Buffer): void { + this._readableState.length += chunk.byteLength; + this.emit("data", chunk); + } + _emitEnd(): void { + this._readableState.ended = true; + this.emit("end"); + this.emit("close"); + } + _emitError(error: Error): void { + this.emit("error", error); + } + destroy(err?: Error): this { + this.destroyed = true; + if (err) { + this.emit("error", err); + } + this.emit("close"); + return this; + } +} + +class Http2ServerResponse extends Http2EventEmitter { + private _stream: ServerHttp2Stream; + private _headers: Http2HeadersRecord = {}; + private _statusCode = 200; + headersSent = false; + writable = true; + writableEnded = false; + writableFinished = false; + socket: Http2SocketProxy; + connection: Http2SocketProxy; + stream: ServerHttp2Stream; + _writableState = { ended: false, finished: false, objectMode: false, corked: 0, length: 0 }; + constructor(stream: ServerHttp2Stream) { + super(); + this._stream = stream; + this.stream = stream; + this.socket = stream.session.socket; + this.connection = this.socket; + } + writeHead(statusCode: number, headers?: Http2HeadersRecord): this { + this._statusCode = statusCode; + this._headers = { + ...this._headers, + ...(headers ?? {}), + ":status": statusCode, + }; + this._stream.respond(this._headers); + this.headersSent = true; + return this; + } + setHeader(name: string, value: Http2HeaderValue): this { + this._headers[name] = value; + return this; + } + getHeader(name: string): Http2HeaderValue | undefined { + return this._headers[name]; + } + hasHeader(name: string): boolean { + return Object.prototype.hasOwnProperty.call(this._headers, name); + } + removeHeader(name: string): void { + delete this._headers[name]; + } + write(data: unknown, encodingOrCallback?: BufferEncoding | (() => void), callback?: () => void): boolean { + if (!(":status" in this._headers)) { + this._headers[":status"] = this._statusCode; + this._stream.respond(this._headers); + this.headersSent = true; + } + const wrote = this._stream.write( + typeof data === "string" && typeof encodingOrCallback === "string" + ? Buffer.from(data, encodingOrCallback) + : data, + ); + const cb = typeof encodingOrCallback === "function" ? encodingOrCallback : callback; + cb?.(); + return wrote; + } + end(data?: unknown): this { + if (!(":status" in this._headers)) { + this._headers[":status"] = this._statusCode; + this._stream.respond(this._headers); + this.headersSent = true; + } + this.writableEnded = true; + this._writableState.ended = true; + this._stream.end(data); + queueMicrotask(() => { + this.writable = false; + this.writableFinished = true; + this._writableState.finished = true; + this.emit("finish"); + this.emit("close"); + }); + return this; + } + destroy(err?: Error): this { + if (err) { + this.emit("error", err); + } + this.writable = false; + this.writableEnded = true; + this.writableFinished = true; + this.emit("close"); + return this; + } +} + +class Http2Session extends Http2EventEmitter { + encrypted = false; + alpnProtocol: string | false = false; + originSet?: string[]; + localSettings: Http2SettingsRecord = cloneHttp2Settings(DEFAULT_HTTP2_SETTINGS); + remoteSettings: Http2SettingsRecord = cloneHttp2Settings(DEFAULT_HTTP2_SETTINGS); + pendingSettingsAck = false; + socket: Http2SocketProxy; + state: Http2SessionRuntimeState = cloneHttp2SessionRuntimeState(DEFAULT_HTTP2_SESSION_STATE); + private _sessionId: number; + private _waitStarted = false; + private _pendingSettingsAckCount = 0; + private _awaitingInitialSettingsAck = false; + private _settingsCallbacks: Array<() => void> = []; + constructor(sessionId: number, socketState?: SerializedHttp2SocketState) { + super(); + this._sessionId = sessionId; + this.socket = new Http2SocketProxy(socketState, () => this.destroy()); + (this as Record)[HTTP2_K_SOCKET] = this.socket; + } + _retain(): void { + if (this._waitStarted || typeof _networkHttp2SessionWaitRaw === "undefined") { + return; + } + this._waitStarted = true; + void _networkHttp2SessionWaitRaw.apply(undefined, [this._sessionId], { + result: { promise: true }, + }).catch((error) => { + this.emit("error", error instanceof Error ? error : new Error(String(error))); + }); + } + _release(): void { + this._waitStarted = false; + } + _beginInitialSettingsAck(): void { + this._awaitingInitialSettingsAck = true; + this._pendingSettingsAckCount += 1; + this.pendingSettingsAck = true; + } + _applyLocalSettings(settings: Http2SettingsRecord): void { + this.localSettings = cloneHttp2Settings(settings); + if (this._awaitingInitialSettingsAck) { + this._awaitingInitialSettingsAck = false; + this._pendingSettingsAckCount = Math.max(0, this._pendingSettingsAckCount - 1); + this.pendingSettingsAck = this._pendingSettingsAckCount > 0; + } + this.emit("localSettings", this.localSettings); + } + _applyRemoteSettings(settings: Http2SettingsRecord): void { + this.remoteSettings = cloneHttp2Settings(settings); + this.emit("remoteSettings", this.remoteSettings); + } + _applyRuntimeState(state?: Http2SessionRuntimeState): void { + this.state = cloneHttp2SessionRuntimeState(state); + } + _ackSettings(): void { + this._pendingSettingsAckCount = Math.max(0, this._pendingSettingsAckCount - 1); + this.pendingSettingsAck = this._pendingSettingsAckCount > 0; + const callback = this._settingsCallbacks.shift(); + callback?.(); + } + request(headers?: Http2HeadersRecord, options?: Record): ClientHttp2Stream { + if (typeof _networkHttp2SessionRequestRaw === "undefined") { + throw new Error("http2 session request bridge is not available"); + } + validateHttp2RequestOptions(options); + const streamId = _networkHttp2SessionRequestRaw.applySync( + undefined, + [ + this._sessionId, + serializeHttp2Headers(normalizeHttp2Headers(headers)), + JSON.stringify(options ?? {}), + ], + ); + const stream = new ClientHttp2Stream(streamId, this); + http2Streams.set(streamId, stream); + return stream; + } + settings(settings: Record, callback?: () => void): void { + if (callback !== undefined && typeof callback !== "function") { + throw createHttp2ArgTypeError("callback", "function", callback); + } + if (typeof _networkHttp2SessionSettingsRaw === "undefined") { + throw new Error("http2 session settings bridge is not available"); + } + const normalized = validateHttp2Settings(settings); + _networkHttp2SessionSettingsRaw.applySync( + undefined, + [this._sessionId, JSON.stringify(normalized)], + ); + this._pendingSettingsAckCount += 1; + this.pendingSettingsAck = true; + if (callback) { + this._settingsCallbacks.push(callback); + } + } + setLocalWindowSize(windowSize: unknown): void { + if (typeof windowSize !== "number" || Number.isNaN(windowSize)) { + throw createHttp2ArgTypeError("windowSize", "number", windowSize); + } + if (!Number.isInteger(windowSize) || windowSize < 0 || windowSize > 2147483647) { + const error = new RangeError( + `The value of "windowSize" is out of range. It must be >= 0 && <= 2147483647. Received ${windowSize}`, + ) as RangeError & { code: string }; + error.code = "ERR_OUT_OF_RANGE"; + throw error; + } + if (typeof _networkHttp2SessionSetLocalWindowSizeRaw === "undefined") { + throw new Error("http2 session setLocalWindowSize bridge is not available"); + } + const result = _networkHttp2SessionSetLocalWindowSizeRaw.applySync( + undefined, + [this._sessionId, windowSize], + ); + this._applyRuntimeState(parseHttp2SessionState(result)?.state); + } + goaway(code = 0, lastStreamID = 0, opaqueData?: unknown): void { + const payload = + opaqueData === undefined + ? null + : Buffer.isBuffer(opaqueData) + ? opaqueData.toString("base64") + : typeof opaqueData === "string" + ? Buffer.from(opaqueData).toString("base64") + : Buffer.from(opaqueData as Uint8Array).toString("base64"); + _networkHttp2SessionGoawayRaw?.applySync(undefined, [this._sessionId, code, lastStreamID, payload]); + } + close(): void { + const pendingStreams = Array.from(http2Streams.entries()).filter( + ([, stream]) => + typeof (stream as { _belongsTo?: unknown })._belongsTo === "function" && + (stream as ClientHttp2Stream)._belongsTo(this) && + !(stream as ClientHttp2Stream)._hasReceivedResponse(), + ) as Array<[number, ClientHttp2Stream]>; + if (pendingStreams.length > 0) { + const error = createHttp2Error( + "ERR_HTTP2_GOAWAY_SESSION", + "The HTTP/2 session is closing before the stream could be established.", + ); + queueMicrotask(() => { + for (const [streamId, stream] of pendingStreams) { + if (http2Streams.get(streamId) !== stream) { + continue; + } + stream.emit("error", error); + stream.emit("close"); + http2Streams.delete(streamId); + } + }); + if (typeof _networkHttp2SessionDestroyRaw !== "undefined") { + _networkHttp2SessionDestroyRaw.applySync(undefined, [this._sessionId]); + return; + } + } + _networkHttp2SessionCloseRaw?.applySync(undefined, [this._sessionId]); + setTimeout(() => { + if (!http2Sessions.has(this._sessionId)) { + return; + } + this._release(); + this.emit("close"); + http2Sessions.delete(this._sessionId); + _unregisterHandle?.(`http2:session:${this._sessionId}`); + }, 50); + } + destroy(): void { + if (typeof _networkHttp2SessionDestroyRaw !== "undefined") { + _networkHttp2SessionDestroyRaw.applySync(undefined, [this._sessionId]); + return; + } + this.close(); + } +} + +class Http2Server extends Http2EventEmitter { + readonly allowHalfOpen: boolean; + readonly allowHTTP1: boolean; + readonly encrypted: boolean; + readonly _serverId: number; + listening = false; + private _address: { address: string; family: string; port: number } | null = null; + private _options: Record; + private _timeoutMs = 0; + private _waitStarted = false; + constructor( + options: Record | undefined, + listener: ((req: Http2ServerRequest, res: Http2ServerResponse) => void) | undefined, + encrypted: boolean, + ) { + super(); + this.allowHalfOpen = options?.allowHalfOpen === true; + this.allowHTTP1 = options?.allowHTTP1 === true; + this.encrypted = encrypted; + const initialSettings = + options?.settings && typeof options.settings === "object" && !Array.isArray(options.settings) + ? cloneHttp2Settings(options.settings as Http2SettingsRecord) + : {}; + this._options = { + ...(options ?? {}), + settings: initialSettings, + }; + this._serverId = nextHttp2ServerId++; + (this as Record)[HTTP2_OPTIONS] = { + settings: cloneHttp2Settings(initialSettings), + unknownProtocolTimeout: 10000, + ...(encrypted ? { ALPNProtocols: ["h2"] } : {}), + }; + if (listener) { + this.on("request", listener as unknown as Http2EventListener); + } + http2Servers.set(this._serverId, this); + } + address(): { address: string; family: string; port: number } | null { + return this._address; + } + _retain(): void { + if (this._waitStarted || typeof _networkHttp2ServerWaitRaw === "undefined") { + return; + } + this._waitStarted = true; + void _networkHttp2ServerWaitRaw.apply(undefined, [this._serverId], { + result: { promise: true }, + }).catch((error) => { + this.emit("error", error instanceof Error ? error : new Error(String(error))); + }); + } + _release(): void { + this._waitStarted = false; + } + setTimeout(timeout: number, callback?: () => void): this { + this._timeoutMs = normalizeSocketTimeout(timeout); + if (callback) { + this.on("timeout", callback); + } + return this; + } + updateSettings(settings: Record): this { + const normalized = validateHttp2Settings(settings); + const mergedSettings = { + ...cloneHttp2Settings(this._options.settings as Http2SettingsRecord), + ...cloneHttp2Settings(normalized), + }; + this._options = { + ...this._options, + settings: mergedSettings, + }; + const optionsState = (this as Record)[HTTP2_OPTIONS] as { + settings: Http2SettingsRecord; + }; + optionsState.settings = cloneHttp2Settings(mergedSettings); + return this; + } + listen( + portOrOptions?: number | string | null | { port?: unknown; host?: unknown; backlog?: unknown; path?: unknown }, + hostOrCallback?: string | NetServerEventListener, + backlogOrCallback?: number | NetServerEventListener, + callback?: NetServerEventListener, + ): this { + if (typeof _networkHttp2ServerListenRaw === "undefined") { + throw new Error(`http2.${this.encrypted ? "createSecureServer" : "createServer"} is not supported in sandbox`); + } + const options = normalizeListenArgs(portOrOptions, hostOrCallback, backlogOrCallback, callback); + if (options.callback) { + this.once("listening", options.callback); + } + const payload = { + serverId: this._serverId, + secure: this.encrypted, + port: options.port, + host: options.host, + backlog: options.backlog, + allowHalfOpen: this.allowHalfOpen, + allowHTTP1: this._options.allowHTTP1 === true, + timeout: this._timeoutMs, + settings: this._options.settings, + remoteCustomSettings: this._options.remoteCustomSettings, + tls: this.encrypted + ? buildSerializedTlsOptions( + { + ...this._options, + ...((portOrOptions && typeof portOrOptions === "object" + ? portOrOptions + : {}) as Record), + }, + { isServer: true }, + ) + : undefined, + }; + const result = JSON.parse( + _networkHttp2ServerListenRaw.applySyncPromise(undefined, [JSON.stringify(payload)]), + ) as { address?: { address: string; family: string; port: number } | null }; + this._address = result.address ?? null; + this.listening = true; + this._retain(); + _registerHandle?.(`http2:server:${this._serverId}`, "http2 server"); + this.emit("listening"); + return this; + } + close(callback?: () => void): this { + if (callback) { + this.once("close", callback); + } + if (!this.listening) { + this._release(); + queueMicrotask(() => this.emit("close")); + return this; + } + void _networkHttp2ServerCloseRaw?.apply(undefined, [this._serverId], { + result: { promise: true }, + }); + setTimeout(() => { + if (!this.listening) { + return; + } + this.listening = false; + this._release(); + this.emit("close"); + http2Servers.delete(this._serverId); + _unregisterHandle?.(`http2:server:${this._serverId}`); + }, 50); + return this; + } +} + +function createHttp2Server( + secure: boolean, + optionsOrListener?: Record | ((req: Http2ServerRequest, res: Http2ServerResponse) => void), + maybeListener?: (req: Http2ServerRequest, res: Http2ServerResponse) => void, +): Http2Server { + const listener = + typeof optionsOrListener === "function" + ? optionsOrListener + : maybeListener; + const options = + optionsOrListener && typeof optionsOrListener === "object" && !Array.isArray(optionsOrListener) + ? optionsOrListener + : undefined; + return new Http2Server(options, listener, secure); +} + +function connectHttp2( + authorityOrOptions: unknown, + optionsOrListener?: Record | ((session: Http2Session) => void), + maybeListener?: (session: Http2Session) => void, +): Http2Session { + if (typeof _networkHttp2SessionConnectRaw === "undefined") { + throw new Error("http2.connect is not supported in sandbox"); + } + const { authority, options, listener } = normalizeHttp2ConnectArgs( + authorityOrOptions, + optionsOrListener, + maybeListener, + ); + if (authority.protocol !== "http:" && authority.protocol !== "https:") { + throw createHttp2Error( + "ERR_HTTP2_UNSUPPORTED_PROTOCOL", + `protocol "${authority.protocol}" is unsupported.`, + ); + } + validateHttp2ConnectOptions(options); + const socketId = options.createConnection + ? resolveHttp2SocketId((options.createConnection as () => unknown)()) + : undefined; + const response = JSON.parse( + _networkHttp2SessionConnectRaw.applySyncPromise( + undefined, + [ + JSON.stringify({ + authority: authority.toString(), + protocol: authority.protocol, + host: options.host ?? options.hostname ?? authority.hostname, + port: options.port ?? authority.port, + localAddress: options.localAddress, + family: options.family, + socketId, + settings: options.settings, + remoteCustomSettings: options.remoteCustomSettings, + tls: + authority.protocol === "https:" + ? buildSerializedTlsOptions(options, { servername: typeof options.servername === "string" ? options.servername : authority.hostname }) + : undefined, + }), + ], + ), + ) as { sessionId: number; state?: string }; + const initialState = parseHttp2SessionState(response.state); + const session = new Http2Session( + response.sessionId, + initialState?.socket ?? undefined, + ); + applyHttp2SessionState(session, initialState); + session._beginInitialSettingsAck(); + session._retain(); + if (listener) { + session.once("connect", () => listener(session)); + } + http2Sessions.set(response.sessionId, session); + _registerHandle?.(`http2:session:${response.sessionId}`, "http2 session"); + if (authority.protocol === "https:") { + session.socket.once("secureConnect", () => {}); + } + return session; +} + +function getOrCreateHttp2Session( + sessionId: number, + state?: SerializedHttp2SessionState | null, +): Http2Session { + let session = http2Sessions.get(sessionId); + if (!session) { + session = new Http2Session(sessionId, state?.socket ?? undefined); + http2Sessions.set(sessionId, session); + } + applyHttp2SessionState(session, state); + return session; +} + +function queuePendingHttp2ClientStreamEvent( + streamId: number, + event: { + kind: "push" | "responseHeaders" | "data" | "end" | "close" | "error"; + data?: string; + extraNumber?: number; + }, +): void { + const pending = pendingHttp2ClientStreamEvents.get(streamId) ?? []; + pending.push(event); + pendingHttp2ClientStreamEvents.set(streamId, pending); +} + +function schedulePendingHttp2ClientStreamEventsFlush(streamId: number): void { + if (scheduledHttp2ClientStreamFlushes.has(streamId)) { + return; + } + scheduledHttp2ClientStreamFlushes.add(streamId); + const flush = () => { + scheduledHttp2ClientStreamFlushes.delete(streamId); + flushPendingHttp2ClientStreamEvents(streamId); + }; + const scheduleImmediate = (globalThis as { setImmediate?: (callback: () => void) => void }).setImmediate; + if (typeof scheduleImmediate === "function") { + scheduleImmediate(flush); + return; + } + setTimeout(flush, 0); +} + +function flushPendingHttp2ClientStreamEvents(streamId: number): void { + const stream = http2Streams.get(streamId); + if (!stream || typeof (stream as { _emitResponseHeaders?: unknown })._emitResponseHeaders !== "function") { + return; + } + const pending = pendingHttp2ClientStreamEvents.get(streamId); + if (!pending || pending.length === 0) { + return; + } + pendingHttp2ClientStreamEvents.delete(streamId); + for (const event of pending) { + if (event.kind === "push") { + (stream as ClientHttp2Stream)._emitPush(parseHttp2Headers(event.data), event.extraNumber); + continue; + } + if (event.kind === "responseHeaders") { + (stream as ClientHttp2Stream)._emitResponseHeaders(parseHttp2Headers(event.data)); + continue; + } + if (event.kind === "data") { + (stream as ClientHttp2Stream)._emitDataChunk(event.data); + continue; + } + if (event.kind === "end") { + (stream as ClientHttp2Stream)._emitEnd(); + continue; + } + if (event.kind === "error") { + stream.emit("error", parseHttp2ErrorPayload(event.data)); + continue; + } + if (typeof (stream as { _emitClose?: unknown })._emitClose === "function") { + (stream as ClientHttp2Stream)._emitClose(event.extraNumber); + } else { + stream.emit("close"); + } + http2Streams.delete(streamId); + } +} + +function http2Dispatch( + kind: string, + id: number, + data?: string, + extra?: string, + extraNumber?: string | number, + extraHeaders?: string, + flags?: string | number, +): void { + if (kind === "sessionConnect") { + const session = http2Sessions.get(id); + if (!session) return; + const state = parseHttp2SessionState(data); + applyHttp2SessionState(session, state); + if (session.encrypted) { + session.socket.emit("secureConnect"); + } + session.emit("connect"); + return; + } + if (kind === "sessionClose") { + const session = http2Sessions.get(id); + if (!session) return; + session._release(); + session.emit("close"); + http2Sessions.delete(id); + _unregisterHandle?.(`http2:session:${id}`); + return; + } + if (kind === "sessionError") { + const session = http2Sessions.get(id); + if (!session) return; + session.emit("error", parseHttp2ErrorPayload(data)); + return; + } + if (kind === "sessionLocalSettings") { + const session = http2Sessions.get(id); + if (!session) return; + session._applyLocalSettings(parseHttp2Headers(data) as unknown as Http2SettingsRecord); + return; + } + if (kind === "sessionRemoteSettings") { + const session = http2Sessions.get(id); + if (!session) return; + session._applyRemoteSettings(parseHttp2Headers(data) as unknown as Http2SettingsRecord); + return; + } + if (kind === "sessionSettingsAck") { + const session = http2Sessions.get(id); + if (!session) return; + session._ackSettings(); + return; + } + if (kind === "sessionGoaway") { + const session = http2Sessions.get(id); + if (!session) return; + session.emit( + "goaway", + Number(extraNumber ?? 0), + Number(flags ?? 0), + data ? Buffer.from(data, "base64") : Buffer.alloc(0), + ); + return; + } + if (kind === "clientPushStream") { + const session = http2Sessions.get(id); + if (!session) return; + const streamId = Number(data); + const stream = new ClientHttp2Stream(streamId, session, true); + http2Streams.set(streamId, stream); + session.emit("stream", stream, parseHttp2Headers(extraHeaders), Number(flags ?? 0)); + schedulePendingHttp2ClientStreamEventsFlush(streamId); + return; + } + if (kind === "clientPushHeaders") { + queuePendingHttp2ClientStreamEvent(id, { + kind: "push", + data, + extraNumber: Number(extraNumber ?? 0), + }); + schedulePendingHttp2ClientStreamEventsFlush(id); + return; + } + if (kind === "clientResponseHeaders") { + queuePendingHttp2ClientStreamEvent(id, { + kind: "responseHeaders", + data, + }); + schedulePendingHttp2ClientStreamEventsFlush(id); + return; + } + if (kind === "clientData") { + queuePendingHttp2ClientStreamEvent(id, { + kind: "data", + data, + }); + schedulePendingHttp2ClientStreamEventsFlush(id); + return; + } + if (kind === "clientEnd") { + queuePendingHttp2ClientStreamEvent(id, { + kind: "end", + }); + schedulePendingHttp2ClientStreamEventsFlush(id); + return; + } + if (kind === "clientClose") { + queuePendingHttp2ClientStreamEvent(id, { + kind: "close", + extraNumber: Number(extraNumber ?? 0), + }); + schedulePendingHttp2ClientStreamEventsFlush(id); + return; + } + if (kind === "clientError") { + queuePendingHttp2ClientStreamEvent(id, { + kind: "error", + data, + }); + schedulePendingHttp2ClientStreamEventsFlush(id); + return; + } + if (kind === "serverStream") { + const server = http2Servers.get(id); + if (!server) return; + const sessionState = parseHttp2SessionState(extra); + const sessionId = Number(extraNumber); + const session = getOrCreateHttp2Session(sessionId, sessionState); + const streamId = Number(data); + const headers = parseHttp2Headers(extraHeaders); + const numericFlags = Number(flags ?? 0); + const stream = new ServerHttp2Stream(streamId, session, headers); + http2Streams.set(streamId, stream); + server.emit("stream", stream, headers, numericFlags); + if (server.listenerCount("request") > 0) { + const request = new Http2ServerRequest(headers, session.socket, stream); + const response = new Http2ServerResponse(stream); + stream.on("data", (chunk) => { + request._emitData(chunk as Buffer); + }); + stream.on("end", () => { + request._emitEnd(); + }); + stream.on("error", (error) => { + request._emitError(error as Error); + }); + stream.on("drain", () => { + response.emit("drain"); + }); + server.emit("request", request, response); + } + return; + } + if (kind === "serverStreamData") { + const stream = http2Streams.get(id); + if (!stream || typeof (stream as { _emitData?: unknown })._emitData !== "function") return; + (stream as ServerHttp2Stream)._emitData(data); + return; + } + if (kind === "serverStreamEnd") { + const stream = http2Streams.get(id); + if (!stream || typeof (stream as { _emitEnd?: unknown })._emitEnd !== "function") return; + (stream as ServerHttp2Stream)._emitEnd(); + return; + } + if (kind === "serverStreamDrain") { + const stream = http2Streams.get(id); + if (!stream || typeof (stream as { _emitDrain?: unknown })._emitDrain !== "function") return; + (stream as ServerHttp2Stream)._emitDrain(); + return; + } + if (kind === "serverStreamError") { + const stream = http2Streams.get(id); + if (!stream) return; + stream.emit("error", parseHttp2ErrorPayload(data)); + return; + } + if (kind === "serverStreamClose") { + const stream = http2Streams.get(id); + if (!stream || typeof (stream as { _emitClose?: unknown })._emitClose !== "function") return; + (stream as ServerHttp2Stream)._emitClose(Number(extraNumber ?? 0)); + http2Streams.delete(id); + return; + } + if (kind === "serverSession") { + const server = http2Servers.get(id); + if (!server) return; + const sessionId = Number(extraNumber); + const session = getOrCreateHttp2Session(sessionId, parseHttp2SessionState(data)); + server.emit("session", session); + return; + } + if (kind === "serverTimeout") { + http2Servers.get(id)?.emit("timeout"); + return; + } + if (kind === "serverConnection") { + http2Servers.get(id)?.emit("connection", new Http2SocketProxy(parseHttp2SocketState(data) ?? undefined)); + return; + } + if (kind === "serverSecureConnection") { + http2Servers.get(id)?.emit("secureConnection", new Http2SocketProxy(parseHttp2SocketState(data) ?? undefined)); + return; + } + if (kind === "serverClose") { + const server = http2Servers.get(id); + if (!server) return; + server.listening = false; + server._release(); + server.emit("close"); + http2Servers.delete(id); + _unregisterHandle?.(`http2:server:${id}`); + return; + } + if (kind === "serverCompatRequest") { + pendingHttp2CompatRequests.set(Number(extraNumber), { + serverId: id, + requestJson: data ?? "{}", + }); + void dispatchHttp2CompatibilityRequest(id, Number(extraNumber)); + } +} + +function scheduleQueuedHttp2DispatchDrain(): void { + if (scheduledHttp2DispatchDrain) { + return; + } + scheduledHttp2DispatchDrain = true; + const drain = () => { + scheduledHttp2DispatchDrain = false; + while (queuedHttp2DispatchEvents.length > 0) { + const event = queuedHttp2DispatchEvents.shift(); + if (!event) { + continue; + } + http2Dispatch( + event.kind, + event.id, + event.data, + event.extra, + event.extraNumber, + event.extraHeaders, + event.flags, + ); + } + }; + queueMicrotask(drain); +} + +function onHttp2Dispatch(_eventType: string, payload?: unknown): void { + if (!payload || typeof payload !== "object") { + return; + } + const event = payload as { + kind?: unknown; + id?: unknown; + data?: unknown; + extra?: unknown; + extraNumber?: unknown; + extraHeaders?: unknown; + flags?: unknown; + }; + if (typeof event.kind !== "string" || typeof event.id !== "number") { + return; + } + if (process.env.SECURE_EXEC_DEBUG_HTTP2_BRIDGE === "1") { + console.error("[secure-exec http2 isolate dispatch]", event.kind, event.id); + } + const kind = event.kind; + const id = event.id; + const data = typeof event.data === "string" ? event.data : undefined; + const extra = typeof event.extra === "string" ? event.extra : undefined; + const normalizedExtraNumber = + typeof event.extraNumber === "string" || typeof event.extraNumber === "number" + ? event.extraNumber + : undefined; + const extraHeaders = typeof event.extraHeaders === "string" ? event.extraHeaders : undefined; + const flags = + typeof event.flags === "string" || typeof event.flags === "number" + ? event.flags + : undefined; + queuedHttp2DispatchEvents.push({ + kind, + id, + data, + extra, + extraNumber: normalizedExtraNumber, + extraHeaders, + flags, + }); + scheduleQueuedHttp2DispatchDrain(); +} + +export const http2 = { + Http2ServerRequest, + Http2ServerResponse, + constants: { + HTTP2_HEADER_METHOD: ":method", + HTTP2_HEADER_PATH: ":path", + HTTP2_HEADER_SCHEME: ":scheme", + HTTP2_HEADER_AUTHORITY: ":authority", + HTTP2_HEADER_STATUS: ":status", + HTTP2_HEADER_CONTENT_TYPE: "content-type", + HTTP2_HEADER_CONTENT_LENGTH: "content-length", + HTTP2_HEADER_ACCEPT: "accept", + HTTP2_HEADER_ACCEPT_ENCODING: "accept-encoding", + HTTP2_METHOD_GET: "GET", + HTTP2_METHOD_POST: "POST", + HTTP2_METHOD_PUT: "PUT", + HTTP2_METHOD_DELETE: "DELETE", + NGHTTP2_NO_ERROR: 0, + NGHTTP2_PROTOCOL_ERROR: 1, + NGHTTP2_INTERNAL_ERROR: 2, + NGHTTP2_FRAME_SIZE_ERROR: 6, + NGHTTP2_FLOW_CONTROL_ERROR: 3, + NGHTTP2_REFUSED_STREAM: 7, + NGHTTP2_CANCEL: 8, + DEFAULT_SETTINGS_MAX_HEADER_LIST_SIZE: 65535, + } as Record, + getDefaultSettings(): Http2SettingsRecord { + return cloneHttp2Settings(DEFAULT_HTTP2_SETTINGS); + }, + connect: connectHttp2, + createServer: createHttp2Server.bind(undefined, false), + createSecureServer: createHttp2Server.bind(undefined, true), +}; + +// Export modules and make them available as globals for require() +exposeCustomGlobal("_httpModule", http); +exposeCustomGlobal("_httpsModule", https); +exposeCustomGlobal("_http2Module", http2); +exposeCustomGlobal("_dnsModule", dns); +function onHttpServerRequest( + eventType: string, + payload?: { + serverId?: number; + requestId?: number; + request?: string; + } | null, +): void { + debugBridgeNetwork("http stream event", eventType, payload); + if (eventType !== "http_request") { + return; + } + if (!payload || payload.serverId === undefined || payload.requestId === undefined || typeof payload.request !== "string") { + return; + } + if (typeof _networkHttpServerRespondRaw === "undefined") { + debugBridgeNetwork("http stream missing respond bridge"); + return; + } + + void dispatchServerRequest(payload.serverId, payload.request) + .then((responseJson) => { + debugBridgeNetwork("http stream response", payload.serverId, payload.requestId); + _networkHttpServerRespondRaw.applySync(undefined, [ + payload.serverId!, + payload.requestId!, + responseJson, + ]); + }) + .catch((err) => { + const message = err instanceof Error ? err.message : String(err); + debugBridgeNetwork("http stream error", payload.serverId, payload.requestId, message); + _networkHttpServerRespondRaw.applySync(undefined, [ + payload.serverId!, + payload.requestId!, + JSON.stringify({ + status: 500, + headers: [["content-type", "text/plain"]], + body: `Error: ${message}`, + bodyEncoding: "utf8", + }), + ]); + }); +} + +exposeCustomGlobal("_httpServerDispatch", onHttpServerRequest); +exposeCustomGlobal("_httpServerUpgradeDispatch", dispatchUpgradeRequest); +exposeCustomGlobal("_httpServerConnectDispatch", dispatchConnectRequest); +exposeCustomGlobal("_http2Dispatch", onHttp2Dispatch); +exposeCustomGlobal("_upgradeSocketData", onUpgradeSocketData); +exposeCustomGlobal("_upgradeSocketEnd", onUpgradeSocketEnd); + +// Harden fetch API globals (non-writable, non-configurable) +exposeCustomGlobal("fetch", fetch); +exposeCustomGlobal("Headers", Headers); +exposeCustomGlobal("Request", Request); +exposeCustomGlobal("Response", Response); +if (typeof (globalThis as Record).Blob === "undefined") { + // Minimal Blob stub used by server frameworks for instanceof checks. + exposeCustomGlobal("Blob", class BlobStub {}); +} +if (typeof (globalThis as Record).FormData === "undefined") { + // Minimal FormData stub — server frameworks check `instanceof FormData`. + class FormDataStub { + private _entries: [string, string][] = []; + append(name: string, value: string): void { + this._entries.push([name, value]); + } + get(name: string): string | null { + const entry = this._entries.find(([k]) => k === name); + return entry ? entry[1] : null; + } + getAll(name: string): string[] { + return this._entries.filter(([k]) => k === name).map(([, v]) => v); + } + has(name: string): boolean { + return this._entries.some(([k]) => k === name); + } + delete(name: string): void { + this._entries = this._entries.filter(([k]) => k !== name); + } + entries(): IterableIterator<[string, string]> { + return this._entries[Symbol.iterator](); + } + [Symbol.iterator](): IterableIterator<[string, string]> { + return this.entries(); + } + } + exposeCustomGlobal("FormData", FormDataStub); +} + +// =================================================================== +// net module — TCP socket support bridged to the host +// =================================================================== + +type NetEventListener = (...args: unknown[]) => void; + +const NET_SOCKET_REGISTRY_PREFIX = "__secureExecNetSocket:"; +const NET_SERVER_HANDLE_PREFIX = "net-server:"; + +type NetSocketInfo = { + localAddress: string; + localPort: number; + localFamily: string; + localPath?: string; + remoteAddress?: string; + remotePort?: number; + remoteFamily?: string; + remotePath?: string; +}; + +type SerializedTlsDataValue = + | { + kind: "buffer"; + data: string; + } + | { + kind: "string"; + data: string; + }; + +type SerializedTlsMaterial = SerializedTlsDataValue | SerializedTlsDataValue[]; + +type SerializedTlsBridgeOptions = { + isServer?: boolean; + servername?: string; + rejectUnauthorized?: boolean; + requestCert?: boolean; + session?: string; + key?: SerializedTlsMaterial; + cert?: SerializedTlsMaterial; + ca?: SerializedTlsMaterial; + passphrase?: string; + ciphers?: string; + ALPNProtocols?: string[]; + minVersion?: string; + maxVersion?: string; +}; + +type SerializedTlsClientHello = { + servername?: string; + ALPNProtocols?: string[]; +}; + +type TlsSecureContextWrapper = { + __secureExecTlsContext: SerializedTlsBridgeOptions; + context: Record; +}; + +type SerializedTlsState = { + authorized?: boolean; + authorizationError?: string; + alpnProtocol?: string | false; + servername?: string; + protocol?: string | null; + sessionReused?: boolean; + cipher?: { + name?: string; + standardName?: string; + version?: string; + } | null; +}; + +type SerializedTlsBridgeValue = + | null + | boolean + | number + | string + | { + type: "undefined"; + } + | { + type: "buffer"; + data: string; + } + | { + type: "array"; + value: SerializedTlsBridgeValue[]; + } + | { + type: "object"; + id: number; + value: Record; + } + | { + type: "ref"; + id: number; + }; + +type SerializedTlsError = { + message: string; + name?: string; + code?: string; + stack?: string; + authorized?: boolean; + authorizationError?: string; +}; + +type NetSocketHandle = { + setNoDelay?: (enable?: boolean) => unknown; + setKeepAlive?: (enable?: boolean, initialDelay?: number) => unknown; + readStart?: () => unknown; + ref?: () => unknown; + unref?: () => unknown; + socketId?: number; +}; + +type AcceptedNetClientHandle = NetSocketHandle & { + socketId: number; + info: NetSocketInfo; +}; + +function getRegisteredNetSocket(socketId: number): NetSocket | undefined { + return (globalThis as Record)[`${NET_SOCKET_REGISTRY_PREFIX}${socketId}`] as NetSocket | undefined; +} + +function registerNetSocket(socketId: number, socket: NetSocket): void { + (globalThis as Record)[`${NET_SOCKET_REGISTRY_PREFIX}${socketId}`] = socket; +} + +function unregisterNetSocket(socketId: number): void { + delete (globalThis as Record)[`${NET_SOCKET_REGISTRY_PREFIX}${socketId}`]; +} + +function isTruthySocketOption(value: unknown): boolean { + return value === undefined ? true : Boolean(value); +} + +function normalizeKeepAliveDelay(initialDelay?: number): number { + if (typeof initialDelay !== "number" || !Number.isFinite(initialDelay)) { + return 0; + } + return Math.max(0, Math.floor(initialDelay / 1000)); +} + +function createTimeoutArgTypeError(argumentName: string, value: unknown): TypeError & { code: string } { + return createTypeErrorWithCode( + `The "${argumentName}" argument must be of type number. Received ${formatReceivedType(value)}`, + "ERR_INVALID_ARG_TYPE", + ); +} + +function createFunctionArgTypeError(argumentName: string, value: unknown): TypeError & { code: string } { + return createTypeErrorWithCode( + `The "${argumentName}" argument must be of type function. Received ${formatReceivedType(value)}`, + "ERR_INVALID_ARG_TYPE", + ); +} + +function createTimeoutRangeError(value: number): RangeError & { code: string } { + const error = new RangeError( + `The value of "timeout" is out of range. It must be a non-negative finite number. Received ${String(value)}`, + ) as RangeError & { code: string }; + error.code = "ERR_OUT_OF_RANGE"; + return error; +} + +function createListenArgValueError(message: string): TypeError & { code: string } { + return createTypeErrorWithCode(message, "ERR_INVALID_ARG_VALUE"); +} + +function createSocketBadPortError(value: unknown): RangeError & { code: string } { + const error = new RangeError( + `options.port should be >= 0 and < 65536. Received ${formatReceivedType(value)}.`, + ) as RangeError & { code: string }; + error.code = "ERR_SOCKET_BAD_PORT"; + return error; +} + +function isValidTcpPort(value: number): boolean { + return Number.isInteger(value) && value >= 0 && value < 65536; +} + +function isDecimalIntegerString(value: string): boolean { + return /^[0-9]+$/.test(value); +} + +function normalizeListenPortValue(value: unknown): number { + if (value === undefined || value === null) { + return 0; + } + if (typeof value === "string" && value.length > 0) { + const parsed = Number(value); + if (isValidTcpPort(parsed)) { + return parsed; + } + throw createSocketBadPortError(value); + } + if (typeof value === "number") { + if (isValidTcpPort(value)) { + return value; + } + throw createSocketBadPortError(value); + } + throw createListenArgValueError( + `The argument 'options' is invalid. Received ${String(value)}`, + ); +} + +type ParsedListenOptions = { + port?: number; + host?: string; + path?: string; + backlog: number; + readableAll: boolean; + writableAll: boolean; + callback?: NetServerEventListener; +}; + +function normalizeListenArgs( + portOrOptions?: number | string | null | { port?: unknown; host?: unknown; backlog?: unknown; path?: unknown }, + hostOrCallback?: string | NetServerEventListener, + backlogOrCallback?: number | NetServerEventListener, + callback?: NetServerEventListener, +): ParsedListenOptions { + const defaultOptions = { + port: 0, + host: "127.0.0.1", + backlog: 511, + readableAll: false, + writableAll: false, + }; + + if (typeof portOrOptions === "function") { + return { + ...defaultOptions, + callback: portOrOptions, + }; + } + + if (portOrOptions !== null && typeof portOrOptions === "object") { + const options = portOrOptions as { + port?: unknown; + host?: unknown; + backlog?: unknown; + path?: unknown; + readableAll?: unknown; + writableAll?: unknown; + }; + const hasPort = Object.prototype.hasOwnProperty.call(options, "port"); + const hasPath = Object.prototype.hasOwnProperty.call(options, "path"); + if (!hasPort && !hasPath) { + throw createListenArgValueError( + `The argument 'options' must have the property "port" or "path". Received ${String(portOrOptions)}`, + ); + } + if (hasPort && hasPath) { + throw createListenArgValueError( + `The argument 'options' is invalid. Received ${String(portOrOptions)}`, + ); + } + + if ( + hasPort && + options.port !== undefined && + options.port !== null && + typeof options.port !== "number" && + typeof options.port !== "string" + ) { + throw createListenArgValueError( + `The argument 'options' is invalid. Received ${String(portOrOptions)}`, + ); + } + + if (hasPath) { + if (typeof options.path !== "string" || options.path.length === 0) { + throw createListenArgValueError( + `The argument 'options' is invalid. Received ${String(portOrOptions)}`, + ); + } + return { + path: options.path, + backlog: + typeof options.backlog === "number" && Number.isFinite(options.backlog) + ? options.backlog + : defaultOptions.backlog, + readableAll: options.readableAll === true, + writableAll: options.writableAll === true, + callback: + typeof hostOrCallback === "function" + ? hostOrCallback + : typeof backlogOrCallback === "function" + ? backlogOrCallback + : callback, + }; + } + + return { + port: normalizeListenPortValue(options.port), + host: + typeof options.host === "string" && options.host.length > 0 + ? options.host + : defaultOptions.host, + backlog: + typeof options.backlog === "number" && Number.isFinite(options.backlog) + ? options.backlog + : defaultOptions.backlog, + readableAll: false, + writableAll: false, + callback: + typeof hostOrCallback === "function" + ? hostOrCallback + : typeof backlogOrCallback === "function" + ? backlogOrCallback + : callback, + }; + } + + if ( + portOrOptions !== undefined && + portOrOptions !== null && + typeof portOrOptions !== "number" && + typeof portOrOptions !== "string" + ) { + throw createListenArgValueError( + `The argument 'options' is invalid. Received ${String(portOrOptions)}`, + ); + } + + if (typeof portOrOptions === "string" && portOrOptions.length > 0 && !isDecimalIntegerString(portOrOptions)) { + return { + path: portOrOptions, + backlog: defaultOptions.backlog, + readableAll: false, + writableAll: false, + callback: + typeof hostOrCallback === "function" + ? hostOrCallback + : typeof backlogOrCallback === "function" + ? backlogOrCallback + : callback, + }; + } + + return { + port: normalizeListenPortValue(portOrOptions), + host: typeof hostOrCallback === "string" ? hostOrCallback : defaultOptions.host, + backlog: typeof backlogOrCallback === "number" ? backlogOrCallback : defaultOptions.backlog, + readableAll: false, + writableAll: false, + callback: + typeof hostOrCallback === "function" + ? hostOrCallback + : typeof backlogOrCallback === "function" + ? backlogOrCallback + : callback, + }; +} + +type ParsedConnectOptions = { + host?: string; + port?: number; + path?: string; + keepAlive?: unknown; + keepAliveInitialDelay?: number; + callback?: () => void; +}; + +function normalizeConnectArgs( + portOrOptions: + | number + | string + | { + host?: string; + port?: number; + path?: string; + keepAlive?: unknown; + keepAliveInitialDelay?: number; + }, + hostOrCallback?: string | (() => void), + callback?: () => void, +): ParsedConnectOptions { + if (portOrOptions !== null && typeof portOrOptions === "object") { + return { + host: + typeof portOrOptions.host === "string" && portOrOptions.host.length > 0 + ? portOrOptions.host + : undefined, + port: portOrOptions.port, + path: + typeof portOrOptions.path === "string" && portOrOptions.path.length > 0 + ? portOrOptions.path + : undefined, + keepAlive: portOrOptions.keepAlive, + keepAliveInitialDelay: portOrOptions.keepAliveInitialDelay, + callback: typeof hostOrCallback === "function" ? hostOrCallback : callback, + }; + } + + if (typeof portOrOptions === "string" && !isDecimalIntegerString(portOrOptions)) { + return { + path: portOrOptions, + callback: typeof hostOrCallback === "function" ? hostOrCallback : callback, + }; + } + + return { + port: typeof portOrOptions === "number" ? portOrOptions : Number(portOrOptions), + host: typeof hostOrCallback === "string" ? hostOrCallback : "127.0.0.1", + callback: typeof hostOrCallback === "function" ? hostOrCallback : callback, + }; +} + +function isValidIPv4Segment(segment: string): boolean { + if (!/^[0-9]{1,3}$/.test(segment)) { + return false; + } + if (segment.length > 1 && segment.startsWith("0")) { + return false; + } + const value = Number(segment); + return Number.isInteger(value) && value >= 0 && value <= 255; +} + +function isIPv4String(input: string): boolean { + const segments = input.split("."); + return segments.length === 4 && segments.every((segment) => isValidIPv4Segment(segment)); +} + +function isValidIPv6Zone(zone: string): boolean { + return zone.length > 0 && /^[0-9A-Za-z_.-]+$/.test(zone); +} + +function countIPv6Parts(part: string): number | null { + if (part.length === 0) { + return 0; + } + const segments = part.split(":"); + let count = 0; + for (const segment of segments) { + if (segment.length === 0) { + return null; + } + if (segment.includes(".")) { + if (segment !== segments[segments.length - 1] || !isIPv4String(segment)) { + return null; + } + count += 2; + continue; + } + if (!/^[0-9A-Fa-f]{1,4}$/.test(segment)) { + return null; + } + count += 1; + } + return count; +} + +function isIPv6String(input: string): boolean { + if (input.length === 0) { + return false; + } + + let address = input; + const zoneIndex = address.indexOf("%"); + if (zoneIndex !== -1) { + if (address.indexOf("%", zoneIndex + 1) !== -1) { + return false; + } + const zone = address.slice(zoneIndex + 1); + if (!isValidIPv6Zone(zone)) { + return false; + } + address = address.slice(0, zoneIndex); + } + + const doubleColonIndex = address.indexOf("::"); + if (doubleColonIndex !== -1) { + if (address.indexOf("::", doubleColonIndex + 2) !== -1) { + return false; + } + const [left, right] = address.split("::"); + if (left.includes(".")) { + return false; + } + const leftCount = countIPv6Parts(left); + const rightCount = countIPv6Parts(right); + if (leftCount === null || rightCount === null) { + return false; + } + return leftCount + rightCount < 8; + } + + const count = countIPv6Parts(address); + return count === 8; +} + +function coerceIpInput(input: unknown): string { + if (input === null || input === undefined) { + return ""; + } + return String(input); +} + +function classifyIpAddress(input: unknown): 0 | 4 | 6 { + const value = coerceIpInput(input); + if (isIPv4String(value)) { + return 4; + } + if (isIPv6String(value)) { + return 6; + } + return 0; +} + +function normalizeSocketTimeout(timeout: unknown): number { + if (typeof timeout !== "number") { + throw createTimeoutArgTypeError("timeout", timeout); + } + if (!Number.isFinite(timeout) || timeout < 0) { + throw createTimeoutRangeError(timeout); + } + return timeout; +} + +function parseNetSocketInfo(data?: string): NetSocketInfo | null { + if (!data) { + return null; + } + try { + const parsed = JSON.parse(data) as NetSocketInfo; + return parsed && typeof parsed === "object" ? parsed : null; + } catch { + return null; + } +} + +function serializeTlsValue(value: unknown): SerializedTlsMaterial | undefined { + if (value === undefined || value === null) { + return undefined; + } + if (Array.isArray(value)) { + const entries = value + .map((entry) => serializeTlsValue(entry)) + .flatMap((entry) => Array.isArray(entry) ? entry : entry ? [entry] : []); + return entries.length > 0 ? entries : undefined; + } + if (typeof value === "string") { + return { kind: "string", data: value }; + } + if (Buffer.isBuffer(value) || value instanceof Uint8Array) { + return { kind: "buffer", data: Buffer.from(value).toString("base64") }; + } + return undefined; +} + +function isTlsSecureContextWrapper(value: unknown): value is TlsSecureContextWrapper { + return !!value && + typeof value === "object" && + "__secureExecTlsContext" in (value as Record); +} + +function buildSerializedTlsOptions( + options: Record | undefined, + extra?: Partial, +): SerializedTlsBridgeOptions { + const contextOptions = isTlsSecureContextWrapper(options?.secureContext) + ? options.secureContext.__secureExecTlsContext + : undefined; + const serialized: SerializedTlsBridgeOptions = { + ...(contextOptions ?? {}), + ...extra, + }; + const key = serializeTlsValue(options?.key); + const cert = serializeTlsValue(options?.cert); + const ca = serializeTlsValue(options?.ca); + if (key !== undefined) serialized.key = key; + if (cert !== undefined) serialized.cert = cert; + if (ca !== undefined) serialized.ca = ca; + if (typeof options?.passphrase === "string") serialized.passphrase = options.passphrase; + if (typeof options?.ciphers === "string") serialized.ciphers = options.ciphers; + if (Buffer.isBuffer(options?.session) || options?.session instanceof Uint8Array) { + serialized.session = Buffer.from(options.session).toString("base64"); + } + if (Array.isArray(options?.ALPNProtocols)) { + const protocols = options.ALPNProtocols + .filter((value): value is string => typeof value === "string"); + if (protocols.length > 0) { + serialized.ALPNProtocols = protocols; + } + } + if (typeof options?.minVersion === "string") serialized.minVersion = options.minVersion; + if (typeof options?.maxVersion === "string") serialized.maxVersion = options.maxVersion; + if (typeof options?.servername === "string") serialized.servername = options.servername; + if (typeof options?.rejectUnauthorized === "boolean") { + serialized.rejectUnauthorized = options.rejectUnauthorized; + } + if (typeof options?.requestCert === "boolean") { + serialized.requestCert = options.requestCert; + } + return serialized; +} + +function parseTlsState(payload?: string): SerializedTlsState | null { + if (!payload) { + return null; + } + try { + return JSON.parse(payload) as SerializedTlsState; + } catch { + return null; + } +} + +function parseTlsClientHello(payload?: string): SerializedTlsClientHello | null { + if (!payload) { + return null; + } + try { + return JSON.parse(payload) as SerializedTlsClientHello; + } catch { + return null; + } +} + +function createBridgedTlsError(payload?: string): Error { + if (!payload) { + return new Error("socket error"); + } + try { + const parsed = JSON.parse(payload) as SerializedTlsError; + const error = new Error(parsed.message); + if (parsed.name) error.name = parsed.name; + if (parsed.code) { + (error as Error & { code?: string }).code = parsed.code; + } + if (parsed.stack) error.stack = parsed.stack; + return error; + } catch { + return new Error(payload); + } +} + +function deserializeTlsBridgeValue( + value: SerializedTlsBridgeValue, + refs = new Map>(), +): unknown { + if ( + value === null || + typeof value === "boolean" || + typeof value === "number" || + typeof value === "string" + ) { + return value; + } + if (value.type === "undefined") { + return undefined; + } + if (value.type === "buffer") { + return Buffer.from(value.data, "base64"); + } + if (value.type === "array") { + return value.value.map((entry) => deserializeTlsBridgeValue(entry, refs)); + } + if (value.type === "ref") { + return refs.get(value.id); + } + const target: Record = {}; + refs.set(value.id, target); + for (const [key, entry] of Object.entries(value.value)) { + target[key] = deserializeTlsBridgeValue(entry, refs); + } + return target; +} + +function queryTlsSocket( + socketId: number, + query: string, + detailed?: boolean, +): unknown { + if (typeof _netSocketTlsQueryRaw === "undefined") { + return undefined; + } + const payload = _netSocketTlsQueryRaw.applySync( + undefined, + detailed === undefined ? [socketId, query] : [socketId, query, detailed], + ); + return deserializeTlsBridgeValue(JSON.parse(payload) as SerializedTlsBridgeValue); +} + +function createConnectedSocketHandle(socketId: number): NetSocketHandle { + return { + socketId, + setNoDelay(enable?: boolean) { + _netSocketSetNoDelayRaw?.applySync(undefined, [socketId, enable !== false]); + return this; + }, + setKeepAlive(enable?: boolean, initialDelay?: number) { + _netSocketSetKeepAliveRaw?.applySync(undefined, [ + socketId, + enable !== false, + normalizeKeepAliveDelay(initialDelay), + ]); + return this; + }, + ref() { + return this; + }, + unref() { + return this; + }, + }; +} + +function createAcceptedClientHandle( + socketId: number, + info: NetSocketInfo, +): AcceptedNetClientHandle { + return { + socketId, + info, + setNoDelay(enable?: boolean) { + _netSocketSetNoDelayRaw?.applySync(undefined, [socketId, enable !== false]); + return this; + }, + setKeepAlive(enable?: boolean, initialDelay?: number) { + _netSocketSetKeepAliveRaw?.applySync(undefined, [ + socketId, + enable !== false, + normalizeKeepAliveDelay(initialDelay), + ]); + return this; + }, + ref() { + return this; + }, + unref() { + return this; + }, + }; +} + +const NET_BRIDGE_TIMEOUT_SENTINEL = "__secure_exec_net_timeout__"; +const NET_BRIDGE_POLL_DELAY_MS = 10; + +// Dispatch callback invoked by the host when socket events arrive +function netSocketDispatch(socketId: number, event: string, data?: string): void { + if (socketId === 0 && event.startsWith("http2:")) { + debugBridgeNetwork("http2 dispatch via netSocket", event); + try { + const payload = data ? JSON.parse(data) as { + id?: number; + data?: string; + extra?: string; + extraNumber?: string | number; + extraHeaders?: string; + flags?: string | number; + } : {}; + http2Dispatch( + event.slice("http2:".length), + Number(payload.id ?? 0), + payload.data, + payload.extra, + payload.extraNumber, + payload.extraHeaders, + payload.flags, + ); + } catch { + // Ignore malformed bridged HTTP/2 dispatch payloads. + } + return; + } + const socket = getRegisteredNetSocket(socketId); + if (!socket) return; + + switch (event) { + case "connect": { + socket._applySocketInfo(parseNetSocketInfo(data)); + socket._connected = true; + socket.connecting = false; + socket._touchTimeout(); socket._emitNet("connect"); socket._emitNet("ready"); break; + } case "secureConnect": - socket._emitNet("secureConnect"); + case "secure": { + const state = parseTlsState(data); + socket.encrypted = true; + if (state) { + socket.authorized = state.authorized === true; + socket.authorizationError = state.authorizationError; + socket.alpnProtocol = state.alpnProtocol ?? false; + socket.servername = state.servername ?? socket.servername; + socket._tlsProtocol = state.protocol ?? null; + socket._tlsSessionReused = state.sessionReused === true; + socket._tlsCipher = state.cipher ?? null; + } + socket._emitNet(event); break; + } case "data": { const buf = typeof Buffer !== "undefined" ? Buffer.from(data!, "base64") : new Uint8Array(0); + socket._touchTimeout(); socket._emitNet("data", buf); break; } - case "end": - socket._emitNet("end"); - break; - case "error": - socket._emitNet("error", new Error(data ?? "socket error")); - break; - case "close": - unregisterNetSocket(socketId); - socket._connected = false; - socket.connecting = false; - socket._emitNet("close"); - break; + case "end": + socket._emitNet("end"); + break; + case "session": { + const session = typeof Buffer !== "undefined" + ? Buffer.from(data ?? "", "base64") + : new Uint8Array(0); + socket._tlsSession = Buffer.from(session); + socket._emitNet("session", session); + break; + } + case "error": + if (data) { + try { + const parsed = JSON.parse(data) as SerializedTlsError; + socket.authorized = parsed.authorized === true; + socket.authorizationError = parsed.authorizationError; + } catch { + // Ignore non-JSON payloads. + } + } + socket._emitNet("error", createBridgedTlsError(data)); + break; + case "close": + unregisterNetSocket(socketId); + socket._connected = false; + socket.connecting = false; + socket._clearTimeoutTimer(); + socket._emitNet("close"); + break; + } +} + +exposeCustomGlobal("_netSocketDispatch", netSocketDispatch); + +class NetSocket { + private _listeners: Record = {}; + private _onceListeners: Record = {}; + private _socketId = 0; + private _loopbackServer: Server | null = null; + private _loopbackBuffer: Buffer = Buffer.alloc(0); + private _loopbackDispatchRunning = false; + private _loopbackReadableEnded = false; + private _loopbackEventQueue: Promise = Promise.resolve(); + private _encoding?: BufferEncoding; + private _noDelayState = false; + private _keepAliveState = false; + private _keepAliveDelaySeconds = 0; + private _refed = true; + private _bridgeReadLoopRunning = false; + private _bridgeReadPollTimer: ReturnType | null = null; + private _timeoutMs = 0; + private _timeoutTimer: ReturnType | null = null; + private _tlsUpgrading = false; + _connected = false; + connecting = false; + destroyed = false; + writable = true; + readable = true; + readableLength = 0; + writableLength = 0; + remoteAddress?: string; + remotePort?: number; + remoteFamily?: string; + localAddress = "0.0.0.0"; + localPort = 0; + localFamily = "IPv4"; + localPath?: string; + remotePath?: string; + bytesRead = 0; + bytesWritten = 0; + bufferSize = 0; + pending = true; + allowHalfOpen = false; + encrypted = false; + authorized = false; + authorizationError?: string; + servername?: string; + alpnProtocol: string | false = false; + writableHighWaterMark = 16 * 1024; + server?: NetServer; + _tlsCipher: SerializedTlsState["cipher"] = null; + _tlsProtocol: string | null = null; + _tlsSession: Buffer | null = null; + _tlsSessionReused = false; + // Readable stream state stub for library compatibility + _readableState = { endEmitted: false }; + _handle: NetSocketHandle | null = null; + + constructor(options?: { allowHalfOpen?: boolean; handle?: NetSocketHandle | null }) { + if (options?.allowHalfOpen) this.allowHalfOpen = true; + if (options?.handle) this._handle = options.handle; + } + + connect( + portOrOptions: + | number + | string + | { + host?: string; + port?: number; + path?: string; + keepAlive?: unknown; + keepAliveInitialDelay?: number; + }, + hostOrCallback?: string | (() => void), + callback?: () => void, + ): this { + if (typeof _netSocketConnectRaw === "undefined") { + throw new Error("net.Socket is not supported in sandbox (bridge not available)"); + } + + const { + host = "127.0.0.1", + port = 0, + path, + keepAlive, + keepAliveInitialDelay, + callback: cb, + } = normalizeConnectArgs(portOrOptions, hostOrCallback, callback); + + if (cb) this.once("connect", cb); + + this.connecting = true; + this.remoteAddress = path ?? host; + this.remotePort = path ? undefined : port; + this.remotePath = path; + this.pending = false; + + const loopbackServer = + !path && isLoopbackRequestHost(host) + ? findLoopbackHttpServerByPort(port) + : null; + if (loopbackServer) { + this._loopbackServer = loopbackServer; + this._connected = true; + this.connecting = false; + queueMicrotask(() => { + this._touchTimeout(); + this._emitNet("connect"); + this._emitNet("ready"); + }); + return this; + } + + this._socketId = _netSocketConnectRaw.applySync( + undefined, + [JSON.stringify(path ? { path } : { host, port })], + ) as number; + this._handle = createConnectedSocketHandle(this._socketId); + registerNetSocket(this._socketId, this); + void this._waitForConnect(); + + // Note: do NOT use _registerHandle for net sockets — _waitForActiveHandles() + // blocks dispatch callbacks. Libraries use their own async patterns (Promises, + // callbacks) which keep the execution alive via the script result promise. + + if (keepAlive) { + this.once("connect", () => { + this.setKeepAlive(true, keepAliveInitialDelay); + }); + } + + return this; + } + + write(data: unknown, encodingOrCallback?: string | (() => void), callback?: () => void): boolean { + let buf: Buffer; + if (Buffer.isBuffer(data)) { + buf = data; + } else if (typeof data === "string") { + const enc = typeof encodingOrCallback === "string" ? encodingOrCallback : "utf-8"; + buf = Buffer.from(data, enc as BufferEncoding); + } else { + buf = Buffer.from(data as Uint8Array); + } + + if (this._loopbackServer) { + this.bytesWritten += buf.length; + this._loopbackBuffer = Buffer.concat([this._loopbackBuffer, buf]); + this._touchTimeout(); + this._dispatchLoopbackHttpRequest(); + const cb = typeof encodingOrCallback === "function" ? encodingOrCallback : callback; + if (cb) cb(); + return true; + } + + if (typeof _netSocketWriteRaw === "undefined") return false; + if (this.destroyed || !this._socketId) return false; + + const base64 = buf.toString("base64"); + this.bytesWritten += buf.length; + _netSocketWriteRaw.applySync(undefined, [this._socketId, base64]); + this._touchTimeout(); + + const cb = typeof encodingOrCallback === "function" ? encodingOrCallback : callback; + if (cb) cb(); + return true; + } + + end(dataOrCallback?: unknown, encodingOrCallback?: string | (() => void), callback?: () => void): this { + if (typeof dataOrCallback === "function") { + this.once("finish", dataOrCallback as () => void); + } else if (dataOrCallback != null) { + this.write(dataOrCallback, encodingOrCallback, callback); + } + if (this._loopbackServer) { + if (!this._loopbackReadableEnded) { + queueMicrotask(() => { + this._closeLoopbackReadable(); + }); + } + return this; + } + if (typeof _netSocketEndRaw !== "undefined" && this._socketId && !this.destroyed) { + _netSocketEndRaw.applySync(undefined, [this._socketId]); + this._touchTimeout(); + } + return this; + } + + destroy(error?: Error): this { + if (this.destroyed) return this; + this.destroyed = true; + this.writable = false; + this.readable = false; + this._clearTimeoutTimer(); + if (this._bridgeReadPollTimer) { + clearTimeout(this._bridgeReadPollTimer); + this._bridgeReadPollTimer = null; + } + if (this._loopbackServer) { + this._loopbackServer = null; + if (error) { + this._emitNet("error", error); + } + this._emitNet("close"); + return this; + } + if (typeof _netSocketDestroyRaw !== "undefined" && this._socketId) { + _netSocketDestroyRaw.applySync(undefined, [this._socketId]); + unregisterNetSocket(this._socketId); + } + if (error) { + this._emitNet("error", error); + } + this._emitNet("close"); + return this; + } + + _applySocketInfo(info: NetSocketInfo | null): void { + if (!info) { + return; + } + this.localAddress = info.localAddress; + this.localPort = info.localPort; + this.localFamily = info.localFamily; + this.localPath = info.localPath; + this.remoteAddress = info.remoteAddress ?? this.remoteAddress; + this.remotePort = info.remotePort ?? this.remotePort; + this.remoteFamily = info.remoteFamily ?? this.remoteFamily; + this.remotePath = info.remotePath ?? this.remotePath; + } + + _applyAcceptedKeepAlive(initialDelay?: number): void { + this._keepAliveState = true; + this._keepAliveDelaySeconds = normalizeKeepAliveDelay(initialDelay); + } + + static fromAcceptedHandle( + handle: AcceptedNetClientHandle, + options?: { allowHalfOpen?: boolean }, + ): NetSocket { + const socket = new NetSocket({ allowHalfOpen: options?.allowHalfOpen }); + socket._socketId = handle.socketId; + socket._handle = createConnectedSocketHandle(handle.socketId); + socket._applySocketInfo(handle.info); + socket._connected = true; + socket.connecting = false; + socket.pending = false; + registerNetSocket(handle.socketId, socket); + queueMicrotask(() => { + if (!socket.destroyed && !socket._tlsUpgrading) { + void socket._pumpBridgeReads(); + } + }); + return socket; + } + + setKeepAlive(enable?: boolean, initialDelay?: number): this { + const nextEnable = isTruthySocketOption(enable); + const nextDelaySeconds = normalizeKeepAliveDelay(initialDelay); + if ( + nextEnable === this._keepAliveState && + (!nextEnable || nextDelaySeconds === this._keepAliveDelaySeconds) + ) { + return this; + } + this._keepAliveState = nextEnable; + this._keepAliveDelaySeconds = nextEnable ? nextDelaySeconds : 0; + this._handle?.setKeepAlive?.(nextEnable, nextDelaySeconds); + return this; + } + + setNoDelay(noDelay?: boolean): this { + const nextState = isTruthySocketOption(noDelay); + if (nextState === this._noDelayState) { + return this; + } + this._noDelayState = nextState; + this._handle?.setNoDelay?.(nextState); + return this; + } + setTimeout(timeout: number, callback?: () => void): this { + const nextTimeout = normalizeSocketTimeout(timeout); + if (callback !== undefined && typeof callback !== "function") { + throw createFunctionArgTypeError("callback", callback); + } + if (callback) { + this.once("timeout", callback); + } + this._timeoutMs = nextTimeout; + if (nextTimeout === 0) { + this._clearTimeoutTimer(); + return this; + } + this._touchTimeout(); + return this; + } + ref(): this { + this._refed = true; + this._handle?.ref?.(); + if (this._timeoutTimer && typeof this._timeoutTimer.ref === "function") { + this._timeoutTimer.ref(); + } + if ( + !this.destroyed && + this._connected && + !this._loopbackServer && + !this._bridgeReadLoopRunning + ) { + void this._pumpBridgeReads(); + } + return this; + } + unref(): this { + this._refed = false; + this._handle?.unref?.(); + if (this._timeoutTimer && typeof this._timeoutTimer.unref === "function") { + this._timeoutTimer.unref(); + } + if (this._bridgeReadPollTimer) { + clearTimeout(this._bridgeReadPollTimer); + this._bridgeReadPollTimer = null; + } + return this; + } + pause(): this { return this; } + resume(): this { return this; } + address(): { port: number; family: string; address: string } { + return { port: this.localPort, family: this.localFamily, address: this.localAddress }; + } + getCipher(): SerializedTlsState["cipher"] { + return (queryTlsSocket(this._socketId, "getCipher") as SerializedTlsState["cipher"] | undefined) ?? this._tlsCipher; + } + getSession(): Buffer | null { + const session = queryTlsSocket(this._socketId, "getSession"); + if (Buffer.isBuffer(session)) { + this._tlsSession = Buffer.from(session); + return Buffer.from(session); + } + return this._tlsSession ? Buffer.from(this._tlsSession) : null; + } + isSessionReused(): boolean { + const reused = queryTlsSocket(this._socketId, "isSessionReused"); + return typeof reused === "boolean" ? reused : this._tlsSessionReused; + } + getPeerCertificate(detailed?: boolean): Record { + const cert = queryTlsSocket(this._socketId, "getPeerCertificate", detailed === true); + return cert && typeof cert === "object" ? cert as Record : {}; + } + getCertificate(): Record { + const cert = queryTlsSocket(this._socketId, "getCertificate"); + return cert && typeof cert === "object" ? cert as Record : {}; + } + getProtocol(): string | null { + const protocol = queryTlsSocket(this._socketId, "getProtocol"); + return typeof protocol === "string" ? protocol : this._tlsProtocol; + } + setEncoding(encoding: string): this { + this._encoding = encoding as BufferEncoding; + return this; + } + pipe(destination: T): T { return destination; } + + on(event: string, listener: NetEventListener): this { + if (!this._listeners[event]) this._listeners[event] = []; + this._listeners[event].push(listener); + return this; + } + + addListener(event: string, listener: NetEventListener): this { + return this.on(event, listener); + } + + once(event: string, listener: NetEventListener): this { + if (!this._onceListeners[event]) this._onceListeners[event] = []; + this._onceListeners[event].push(listener); + return this; + } + + removeListener(event: string, listener: NetEventListener): this { + const listeners = this._listeners[event]; + if (listeners) { + const idx = listeners.indexOf(listener); + if (idx >= 0) listeners.splice(idx, 1); + } + const onceListeners = this._onceListeners[event]; + if (onceListeners) { + const idx = onceListeners.indexOf(listener); + if (idx >= 0) onceListeners.splice(idx, 1); + } + return this; + } + + off(event: string, listener: NetEventListener): this { + return this.removeListener(event, listener); + } + + removeAllListeners(event?: string): this { + if (event) { + delete this._listeners[event]; + delete this._onceListeners[event]; + } else { + this._listeners = {}; + this._onceListeners = {}; + } + return this; + } + + listeners(event: string): NetEventListener[] { + return [...(this._listeners[event] ?? []), ...(this._onceListeners[event] ?? [])]; + } + + listenerCount(event: string): number { + return (this._listeners[event]?.length ?? 0) + (this._onceListeners[event]?.length ?? 0); + } + + setMaxListeners(_n: number): this { return this; } + getMaxListeners(): number { return 10; } + prependListener(event: string, listener: NetEventListener): this { + if (!this._listeners[event]) this._listeners[event] = []; + this._listeners[event].unshift(listener); + return this; + } + prependOnceListener(event: string, listener: NetEventListener): this { + if (!this._onceListeners[event]) this._onceListeners[event] = []; + this._onceListeners[event].unshift(listener); + return this; + } + eventNames(): string[] { + return [...new Set([...Object.keys(this._listeners), ...Object.keys(this._onceListeners)])]; + } + rawListeners(event: string): NetEventListener[] { + return this.listeners(event); + } + emit(event: string, ...args: unknown[]): boolean { + return this._emitNet(event, ...args); + } + + _emitNet(event: string, ...args: unknown[]): boolean { + if (event === "data" && this._encoding && args[0] && Buffer.isBuffer(args[0])) { + args[0] = (args[0] as Buffer).toString(this._encoding); + } + let handled = false; + const listeners = this._listeners[event]; + if (listeners) { + for (const fn of [...listeners]) { + fn(...args); + handled = true; + } + } + const onceListeners = this._onceListeners[event]; + if (onceListeners) { + const fns = [...onceListeners]; + this._onceListeners[event] = []; + for (const fn of fns) { + fn(...args); + handled = true; + } + } + return handled; + } + + private async _waitForConnect(): Promise { + if (typeof _netSocketWaitConnectRaw === "undefined" || this._socketId === 0) { + return; + } + try { + const infoJson = await _netSocketWaitConnectRaw.apply( + undefined, + [this._socketId], + { result: { promise: true } }, + ); + if (this.destroyed) { + return; + } + this._applySocketInfo(parseNetSocketInfo(infoJson)); + this._connected = true; + this.connecting = false; + this._touchTimeout(); + this._emitNet("connect"); + this._emitNet("ready"); + if (!this._tlsUpgrading) { + await this._pumpBridgeReads(); + } + } catch (error) { + if (this.destroyed) { + return; + } + const err = error instanceof Error ? error : new Error(String(error)); + this._emitNet("error", err); + this.destroy(); + } + } + + private async _pumpBridgeReads(): Promise { + if ( + this._bridgeReadLoopRunning || + typeof _netSocketReadRaw === "undefined" || + this._socketId === 0 + ) { + return; + } + this._bridgeReadLoopRunning = true; + try { + while (!this.destroyed) { + const chunkBase64 = _netSocketReadRaw.applySync(undefined, [this._socketId]); + if (this.destroyed) { + return; + } + if (chunkBase64 === NET_BRIDGE_TIMEOUT_SENTINEL) { + if (!this._refed) { + return; + } + this._bridgeReadPollTimer = setTimeout(() => { + this._bridgeReadPollTimer = null; + void this._pumpBridgeReads(); + }, NET_BRIDGE_POLL_DELAY_MS); + return; + } + if (chunkBase64 === null) { + this.readable = false; + this._readableState.endEmitted = true; + this._emitNet("end"); + if (!this.destroyed) { + unregisterNetSocket(this._socketId); + this._emitNet("close"); + } + return; + } + const payload = Buffer.from(chunkBase64, "base64"); + this.bytesRead += payload.length; + this._touchTimeout(); + this._emitNet("data", payload); + } + } finally { + this._bridgeReadLoopRunning = false; + } + } + + private _dispatchLoopbackHttpRequest(): void { + if (!this._loopbackServer || this._loopbackDispatchRunning || this.destroyed) { + return; + } + this._loopbackDispatchRunning = true; + void this._processLoopbackHttpRequests().finally(() => { + this._loopbackDispatchRunning = false; + }); + } + + private async _processLoopbackHttpRequests(): Promise { + let closeAfterDrain = false; + + while (this._loopbackServer && !this.destroyed) { + const parsed = parseLoopbackRequestBuffer(this._loopbackBuffer, this._loopbackServer); + if (parsed.kind === "incomplete") { + if (closeAfterDrain) { + this._closeLoopbackReadable(); + } + return; + } + + if (parsed.kind === "bad-request") { + this._pushLoopbackData(createBadRequestResponseBuffer()); + if (parsed.closeConnection) { + this._closeLoopbackReadable(); + } + this._loopbackBuffer = Buffer.alloc(0); + return; + } + + this._loopbackBuffer = this._loopbackBuffer.subarray(parsed.bytesConsumed); + + if (parsed.upgradeHead) { + this._dispatchLoopbackUpgrade(parsed.request, parsed.upgradeHead); + return; + } + + const { + responseJson, + } = await dispatchLoopbackServerRequest(this._loopbackServer, parsed.request); + const response = JSON.parse(responseJson) as SerializedServerResponse; + const serialized = serializeLoopbackResponse(response, parsed.request, parsed.closeConnection); + if (!closeAfterDrain && serialized.payload.length > 0) { + this._pushLoopbackData(serialized.payload); + } + + if (serialized.closeConnection) { + closeAfterDrain = true; + if (this._loopbackBuffer.length === 0) { + this._closeLoopbackReadable(); + return; + } + } + } + } + + private _pushLoopbackData(data: Buffer): void { + if (data.length === 0 || this._loopbackReadableEnded) { + return; + } + const payload = Buffer.from(data); + this._queueLoopbackEvent(() => { + if (this.destroyed) { + return; + } + this.bytesRead += payload.length; + this._touchTimeout(); + this._emitNet("data", payload); + }); + } + + private _closeLoopbackReadable(): void { + if (this._loopbackReadableEnded) { + return; + } + this._loopbackReadableEnded = true; + this.readable = false; + this.writable = false; + this._readableState.endEmitted = true; + this._clearTimeoutTimer(); + this._queueLoopbackEvent(() => { + this._emitNet("end"); + this._emitNet("close"); + }); + } + + private _queueLoopbackEvent(callback: () => void): void { + this._loopbackEventQueue = this._loopbackEventQueue.then( + () => new Promise((resolve) => { + queueMicrotask(() => { + try { + callback(); + } finally { + resolve(); + } + }); + }), + ); + } + + private _dispatchLoopbackUpgrade( + request: SerializedServerRequest, + head: Buffer, + ): void { + if (!this._loopbackServer) { + return; + } + + try { + this._loopbackServer._emit( + "upgrade", + new ServerIncomingMessage(request), + new DirectTunnelSocket({ + host: this.remoteAddress, + port: this.remotePort, + }), + head, + ); + } catch (error) { + const rethrow = + error instanceof Error + ? error + : new Error(String(error)); + let handled = false; + let exitCodeFromHandler: number | null = null; + if (typeof process !== "undefined" && typeof process.emit === "function") { + const processEmitter = process as typeof process & { + emit(event: string, ...args: unknown[]): boolean; + }; + try { + handled = processEmitter.emit("uncaughtException", rethrow, "uncaughtException"); + } catch (emitError) { + if ( + emitError && + typeof emitError === "object" && + (emitError as { name?: string }).name === "ProcessExitError" + ) { + handled = true; + const exitCode = Number((emitError as { code?: unknown }).code); + exitCodeFromHandler = Number.isFinite(exitCode) ? exitCode : 0; + } else { + throw emitError; + } + } + } + if (handled) { + if (exitCodeFromHandler !== null) { + process.exitCode = exitCodeFromHandler; + } + this._loopbackServer?.close(); + this.destroy(); + return; + } + throw rethrow; + } + } + + // Upgrade this socket to TLS + _upgradeTls(options?: SerializedTlsBridgeOptions): void { + if (typeof _netSocketUpgradeTlsRaw === "undefined") { + throw new Error("tls.connect is not supported in sandbox (bridge not available)"); + } + this._tlsUpgrading = true; + _netSocketUpgradeTlsRaw.applySync(undefined, [this._socketId, JSON.stringify(options ?? {})]); + } + + _touchTimeout(): void { + if (this._timeoutMs === 0 || this.destroyed) { + return; + } + this._clearTimeoutTimer(); + this._timeoutTimer = setTimeout(() => { + this._timeoutTimer = null; + if (this.destroyed) { + return; + } + this._emitNet("timeout"); + }, this._timeoutMs); + if (!this._refed && typeof this._timeoutTimer.unref === "function") { + this._timeoutTimer.unref(); + } + } + + _clearTimeoutTimer(): void { + if (this._timeoutTimer) { + clearTimeout(this._timeoutTimer); + this._timeoutTimer = null; + } + } +} + +function netConnect( + portOrOptions: + | number + | string + | { + host?: string; + port?: number; + path?: string; + keepAlive?: unknown; + keepAliveInitialDelay?: number; + }, + hostOrCallback?: string | (() => void), + callback?: () => void, +): NetSocket { + const socket = new NetSocket(); + socket.connect(portOrOptions, hostOrCallback, callback); + return socket; +} + +type NetServerEventListener = (...args: unknown[]) => void; + +class NetServer { + private _listeners: Record = {}; + private _onceListeners: Record = {}; + private _serverId = 0; + private _address: { address: string; family: string; port: number } | string | null = null; + private _acceptLoopActive = false; + private _acceptLoopRunning = false; + private _acceptPollTimer: ReturnType | null = null; + private _handleRefId: string | null = null; + private _connections = new Set(); + private _refed = true; + listening = false; + keepAlive = false; + keepAliveInitialDelay = 0; + allowHalfOpen = false; + maxConnections?: number; + _handle: { + onconnection: (err: Error | null, clientHandle?: AcceptedNetClientHandle) => void; + }; + + constructor( + optionsOrListener?: { + allowHalfOpen?: boolean; + keepAlive?: boolean; + keepAliveInitialDelay?: number; + } | NetServerEventListener, + maybeListener?: NetServerEventListener, + ) { + if (typeof optionsOrListener === "function") { + this.on("connection", optionsOrListener); + } else { + this.allowHalfOpen = optionsOrListener?.allowHalfOpen === true; + this.keepAlive = optionsOrListener?.keepAlive === true; + this.keepAliveInitialDelay = optionsOrListener?.keepAliveInitialDelay ?? 0; + if (maybeListener) { + this.on("connection", maybeListener); + } + } + this._handle = { + onconnection: (err: Error | null, clientHandle?: AcceptedNetClientHandle) => { + if (err) { + this._emit("error", err); + return; + } + if (!clientHandle) { + return; + } + if ( + typeof this.maxConnections === "number" && + this.maxConnections >= 0 && + this._connections.size >= this.maxConnections + ) { + this._emit("drop", { + localAddress: clientHandle.info.localAddress, + localPort: clientHandle.info.localPort, + localFamily: clientHandle.info.localFamily, + remoteAddress: clientHandle.info.remoteAddress, + remotePort: clientHandle.info.remotePort, + remoteFamily: clientHandle.info.remoteFamily, + }); + _netSocketDestroyRaw?.applySync(undefined, [clientHandle.socketId]); + return; + } + if (this.keepAlive) { + clientHandle.setKeepAlive?.(true, this.keepAliveInitialDelay); + } + const socket = NetSocket.fromAcceptedHandle(clientHandle, { + allowHalfOpen: this.allowHalfOpen, + }); + socket.server = this; + this._connections.add(socket); + socket.once("close", () => { + this._connections.delete(socket); + }); + if (this.keepAlive) { + socket._applyAcceptedKeepAlive(this.keepAliveInitialDelay); + } + this._emit("connection", socket); + }, + }; + } + + listen( + portOrOptions?: number | string | null | { port?: unknown; host?: unknown; backlog?: unknown; path?: unknown }, + hostOrCallback?: string | NetServerEventListener, + backlogOrCallback?: number | NetServerEventListener, + callback?: NetServerEventListener, + ): this { + if (typeof _netServerListenRaw === "undefined" || typeof _netServerAcceptRaw === "undefined") { + throw new Error("net.createServer is not supported in sandbox"); + } + + const { port, host, path, backlog, readableAll, writableAll, callback: cb } = normalizeListenArgs( + portOrOptions, + hostOrCallback, + backlogOrCallback, + callback, + ); + + if (cb) { + this.once("listening", cb); + } + + try { + const resultJson = _netServerListenRaw.applySyncPromise( + undefined, + [JSON.stringify({ port, host, path, backlog, readableAll, writableAll })], + ); + const result = JSON.parse(resultJson) as { + serverId: number; + address: NetSocketInfo; + }; + this._serverId = result.serverId; + this._address = result.address.localPath + ? result.address.localPath + : { + address: result.address.localAddress, + family: result.address.localFamily, + port: result.address.localPort, + }; + this.listening = true; + this._syncHandleRef(); + this._acceptLoopActive = true; + queueMicrotask(() => { + if (!this.listening || this._serverId === 0) { + return; + } + this._emit("listening"); + void this._pumpAccepts(); + }); + } catch (error) { + queueMicrotask(() => { + this._emit("error", error); + }); + } + + return this; + } + + close(callback?: NetServerEventListener): this { + if (callback) { + this.once("close", callback); + } + if (!this.listening || typeof _netServerCloseRaw === "undefined") { + queueMicrotask(() => { + this._emit("close"); + }); + return this; + } + this.listening = false; + this._acceptLoopActive = false; + if (this._acceptPollTimer) { + clearTimeout(this._acceptPollTimer); + this._acceptPollTimer = null; + } + this._syncHandleRef(); + const serverId = this._serverId; + this._serverId = 0; + void (async () => { + try { + await _netServerCloseRaw.apply(undefined, [serverId], { + result: { promise: true }, + }); + } finally { + this._address = null; + this._emit("close"); + } + })(); + return this; + } + + address(): { address: string; family: string; port: number } | string | null { + return this._address; + } + + getConnections(callback: (error: Error | null, count: number) => void): this { + if (typeof callback !== "function") { + throw createFunctionArgTypeError("callback", callback); + } + queueMicrotask(() => { + callback(null, this._connections.size); + }); + return this; + } + + ref(): this { + this._refed = true; + this._syncHandleRef(); + if (this.listening && this._acceptLoopActive && !this._acceptLoopRunning) { + void this._pumpAccepts(); + } + return this; + } + + unref(): this { + this._refed = false; + if (this._acceptPollTimer) { + clearTimeout(this._acceptPollTimer); + this._acceptPollTimer = null; + } + this._syncHandleRef(); + return this; + } + + on(event: string, listener: NetServerEventListener): this { + if (!this._listeners[event]) this._listeners[event] = []; + this._listeners[event].push(listener); + return this; + } + + once(event: string, listener: NetServerEventListener): this { + if (!this._onceListeners[event]) this._onceListeners[event] = []; + this._onceListeners[event].push(listener); + return this; + } + + emit(event: string, ...args: unknown[]): boolean { + return this._emit(event, ...args); + } + + private _emit(event: string, ...args: unknown[]): boolean { + let handled = false; + const listeners = this._listeners[event]; + if (listeners) { + for (const fn of [...listeners]) { + fn(...args); + handled = true; + } + } + const onceListeners = this._onceListeners[event]; + if (onceListeners) { + this._onceListeners[event] = []; + for (const fn of [...onceListeners]) { + fn(...args); + handled = true; + } + } + return handled; + } + + private _syncHandleRef(): void { + if (!this.listening || this._serverId === 0 || !this._refed) { + if (this._handleRefId && typeof _unregisterHandle === "function") { + _unregisterHandle(this._handleRefId); + } + this._handleRefId = null; + return; + } + + const nextHandleId = `${NET_SERVER_HANDLE_PREFIX}${this._serverId}`; + if (this._handleRefId === nextHandleId) { + return; + } + if (this._handleRefId && typeof _unregisterHandle === "function") { + _unregisterHandle(this._handleRefId); + } + this._handleRefId = nextHandleId; + if (typeof _registerHandle === "function") { + _registerHandle(this._handleRefId, "net server"); + } + } + + private async _pumpAccepts(): Promise { + if (typeof _netServerAcceptRaw === "undefined" || this._acceptLoopRunning) { + return; + } + this._acceptLoopRunning = true; + try { + while (this._acceptLoopActive && this._serverId !== 0) { + const payload = _netServerAcceptRaw.applySync(undefined, [this._serverId]); + if (payload === NET_BRIDGE_TIMEOUT_SENTINEL) { + if (!this._refed) { + return; + } + this._acceptPollTimer = setTimeout(() => { + this._acceptPollTimer = null; + void this._pumpAccepts(); + }, NET_BRIDGE_POLL_DELAY_MS); + return; + } + if (!payload) { + return; + } + try { + const accepted = JSON.parse(payload) as { + socketId: number; + info: NetSocketInfo; + }; + const clientHandle = createAcceptedClientHandle(accepted.socketId, accepted.info); + this._handle.onconnection(null, clientHandle); + } catch (error) { + this._emit("error", error); + } + } + } finally { + this._acceptLoopRunning = false; + } + } +} + +function NetServerCallable( + this: NetServer | undefined, + optionsOrListener?: { + allowHalfOpen?: boolean; + keepAlive?: boolean; + keepAliveInitialDelay?: number; + } | NetServerEventListener, + maybeListener?: NetServerEventListener, +): NetServer { + return new NetServer(optionsOrListener, maybeListener); +} + +function findLoopbackHttpServerByPort(port: number): Server | null { + for (const server of serverInstances.values()) { + if (!server.listening) { + continue; + } + const address = server.address(); + if (address && typeof address === "object" && address.port === port) { + return server; + } + } + return null; +} + +const netModule = { + Socket: NetSocket, + Server: NetServerCallable as unknown as typeof import("node:net").Server, + connect: netConnect, + createConnection: netConnect, + createServer( + optionsOrListener?: { + allowHalfOpen?: boolean; + keepAlive?: boolean; + keepAliveInitialDelay?: number; + } | NetServerEventListener, + maybeListener?: NetServerEventListener, + ): NetServer { + return new NetServer(optionsOrListener, maybeListener); + }, + isIP(input: string): number { + return classifyIpAddress(input); + }, + isIPv4(input: string): boolean { return classifyIpAddress(input) === 4; }, + isIPv6(input: string): boolean { return classifyIpAddress(input) === 6; }, +}; + +// =================================================================== +// tls module — TLS socket support via upgrade bridge +// =================================================================== + +type TlsConnectOptions = { + host?: string; + port?: number; + socket?: NetSocket; + rejectUnauthorized?: boolean; + servername?: string; + session?: Buffer | Uint8Array; + ALPNProtocols?: string[]; + secureContext?: TlsSecureContextWrapper; + key?: unknown; + cert?: unknown; + ca?: unknown; + ciphers?: string; + minVersion?: string; + maxVersion?: string; + passphrase?: string; +}; + +type TlsServerOptions = { + allowHalfOpen?: boolean; + keepAlive?: boolean; + keepAliveInitialDelay?: number; + rejectUnauthorized?: boolean; + requestCert?: boolean; + SNICallback?: ( + servername: string, + callback: (error: Error | null, context: unknown) => void, + ) => void; + ALPNProtocols?: string[]; + ALPNCallback?: (info: { + servername?: string; + protocols: string[]; + }) => string | undefined; + secureContext?: TlsSecureContextWrapper; + key?: unknown; + cert?: unknown; + ca?: unknown; + ciphers?: string; + minVersion?: string; + maxVersion?: string; + passphrase?: string; +}; + +function createSecureContextWrapper(options?: Record): TlsSecureContextWrapper { + return { + __secureExecTlsContext: buildSerializedTlsOptions(options), + context: {}, + }; +} + +function tlsConnect( + portOrOptions: number | TlsConnectOptions, + hostOrCallback?: string | (() => void), + callback?: () => void, +): NetSocket { + let socket: NetSocket; + let options: TlsConnectOptions = {}; + let cb: (() => void) | undefined; + + if (typeof portOrOptions === "object") { + options = { ...portOrOptions }; + cb = typeof hostOrCallback === "function" ? hostOrCallback : callback; + + if (portOrOptions.socket) { + // Upgrade existing socket to TLS + socket = portOrOptions.socket; + } else { + // Create new TCP socket then upgrade + socket = new NetSocket(); + socket.connect({ host: portOrOptions.host ?? "127.0.0.1", port: portOrOptions.port }); + } + } else { + const host = typeof hostOrCallback === "string" ? hostOrCallback : "127.0.0.1"; + cb = typeof hostOrCallback === "function" ? hostOrCallback : callback; + options = { host }; + socket = new NetSocket(); + socket.connect(portOrOptions, host); + } + + if (cb) socket.once("secureConnect", cb); + + const upgradeOptions = buildSerializedTlsOptions( + options as unknown as Record, + { + isServer: false, + servername: options.servername ?? options.host ?? "127.0.0.1", + }, + ); + socket.servername = upgradeOptions.servername; + + // If already connected, upgrade immediately; otherwise wait for connect + if (socket._connected) { + socket._upgradeTls(upgradeOptions); + } else { + socket.once("connect", () => { + socket._upgradeTls(upgradeOptions); + }); + } + + return socket; +} + +function matchesServername(pattern: string, servername: string): boolean { + if (!pattern.startsWith("*.")) { + return pattern === servername; + } + const suffix = pattern.slice(1); + if (!servername.endsWith(suffix)) { + return false; + } + const prefix = servername.slice(0, -suffix.length); + return prefix.length > 0 && !prefix.includes("."); +} + +class TLSServer { + private _listeners: Record = {}; + private _onceListeners: Record = {}; + private _server: NetServer; + private _tlsOptions: SerializedTlsBridgeOptions; + private _sniCallback?: + | (( + servername: string, + callback: (error: Error | null, context: unknown) => void, + ) => void) + | undefined; + private _alpnCallback?: + | ((info: { servername?: string; protocols: string[] }) => string | undefined) + | undefined; + private _contexts: Array<{ + servername: string; + context: TlsSecureContextWrapper; + }> = []; + + constructor( + optionsOrListener?: TlsServerOptions | NetServerEventListener, + maybeListener?: NetServerEventListener, + ) { + const options = + typeof optionsOrListener === "function" || optionsOrListener === undefined + ? undefined + : optionsOrListener; + const listener = + typeof optionsOrListener === "function" ? optionsOrListener : maybeListener; + + if (options?.ALPNCallback && options?.ALPNProtocols) { + const error = new Error( + "The ALPNCallback and ALPNProtocols TLS options are mutually exclusive", + ) as Error & { code?: string }; + error.code = "ERR_TLS_ALPN_CALLBACK_WITH_PROTOCOLS"; + throw error; + } + + this._tlsOptions = buildSerializedTlsOptions( + options as unknown as Record | undefined, + { isServer: true }, + ); + this._sniCallback = options?.SNICallback; + this._alpnCallback = options?.ALPNCallback; + + this._server = new NetServer( + options + ? { + allowHalfOpen: options.allowHalfOpen, + keepAlive: options.keepAlive, + keepAliveInitialDelay: options.keepAliveInitialDelay, + } + : undefined, + ((socket: unknown) => { + const tlsSocket = socket as NetSocket; + tlsSocket.server = this as unknown as NetServer; + void this._handleSecureSocket(tlsSocket); + }) as NetServerEventListener, + ); + + if (listener) { + this.on("secureConnection", listener); + } + + this._server.on("listening", (...args) => this._emit("listening", ...args)); + this._server.on("close", (...args) => this._emit("close", ...args)); + this._server.on("error", (...args) => this._emit("error", ...args)); + this._server.on("drop", (...args) => this._emit("drop", ...args)); + } + + listen( + portOrOptions?: number | string | null | { port?: unknown; host?: unknown; backlog?: unknown; path?: unknown }, + hostOrCallback?: string | NetServerEventListener, + backlogOrCallback?: number | NetServerEventListener, + callback?: NetServerEventListener, + ): this { + this._server.listen(portOrOptions, hostOrCallback, backlogOrCallback, callback); + return this; + } + + close(callback?: NetServerEventListener): this { + if (callback) { + this.once("close", callback); + } + this._server.close(); + return this; + } + + address(): { address: string; family: string; port: number } | string | null { + return this._server.address(); + } + + getConnections(callback: (error: Error | null, count: number) => void): this { + this._server.getConnections(callback); + return this; + } + + ref(): this { + this._server.ref(); + return this; + } + + unref(): this { + this._server.unref(); + return this; + } + + addContext(servername: string, context: unknown): this { + const wrapper = isTlsSecureContextWrapper(context) + ? context + : createSecureContextWrapper( + context && typeof context === "object" + ? context as Record + : undefined, + ); + this._contexts.push({ servername, context: wrapper }); + return this; + } + + on(event: string, listener: NetServerEventListener): this { + if (!this._listeners[event]) this._listeners[event] = []; + this._listeners[event].push(listener); + return this; + } + + once(event: string, listener: NetServerEventListener): this { + if (!this._onceListeners[event]) this._onceListeners[event] = []; + this._onceListeners[event].push(listener); + return this; + } + + emit(event: string, ...args: unknown[]): boolean { + return this._emit(event, ...args); + } + + private _emit(event: string, ...args: unknown[]): boolean { + let handled = false; + const listeners = this._listeners[event]; + if (listeners) { + for (const fn of [...listeners]) { + fn(...args); + handled = true; + } + } + const onceListeners = this._onceListeners[event]; + if (onceListeners) { + this._onceListeners[event] = []; + for (const fn of [...onceListeners]) { + fn(...args); + handled = true; + } + } + return handled; + } + + private async _handleSecureSocket(socket: NetSocket): Promise { + const clientHello = this._getClientHello(socket); + const requestedServername = clientHello?.servername; + if (requestedServername) { + socket.servername = requestedServername; + } + + try { + const upgradeOptions = await this._resolveTlsOptions( + requestedServername, + clientHello?.ALPNProtocols ?? [], + ); + if (!upgradeOptions) { + this._emitTlsClientError(socket, "Invalid SNI context"); + return; + } + + socket._upgradeTls(upgradeOptions); + socket.once("secure", () => { + this._emit("secureConnection", socket); + this._emit("connection", socket); + }); + socket.on("error", (error: unknown) => { + this._emit("tlsClientError", error, socket); + }); + } catch (error) { + const err = error instanceof Error ? error : new Error(String(error)); + this._emitTlsClientError(socket, err.message, err); + if ((err as { uncaught?: boolean }).uncaught) { + (process as typeof process & { + emit?: (event: string, ...args: unknown[]) => boolean; + }).emit?.("uncaughtException", err, "uncaughtException"); + } + } + } + + private _getClientHello(socket: NetSocket): SerializedTlsClientHello | null { + if (typeof _netSocketGetTlsClientHelloRaw === "undefined") { + return null; + } + const socketId = (socket as unknown as { _socketId?: number })._socketId; + if (typeof socketId !== "number" || socketId === 0) { + return null; + } + return parseTlsClientHello( + _netSocketGetTlsClientHelloRaw.applySync(undefined, [socketId]), + ); + } + + private async _resolveTlsOptions( + servername: string | undefined, + clientProtocols: string[], + ): Promise { + let selectedContext: TlsSecureContextWrapper | null = null; + let invalidContext = false; + + if (servername && this._sniCallback) { + selectedContext = await new Promise((resolve, reject) => { + this._sniCallback?.(servername, (error, context) => { + if (error) { + reject(error); + return; + } + if (context == null) { + resolve(null); + return; + } + if (isTlsSecureContextWrapper(context)) { + resolve(context); + return; + } + if (context && typeof context === "object" && Object.keys(context as object).length > 0) { + resolve(createSecureContextWrapper(context as Record)); + return; + } + invalidContext = true; + resolve(null); + }); + }); + if (invalidContext) { + return null; + } + } else if (servername) { + selectedContext = this._findContext(servername); + } + + const resolvedOptions: SerializedTlsBridgeOptions = { + ...this._tlsOptions, + ...(selectedContext?.__secureExecTlsContext ?? {}), + isServer: true, + }; + + if (this._alpnCallback) { + const selectedProtocol = this._alpnCallback({ + servername, + protocols: clientProtocols, + }); + if (selectedProtocol === undefined) { + const error = new Error("ALPN callback rejected the client protocol list") as Error & { + code?: string; + }; + error.code = "ERR_SSL_TLSV1_ALERT_NO_APPLICATION_PROTOCOL"; + throw error; + } + if (!clientProtocols.includes(selectedProtocol)) { + const error = new Error( + "The ALPNCallback callback returned an invalid protocol", + ) as Error & { code?: string; uncaught?: boolean }; + error.code = "ERR_TLS_ALPN_CALLBACK_INVALID_RESULT"; + error.uncaught = true; + throw error; + } + resolvedOptions.ALPNProtocols = [selectedProtocol]; + } + + return resolvedOptions; + } + + private _findContext(servername: string): TlsSecureContextWrapper | null { + for (let index = this._contexts.length - 1; index >= 0; index -= 1) { + const entry = this._contexts[index]; + if (matchesServername(entry.servername, servername)) { + return entry.context; + } + } + return null; + } + + private _emitTlsClientError( + socket: NetSocket, + message: string, + existingError?: Error, + ): void { + const error = existingError ?? new Error(message); + socket.servername ??= this._getClientHello(socket)?.servername; + this._emit("tlsClientError", error, socket); + socket.destroy(); } } -exposeCustomGlobal("_netSocketDispatch", netSocketDispatch); +function TLSServerCallable( + this: TLSServer | undefined, + optionsOrListener?: TlsServerOptions | NetServerEventListener, + maybeListener?: NetServerEventListener, +): TLSServer { + return new TLSServer(optionsOrListener, maybeListener); +} -class NetSocket { - private _listeners: Record = {}; - private _onceListeners: Record = {}; - private _socketId = 0; - _connected = false; - connecting = false; - destroyed = false; - writable = true; - readable = true; - readableLength = 0; - writableLength = 0; - remoteAddress?: string; - remotePort?: number; - remoteFamily?: string; - localAddress = "0.0.0.0"; - localPort = 0; - localFamily = "IPv4"; - bytesRead = 0; - bytesWritten = 0; - bufferSize = 0; - pending = true; - allowHalfOpen = false; - // Readable stream state stub for library compatibility - _readableState = { endEmitted: false }; +const tlsModule = { + connect: tlsConnect, + TLSSocket: NetSocket, // Alias — TLSSocket is just a NetSocket after upgrade + Server: TLSServerCallable as unknown as typeof import("node:tls").Server, + createServer( + optionsOrListener?: TlsServerOptions | NetServerEventListener, + maybeListener?: NetServerEventListener, + ): TLSServer { + return new TLSServer(optionsOrListener, maybeListener); + }, + createSecureContext(options?: Record): TlsSecureContextWrapper { + return createSecureContextWrapper(options); + }, + getCiphers(): string[] { + if (typeof _tlsGetCiphersRaw === "undefined") { + throw new Error("tls.getCiphers is not supported in sandbox"); + } + try { + return JSON.parse(_tlsGetCiphersRaw.applySync(undefined, [])) as string[]; + } catch { + return []; + } + }, + DEFAULT_MIN_VERSION: "TLSv1.2", + DEFAULT_MAX_VERSION: "TLSv1.3", +}; - constructor(options?: { allowHalfOpen?: boolean }) { - if (options?.allowHalfOpen) this.allowHalfOpen = true; +type DgramEventListener = (...args: unknown[]) => void; +type DgramSocketType = "udp4" | "udp6"; +type DgramRemoteInfo = { + address: string; + family: string; + port: number; + size: number; +}; + +type DgramSocketAddress = { + address: string; + family: string; + port: number; +}; + +type SerializedDgramMessage = { + data: string; + rinfo: DgramRemoteInfo; +}; + +const DGRAM_HANDLE_PREFIX = "dgram-socket:"; + +function createBadDgramSocketTypeError(): TypeError & { code: string } { + return createTypeErrorWithCode( + "Bad socket type specified. Valid types are: udp4, udp6", + "ERR_SOCKET_BAD_TYPE", + ); +} + +function createDgramAlreadyBoundError(): Error & { code: string } { + const error = new Error("Socket is already bound") as Error & { code: string }; + error.code = "ERR_SOCKET_ALREADY_BOUND"; + return error; +} + +function createDgramAddressError(): Error { + return new Error("getsockname EBADF"); +} + +function createDgramArgTypeError( + argumentName: string, + expectedType: string, + value: unknown, +): TypeError & { code: string } { + return createTypeErrorWithCode( + `The "${argumentName}" argument must be of type ${expectedType}. Received ${formatReceivedType(value)}`, + "ERR_INVALID_ARG_TYPE", + ); +} + +function createDgramMissingArgError(argumentName: string): TypeError & { code: string } { + return createTypeErrorWithCode( + `The "${argumentName}" argument must be specified`, + "ERR_MISSING_ARGS", + ); +} + +function createDgramNotRunningError(): Error & { code: string } { + return createErrorWithCode("Not running", "ERR_SOCKET_DGRAM_NOT_RUNNING"); +} + +function getDgramErrno(code: "EBADF" | "EINVAL" | "EADDRNOTAVAIL" | "ENOPROTOOPT"): number { + switch (code) { + case "EBADF": + return -9; + case "EINVAL": + return -22; + case "EADDRNOTAVAIL": + return -99; + case "ENOPROTOOPT": + return -92; } +} - connect(portOrOptions: number | { host?: string; port: number }, hostOrCallback?: string | (() => void), callback?: () => void): this { - if (typeof _netSocketConnectRaw === "undefined") { - throw new Error("net.Socket is not supported in sandbox (bridge not available)"); +function createDgramSyscallError( + syscall: string, + code: "EBADF" | "EINVAL" | "EADDRNOTAVAIL" | "ENOPROTOOPT", +): Error & { code: string; errno: number; syscall: string } { + const error = new Error(`${syscall} ${code}`) as Error & { + code: string; + errno: number; + syscall: string; + }; + error.code = code; + error.errno = getDgramErrno(code); + error.syscall = syscall; + return error; +} + +function createDgramTtlArgTypeError(value: unknown): TypeError & { code: string } { + return createTypeErrorWithCode( + `The "ttl" argument must be of type number. Received ${formatReceivedType(value)}`, + "ERR_INVALID_ARG_TYPE", + ); +} + +function createDgramBufferSizeTypeError(): TypeError & { code: string } { + return createTypeErrorWithCode( + "Buffer size must be a positive integer", + "ERR_SOCKET_BAD_BUFFER_SIZE", + ); +} + +function createDgramBufferSizeSystemError( + which: "recv" | "send", + code: "EBADF" | "EINVAL", +): Error & { + code: string; + info: { errno: number; code: string; message: string; syscall: string }; + errno: number; + syscall: string; +} { + const syscall = `uv_${which}_buffer_size`; + const info = { + errno: code === "EBADF" ? -9 : -22, + code, + message: code === "EBADF" ? "bad file descriptor" : "invalid argument", + syscall, + }; + const error = new Error( + `Could not get or set buffer size: ${syscall} returned ${code} (${info.message})`, + ) as Error & { + code: string; + info: { errno: number; code: string; message: string; syscall: string }; + errno: number; + syscall: string; + }; + error.name = "SystemError [ERR_SOCKET_BUFFER_SIZE]"; + error.code = "ERR_SOCKET_BUFFER_SIZE"; + error.info = info; + let errno = info.errno; + let syscallValue = syscall; + Object.defineProperty(error, "errno", { + enumerable: true, + configurable: true, + get() { + return errno; + }, + set(value: number) { + errno = value; + }, + }); + Object.defineProperty(error, "syscall", { + enumerable: true, + configurable: true, + get() { + return syscallValue; + }, + set(value: string) { + syscallValue = value; + }, + }); + return error; +} + +function getPlatformDgramBufferSize(size: number): number { + if (size <= 0) { + return size; + } + return process.platform === "linux" ? size * 2 : size; +} + +function normalizeDgramTtlValue( + value: unknown, + syscall: "setTTL" | "setMulticastTTL", +): number { + if (typeof value !== "number") { + throw createDgramTtlArgTypeError(value); + } + if (!Number.isInteger(value) || value <= 0 || value >= 256) { + throw createDgramSyscallError(syscall, "EINVAL"); + } + return value; +} + +function isIPv4MulticastAddress(address: string): boolean { + if (!isIPv4String(address)) { + return false; + } + const first = Number(address.split(".")[0]); + return first >= 224 && first <= 239; +} + +function isIPv4UnicastAddress(address: string): boolean { + return isIPv4String(address) && !isIPv4MulticastAddress(address) && address !== "255.255.255.255"; +} + +function isIPv6MulticastAddress(address: string): boolean { + const zoneIndex = address.indexOf("%"); + const normalized = zoneIndex === -1 ? address : address.slice(0, zoneIndex); + return isIPv6String(address) && normalized.toLowerCase().startsWith("ff"); +} + +function validateDgramMulticastAddress( + type: DgramSocketType, + syscall: "addMembership" | "dropMembership" | "addSourceSpecificMembership" | "dropSourceSpecificMembership", + address: unknown, +): string { + if (typeof address !== "string") { + throw createDgramArgTypeError( + syscall === "addSourceSpecificMembership" || syscall === "dropSourceSpecificMembership" + ? "groupAddress" + : "multicastAddress", + "string", + address, + ); + } + const isValid = type === "udp6" ? isIPv6MulticastAddress(address) : isIPv4MulticastAddress(address); + if (!isValid) { + throw createDgramSyscallError(syscall, "EINVAL"); + } + return address; +} + +function validateDgramSourceAddress( + type: DgramSocketType, + syscall: "addSourceSpecificMembership" | "dropSourceSpecificMembership", + address: unknown, +): string { + if (typeof address !== "string") { + throw createDgramArgTypeError("sourceAddress", "string", address); + } + const isValid = type === "udp6" ? isIPv6String(address) && !isIPv6MulticastAddress(address) : isIPv4UnicastAddress(address); + if (!isValid) { + throw createDgramSyscallError(syscall, "EINVAL"); + } + return address; +} + +function normalizeDgramSocketType(value: unknown): DgramSocketType { + if (value === "udp4" || value === "udp6") { + return value; + } + throw createBadDgramSocketTypeError(); +} + +function normalizeDgramSocketOptions( + options: unknown, +): { type: DgramSocketType; recvBufferSize?: number; sendBufferSize?: number } { + if (typeof options === "string") { + return { type: normalizeDgramSocketType(options) }; + } + if (!options || typeof options !== "object" || Array.isArray(options)) { + throw createBadDgramSocketTypeError(); + } + const typedOptions = options as { + type?: unknown; + recvBufferSize?: unknown; + sendBufferSize?: unknown; + }; + const result: { type: DgramSocketType; recvBufferSize?: number; sendBufferSize?: number } = { + type: normalizeDgramSocketType(typedOptions.type), + }; + if (typedOptions.recvBufferSize !== undefined) { + if (typeof typedOptions.recvBufferSize !== "number") { + throw createInvalidArgTypeError( + "options.recvBufferSize", + "number", + typedOptions.recvBufferSize, + ); + } + result.recvBufferSize = typedOptions.recvBufferSize; + } + if (typedOptions.sendBufferSize !== undefined) { + if (typeof typedOptions.sendBufferSize !== "number") { + throw createInvalidArgTypeError( + "options.sendBufferSize", + "number", + typedOptions.sendBufferSize, + ); } + result.sendBufferSize = typedOptions.sendBufferSize; + } + return result; +} + +function normalizeDgramAddressValue( + address: unknown, + type: DgramSocketType, + defaultAddress: string, +): string { + if (address === undefined || address === null || address === "") { + return defaultAddress; + } + if (typeof address !== "string") { + throw createDgramArgTypeError("address", "string", address); + } + if (address === "localhost") { + return type === "udp6" ? "::1" : "127.0.0.1"; + } + return address; +} + +function normalizeDgramPortValue(port: unknown): number { + if (typeof port !== "number") { + throw createDgramArgTypeError("port", "number", port); + } + if (!isValidTcpPort(port)) { + throw createSocketBadPortError(port); + } + return port; +} + +function createDgramMessageBuffer(value: unknown): Buffer { + if (typeof value === "string") { + return Buffer.from(value); + } + if (Buffer.isBuffer(value)) { + return Buffer.from(value); + } + if (ArrayBuffer.isView(value)) { + return Buffer.from(value.buffer, value.byteOffset, value.byteLength); + } + throw createDgramArgTypeError("msg", "string or Buffer or Uint8Array or DataView", value); +} - let host: string; - let port: number; - let cb: (() => void) | undefined; +function createDgramMessageListBuffer(value: unknown): Buffer { + if (Array.isArray(value)) { + return Buffer.concat(value.map((entry) => createDgramMessageBuffer(entry))); + } + return createDgramMessageBuffer(value); +} - if (typeof portOrOptions === "object") { - host = portOrOptions.host ?? "127.0.0.1"; - port = portOrOptions.port; - cb = typeof hostOrCallback === "function" ? hostOrCallback : callback; +function normalizeDgramBindArgs( + args: unknown[], + type: DgramSocketType, +): { port: number; address: string; callback?: () => void } { + let port: unknown; + let address: unknown; + let callback: unknown; + + if (typeof args[0] === "function") { + callback = args[0]; + } else if (args[0] && typeof args[0] === "object" && !Array.isArray(args[0])) { + const options = args[0] as { port?: unknown; address?: unknown }; + port = options.port; + address = options.address; + callback = args[1]; + } else { + port = args[0]; + if (typeof args[1] === "function") { + callback = args[1]; } else { - port = portOrOptions; - host = typeof hostOrCallback === "string" ? hostOrCallback : "127.0.0.1"; - cb = typeof hostOrCallback === "function" ? hostOrCallback : callback; + address = args[1]; + callback = args[2]; } + } - if (cb) this.once("connect", cb); + if (callback !== undefined && typeof callback !== "function") { + throw createFunctionArgTypeError("callback", callback); + } - this.connecting = true; - this.remoteAddress = host; - this.remotePort = port; - this.pending = false; + return { + port: port === undefined ? 0 : normalizeDgramPortValue(port), + address: normalizeDgramAddressValue( + address, + type, + type === "udp6" ? "::" : "0.0.0.0", + ), + callback: callback as (() => void) | undefined, + }; +} - this._socketId = _netSocketConnectRaw.applySync(undefined, [host, port]) as number; - registerNetSocket(this._socketId, this); +function normalizeDgramSendArgs( + args: unknown[], + type: DgramSocketType, +): { data: Buffer; port: number; address: string; callback?: (err: Error | null, bytes?: number) => void } { + if (args.length === 0) { + throw createDgramArgTypeError("msg", "string or Buffer or Uint8Array or DataView", undefined); + } + const message = args[0]; + const hasOffsetLength = + typeof args[1] === "number" && + typeof args[2] === "number" && + args.length >= 4; + + if (hasOffsetLength) { + const source = createDgramMessageBuffer(message); + const offset = args[1] as number; + const length = args[2] as number; + const callback = typeof args[4] === "function" ? args[4] : args[5]; + if (callback !== undefined && typeof callback !== "function") { + throw createFunctionArgTypeError("callback", callback); + } + return { + data: Buffer.from(source.subarray(offset, offset + length)), + port: normalizeDgramPortValue(args[3]), + address: normalizeDgramAddressValue( + typeof args[4] === "function" ? undefined : args[4], + type, + type === "udp6" ? "::1" : "127.0.0.1", + ), + callback: callback as ((err: Error | null, bytes?: number) => void) | undefined, + }; + } - // Note: do NOT use _registerHandle for net sockets — _waitForActiveHandles() - // blocks dispatch callbacks. Libraries use their own async patterns (Promises, - // callbacks) which keep the execution alive via the script result promise. + const callback = typeof args[2] === "function" ? args[2] : args[3]; + if (callback !== undefined && typeof callback !== "function") { + throw createFunctionArgTypeError("callback", callback); + } + return { + data: createDgramMessageListBuffer(message), + port: normalizeDgramPortValue(args[1]), + address: normalizeDgramAddressValue( + typeof args[2] === "function" ? undefined : args[2], + type, + type === "udp6" ? "::1" : "127.0.0.1", + ), + callback: callback as ((err: Error | null, bytes?: number) => void) | undefined, + }; +} + +class DgramSocket { + private readonly _type: DgramSocketType; + private readonly _socketId: number; + private _listeners: Record = {}; + private _onceListeners: Record = {}; + private _bindPromise: Promise | null = null; + private _receiveLoopRunning = false; + private _receivePollTimer: ReturnType | null = null; + private _refed = true; + private _closed = false; + private _bound = false; + private _handleRefId: string | null = null; + private _recvBufferSize?: number; + private _sendBufferSize?: number; + private _memberships = new Set(); + private _multicastInterface?: string; + private _broadcast = false; + private _multicastLoopback = 1; + private _multicastTtl = 1; + private _ttl = 64; + + constructor( + optionsOrType: unknown, + listener?: DgramEventListener, + ) { + if (typeof _dgramSocketCreateRaw === "undefined") { + throw new Error("dgram.createSocket is not supported in sandbox"); + } + const options = normalizeDgramSocketOptions(optionsOrType); + this._type = options.type; + this._socketId = _dgramSocketCreateRaw.applySync(undefined, [this._type]); + if (listener) { + this.on("message", listener); + } + if (options.recvBufferSize !== undefined) { + this._setBufferSize("recv", options.recvBufferSize, false); + } + if (options.sendBufferSize !== undefined) { + this._setBufferSize("send", options.sendBufferSize, false); + } + } + bind(...args: unknown[]): this { + const { port, address, callback } = normalizeDgramBindArgs(args, this._type); + void this._bindInternal(port, address, callback); return this; } - write(data: unknown, encodingOrCallback?: string | (() => void), callback?: () => void): boolean { - if (typeof _netSocketWriteRaw === "undefined") return false; - if (this.destroyed || !this._socketId) return false; + send(...args: unknown[]): void { + const { data, port, address, callback } = normalizeDgramSendArgs(args, this._type); + void this._sendInternal(data, port, address, callback); + } - let buf: Buffer; - if (Buffer.isBuffer(data)) { - buf = data; - } else if (typeof data === "string") { - const enc = typeof encodingOrCallback === "string" ? encodingOrCallback : "utf-8"; - buf = Buffer.from(data, enc as BufferEncoding); - } else { - buf = Buffer.from(data as Uint8Array); + sendto(...args: unknown[]): void { + this.send(...args); + } + + address(): DgramSocketAddress { + if (typeof _dgramSocketAddressRaw === "undefined") { + throw createDgramAddressError(); + } + try { + return JSON.parse( + _dgramSocketAddressRaw.applySync(undefined, [this._socketId]), + ) as DgramSocketAddress; + } catch { + throw createDgramAddressError(); } + } - const base64 = buf.toString("base64"); - this.bytesWritten += buf.length; - _netSocketWriteRaw.applySync(undefined, [this._socketId, base64]); + close(callback?: () => void): this { + if (callback !== undefined && typeof callback !== "function") { + throw createFunctionArgTypeError("callback", callback); + } + if (callback) { + this.once("close", callback); + } + if (this._closed) { + return this; + } + this._closed = true; + this._bound = false; + this._clearReceivePollTimer(); + this._syncHandleRef(); + if (typeof _dgramSocketCloseRaw === "undefined") { + queueMicrotask(() => { + this._emit("close"); + }); + return this; + } + try { + _dgramSocketCloseRaw.applySyncPromise(undefined, [this._socketId]); + } finally { + queueMicrotask(() => { + this._emit("close"); + }); + } + return this; + } + + ref(): this { + this._refed = true; + this._syncHandleRef(); + if (this._receivePollTimer && typeof this._receivePollTimer.ref === "function") { + this._receivePollTimer.ref(); + } + if (this._bound && !this._closed && !this._receiveLoopRunning) { + void this._pumpMessages(); + } + return this; + } + + unref(): this { + this._refed = false; + this._syncHandleRef(); + if (this._receivePollTimer && typeof this._receivePollTimer.unref === "function") { + this._receivePollTimer.unref(); + } + return this; + } + + setRecvBufferSize(size: number): void { + this._setBufferSize("recv", size); + } + + setSendBufferSize(size: number): void { + this._setBufferSize("send", size); + } + + getRecvBufferSize(): number { + return this._getBufferSize("recv"); + } + + getSendBufferSize(): number { + return this._getBufferSize("send"); + } + + setBroadcast(flag: unknown): void { + this._ensureBoundForSocketOption("setBroadcast"); + this._broadcast = Boolean(flag); + } + + setTTL(ttl: unknown): number { + this._ensureBoundForSocketOption("setTTL"); + this._ttl = normalizeDgramTtlValue(ttl, "setTTL"); + return this._ttl; + } + + setMulticastTTL(ttl: unknown): number { + this._ensureBoundForSocketOption("setMulticastTTL"); + this._multicastTtl = normalizeDgramTtlValue(ttl, "setMulticastTTL"); + return this._multicastTtl; + } + + setMulticastLoopback(flag: unknown): number { + this._ensureBoundForSocketOption("setMulticastLoopback"); + this._multicastLoopback = Number(flag); + return this._multicastLoopback; + } - const cb = typeof encodingOrCallback === "function" ? encodingOrCallback : callback; - if (cb) cb(); - return true; + addMembership(multicastAddress?: unknown, multicastInterface?: unknown): void { + if (multicastAddress === undefined) { + throw createDgramMissingArgError("multicastAddress"); + } + if (this._closed) { + throw createDgramNotRunningError(); + } + const groupAddress = validateDgramMulticastAddress( + this._type, + "addMembership", + multicastAddress, + ); + if (multicastInterface !== undefined && typeof multicastInterface !== "string") { + throw createDgramArgTypeError("multicastInterface", "string", multicastInterface); + } + this._memberships.add(`${groupAddress}|${multicastInterface ?? ""}`); } - end(dataOrCallback?: unknown, encodingOrCallback?: string | (() => void), callback?: () => void): this { - if (typeof dataOrCallback === "function") { - this.once("finish", dataOrCallback as () => void); - } else if (dataOrCallback != null) { - this.write(dataOrCallback, encodingOrCallback, callback); + dropMembership(multicastAddress?: unknown, multicastInterface?: unknown): void { + if (multicastAddress === undefined) { + throw createDgramMissingArgError("multicastAddress"); } - if (typeof _netSocketEndRaw !== "undefined" && this._socketId && !this.destroyed) { - _netSocketEndRaw.applySync(undefined, [this._socketId]); + if (this._closed) { + throw createDgramNotRunningError(); } - return this; + const groupAddress = validateDgramMulticastAddress( + this._type, + "dropMembership", + multicastAddress, + ); + if (multicastInterface !== undefined && typeof multicastInterface !== "string") { + throw createDgramArgTypeError("multicastInterface", "string", multicastInterface); + } + const membershipKey = `${groupAddress}|${multicastInterface ?? ""}`; + if (!this._memberships.has(membershipKey)) { + throw createDgramSyscallError("dropMembership", "EADDRNOTAVAIL"); + } + this._memberships.delete(membershipKey); } - destroy(error?: Error): this { - if (this.destroyed) return this; - this.destroyed = true; - this.writable = false; - this.readable = false; - if (typeof _netSocketDestroyRaw !== "undefined" && this._socketId) { - _netSocketDestroyRaw.applySync(undefined, [this._socketId]); - unregisterNetSocket(this._socketId); + addSourceSpecificMembership( + sourceAddress?: unknown, + groupAddress?: unknown, + multicastInterface?: unknown, + ): void { + if (this._closed) { + throw createDgramNotRunningError(); } - if (error) { - this._emitNet("error", error); + if (typeof sourceAddress !== "string") { + throw createDgramArgTypeError("sourceAddress", "string", sourceAddress); } - this._emitNet("close"); - return this; + if (typeof groupAddress !== "string") { + throw createDgramArgTypeError("groupAddress", "string", groupAddress); + } + const validatedSource = validateDgramSourceAddress( + this._type, + "addSourceSpecificMembership", + sourceAddress, + ); + const validatedGroup = validateDgramMulticastAddress( + this._type, + "addSourceSpecificMembership", + groupAddress, + ); + if (multicastInterface !== undefined && typeof multicastInterface !== "string") { + throw createDgramArgTypeError("multicastInterface", "string", multicastInterface); + } + this._memberships.add(`${validatedSource}>${validatedGroup}|${multicastInterface ?? ""}`); } - setKeepAlive(_enable?: boolean, _initialDelay?: number): this { return this; } - setNoDelay(_noDelay?: boolean): this { return this; } - setTimeout(timeout: number, callback?: () => void): this { - if (callback) this.once("timeout", callback); - if (timeout === 0) return this; - // Timeout not enforced — bridge manages socket lifecycle - return this; + dropSourceSpecificMembership( + sourceAddress?: unknown, + groupAddress?: unknown, + multicastInterface?: unknown, + ): void { + if (this._closed) { + throw createDgramNotRunningError(); + } + if (typeof sourceAddress !== "string") { + throw createDgramArgTypeError("sourceAddress", "string", sourceAddress); + } + if (typeof groupAddress !== "string") { + throw createDgramArgTypeError("groupAddress", "string", groupAddress); + } + const validatedSource = validateDgramSourceAddress( + this._type, + "dropSourceSpecificMembership", + sourceAddress, + ); + const validatedGroup = validateDgramMulticastAddress( + this._type, + "dropSourceSpecificMembership", + groupAddress, + ); + if (multicastInterface !== undefined && typeof multicastInterface !== "string") { + throw createDgramArgTypeError("multicastInterface", "string", multicastInterface); + } + const membershipKey = `${validatedSource}>${validatedGroup}|${multicastInterface ?? ""}`; + if (!this._memberships.has(membershipKey)) { + throw createDgramSyscallError("dropSourceSpecificMembership", "EADDRNOTAVAIL"); + } + this._memberships.delete(membershipKey); } - ref(): this { return this; } - unref(): this { return this; } - pause(): this { return this; } - resume(): this { return this; } - address(): { port: number; family: string; address: string } { - return { port: this.localPort, family: this.localFamily, address: this.localAddress }; + + setMulticastInterface(interfaceAddress: unknown): void { + if (typeof interfaceAddress !== "string") { + throw createDgramArgTypeError("interfaceAddress", "string", interfaceAddress); + } + if (this._closed) { + throw createDgramNotRunningError(); + } + this._ensureBoundForSocketOption("setMulticastInterface"); + if (this._type === "udp4") { + if (interfaceAddress === "0.0.0.0") { + this._multicastInterface = interfaceAddress; + return; + } + if (!isIPv4String(interfaceAddress)) { + throw createDgramSyscallError("setMulticastInterface", "ENOPROTOOPT"); + } + if (!isIPv4UnicastAddress(interfaceAddress)) { + throw createDgramSyscallError("setMulticastInterface", "EADDRNOTAVAIL"); + } + this._multicastInterface = interfaceAddress; + return; + } + if (interfaceAddress === "" || interfaceAddress === "undefined" || !isIPv6String(interfaceAddress)) { + throw createDgramSyscallError("setMulticastInterface", "EINVAL"); + } + this._multicastInterface = interfaceAddress; } - setEncoding(_encoding: string): this { return this; } - pipe(destination: T): T { return destination; } - on(event: string, listener: NetEventListener): this { + on(event: string, listener: DgramEventListener): this { if (!this._listeners[event]) this._listeners[event] = []; this._listeners[event].push(listener); return this; } - addListener(event: string, listener: NetEventListener): this { + addListener(event: string, listener: DgramEventListener): this { return this.on(event, listener); } - once(event: string, listener: NetEventListener): this { + once(event: string, listener: DgramEventListener): this { if (!this._onceListeners[event]) this._onceListeners[event] = []; this._onceListeners[event].push(listener); return this; } - removeListener(event: string, listener: NetEventListener): this { + removeListener(event: string, listener: DgramEventListener): this { const listeners = this._listeners[event]; if (listeners) { - const idx = listeners.indexOf(listener); - if (idx >= 0) listeners.splice(idx, 1); + const index = listeners.indexOf(listener); + if (index >= 0) listeners.splice(index, 1); } const onceListeners = this._onceListeners[event]; if (onceListeners) { - const idx = onceListeners.indexOf(listener); - if (idx >= 0) onceListeners.splice(idx, 1); + const index = onceListeners.indexOf(listener); + if (index >= 0) onceListeners.splice(index, 1); } return this; } - off(event: string, listener: NetEventListener): this { + off(event: string, listener: DgramEventListener): this { return this.removeListener(event, listener); } - removeAllListeners(event?: string): this { - if (event) { - delete this._listeners[event]; - delete this._onceListeners[event]; - } else { - this._listeners = {}; - this._onceListeners = {}; + emit(event: string, ...args: unknown[]): boolean { + return this._emit(event, ...args); + } + + private async _bindInternal( + port: number, + address: string, + callback?: () => void, + ): Promise { + if (this._closed) { + return; } - return this; + if (this._bound || this._bindPromise) { + throw createDgramAlreadyBoundError(); + } + if (typeof _dgramSocketBindRaw === "undefined") { + throw new Error("dgram.bind is not supported in sandbox"); + } + + this._bindPromise = (async () => { + try { + const resultJson = _dgramSocketBindRaw.applySyncPromise(undefined, [ + this._socketId, + JSON.stringify({ port, address }), + ]); + JSON.parse(resultJson) as DgramSocketAddress; + this._bound = true; + this._applyInitialBufferSizes(); + this._syncHandleRef(); + queueMicrotask(() => { + if (this._closed) { + return; + } + this._emit("listening"); + callback?.call(this); + void this._pumpMessages(); + }); + } catch (error) { + queueMicrotask(() => { + this._emit("error", error); + }); + throw error; + } finally { + this._bindPromise = null; + } + })(); + + return this._bindPromise; } - listeners(event: string): NetEventListener[] { - return [...(this._listeners[event] ?? []), ...(this._onceListeners[event] ?? [])]; + private async _ensureBound(): Promise { + if (this._bound) { + return; + } + if (this._bindPromise) { + await this._bindPromise; + return; + } + await this._bindInternal(0, this._type === "udp6" ? "::" : "0.0.0.0"); } - listenerCount(event: string): number { - return (this._listeners[event]?.length ?? 0) + (this._onceListeners[event]?.length ?? 0); + private async _sendInternal( + data: Buffer, + port: number, + address: string, + callback?: (err: Error | null, bytes?: number) => void, + ): Promise { + try { + await this._ensureBound(); + if (this._closed || typeof _dgramSocketSendRaw === "undefined") { + return; + } + const bytes = _dgramSocketSendRaw.applySyncPromise(undefined, [ + this._socketId, + JSON.stringify({ + data: data.toString("base64"), + port, + address, + }), + ]); + if (callback) { + queueMicrotask(() => { + callback(null, bytes); + }); + } + } catch (error) { + if (callback) { + queueMicrotask(() => { + callback(error as Error); + }); + return; + } + queueMicrotask(() => { + this._emit("error", error); + }); + } } - setMaxListeners(_n: number): this { return this; } - getMaxListeners(): number { return 10; } - prependListener(event: string, listener: NetEventListener): this { - if (!this._listeners[event]) this._listeners[event] = []; - this._listeners[event].unshift(listener); - return this; + private async _pumpMessages(): Promise { + if (this._receiveLoopRunning || this._closed || !this._bound) { + return; + } + if (typeof _dgramSocketRecvRaw === "undefined") { + return; + } + + this._receiveLoopRunning = true; + try { + while (!this._closed && this._bound) { + const payload = _dgramSocketRecvRaw.applySync(undefined, [this._socketId]); + if (payload === NET_BRIDGE_TIMEOUT_SENTINEL) { + this._receivePollTimer = setTimeout(() => { + this._receivePollTimer = null; + void this._pumpMessages(); + }, NET_BRIDGE_POLL_DELAY_MS); + if (!this._refed && typeof this._receivePollTimer.unref === "function") { + this._receivePollTimer.unref(); + } + return; + } + if (!payload) { + return; + } + const message = JSON.parse(payload) as SerializedDgramMessage; + this._emit( + "message", + Buffer.from(message.data, "base64"), + message.rinfo, + ); + } + } catch (error) { + this._emit("error", error); + } finally { + this._receiveLoopRunning = false; + } } - prependOnceListener(event: string, listener: NetEventListener): this { - if (!this._onceListeners[event]) this._onceListeners[event] = []; - this._onceListeners[event].unshift(listener); - return this; + + private _clearReceivePollTimer(): void { + if (this._receivePollTimer) { + clearTimeout(this._receivePollTimer); + this._receivePollTimer = null; + } } - eventNames(): string[] { - return [...new Set([...Object.keys(this._listeners), ...Object.keys(this._onceListeners)])]; + + private _ensureBoundForSocketOption( + syscall: "setBroadcast" | "setTTL" | "setMulticastTTL" | "setMulticastLoopback" | "setMulticastInterface", + ): void { + if (!this._bound || this._closed) { + throw createDgramSyscallError(syscall, "EBADF"); + } } - rawListeners(event: string): NetEventListener[] { - return this.listeners(event); + + private _setBufferSize(which: "recv" | "send", size: number, requireRunning = true): void { + if (!Number.isInteger(size) || size <= 0 || !Number.isFinite(size)) { + throw createDgramBufferSizeTypeError(); + } + if (size > 0x7fffffff) { + throw createDgramBufferSizeSystemError(which, "EINVAL"); + } + if (requireRunning && (!this._bound || this._closed)) { + throw createDgramBufferSizeSystemError(which, "EBADF"); + } + if (typeof _dgramSocketSetBufferSizeRaw !== "undefined" && this._bound && !this._closed) { + _dgramSocketSetBufferSizeRaw.applySync(undefined, [this._socketId, which, size]); + } + if (which === "recv") { + this._recvBufferSize = size; + return; + } + this._sendBufferSize = size; } - emit(event: string, ...args: unknown[]): boolean { - return this._emitNet(event, ...args); + + private _getBufferSize(which: "recv" | "send"): number { + if (!this._bound || this._closed) { + throw createDgramBufferSizeSystemError(which, "EBADF"); + } + const fallback = which === "recv" ? this._recvBufferSize ?? 0 : this._sendBufferSize ?? 0; + if (typeof _dgramSocketGetBufferSizeRaw === "undefined") { + return getPlatformDgramBufferSize(fallback); + } + const rawSize = _dgramSocketGetBufferSizeRaw.applySync(undefined, [this._socketId, which]); + return getPlatformDgramBufferSize(rawSize > 0 ? rawSize : fallback); } - _emitNet(event: string, ...args: unknown[]): boolean { + private _applyInitialBufferSizes(): void { + if (this._recvBufferSize !== undefined) { + this._setBufferSize("recv", this._recvBufferSize); + } + if (this._sendBufferSize !== undefined) { + this._setBufferSize("send", this._sendBufferSize); + } + } + + private _syncHandleRef(): void { + if (!this._bound || this._closed || !this._refed) { + if (this._handleRefId && typeof _unregisterHandle === "function") { + _unregisterHandle(this._handleRefId); + } + this._handleRefId = null; + return; + } + + const nextHandleId = `${DGRAM_HANDLE_PREFIX}${this._socketId}`; + if (this._handleRefId === nextHandleId) { + return; + } + if (this._handleRefId && typeof _unregisterHandle === "function") { + _unregisterHandle(this._handleRefId); + } + this._handleRefId = nextHandleId; + if (typeof _registerHandle === "function") { + _registerHandle(this._handleRefId, "dgram socket"); + } + } + + private _emit(event: string, ...args: unknown[]): boolean { let handled = false; const listeners = this._listeners[event]; if (listeners) { - for (const fn of [...listeners]) { - try { fn(...args); } catch { /* ignore */ } + for (const listener of [...listeners]) { + listener(...args); handled = true; } } const onceListeners = this._onceListeners[event]; if (onceListeners) { - const fns = [...onceListeners]; this._onceListeners[event] = []; - for (const fn of fns) { - try { fn(...args); } catch { /* ignore */ } + for (const listener of [...onceListeners]) { + listener(...args); handled = true; } } return handled; } - - // Upgrade this socket to TLS - _upgradeTls(options?: { rejectUnauthorized?: boolean; servername?: string }): void { - if (typeof _netSocketUpgradeTlsRaw === "undefined") { - throw new Error("tls.connect is not supported in sandbox (bridge not available)"); - } - _netSocketUpgradeTlsRaw.applySync(undefined, [this._socketId, JSON.stringify(options ?? {})]); - } -} - -function netConnect(portOrOptions: number | { host?: string; port: number }, hostOrCallback?: string | (() => void), callback?: () => void): NetSocket { - const socket = new NetSocket(); - socket.connect(portOrOptions, hostOrCallback as string, callback); - return socket; -} - -const netModule = { - Socket: NetSocket, - connect: netConnect, - createConnection: netConnect, - createServer(): never { - throw new Error("net.createServer is not supported in sandbox"); - }, - isIP(input: string): number { - if (/^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$/.test(input)) return 4; - if (input.includes(":")) return 6; - return 0; - }, - isIPv4(input: string): boolean { return netModule.isIP(input) === 4; }, - isIPv6(input: string): boolean { return netModule.isIP(input) === 6; }, -}; - -// =================================================================== -// tls module — TLS socket support via upgrade bridge -// =================================================================== - -function tlsConnect( - portOrOptions: number | { host?: string; port: number; socket?: NetSocket; rejectUnauthorized?: boolean; servername?: string }, - hostOrCallback?: string | (() => void), - callback?: () => void, -): NetSocket { - let socket: NetSocket; - let options: { rejectUnauthorized?: boolean; servername?: string; host?: string; port?: number } = {}; - let cb: (() => void) | undefined; - - if (typeof portOrOptions === "object") { - options = { ...portOrOptions }; - cb = typeof hostOrCallback === "function" ? hostOrCallback : callback; - - if (portOrOptions.socket) { - // Upgrade existing socket to TLS - socket = portOrOptions.socket; - } else { - // Create new TCP socket then upgrade - socket = new NetSocket(); - socket.connect({ host: portOrOptions.host ?? "127.0.0.1", port: portOrOptions.port }); - } - } else { - const host = typeof hostOrCallback === "string" ? hostOrCallback : "127.0.0.1"; - cb = typeof hostOrCallback === "function" ? hostOrCallback : callback; - options = { host }; - socket = new NetSocket(); - socket.connect(portOrOptions, host); - } - - if (cb) socket.once("secureConnect", cb); - - // If already connected, upgrade immediately; otherwise wait for connect - if (socket._connected) { - socket._upgradeTls({ - rejectUnauthorized: options.rejectUnauthorized, - servername: options.servername ?? options.host, - }); - } else { - socket.once("connect", () => { - socket._upgradeTls({ - rejectUnauthorized: options.rejectUnauthorized, - servername: options.servername ?? options.host, - }); - }); - } - - return socket; } -const tlsModule = { - connect: tlsConnect, - TLSSocket: NetSocket, // Alias — TLSSocket is just a NetSocket after upgrade - createServer(): never { - throw new Error("tls.createServer is not supported in sandbox"); - }, - createSecureContext(): Record { - return {}; // Stub for libraries that call this +const dgramModule = { + Socket: DgramSocket as unknown as typeof nodeDgram.Socket, + createSocket( + optionsOrType: unknown, + callback?: DgramEventListener, + ): DgramSocket { + return new DgramSocket(optionsOrType, callback); }, - DEFAULT_MIN_VERSION: "TLSv1.2", - DEFAULT_MAX_VERSION: "TLSv1.3", }; exposeCustomGlobal("_netModule", netModule); exposeCustomGlobal("_tlsModule", tlsModule); +exposeCustomGlobal("_dgramModule", dgramModule); export default { fetch, @@ -3062,4 +10757,5 @@ export default { ClientRequest, net: netModule, tls: tlsModule, + dgram: dgramModule, }; diff --git a/packages/nodejs/src/bridge/polyfills.ts b/packages/nodejs/src/bridge/polyfills.ts index 99d2ceb9..feae24f3 100644 --- a/packages/nodejs/src/bridge/polyfills.ts +++ b/packages/nodejs/src/bridge/polyfills.ts @@ -1,7 +1,82 @@ // Early polyfills - this file must be imported FIRST before any other modules // that might use TextEncoder/TextDecoder (like whatwg-url) -import { TextEncoder, TextDecoder } from "text-encoding-utf-8"; +import { + TextDecoder as PolyfillTextDecoder, +} from "text-encoding-utf-8"; + +function encodeUtf8ScalarValue(codePoint: number, bytes: number[]): void { + if (codePoint <= 0x7f) { + bytes.push(codePoint); + return; + } + if (codePoint <= 0x7ff) { + bytes.push(0xc0 | (codePoint >> 6), 0x80 | (codePoint & 0x3f)); + return; + } + if (codePoint <= 0xffff) { + bytes.push( + 0xe0 | (codePoint >> 12), + 0x80 | ((codePoint >> 6) & 0x3f), + 0x80 | (codePoint & 0x3f), + ); + return; + } + bytes.push( + 0xf0 | (codePoint >> 18), + 0x80 | ((codePoint >> 12) & 0x3f), + 0x80 | ((codePoint >> 6) & 0x3f), + 0x80 | (codePoint & 0x3f), + ); +} + +function encodeUtf8(input = ""): Uint8Array { + const value = String(input); + const bytes: number[] = []; + for (let index = 0; index < value.length; index += 1) { + const codeUnit = value.charCodeAt(index); + if (codeUnit >= 0xd800 && codeUnit <= 0xdbff) { + const nextIndex = index + 1; + if (nextIndex < value.length) { + const nextCodeUnit = value.charCodeAt(nextIndex); + if (nextCodeUnit >= 0xdc00 && nextCodeUnit <= 0xdfff) { + const codePoint = + 0x10000 + ((codeUnit - 0xd800) << 10) + (nextCodeUnit - 0xdc00); + encodeUtf8ScalarValue(codePoint, bytes); + index = nextIndex; + continue; + } + } + encodeUtf8ScalarValue(0xfffd, bytes); + continue; + } + if (codeUnit >= 0xdc00 && codeUnit <= 0xdfff) { + encodeUtf8ScalarValue(0xfffd, bytes); + continue; + } + encodeUtf8ScalarValue(codeUnit, bytes); + } + return new Uint8Array(bytes); +} + +class PatchedTextEncoder { + encode(input = ""): Uint8Array { + return encodeUtf8(input); + } + + get encoding(): string { + return "utf-8"; + } +} + +const TextEncoder = + typeof globalThis.TextEncoder === "function" + ? globalThis.TextEncoder + : (PatchedTextEncoder as typeof globalThis.TextEncoder); +const TextDecoder = + typeof globalThis.TextDecoder === "function" + ? globalThis.TextDecoder + : PolyfillTextDecoder; // Install on globalThis so other modules can use them if (typeof globalThis.TextEncoder === "undefined") { diff --git a/packages/nodejs/src/bridge/process.ts b/packages/nodejs/src/bridge/process.ts index 485d9bb3..ad4a4824 100644 --- a/packages/nodejs/src/bridge/process.ts +++ b/packages/nodejs/src/bridge/process.ts @@ -6,8 +6,11 @@ import type * as nodeProcess from "process"; // Re-export TextEncoder/TextDecoder from polyfills (polyfills.ts is imported first in index.ts) import { TextEncoder, TextDecoder } from "./polyfills.js"; -// Use whatwg-url for spec-compliant URL implementation -import { URL as WhatwgURL, URLSearchParams as WhatwgURLSearchParams } from "whatwg-url"; +import { + URL, + URLSearchParams, + installWhatwgUrlGlobals, +} from "./whatwg-url.js"; // Use buffer package for spec-compliant Buffer implementation import { Buffer as BufferPolyfill } from "buffer"; @@ -170,6 +173,34 @@ if (typeof bufferProto.utf8Slice !== "function") { } } +const bufferCtorMutable = BufferPolyfill as typeof BufferPolyfill & { + allocUnsafe?: typeof BufferPolyfill.allocUnsafe & { _secureExecPatched?: boolean }; +}; +if ( + typeof bufferCtorMutable.allocUnsafe === "function" && + !bufferCtorMutable.allocUnsafe._secureExecPatched +) { + const originalAllocUnsafe = bufferCtorMutable.allocUnsafe; + bufferCtorMutable.allocUnsafe = function patchedAllocUnsafe( + this: typeof BufferPolyfill, + size: number, + ): Buffer { + try { + return originalAllocUnsafe.call(this, size); + } catch (error) { + if ( + error instanceof RangeError && + typeof size === "number" && + size > BUFFER_MAX_LENGTH + ) { + throw new Error("Array buffer allocation failed"); + } + throw error; + } + } as typeof BufferPolyfill.allocUnsafe & { _secureExecPatched?: boolean }; + bufferCtorMutable.allocUnsafe._secureExecPatched = true; +} + // Exit code tracking let _exitCode = 0; let _exited = false; @@ -1148,11 +1179,8 @@ export function clearImmediate(id: TimerHandle | number | undefined): void { clearTimeout(id); } -// URL and URLSearchParams - use whatwg-url for spec-compliant implementation -export const URL = WhatwgURL; -export const URLSearchParams = WhatwgURLSearchParams; - // TextEncoder and TextDecoder - re-export from polyfills +export { URL, URLSearchParams }; export { TextEncoder, TextDecoder }; // Buffer - use buffer package polyfill @@ -1767,14 +1795,8 @@ export function setupGlobals(): void { g.queueMicrotask = _queueMicrotask; } - // URL - if (typeof g.URL === "undefined") { - g.URL = URL; - } - - if (typeof g.URLSearchParams === "undefined") { - g.URLSearchParams = URLSearchParams; - } + // URL globals must override bootstrap fallbacks and stay non-enumerable. + installWhatwgUrlGlobals(g as typeof globalThis); // TextEncoder/TextDecoder if (typeof g.TextEncoder === "undefined") { diff --git a/packages/nodejs/src/bridge/whatwg-url-ambient.d.ts b/packages/nodejs/src/bridge/whatwg-url-ambient.d.ts new file mode 100644 index 00000000..af730303 --- /dev/null +++ b/packages/nodejs/src/bridge/whatwg-url-ambient.d.ts @@ -0,0 +1,4 @@ +declare module "whatwg-url" { + export const URL: typeof globalThis.URL; + export const URLSearchParams: typeof globalThis.URLSearchParams; +} diff --git a/packages/nodejs/src/bridge/whatwg-url.ts b/packages/nodejs/src/bridge/whatwg-url.ts new file mode 100644 index 00000000..00a0231b --- /dev/null +++ b/packages/nodejs/src/bridge/whatwg-url.ts @@ -0,0 +1,897 @@ +// @ts-ignore whatwg-url ships without bundled TypeScript declarations in this repo. +import { + URL as WhatwgURL, + URLSearchParams as WhatwgURLSearchParams, +} from "whatwg-url"; + +type WhatwgURLInstance = InstanceType; +type WhatwgURLSearchParamsInstance = InstanceType; + +const inspectCustomSymbol = Symbol.for("nodejs.util.inspect.custom"); +const toStringTagSymbol = Symbol.toStringTag; +const ERR_INVALID_THIS = "ERR_INVALID_THIS"; +const ERR_MISSING_ARGS = "ERR_MISSING_ARGS"; +const ERR_INVALID_URL = "ERR_INVALID_URL"; +const ERR_ARG_NOT_ITERABLE = "ERR_ARG_NOT_ITERABLE"; +const ERR_INVALID_TUPLE = "ERR_INVALID_TUPLE"; +const URL_SEARCH_PARAMS_TYPE = "URLSearchParams"; +const kLinkedSearchParams = Symbol("secureExecLinkedURLSearchParams"); +const kBlobUrlStore = Symbol.for("secureExec.blobUrlStore"); +const kBlobUrlCounter = Symbol.for("secureExec.blobUrlCounter"); +const SEARCH_PARAM_METHOD_NAMES = ["append", "delete", "get", "getAll", "has"]; +const SEARCH_PARAM_PAIR_METHOD_NAMES = ["append", "set"]; +const URL_SCHEME_TYPES: Record = { + "http:": 0, + "https:": 2, + "ws:": 4, + "wss:": 5, + "file:": 6, + "ftp:": 8, +}; + +type SearchParamsLinkedInit = { + [kLinkedSearchParams]: () => WhatwgURLSearchParamsInstance; +}; + +const searchParamsBrand = new WeakSet(); +const searchParamsState = new WeakMap< + URLSearchParams, + { getImpl: () => WhatwgURLSearchParamsInstance } +>(); +const searchParamsIteratorBrand = new WeakSet(); +const searchParamsIteratorState = new WeakMap< + URLSearchParamsIterator, + { values: unknown[]; index: number } +>(); + +function createNodeTypeError(message: string, code: string): TypeError & { code: string } { + const error = new TypeError(message) as TypeError & { code: string }; + error.code = code; + return error; +} + +function createInvalidUrlError(): TypeError & { code: string } { + const error = new TypeError("Invalid URL") as TypeError & { code: string }; + error.code = ERR_INVALID_URL; + return error; +} + +function createUrlReceiverTypeError(): TypeError { + return new TypeError("Receiver must be an instance of class URL"); +} + +function createMissingArgsError(message: string): TypeError & { code: string } { + return createNodeTypeError(message, ERR_MISSING_ARGS); +} + +function createIterableTypeError(): TypeError & { code: string } { + return createNodeTypeError("Query pairs must be iterable", ERR_ARG_NOT_ITERABLE); +} + +function createTupleTypeError(): TypeError & { code: string } { + return createNodeTypeError( + "Each query pair must be an iterable [name, value] tuple", + ERR_INVALID_TUPLE, + ); +} + +function createSymbolStringError(): TypeError { + return new TypeError("Cannot convert a Symbol value to a string"); +} + +function toNodeString(value: unknown): string { + if (typeof value === "symbol") { + throw createSymbolStringError(); + } + return String(value); +} + +function toWellFormedString(value: string): string { + let result = ""; + for (let index = 0; index < value.length; index += 1) { + const codeUnit = value.charCodeAt(index); + if (codeUnit >= 0xd800 && codeUnit <= 0xdbff) { + const nextIndex = index + 1; + if (nextIndex < value.length) { + const nextCodeUnit = value.charCodeAt(nextIndex); + if (nextCodeUnit >= 0xdc00 && nextCodeUnit <= 0xdfff) { + result += value[index] + value[nextIndex]; + index = nextIndex; + continue; + } + } + result += "\uFFFD"; + continue; + } + if (codeUnit >= 0xdc00 && codeUnit <= 0xdfff) { + result += "\uFFFD"; + continue; + } + result += value[index]; + } + return result; +} + +function toNodeUSVString(value: unknown): string { + return toWellFormedString(toNodeString(value)); +} + +function assertUrlSearchParamsReceiver(receiver: unknown): asserts receiver is URLSearchParams { + if (!searchParamsBrand.has(receiver as URLSearchParams)) { + throw createNodeTypeError( + 'Value of "this" must be of type URLSearchParams', + ERR_INVALID_THIS, + ); + } +} + +function assertUrlSearchParamsIteratorReceiver( + receiver: unknown, +): asserts receiver is URLSearchParamsIterator { + if (!searchParamsIteratorBrand.has(receiver as URLSearchParamsIterator)) { + throw createNodeTypeError( + 'Value of "this" must be of type URLSearchParamsIterator', + ERR_INVALID_THIS, + ); + } +} + +function getUrlSearchParamsImpl(receiver: URLSearchParams): WhatwgURLSearchParamsInstance { + const state = searchParamsState.get(receiver); + if (!state) { + throw createNodeTypeError( + 'Value of "this" must be of type URLSearchParams', + ERR_INVALID_THIS, + ); + } + return state.getImpl(); +} + +function countSearchParams(params: WhatwgURLSearchParamsInstance): number { + let count = 0; + for (const _entry of params) { + count++; + } + return count; +} + +function normalizeSearchParamsInit( + init: unknown, +): string | Array<[string, string]> | SearchParamsLinkedInit | undefined { + if ( + init && + typeof init === "object" && + kLinkedSearchParams in (init as Record) + ) { + return init as SearchParamsLinkedInit; + } + + if (init == null) { + return undefined; + } + + if (typeof init === "string") { + return toNodeUSVString(init); + } + + if (typeof init === "object" || typeof init === "function") { + const iterator = (init as { [Symbol.iterator]?: unknown })[Symbol.iterator]; + if (iterator !== undefined) { + if (typeof iterator !== "function") { + throw createIterableTypeError(); + } + + const pairs: Array<[string, string]> = []; + for (const pair of init as Iterable) { + if (pair == null) { + throw createTupleTypeError(); + } + + const pairIterator = (pair as { [Symbol.iterator]?: unknown })[Symbol.iterator]; + if (typeof pairIterator !== "function") { + throw createTupleTypeError(); + } + + const values = Array.from(pair as Iterable); + if (values.length !== 2) { + throw createTupleTypeError(); + } + + pairs.push([toNodeUSVString(values[0]), toNodeUSVString(values[1])]); + } + return pairs; + } + + const pairs: Array<[string, string]> = []; + for (const key of Reflect.ownKeys(init as object)) { + if (!Object.prototype.propertyIsEnumerable.call(init, key)) { + continue; + } + pairs.push([ + toNodeUSVString(key), + toNodeUSVString((init as Record)[key]), + ]); + } + return pairs; + } + + return toNodeUSVString(init); +} + +function createStandaloneSearchParams( + init?: string | Array<[string, string]>, +): WhatwgURLSearchParamsInstance { + if (typeof init === "string") { + return new WhatwgURLSearchParams(init); + } + return init === undefined + ? new WhatwgURLSearchParams() + : new WhatwgURLSearchParams(init); +} + +function createCollectionBody( + items: string[], + options: { breakLength?: number } | undefined, + emptyBody: "{}" | "{ }", +): string { + if (items.length === 0) { + return emptyBody; + } + + const oneLine = `{ ${items.join(", ")} }`; + const breakLength = options?.breakLength ?? Infinity; + if (oneLine.length <= breakLength) { + return oneLine; + } + return `{\n ${items.join(",\n ")} }`; +} + +function createUrlContext(url: URL) { + const href = url.href; + const protocolEnd = href.indexOf(":") + 1; + const authIndex = href.indexOf("@"); + const pathnameStart = href.indexOf("/", protocolEnd + 2); + const searchStart = href.indexOf("?"); + const hashStart = href.indexOf("#"); + const usernameEnd = + url.username.length > 0 + ? href.indexOf(":", protocolEnd + 2) + : protocolEnd + 2; + const hostStart = authIndex === -1 ? protocolEnd + 2 : authIndex; + const hostEnd = + pathnameStart === -1 + ? href.length + : pathnameStart - (url.port.length > 0 ? url.port.length + 1 : 0); + const port = url.port.length > 0 ? Number(url.port) : null; + + return { + href, + protocol_end: protocolEnd, + username_end: usernameEnd, + host_start: hostStart, + host_end: hostEnd, + pathname_start: pathnameStart === -1 ? href.length : pathnameStart, + search_start: searchStart === -1 ? href.length : searchStart, + hash_start: hashStart === -1 ? href.length : hashStart, + port, + scheme_type: URL_SCHEME_TYPES[url.protocol] ?? 1, + hasPort: url.port.length > 0, + hasSearch: url.search.length > 0, + hasHash: url.hash.length > 0, + }; +} + +function formatUrlContext( + url: URL, + inspect: ((value: unknown, options?: unknown) => string) | undefined, + options: unknown, +): string { + const context = createUrlContext(url); + const formatValue = + typeof inspect === "function" + ? (value: unknown) => inspect(value, options) + : (value: unknown) => JSON.stringify(value); + const portValue = context.port === null ? "null" : String(context.port); + + return [ + "URLContext {", + ` href: ${formatValue(context.href)},`, + ` protocol_end: ${context.protocol_end},`, + ` username_end: ${context.username_end},`, + ` host_start: ${context.host_start},`, + ` host_end: ${context.host_end},`, + ` pathname_start: ${context.pathname_start},`, + ` search_start: ${context.search_start},`, + ` hash_start: ${context.hash_start},`, + ` port: ${portValue},`, + ` scheme_type: ${context.scheme_type},`, + " [hasPort]: [Getter],", + " [hasSearch]: [Getter],", + " [hasHash]: [Getter]", + " }", + ].join("\n"); +} + +function getBlobUrlStore(): Map { + const globalRecord = globalThis as Record; + const existing = globalRecord[kBlobUrlStore]; + if (existing instanceof Map) { + return existing; + } + const store = new Map(); + globalRecord[kBlobUrlStore] = store; + return store; +} + +function nextBlobUrlId(): number { + const globalRecord = globalThis as Record; + const nextId = typeof globalRecord[kBlobUrlCounter] === "number" ? globalRecord[kBlobUrlCounter] : 1; + globalRecord[kBlobUrlCounter] = (nextId as number) + 1; + return nextId as number; +} + +export class URLSearchParamsIterator { + constructor(values: unknown[]) { + searchParamsIteratorBrand.add(this); + searchParamsIteratorState.set(this, { values, index: 0 }); + } + + next(): IteratorResult { + assertUrlSearchParamsIteratorReceiver(this); + const state = searchParamsIteratorState.get(this)!; + if (state.index >= state.values.length) { + return { value: undefined, done: true }; + } + const value = state.values[state.index]; + state.index += 1; + return { value, done: false }; + } + + [inspectCustomSymbol]( + depth: number, + options?: { breakLength?: number }, + inspect?: (value: unknown, options?: unknown) => string, + ): string { + assertUrlSearchParamsIteratorReceiver(this); + if (depth < 0) { + return "[Object]"; + } + const state = searchParamsIteratorState.get(this)!; + const formatValue = + typeof inspect === "function" + ? (value: unknown) => inspect(value, options) + : (value: unknown) => JSON.stringify(value); + const remaining = state.values.slice(state.index).map((value) => formatValue(value)); + return `URLSearchParams Iterator ${createCollectionBody(remaining, options, "{ }")}`; + } + + get [toStringTagSymbol](): string { + if (this !== URLSearchParamsIterator.prototype) { + assertUrlSearchParamsIteratorReceiver(this); + } + return "URLSearchParams Iterator"; + } +} + +Object.defineProperties(URLSearchParamsIterator.prototype, { + next: { + value: URLSearchParamsIterator.prototype.next, + writable: true, + configurable: true, + enumerable: true, + }, + [Symbol.iterator]: { + value: function iterator(this: URLSearchParamsIterator) { + assertUrlSearchParamsIteratorReceiver(this); + return this; + }, + writable: true, + configurable: true, + enumerable: false, + }, + [inspectCustomSymbol]: { + value: URLSearchParamsIterator.prototype[inspectCustomSymbol], + writable: true, + configurable: true, + enumerable: false, + }, + [toStringTagSymbol]: { + get: Object.getOwnPropertyDescriptor(URLSearchParamsIterator.prototype, toStringTagSymbol)?.get, + configurable: true, + enumerable: false, + }, +}); + +Object.defineProperty( + Object.getOwnPropertyDescriptor(URLSearchParamsIterator.prototype, Symbol.iterator)?.value, + "name", + { + value: "entries", + configurable: true, + }, +); + +export class URLSearchParams { + constructor(init?: unknown) { + searchParamsBrand.add(this); + const normalized = normalizeSearchParamsInit(init); + if ( + normalized && + typeof normalized === "object" && + kLinkedSearchParams in normalized + ) { + searchParamsState.set(this, { + getImpl: (normalized as SearchParamsLinkedInit)[kLinkedSearchParams], + }); + return; + } + const impl = createStandaloneSearchParams( + normalized as string | Array<[string, string]> | undefined, + ); + searchParamsState.set(this, { getImpl: () => impl }); + } + + append(name?: unknown, value?: unknown): void { + assertUrlSearchParamsReceiver(this); + if (arguments.length < 2) { + throw createMissingArgsError('The "name" and "value" arguments must be specified'); + } + getUrlSearchParamsImpl(this).append(toNodeUSVString(name), toNodeUSVString(value)); + } + + delete(name?: unknown): void { + assertUrlSearchParamsReceiver(this); + if (arguments.length < 1) { + throw createMissingArgsError('The "name" argument must be specified'); + } + getUrlSearchParamsImpl(this).delete(toNodeUSVString(name)); + } + + get(name?: unknown): string | null { + assertUrlSearchParamsReceiver(this); + if (arguments.length < 1) { + throw createMissingArgsError('The "name" argument must be specified'); + } + return getUrlSearchParamsImpl(this).get(toNodeUSVString(name)); + } + + getAll(name?: unknown): string[] { + assertUrlSearchParamsReceiver(this); + if (arguments.length < 1) { + throw createMissingArgsError('The "name" argument must be specified'); + } + return getUrlSearchParamsImpl(this).getAll(toNodeUSVString(name)); + } + + has(name?: unknown): boolean { + assertUrlSearchParamsReceiver(this); + if (arguments.length < 1) { + throw createMissingArgsError('The "name" argument must be specified'); + } + return getUrlSearchParamsImpl(this).has(toNodeUSVString(name)); + } + + set(name?: unknown, value?: unknown): void { + assertUrlSearchParamsReceiver(this); + if (arguments.length < 2) { + throw createMissingArgsError('The "name" and "value" arguments must be specified'); + } + getUrlSearchParamsImpl(this).set(toNodeUSVString(name), toNodeUSVString(value)); + } + + sort(): void { + assertUrlSearchParamsReceiver(this); + getUrlSearchParamsImpl(this).sort(); + } + + entries(): URLSearchParamsIterator { + assertUrlSearchParamsReceiver(this); + return new URLSearchParamsIterator(Array.from(getUrlSearchParamsImpl(this))); + } + + keys(): URLSearchParamsIterator { + assertUrlSearchParamsReceiver(this); + return new URLSearchParamsIterator(Array.from(getUrlSearchParamsImpl(this).keys())); + } + + values(): URLSearchParamsIterator { + assertUrlSearchParamsReceiver(this); + return new URLSearchParamsIterator(Array.from(getUrlSearchParamsImpl(this).values())); + } + + forEach( + callback?: (value: string, key: string, obj: URLSearchParams) => void, + thisArg?: unknown, + ): void { + assertUrlSearchParamsReceiver(this); + if (typeof callback !== "function") { + throw createNodeTypeError( + 'The "callback" argument must be of type function. Received ' + + (callback === undefined ? "undefined" : typeof callback), + "ERR_INVALID_ARG_TYPE", + ); + } + + for (const [key, value] of getUrlSearchParamsImpl(this)) { + callback.call(thisArg, value, key, this); + } + } + + toString(): string { + assertUrlSearchParamsReceiver(this); + return getUrlSearchParamsImpl(this).toString(); + } + + get size(): number { + assertUrlSearchParamsReceiver(this); + return countSearchParams(getUrlSearchParamsImpl(this)); + } + + [inspectCustomSymbol]( + depth: number, + options?: { breakLength?: number }, + inspect?: (value: unknown, options?: unknown) => string, + ): string { + assertUrlSearchParamsReceiver(this); + if (depth < 0) { + return "[Object]"; + } + const formatValue = + typeof inspect === "function" + ? (value: unknown) => inspect(value, options) + : (value: unknown) => JSON.stringify(value); + const items = Array.from( + getUrlSearchParamsImpl(this) as Iterable<[string, string]>, + ).map( + ([key, value]) => `${formatValue(key)} => ${formatValue(value)}`, + ); + return `URLSearchParams ${createCollectionBody(items, options, "{}")}`; + } + + get [toStringTagSymbol](): string { + if (this !== URLSearchParams.prototype) { + assertUrlSearchParamsReceiver(this); + } + return URL_SEARCH_PARAMS_TYPE; + } +} + +for (const name of SEARCH_PARAM_METHOD_NAMES) { + Object.defineProperty(URLSearchParams.prototype, name, { + value: (URLSearchParams.prototype as unknown as Record)[name], + writable: true, + configurable: true, + enumerable: true, + }); +} + +for (const name of SEARCH_PARAM_PAIR_METHOD_NAMES) { + Object.defineProperty(URLSearchParams.prototype, name, { + value: (URLSearchParams.prototype as unknown as Record)[name], + writable: true, + configurable: true, + enumerable: true, + }); +} + +for (const name of ["sort", "entries", "forEach", "keys", "values", "toString"] as const) { + Object.defineProperty(URLSearchParams.prototype, name, { + value: URLSearchParams.prototype[name], + writable: true, + configurable: true, + enumerable: true, + }); +} + +Object.defineProperties(URLSearchParams.prototype, { + size: { + get: Object.getOwnPropertyDescriptor(URLSearchParams.prototype, "size")?.get, + configurable: true, + enumerable: true, + }, + [Symbol.iterator]: { + value: URLSearchParams.prototype.entries, + writable: true, + configurable: true, + enumerable: false, + }, + [inspectCustomSymbol]: { + value: URLSearchParams.prototype[inspectCustomSymbol], + writable: true, + configurable: true, + enumerable: false, + }, + [toStringTagSymbol]: { + get: Object.getOwnPropertyDescriptor(URLSearchParams.prototype, toStringTagSymbol)?.get, + configurable: true, + enumerable: false, + }, +}); + +export class URL { + #impl: WhatwgURLInstance; + #searchParams: URLSearchParams | undefined; + + constructor(input?: unknown, base?: unknown) { + if (arguments.length < 1) { + throw createMissingArgsError('The "url" argument must be specified'); + } + + try { + this.#impl = + arguments.length >= 2 + ? new WhatwgURL(toNodeUSVString(input), toNodeUSVString(base)) + : new WhatwgURL(toNodeUSVString(input)); + } catch { + throw createInvalidUrlError(); + } + } + + static canParse(input?: unknown, base?: unknown): boolean { + if (arguments.length < 1) { + throw createMissingArgsError('The "url" argument must be specified'); + } + + try { + if (arguments.length >= 2) { + new URL(input, base); + } else { + new URL(input); + } + return true; + } catch { + return false; + } + } + + static createObjectURL(obj?: unknown): string { + if ( + typeof Blob === "undefined" || + !(obj instanceof Blob) + ) { + throw createNodeTypeError( + 'The "obj" argument must be an instance of Blob. Received ' + + (obj === null ? "null" : typeof obj), + "ERR_INVALID_ARG_TYPE", + ); + } + const id = `blob:nodedata:${nextBlobUrlId()}`; + getBlobUrlStore().set(id, obj); + return id; + } + + static revokeObjectURL(url?: unknown): void { + if (arguments.length < 1) { + throw createMissingArgsError('The "url" argument must be specified'); + } + if (typeof url !== "string") { + return; + } + getBlobUrlStore().delete(url); + } + + get href(): string { + if (!(this instanceof URL)) { + throw createUrlReceiverTypeError(); + } + return this.#impl.href; + } + + set href(value: unknown) { + this.#impl.href = toNodeUSVString(value); + } + + get origin(): string { + return this.#impl.origin; + } + + get protocol(): string { + return this.#impl.protocol; + } + + set protocol(value: unknown) { + this.#impl.protocol = toNodeUSVString(value); + } + + get username(): string { + return this.#impl.username; + } + + set username(value: unknown) { + this.#impl.username = toNodeUSVString(value); + } + + get password(): string { + return this.#impl.password; + } + + set password(value: unknown) { + this.#impl.password = toNodeUSVString(value); + } + + get host(): string { + return this.#impl.host; + } + + set host(value: unknown) { + this.#impl.host = toNodeUSVString(value); + } + + get hostname(): string { + return this.#impl.hostname; + } + + set hostname(value: unknown) { + this.#impl.hostname = toNodeUSVString(value); + } + + get port(): string { + return this.#impl.port; + } + + set port(value: unknown) { + this.#impl.port = toNodeUSVString(value); + } + + get pathname(): string { + return this.#impl.pathname; + } + + set pathname(value: unknown) { + this.#impl.pathname = toNodeUSVString(value); + } + + get search(): string { + if (!(this instanceof URL)) { + throw createUrlReceiverTypeError(); + } + return this.#impl.search; + } + + set search(value: unknown) { + this.#impl.search = toNodeUSVString(value); + } + + get searchParams(): URLSearchParams { + if (!this.#searchParams) { + this.#searchParams = new URLSearchParams({ + [kLinkedSearchParams]: () => this.#impl.searchParams, + }); + } + return this.#searchParams; + } + + get hash(): string { + return this.#impl.hash; + } + + set hash(value: unknown) { + this.#impl.hash = toNodeUSVString(value); + } + + toString(): string { + if (!(this instanceof URL)) { + throw createUrlReceiverTypeError(); + } + return this.#impl.href; + } + + toJSON(): string { + if (!(this instanceof URL)) { + throw createUrlReceiverTypeError(); + } + return this.#impl.href; + } + + [inspectCustomSymbol]( + depth: number, + options?: { showHidden?: boolean }, + inspect?: (value: unknown, options?: unknown) => string, + ): string { + const inspectName = this.constructor === URL ? "URL" : this.constructor.name; + if (depth < 0) { + return `${inspectName} {}`; + } + + const formatValue = + typeof inspect === "function" + ? (value: unknown) => inspect(value, options) + : (value: unknown) => JSON.stringify(value); + const lines = [ + `${inspectName} {`, + ` href: ${formatValue(this.href)},`, + ` origin: ${formatValue(this.origin)},`, + ` protocol: ${formatValue(this.protocol)},`, + ` username: ${formatValue(this.username)},`, + ` password: ${formatValue(this.password)},`, + ` host: ${formatValue(this.host)},`, + ` hostname: ${formatValue(this.hostname)},`, + ` port: ${formatValue(this.port)},`, + ` pathname: ${formatValue(this.pathname)},`, + ` search: ${formatValue(this.search)},`, + ` searchParams: ${this.searchParams[inspectCustomSymbol](depth - 1, undefined, inspect)},`, + ` hash: ${formatValue(this.hash)}`, + ]; + + if (options?.showHidden) { + lines[lines.length - 1] += ","; + lines.push(` [Symbol(context)]: ${formatUrlContext(this, inspect, options)}`); + } + + lines.push("}"); + return lines.join("\n"); + } + + get [toStringTagSymbol](): string { + return "URL"; + } +} + +for (const name of ["toString", "toJSON"] as const) { + Object.defineProperty(URL.prototype, name, { + value: URL.prototype[name], + writable: true, + configurable: true, + enumerable: true, + }); +} + +for (const name of [ + "href", + "protocol", + "username", + "password", + "host", + "hostname", + "port", + "pathname", + "search", + "hash", + "origin", + "searchParams", +] as const) { + const descriptor = Object.getOwnPropertyDescriptor(URL.prototype, name); + if (!descriptor) { + continue; + } + descriptor.enumerable = true; + Object.defineProperty(URL.prototype, name, descriptor); +} + +Object.defineProperties(URL.prototype, { + [inspectCustomSymbol]: { + value: URL.prototype[inspectCustomSymbol], + writable: true, + configurable: true, + enumerable: false, + }, + [toStringTagSymbol]: { + get: Object.getOwnPropertyDescriptor(URL.prototype, toStringTagSymbol)?.get, + configurable: true, + enumerable: false, + }, +}); + +for (const name of ["canParse", "createObjectURL", "revokeObjectURL"] as const) { + Object.defineProperty(URL, name, { + value: URL[name], + writable: true, + configurable: true, + enumerable: true, + }); +} + +export function installWhatwgUrlGlobals(target: typeof globalThis = globalThis): void { + Object.defineProperty(target, "URL", { + value: URL, + writable: true, + configurable: true, + enumerable: false, + }); + Object.defineProperty(target, "URLSearchParams", { + value: URLSearchParams, + writable: true, + configurable: true, + enumerable: false, + }); +} diff --git a/packages/nodejs/src/builtin-modules.ts b/packages/nodejs/src/builtin-modules.ts index 5f8ad251..1ff1241a 100644 --- a/packages/nodejs/src/builtin-modules.ts +++ b/packages/nodejs/src/builtin-modules.ts @@ -198,6 +198,8 @@ export const BUILTIN_NAMED_EXPORTS: Record = { "IncomingMessage", "ServerResponse", "Agent", + "validateHeaderName", + "validateHeaderValue", "METHODS", "STATUS_CODES", ], diff --git a/packages/nodejs/src/default-network-adapter.ts b/packages/nodejs/src/default-network-adapter.ts index a7cc7c25..3832b9a2 100644 --- a/packages/nodejs/src/default-network-adapter.ts +++ b/packages/nodejs/src/default-network-adapter.ts @@ -244,7 +244,10 @@ export function createDefaultNetworkAdapter( async httpRequest(url, requestOptions) { await assertNotPrivateHost(url, allowLoopbackPort); - return new Promise((resolve, reject) => { + type HttpRequestResult = Awaited> & { + rawHeaders?: string[]; + }; + return new Promise((resolve, reject) => { const urlObj = new URL(url); const isHttps = urlObj.protocol === "https:"; const transport = isHttps ? https : http; @@ -291,6 +294,7 @@ export function createDefaultNetworkAdapter( url.endsWith(".tgz"); const headers: Record = {}; + const rawHeaders = [...res.rawHeaders]; Object.entries(res.headers).forEach(([key, value]) => { if (typeof value === "string") headers[key] = value; else if (Array.isArray(value)) headers[key] = value.join(", "); @@ -310,6 +314,7 @@ export function createDefaultNetworkAdapter( status: res.statusCode || 200, statusText: res.statusMessage || "OK", headers, + rawHeaders, url, ...(hasTrailers ? { trailers } : {}), }; @@ -326,6 +331,7 @@ export function createDefaultNetworkAdapter( req.on("upgrade", (res, socket, head) => { const headers: Record = {}; + const rawHeaders = [...res.rawHeaders]; Object.entries(res.headers).forEach(([key, value]) => { if (typeof value === "string") headers[key] = value; else if (Array.isArray(value)) headers[key] = value.join(", "); @@ -350,6 +356,41 @@ export function createDefaultNetworkAdapter( status: res.statusCode || 101, statusText: res.statusMessage || "Switching Protocols", headers, + rawHeaders, + body: head.toString("base64"), + url, + upgradeSocketId: socketId, + }); + }); + + req.on("connect", (res, socket, head) => { + const headers: Record = {}; + const rawHeaders = [...res.rawHeaders]; + Object.entries(res.headers).forEach(([key, value]) => { + if (typeof value === "string") headers[key] = value; + else if (Array.isArray(value)) headers[key] = value.join(", "); + }); + + const socketId = nextUpgradeSocketId++; + upgradeSockets.set(socketId, socket); + + socket.on("data", (chunk) => { + if (onUpgradeSocketData) { + onUpgradeSocketData(socketId, chunk.toString("base64")); + } + }); + socket.on("close", () => { + if (onUpgradeSocketEnd) { + onUpgradeSocketEnd(socketId); + } + upgradeSockets.delete(socketId); + }); + + resolve({ + status: res.statusCode || 200, + statusText: res.statusMessage || "Connection established", + headers, + rawHeaders, body: head.toString("base64"), url, upgradeSocketId: socketId, diff --git a/packages/nodejs/src/execution-driver.ts b/packages/nodejs/src/execution-driver.ts index fa8f5e89..a42ff923 100644 --- a/packages/nodejs/src/execution-driver.ts +++ b/packages/nodejs/src/execution-driver.ts @@ -193,8 +193,31 @@ if (typeof atob === 'undefined') { }; } if (typeof TextEncoder === 'undefined') { + const _encodeUtf8 = (str = '') => { + const bytes = []; + for (let i = 0; i < str.length; i++) { + const codeUnit = str.charCodeAt(i); + let codePoint = codeUnit; + if (codeUnit >= 0xD800 && codeUnit <= 0xDBFF) { + const next = i + 1 < str.length ? str.charCodeAt(i + 1) : 0; + if (next >= 0xDC00 && next <= 0xDFFF) { + codePoint = 0x10000 + ((codeUnit - 0xD800) << 10) + (next - 0xDC00); + i++; + } else { + codePoint = 0xFFFD; + } + } else if (codeUnit >= 0xDC00 && codeUnit <= 0xDFFF) { + codePoint = 0xFFFD; + } + if (codePoint < 0x80) bytes.push(codePoint); + else if (codePoint < 0x800) bytes.push(0xC0 | (codePoint >> 6), 0x80 | (codePoint & 63)); + else if (codePoint < 0x10000) bytes.push(0xE0 | (codePoint >> 12), 0x80 | ((codePoint >> 6) & 63), 0x80 | (codePoint & 63)); + else bytes.push(0xF0 | (codePoint >> 18), 0x80 | ((codePoint >> 12) & 63), 0x80 | ((codePoint >> 6) & 63), 0x80 | (codePoint & 63)); + } + return new Uint8Array(bytes); + }; globalThis.TextEncoder = class TextEncoder { - encode(str) { const a = []; for (let i = 0; i < str.length; i++) { const c = str.charCodeAt(i); if (c < 128) a.push(c); else if (c < 2048) { a.push(192|(c>>6), 128|(c&63)); } else { a.push(224|(c>>12), 128|((c>>6)&63), 128|(c&63)); } } return new Uint8Array(a); } + encode(str = '') { return _encodeUtf8(String(str)); } get encoding() { return 'utf-8'; } }; } @@ -225,10 +248,92 @@ if (typeof structuredClone === 'undefined') { if (typeof performance === 'undefined') { globalThis.performance = { now: () => Date.now(), timeOrigin: Date.now() }; } -if (typeof AbortController === 'undefined') { - class AbortSignal { constructor() { this.aborted = false; this.reason = undefined; } } +if ( + typeof AbortController === 'undefined' || + typeof AbortSignal === 'undefined' || + typeof AbortSignal.prototype?.addEventListener !== 'function' || + typeof AbortSignal.prototype?.removeEventListener !== 'function' +) { + const abortSignalState = new WeakMap(); + function getAbortSignalState(signal) { + const state = abortSignalState.get(signal); + if (!state) throw new Error('Invalid AbortSignal'); + return state; + } + class AbortSignal { + constructor() { + this.onabort = null; + abortSignalState.set(this, { + aborted: false, + reason: undefined, + listeners: [], + }); + } + get aborted() { + return getAbortSignalState(this).aborted; + } + get reason() { + return getAbortSignalState(this).reason; + } + get _listeners() { + return getAbortSignalState(this).listeners.slice(); + } + getEventListeners(type) { + if (type !== 'abort') return []; + return getAbortSignalState(this).listeners.slice(); + } + addEventListener(type, listener) { + if (type !== 'abort' || typeof listener !== 'function') return; + getAbortSignalState(this).listeners.push(listener); + } + removeEventListener(type, listener) { + if (type !== 'abort' || typeof listener !== 'function') return; + const listeners = getAbortSignalState(this).listeners; + const index = listeners.indexOf(listener); + if (index !== -1) { + listeners.splice(index, 1); + } + } + dispatchEvent(event) { + if (!event || event.type !== 'abort') return false; + if (typeof this.onabort === 'function') { + try { + this.onabort.call(this, event); + } catch {} + } + const listeners = getAbortSignalState(this).listeners.slice(); + for (const listener of listeners) { + try { + listener.call(this, event); + } catch {} + } + return true; + } + } globalThis.AbortSignal = AbortSignal; - globalThis.AbortController = class AbortController { constructor() { this.signal = new AbortSignal(); } abort(reason) { this.signal.aborted = true; this.signal.reason = reason; } }; + globalThis.AbortController = class AbortController { + constructor() { + this.signal = new AbortSignal(); + } + abort(reason) { + const state = getAbortSignalState(this.signal); + if (state.aborted) return; + state.aborted = true; + state.reason = reason; + this.signal.dispatchEvent({ type: 'abort' }); + } + }; +} +if ( + typeof globalThis.AbortSignal === 'function' && + typeof globalThis.AbortController === 'function' && + typeof globalThis.AbortSignal.abort !== 'function' +) { + globalThis.AbortSignal.abort = function abort(reason) { + const controller = new globalThis.AbortController(); + controller.abort(reason); + return controller.signal; + }; } if (typeof navigator === 'undefined') { globalThis.navigator = { userAgent: 'secure-exec-v8' }; @@ -697,6 +802,7 @@ export class NodeExecutionDriver implements RuntimeDriver { }, { // Dispatch handlers routed through _loadPolyfill for V8 runtime compat ...cryptoResult.handlers, + ...networkBridgeResult.handlers, ...netSocketResult.handlers, ...buildModuleResolutionBridgeHandlers({ sandboxToHostPath: (p) => { diff --git a/packages/playground/backend/server.ts b/packages/playground/backend/server.ts index 35fad54e..9f2ec893 100644 --- a/packages/playground/backend/server.ts +++ b/packages/playground/backend/server.ts @@ -21,14 +21,10 @@ import { fileURLToPath } from "node:url"; const DEFAULT_PORT = Number(process.env.PORT ?? "4173"); const playgroundDir = resolve(fileURLToPath(new URL("..", import.meta.url))); const secureExecDir = resolve(playgroundDir, "../secure-exec"); -const secureExecCoreDir = resolve(playgroundDir, "../secure-exec-core"); -const secureExecBrowserDir = resolve(playgroundDir, "../secure-exec-browser"); /* Map URL prefixes to filesystem directories outside playgroundDir */ const PATH_ALIASES: Array<{ prefix: string; dir: string }> = [ { prefix: "/secure-exec/", dir: secureExecDir }, - { prefix: "/secure-exec-core/", dir: secureExecCoreDir }, - { prefix: "/secure-exec-browser/", dir: secureExecBrowserDir }, ]; const mimeTypes = new Map([ diff --git a/packages/playground/frontend/index.html b/packages/playground/frontend/index.html index b6f704be..9838da0e 100644 --- a/packages/playground/frontend/index.html +++ b/packages/playground/frontend/index.html @@ -9,15 +9,6 @@ -