deps: update undici to 7.16.0

PR-URL: https://github.com/nodejs/node/pull/59830
Reviewed-By: Matteo Collina <matteo.collina@gmail.com>
Reviewed-By: Richard Lau <richard.lau@ibm.com>
Reviewed-By: Matthew Aitken <maitken033380023@gmail.com>
Reviewed-By: Rafael Gonzaga <rafael.nunu@hotmail.com>
Reviewed-By: Trivikram Kamat <trivikr.dev@gmail.com>
This commit is contained in:
Node.js GitHub Bot 2025-09-11 18:21:54 +01:00 committed by GitHub
parent 220baad8ba
commit ac131bdc01
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
78 changed files with 5464 additions and 3982 deletions

6
deps/undici/src/.gitmodules vendored Normal file
View File

@ -0,0 +1,6 @@
[submodule "test/web-platform-tests/wpt"]
path = test/web-platform-tests/wpt
url = https://github.com/web-platform-tests/wpt.git
[submodule "test/fixtures/cache-tests"]
path = test/fixtures/cache-tests
url = https://github.com/http-tests/cache-tests

View File

@ -94,34 +94,50 @@ Create a commit which includes all of the updated files in lib/llhttp.
`undici` runs a subset of the [`web-platform-tests`](https://github.com/web-platform-tests/wpt).
### Requirements:
- [Node core utils](https://github.com/nodejs/node-core-utils) setup with credentials.
To update every test, run the following commands. Typically you would only need to update the tests in a specific directory.
### Steps:
```bash
git node wpt resources
git node wpt interfaces
git node wpt common
git node wpt fetch
git node wpt xhr
git node wpt websockets
git node wpt mimesniff
git node wpt storage
git node wpt service-workers
git node wpt eventsource
git submodule update --init --recursive
```
#### Run the tests
### Run the tests
Run the tests to ensure that any new failures are marked as such.
You can mark tests as failing in their corresponding [status](./test/wpt/status) file.
Before running the tests for the first time, you must setup the testing environment.
```bash
cd test/web-platform-tests
node wpt-runner.mjs setup
```
To run all tests:
```bash
npm run test:wpt
```
To run a subset of tests:
```bash
cd test/web-platform-tests
node wpt-runner.mjs run [filter] [filterb]
```
To run a single file:
```bash
cd test/web-platform-tests
node wpt-runner.mjs run /path/to/test
```
### Debugging
Verbose logging can be enabled by setting the [`NODE_DEBUG`](https://nodejs.org/api/cli.html#node_debugmodule) flag:
```bash
npx cross-env NODE_DEBUG=UNDICI_WPT node --run test:wpt
```
(`npx cross-env` can be omitted on Linux and Mac)
<a id="lint"></a>
### Lint

View File

@ -1,6 +1,6 @@
# undici
[![Node CI](https://github.com/nodejs/undici/actions/workflows/nodejs.yml/badge.svg)](https://github.com/nodejs/undici/actions/workflows/nodejs.yml) [![neostandard javascript style](https://img.shields.io/badge/neo-standard-7fffff?style=flat\&labelColor=ff80ff)](https://github.com/neostandard/neostandard) [![npm version](https://badge.fury.io/js/undici.svg)](https://badge.fury.io/js/undici) [![codecov](https://codecov.io/gh/nodejs/undici/branch/main/graph/badge.svg?token=yZL6LtXkOA)](https://codecov.io/gh/nodejs/undici)
[![Node CI](https://github.com/nodejs/undici/actions/workflows/ci.yml/badge.svg)](https://github.com/nodejs/undici/actions/workflows/nodejs.yml) [![neostandard javascript style](https://img.shields.io/badge/neo-standard-7fffff?style=flat\&labelColor=ff80ff)](https://github.com/neostandard/neostandard) [![npm version](https://badge.fury.io/js/undici.svg)](https://badge.fury.io/js/undici) [![codecov](https://codecov.io/gh/nodejs/undici/branch/main/graph/badge.svg?token=yZL6LtXkOA)](https://codecov.io/gh/nodejs/undici)
An HTTP/1.1 client, written from scratch for Node.js.

View File

@ -2,7 +2,7 @@
const WASM_BUILDER_CONTAINER = 'ghcr.io/nodejs/wasm-builder@sha256:975f391d907e42a75b8c72eb77c782181e941608687d4d8694c3e9df415a0970' // v0.0.9
const { execSync } = require('node:child_process')
const { execSync, execFileSync } = require('node:child_process')
const { writeFileSync, readFileSync } = require('node:fs')
const { join, resolve } = require('node:path')
@ -69,10 +69,10 @@ if (process.argv[2] === '--docker') {
}
const hasApk = (function () {
try { execSync('command -v apk'); return true } catch (error) { return false }
try { execSync('command -v apk'); return true } catch { return false }
})()
const hasOptimizer = (function () {
try { execSync(`${WASM_OPT} --version`); return true } catch (error) { return false }
try { execSync(`${WASM_OPT} --version`); return true } catch { return false }
})()
if (hasApk) {
// Gather information about the tools used for the build
@ -104,7 +104,19 @@ ${join(WASM_SRC, 'src')}/*.c \
${WASM_LDLIBS}`, { stdio: 'inherit' })
if (hasOptimizer) {
execSync(`${WASM_OPT} ${WASM_OPT_FLAGS} --enable-simd -o ${join(WASM_OUT, 'llhttp_simd.wasm')} ${join(WASM_OUT, 'llhttp_simd.wasm')}`, { stdio: 'inherit' })
// Split WASM_OPT_FLAGS into an array, if not empty
const wasmOptFlagsArray = WASM_OPT_FLAGS ? WASM_OPT_FLAGS.split(/\s+/).filter(Boolean) : []
execFileSync(
WASM_OPT,
[
...wasmOptFlagsArray,
'--enable-simd',
'-o',
join(WASM_OUT, 'llhttp_simd.wasm'),
join(WASM_OUT, 'llhttp_simd.wasm')
],
{ stdio: 'inherit' }
)
}
writeWasmChunk('llhttp_simd.wasm', 'llhttp_simd-wasm.js')

View File

@ -1,357 +0,0 @@
#ifndef INCLUDE_LLHTTP_API_H_
#define INCLUDE_LLHTTP_API_H_
#ifdef __cplusplus
extern "C" {
#endif
#include <stddef.h>
#if defined(__wasm__)
#define LLHTTP_EXPORT __attribute__((visibility("default")))
#elif defined(_WIN32)
#define LLHTTP_EXPORT __declspec(dllexport)
#else
#define LLHTTP_EXPORT
#endif
typedef llhttp__internal_t llhttp_t;
typedef struct llhttp_settings_s llhttp_settings_t;
typedef int (*llhttp_data_cb)(llhttp_t*, const char *at, size_t length);
typedef int (*llhttp_cb)(llhttp_t*);
struct llhttp_settings_s {
/* Possible return values 0, -1, `HPE_PAUSED` */
llhttp_cb on_message_begin;
/* Possible return values 0, -1, HPE_USER */
llhttp_data_cb on_url;
llhttp_data_cb on_status;
llhttp_data_cb on_method;
llhttp_data_cb on_version;
llhttp_data_cb on_header_field;
llhttp_data_cb on_header_value;
llhttp_data_cb on_chunk_extension_name;
llhttp_data_cb on_chunk_extension_value;
/* Possible return values:
* 0 - Proceed normally
* 1 - Assume that request/response has no body, and proceed to parsing the
* next message
* 2 - Assume absence of body (as above) and make `llhttp_execute()` return
* `HPE_PAUSED_UPGRADE`
* -1 - Error
* `HPE_PAUSED`
*/
llhttp_cb on_headers_complete;
/* Possible return values 0, -1, HPE_USER */
llhttp_data_cb on_body;
/* Possible return values 0, -1, `HPE_PAUSED` */
llhttp_cb on_message_complete;
llhttp_cb on_url_complete;
llhttp_cb on_status_complete;
llhttp_cb on_method_complete;
llhttp_cb on_version_complete;
llhttp_cb on_header_field_complete;
llhttp_cb on_header_value_complete;
llhttp_cb on_chunk_extension_name_complete;
llhttp_cb on_chunk_extension_value_complete;
/* When on_chunk_header is called, the current chunk length is stored
* in parser->content_length.
* Possible return values 0, -1, `HPE_PAUSED`
*/
llhttp_cb on_chunk_header;
llhttp_cb on_chunk_complete;
llhttp_cb on_reset;
};
/* Initialize the parser with specific type and user settings.
*
* NOTE: lifetime of `settings` has to be at least the same as the lifetime of
* the `parser` here. In practice, `settings` has to be either a static
* variable or be allocated with `malloc`, `new`, etc.
*/
LLHTTP_EXPORT
void llhttp_init(llhttp_t* parser, llhttp_type_t type,
const llhttp_settings_t* settings);
LLHTTP_EXPORT
llhttp_t* llhttp_alloc(llhttp_type_t type);
LLHTTP_EXPORT
void llhttp_free(llhttp_t* parser);
LLHTTP_EXPORT
uint8_t llhttp_get_type(llhttp_t* parser);
LLHTTP_EXPORT
uint8_t llhttp_get_http_major(llhttp_t* parser);
LLHTTP_EXPORT
uint8_t llhttp_get_http_minor(llhttp_t* parser);
LLHTTP_EXPORT
uint8_t llhttp_get_method(llhttp_t* parser);
LLHTTP_EXPORT
int llhttp_get_status_code(llhttp_t* parser);
LLHTTP_EXPORT
uint8_t llhttp_get_upgrade(llhttp_t* parser);
/* Reset an already initialized parser back to the start state, preserving the
* existing parser type, callback settings, user data, and lenient flags.
*/
LLHTTP_EXPORT
void llhttp_reset(llhttp_t* parser);
/* Initialize the settings object */
LLHTTP_EXPORT
void llhttp_settings_init(llhttp_settings_t* settings);
/* Parse full or partial request/response, invoking user callbacks along the
* way.
*
* If any of `llhttp_data_cb` returns errno not equal to `HPE_OK` - the parsing
* interrupts, and such errno is returned from `llhttp_execute()`. If
* `HPE_PAUSED` was used as a errno, the execution can be resumed with
* `llhttp_resume()` call.
*
* In a special case of CONNECT/Upgrade request/response `HPE_PAUSED_UPGRADE`
* is returned after fully parsing the request/response. If the user wishes to
* continue parsing, they need to invoke `llhttp_resume_after_upgrade()`.
*
* NOTE: if this function ever returns a non-pause type error, it will continue
* to return the same error upon each successive call up until `llhttp_init()`
* is called.
*/
LLHTTP_EXPORT
llhttp_errno_t llhttp_execute(llhttp_t* parser, const char* data, size_t len);
/* This method should be called when the other side has no further bytes to
* send (e.g. shutdown of readable side of the TCP connection.)
*
* Requests without `Content-Length` and other messages might require treating
* all incoming bytes as the part of the body, up to the last byte of the
* connection. This method will invoke `on_message_complete()` callback if the
* request was terminated safely. Otherwise a error code would be returned.
*/
LLHTTP_EXPORT
llhttp_errno_t llhttp_finish(llhttp_t* parser);
/* Returns `1` if the incoming message is parsed until the last byte, and has
* to be completed by calling `llhttp_finish()` on EOF
*/
LLHTTP_EXPORT
int llhttp_message_needs_eof(const llhttp_t* parser);
/* Returns `1` if there might be any other messages following the last that was
* successfully parsed.
*/
LLHTTP_EXPORT
int llhttp_should_keep_alive(const llhttp_t* parser);
/* Make further calls of `llhttp_execute()` return `HPE_PAUSED` and set
* appropriate error reason.
*
* Important: do not call this from user callbacks! User callbacks must return
* `HPE_PAUSED` if pausing is required.
*/
LLHTTP_EXPORT
void llhttp_pause(llhttp_t* parser);
/* Might be called to resume the execution after the pause in user's callback.
* See `llhttp_execute()` above for details.
*
* Call this only if `llhttp_execute()` returns `HPE_PAUSED`.
*/
LLHTTP_EXPORT
void llhttp_resume(llhttp_t* parser);
/* Might be called to resume the execution after the pause in user's callback.
* See `llhttp_execute()` above for details.
*
* Call this only if `llhttp_execute()` returns `HPE_PAUSED_UPGRADE`
*/
LLHTTP_EXPORT
void llhttp_resume_after_upgrade(llhttp_t* parser);
/* Returns the latest return error */
LLHTTP_EXPORT
llhttp_errno_t llhttp_get_errno(const llhttp_t* parser);
/* Returns the verbal explanation of the latest returned error.
*
* Note: User callback should set error reason when returning the error. See
* `llhttp_set_error_reason()` for details.
*/
LLHTTP_EXPORT
const char* llhttp_get_error_reason(const llhttp_t* parser);
/* Assign verbal description to the returned error. Must be called in user
* callbacks right before returning the errno.
*
* Note: `HPE_USER` error code might be useful in user callbacks.
*/
LLHTTP_EXPORT
void llhttp_set_error_reason(llhttp_t* parser, const char* reason);
/* Returns the pointer to the last parsed byte before the returned error. The
* pointer is relative to the `data` argument of `llhttp_execute()`.
*
* Note: this method might be useful for counting the number of parsed bytes.
*/
LLHTTP_EXPORT
const char* llhttp_get_error_pos(const llhttp_t* parser);
/* Returns textual name of error code */
LLHTTP_EXPORT
const char* llhttp_errno_name(llhttp_errno_t err);
/* Returns textual name of HTTP method */
LLHTTP_EXPORT
const char* llhttp_method_name(llhttp_method_t method);
/* Returns textual name of HTTP status */
LLHTTP_EXPORT
const char* llhttp_status_name(llhttp_status_t status);
/* Enables/disables lenient header value parsing (disabled by default).
*
* Lenient parsing disables header value token checks, extending llhttp's
* protocol support to highly non-compliant clients/server. No
* `HPE_INVALID_HEADER_TOKEN` will be raised for incorrect header values when
* lenient parsing is "on".
*
* **Enabling this flag can pose a security issue since you will be exposed to
* request smuggling attacks. USE WITH CAUTION!**
*/
LLHTTP_EXPORT
void llhttp_set_lenient_headers(llhttp_t* parser, int enabled);
/* Enables/disables lenient handling of conflicting `Transfer-Encoding` and
* `Content-Length` headers (disabled by default).
*
* Normally `llhttp` would error when `Transfer-Encoding` is present in
* conjunction with `Content-Length`. This error is important to prevent HTTP
* request smuggling, but may be less desirable for small number of cases
* involving legacy servers.
*
* **Enabling this flag can pose a security issue since you will be exposed to
* request smuggling attacks. USE WITH CAUTION!**
*/
LLHTTP_EXPORT
void llhttp_set_lenient_chunked_length(llhttp_t* parser, int enabled);
/* Enables/disables lenient handling of `Connection: close` and HTTP/1.0
* requests responses.
*
* Normally `llhttp` would error on (in strict mode) or discard (in loose mode)
* the HTTP request/response after the request/response with `Connection: close`
* and `Content-Length`. This is important to prevent cache poisoning attacks,
* but might interact badly with outdated and insecure clients. With this flag
* the extra request/response will be parsed normally.
*
* **Enabling this flag can pose a security issue since you will be exposed to
* poisoning attacks. USE WITH CAUTION!**
*/
LLHTTP_EXPORT
void llhttp_set_lenient_keep_alive(llhttp_t* parser, int enabled);
/* Enables/disables lenient handling of `Transfer-Encoding` header.
*
* Normally `llhttp` would error when a `Transfer-Encoding` has `chunked` value
* and another value after it (either in a single header or in multiple
* headers whose value are internally joined using `, `).
* This is mandated by the spec to reliably determine request body size and thus
* avoid request smuggling.
* With this flag the extra value will be parsed normally.
*
* **Enabling this flag can pose a security issue since you will be exposed to
* request smuggling attacks. USE WITH CAUTION!**
*/
LLHTTP_EXPORT
void llhttp_set_lenient_transfer_encoding(llhttp_t* parser, int enabled);
/* Enables/disables lenient handling of HTTP version.
*
* Normally `llhttp` would error when the HTTP version in the request or status line
* is not `0.9`, `1.0`, `1.1` or `2.0`.
* With this flag the invalid value will be parsed normally.
*
* **Enabling this flag can pose a security issue since you will allow unsupported
* HTTP versions. USE WITH CAUTION!**
*/
LLHTTP_EXPORT
void llhttp_set_lenient_version(llhttp_t* parser, int enabled);
/* Enables/disables lenient handling of additional data received after a message ends
* and keep-alive is disabled.
*
* Normally `llhttp` would error when additional unexpected data is received if the message
* contains the `Connection` header with `close` value.
* With this flag the extra data will discarded without throwing an error.
*
* **Enabling this flag can pose a security issue since you will be exposed to
* poisoning attacks. USE WITH CAUTION!**
*/
LLHTTP_EXPORT
void llhttp_set_lenient_data_after_close(llhttp_t* parser, int enabled);
/* Enables/disables lenient handling of incomplete CRLF sequences.
*
* Normally `llhttp` would error when a CR is not followed by LF when terminating the
* request line, the status line, the headers or a chunk header.
* With this flag only a CR is required to terminate such sections.
*
* **Enabling this flag can pose a security issue since you will be exposed to
* request smuggling attacks. USE WITH CAUTION!**
*/
LLHTTP_EXPORT
void llhttp_set_lenient_optional_lf_after_cr(llhttp_t* parser, int enabled);
/*
* Enables/disables lenient handling of line separators.
*
* Normally `llhttp` would error when a LF is not preceded by CR when terminating the
* request line, the status line, the headers, a chunk header or a chunk data.
* With this flag only a LF is required to terminate such sections.
*
* **Enabling this flag can pose a security issue since you will be exposed to
* request smuggling attacks. USE WITH CAUTION!**
*/
LLHTTP_EXPORT
void llhttp_set_lenient_optional_cr_before_lf(llhttp_t* parser, int enabled);
/* Enables/disables lenient handling of chunks not separated via CRLF.
*
* Normally `llhttp` would error when after a chunk data a CRLF is missing before
* starting a new chunk.
* With this flag the new chunk can start immediately after the previous one.
*
* **Enabling this flag can pose a security issue since you will be exposed to
* request smuggling attacks. USE WITH CAUTION!**
*/
LLHTTP_EXPORT
void llhttp_set_lenient_optional_crlf_after_chunk(llhttp_t* parser, int enabled);
/* Enables/disables lenient handling of spaces after chunk size.
*
* Normally `llhttp` would error when after a chunk size is followed by one or more
* spaces are present instead of a CRLF or `;`.
* With this flag this check is disabled.
*
* **Enabling this flag can pose a security issue since you will be exposed to
* request smuggling attacks. USE WITH CAUTION!**
*/
LLHTTP_EXPORT
void llhttp_set_lenient_spaces_after_chunk_size(llhttp_t* parser, int enabled);
#ifdef __cplusplus
} /* extern "C" */
#endif
#endif /* INCLUDE_LLHTTP_API_H_ */

View File

@ -3,7 +3,7 @@
#define INCLUDE_LLHTTP_H_
#define LLHTTP_VERSION_MAJOR 9
#define LLHTTP_VERSION_MINOR 2
#define LLHTTP_VERSION_MINOR 3
#define LLHTTP_VERSION_PATCH 0
#ifndef INCLUDE_LLHTTP_ITSELF_H_
@ -90,7 +90,8 @@ enum llhttp_errno {
HPE_CB_HEADER_VALUE_COMPLETE = 29,
HPE_CB_CHUNK_EXTENSION_NAME_COMPLETE = 34,
HPE_CB_CHUNK_EXTENSION_VALUE_COMPLETE = 35,
HPE_CB_RESET = 31
HPE_CB_RESET = 31,
HPE_CB_PROTOCOL_COMPLETE = 38
};
typedef enum llhttp_errno llhttp_errno_t;
@ -326,6 +327,7 @@ typedef enum llhttp_status llhttp_status_t;
XX(34, CB_CHUNK_EXTENSION_NAME_COMPLETE, CB_CHUNK_EXTENSION_NAME_COMPLETE) \
XX(35, CB_CHUNK_EXTENSION_VALUE_COMPLETE, CB_CHUNK_EXTENSION_VALUE_COMPLETE) \
XX(31, CB_RESET, CB_RESET) \
XX(38, CB_PROTOCOL_COMPLETE, CB_PROTOCOL_COMPLETE) \
#define HTTP_METHOD_MAP(XX) \
@ -567,6 +569,7 @@ struct llhttp_settings_s {
llhttp_cb on_message_begin;
/* Possible return values 0, -1, HPE_USER */
llhttp_data_cb on_protocol;
llhttp_data_cb on_url;
llhttp_data_cb on_status;
llhttp_data_cb on_method;
@ -592,6 +595,7 @@ struct llhttp_settings_s {
/* Possible return values 0, -1, `HPE_PAUSED` */
llhttp_cb on_message_complete;
llhttp_cb on_protocol_complete;
llhttp_cb on_url_complete;
llhttp_cb on_status_complete;
llhttp_cb on_method_complete;

View File

@ -57,29 +57,14 @@ static int wasm_on_headers_complete_wrap(llhttp_t* p) {
}
const llhttp_settings_t wasm_settings = {
wasm_on_message_begin,
wasm_on_url,
wasm_on_status,
NULL,
NULL,
wasm_on_header_field,
wasm_on_header_value,
NULL,
NULL,
wasm_on_headers_complete_wrap,
wasm_on_body,
wasm_on_message_complete,
NULL,
NULL,
NULL,
NULL,
NULL,
NULL,
NULL,
NULL,
NULL,
NULL,
NULL,
.on_message_begin = wasm_on_message_begin,
.on_url = wasm_on_url,
.on_status = wasm_on_status,
.on_header_field = wasm_on_header_field,
.on_header_value = wasm_on_header_value,
.on_headers_complete = wasm_on_headers_complete_wrap,
.on_body = wasm_on_body,
.on_message_complete = wasm_on_message_complete,
};
@ -341,6 +326,20 @@ int llhttp__on_message_begin(llhttp_t* s, const char* p, const char* endp) {
}
int llhttp__on_protocol(llhttp_t* s, const char* p, const char* endp) {
int err;
SPAN_CALLBACK_MAYBE(s, on_protocol, p, endp - p);
return err;
}
int llhttp__on_protocol_complete(llhttp_t* s, const char* p, const char* endp) {
int err;
CALLBACK_MAYBE(s, on_protocol_complete);
return err;
}
int llhttp__on_url(llhttp_t* s, const char* p, const char* endp) {
int err;
SPAN_CALLBACK_MAYBE(s, on_url, p, endp - p);

File diff suppressed because it is too large Load Diff

View File

@ -19,6 +19,7 @@ Returns: `Agent`
Extends: [`PoolOptions`](/docs/docs/api/Pool.md#parameter-pooloptions)
* **factory** `(origin: URL, opts: Object) => Dispatcher` - Default: `(origin, opts) => new Pool(origin, opts)`
* **maxOrigins** `number` (optional) - Default: `Infinity` - Limits the total number of origins that can receive requests at a time, throwing an `MaxOriginsReachedError` error when attempting to dispatch when the max is reached. If `Infinity`, no limit is enforced.
## Instance Properties

View File

@ -1094,6 +1094,65 @@ await client.request({
});
```
##### `decompress`
⚠️ The decompress interceptor is experimental and subject to change.
The `decompress` interceptor automatically decompresses response bodies that are compressed with gzip, deflate, brotli, or zstd compression. It removes the `content-encoding` and `content-length` headers from decompressed responses and supports RFC-9110 compliant multiple encodings.
**Options**
- `skipErrorResponses` - Whether to skip decompression for error responses (status codes >= 400). Default: `true`.
- `skipStatusCodes` - Array of status codes to skip decompression for. Default: `[204, 304]`.
**Example - Basic Decompress Interceptor**
```js
const { Client, interceptors } = require("undici");
const { decompress } = interceptors;
const client = new Client("http://example.com").compose(
decompress()
);
// Automatically decompresses gzip/deflate/brotli/zstd responses
const response = await client.request({
method: "GET",
path: "/"
});
```
**Example - Custom Options**
```js
const { Client, interceptors } = require("undici");
const { decompress } = interceptors;
const client = new Client("http://example.com").compose(
decompress({
skipErrorResponses: false, // Decompress 5xx responses
skipStatusCodes: [204, 304, 201] // Skip these status codes
})
);
```
**Supported Encodings**
- `gzip` / `x-gzip` - GZIP compression
- `deflate` / `x-compress` - DEFLATE compression
- `br` - Brotli compression
- `zstd` - Zstandard compression
- Multiple encodings (e.g., `gzip, deflate`) are supported per RFC-9110
**Behavior**
- Skips decompression for status codes < 200 or >= 400 (configurable)
- Skips decompression for 204 No Content and 304 Not Modified by default
- Removes `content-encoding` and `content-length` headers when decompressing
- Passes through unsupported encodings unchanged
- Handles case-insensitive encoding names
- Supports streaming decompression without buffering
##### `Cache Interceptor`
The `cache` interceptor implements client-side response caching as described in

View File

@ -14,7 +14,6 @@ import { errors } from 'undici'
| `HeadersTimeoutError` | `UND_ERR_HEADERS_TIMEOUT` | socket is destroyed due to headers timeout. |
| `HeadersOverflowError` | `UND_ERR_HEADERS_OVERFLOW` | socket is destroyed due to headers' max size being exceeded. |
| `BodyTimeoutError` | `UND_ERR_BODY_TIMEOUT` | socket is destroyed due to body timeout. |
| `ResponseStatusCodeError` | `UND_ERR_RESPONSE_STATUS_CODE` | an error is thrown when `throwOnError` is `true` for status codes >= 400. |
| `InvalidArgumentError` | `UND_ERR_INVALID_ARG` | passed an invalid argument. |
| `InvalidReturnValueError` | `UND_ERR_INVALID_RETURN_VALUE` | returned an invalid value. |
| `RequestAbortedError` | `UND_ERR_ABORTED` | the request has been aborted by the user |

View File

@ -1,14 +1,15 @@
'use strict'
const neo = require('neostandard')
const { installedExports } = require('./lib/global')
module.exports = [
...neo({
ignores: [
'lib/llhttp',
'test/fixtures/wpt',
'test/fixtures/cache-tests',
'undici-fetch.js'
'undici-fetch.js',
'test/web-platform-tests/wpt'
],
noJsx: true,
ts: true
@ -22,7 +23,15 @@ module.exports = [
exports: 'never',
functions: 'never'
}],
'@typescript-eslint/no-redeclare': 'off'
'@typescript-eslint/no-redeclare': 'off',
'no-restricted-globals': ['error',
...installedExports.map(name => {
return {
name,
message: `Use undici-own ${name} instead of the global.`
}
})
]
}
}
]

View File

@ -4,8 +4,8 @@ const { getGlobalDispatcher, setGlobalDispatcher } = require('./lib/global')
const EnvHttpProxyAgent = require('./lib/dispatcher/env-http-proxy-agent')
const fetchImpl = require('./lib/web/fetch').fetch
module.exports.fetch = function fetch (resource, init = undefined) {
return fetchImpl(resource, init).catch((err) => {
module.exports.fetch = function fetch (init, options = undefined) {
return fetchImpl(init, options).catch(err => {
if (err && typeof err === 'object') {
Error.captureStackTrace(err)
}

View File

@ -46,7 +46,8 @@ module.exports.interceptors = {
retry: require('./lib/interceptor/retry'),
dump: require('./lib/interceptor/dump'),
dns: require('./lib/interceptor/dns'),
cache: require('./lib/interceptor/cache')
cache: require('./lib/interceptor/cache'),
decompress: require('./lib/interceptor/decompress')
}
module.exports.cacheStores = {
@ -116,16 +117,14 @@ module.exports.setGlobalDispatcher = setGlobalDispatcher
module.exports.getGlobalDispatcher = getGlobalDispatcher
const fetchImpl = require('./lib/web/fetch').fetch
module.exports.fetch = async function fetch (init, options = undefined) {
try {
return await fetchImpl(init, options)
} catch (err) {
module.exports.fetch = function fetch (init, options = undefined) {
return fetchImpl(init, options).catch(err => {
if (err && typeof err === 'object') {
Error.captureStackTrace(err)
}
throw err
}
})
}
module.exports.Headers = require('./lib/web/fetch/headers').Headers
module.exports.Response = require('./lib/web/fetch/response').Response
@ -140,8 +139,6 @@ module.exports.getGlobalOrigin = getGlobalOrigin
const { CacheStorage } = require('./lib/web/cache/cachestorage')
const { kConstruct } = require('./lib/core/symbols')
// Cache & CacheStorage are tightly coupled with fetch. Even if it may run
// in an older version of Node, it doesn't have any use without fetch.
module.exports.caches = new CacheStorage(kConstruct)
const { deleteCookie, getCookies, getSetCookies, setCookie, parseCookie } = require('./lib/web/cookies')

View File

@ -118,6 +118,7 @@ class RequestHandler extends AsyncResource {
this.callback = null
this.res = res
if (callback !== null) {
try {
this.runInAsyncScope(callback, null, null, {
statusCode,
headers,
@ -126,6 +127,19 @@ class RequestHandler extends AsyncResource {
body: res,
context
})
} catch (err) {
// If the callback throws synchronously, we need to handle it
// Remove reference to res to allow res being garbage collected
this.res = null
// Destroy the response stream
util.destroy(res.on('error', noop), err)
// Use queueMicrotask to re-throw the error so it reaches uncaughtException
queueMicrotask(() => {
throw err
})
}
}
}

View File

@ -262,24 +262,26 @@ class BodyReadable extends Readable {
* @param {AbortSignal} [opts.signal] An AbortSignal to cancel the dump.
* @returns {Promise<null>}
*/
async dump (opts) {
dump (opts) {
const signal = opts?.signal
if (signal != null && (typeof signal !== 'object' || !('aborted' in signal))) {
throw new InvalidArgumentError('signal must be an AbortSignal')
return Promise.reject(new InvalidArgumentError('signal must be an AbortSignal'))
}
const limit = opts?.limit && Number.isFinite(opts.limit)
? opts.limit
: 128 * 1024
signal?.throwIfAborted()
if (this._readableState.closeEmitted) {
return null
if (signal?.aborted) {
return Promise.reject(signal.reason ?? new AbortError())
}
return await new Promise((resolve, reject) => {
if (this._readableState.closeEmitted) {
return Promise.resolve(null)
}
return new Promise((resolve, reject) => {
if (
(this[kContentLength] && (this[kContentLength] > limit)) ||
this[kBytesRead] > limit

View File

@ -1,95 +0,0 @@
'use strict'
const assert = require('node:assert')
const {
ResponseStatusCodeError
} = require('../core/errors')
const { chunksDecode } = require('./readable')
const CHUNK_LIMIT = 128 * 1024
async function getResolveErrorBodyCallback ({ callback, body, contentType, statusCode, statusMessage, headers }) {
assert(body)
let chunks = []
let length = 0
try {
for await (const chunk of body) {
chunks.push(chunk)
length += chunk.length
if (length > CHUNK_LIMIT) {
chunks = []
length = 0
break
}
}
} catch {
chunks = []
length = 0
// Do nothing....
}
const message = `Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`
if (statusCode === 204 || !contentType || !length) {
queueMicrotask(() => callback(new ResponseStatusCodeError(message, statusCode, headers)))
return
}
const stackTraceLimit = Error.stackTraceLimit
Error.stackTraceLimit = 0
let payload
try {
if (isContentTypeApplicationJson(contentType)) {
payload = JSON.parse(chunksDecode(chunks, length))
} else if (isContentTypeText(contentType)) {
payload = chunksDecode(chunks, length)
}
} catch {
// process in a callback to avoid throwing in the microtask queue
} finally {
Error.stackTraceLimit = stackTraceLimit
}
queueMicrotask(() => callback(new ResponseStatusCodeError(message, statusCode, headers, payload)))
}
const isContentTypeApplicationJson = (contentType) => {
return (
contentType.length > 15 &&
contentType[11] === '/' &&
contentType[0] === 'a' &&
contentType[1] === 'p' &&
contentType[2] === 'p' &&
contentType[3] === 'l' &&
contentType[4] === 'i' &&
contentType[5] === 'c' &&
contentType[6] === 'a' &&
contentType[7] === 't' &&
contentType[8] === 'i' &&
contentType[9] === 'o' &&
contentType[10] === 'n' &&
contentType[12] === 'j' &&
contentType[13] === 's' &&
contentType[14] === 'o' &&
contentType[15] === 'n'
)
}
const isContentTypeText = (contentType) => {
return (
contentType.length > 4 &&
contentType[4] === '/' &&
contentType[0] === 't' &&
contentType[1] === 'e' &&
contentType[2] === 'x' &&
contentType[3] === 't'
)
}
module.exports = {
getResolveErrorBodyCallback,
isContentTypeApplicationJson,
isContentTypeText
}

View File

@ -1,13 +1,23 @@
'use strict'
const kUndiciError = Symbol.for('undici.error.UND_ERR')
class UndiciError extends Error {
constructor (message, options) {
super(message, options)
this.name = 'UndiciError'
this.code = 'UND_ERR'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kUndiciError] === true
}
get [kUndiciError] () {
return true
}
}
const kConnectTimeoutError = Symbol.for('undici.error.UND_ERR_CONNECT_TIMEOUT')
class ConnectTimeoutError extends UndiciError {
constructor (message) {
super(message)
@ -15,8 +25,17 @@ class ConnectTimeoutError extends UndiciError {
this.message = message || 'Connect Timeout Error'
this.code = 'UND_ERR_CONNECT_TIMEOUT'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kConnectTimeoutError] === true
}
get [kConnectTimeoutError] () {
return true
}
}
const kHeadersTimeoutError = Symbol.for('undici.error.UND_ERR_HEADERS_TIMEOUT')
class HeadersTimeoutError extends UndiciError {
constructor (message) {
super(message)
@ -24,8 +43,17 @@ class HeadersTimeoutError extends UndiciError {
this.message = message || 'Headers Timeout Error'
this.code = 'UND_ERR_HEADERS_TIMEOUT'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kHeadersTimeoutError] === true
}
get [kHeadersTimeoutError] () {
return true
}
}
const kHeadersOverflowError = Symbol.for('undici.error.UND_ERR_HEADERS_OVERFLOW')
class HeadersOverflowError extends UndiciError {
constructor (message) {
super(message)
@ -33,8 +61,17 @@ class HeadersOverflowError extends UndiciError {
this.message = message || 'Headers Overflow Error'
this.code = 'UND_ERR_HEADERS_OVERFLOW'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kHeadersOverflowError] === true
}
get [kHeadersOverflowError] () {
return true
}
}
const kBodyTimeoutError = Symbol.for('undici.error.UND_ERR_BODY_TIMEOUT')
class BodyTimeoutError extends UndiciError {
constructor (message) {
super(message)
@ -42,21 +79,17 @@ class BodyTimeoutError extends UndiciError {
this.message = message || 'Body Timeout Error'
this.code = 'UND_ERR_BODY_TIMEOUT'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kBodyTimeoutError] === true
}
class ResponseStatusCodeError extends UndiciError {
constructor (message, statusCode, headers, body) {
super(message)
this.name = 'ResponseStatusCodeError'
this.message = message || 'Response Status Code Error'
this.code = 'UND_ERR_RESPONSE_STATUS_CODE'
this.body = body
this.status = statusCode
this.statusCode = statusCode
this.headers = headers
get [kBodyTimeoutError] () {
return true
}
}
const kInvalidArgumentError = Symbol.for('undici.error.UND_ERR_INVALID_ARG')
class InvalidArgumentError extends UndiciError {
constructor (message) {
super(message)
@ -64,8 +97,17 @@ class InvalidArgumentError extends UndiciError {
this.message = message || 'Invalid Argument Error'
this.code = 'UND_ERR_INVALID_ARG'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kInvalidArgumentError] === true
}
get [kInvalidArgumentError] () {
return true
}
}
const kInvalidReturnValueError = Symbol.for('undici.error.UND_ERR_INVALID_RETURN_VALUE')
class InvalidReturnValueError extends UndiciError {
constructor (message) {
super(message)
@ -73,16 +115,35 @@ class InvalidReturnValueError extends UndiciError {
this.message = message || 'Invalid Return Value Error'
this.code = 'UND_ERR_INVALID_RETURN_VALUE'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kInvalidReturnValueError] === true
}
get [kInvalidReturnValueError] () {
return true
}
}
const kAbortError = Symbol.for('undici.error.UND_ERR_ABORT')
class AbortError extends UndiciError {
constructor (message) {
super(message)
this.name = 'AbortError'
this.message = message || 'The operation was aborted'
this.code = 'UND_ERR_ABORT'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kAbortError] === true
}
get [kAbortError] () {
return true
}
}
const kRequestAbortedError = Symbol.for('undici.error.UND_ERR_ABORTED')
class RequestAbortedError extends AbortError {
constructor (message) {
super(message)
@ -90,8 +151,17 @@ class RequestAbortedError extends AbortError {
this.message = message || 'Request aborted'
this.code = 'UND_ERR_ABORTED'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kRequestAbortedError] === true
}
get [kRequestAbortedError] () {
return true
}
}
const kInformationalError = Symbol.for('undici.error.UND_ERR_INFO')
class InformationalError extends UndiciError {
constructor (message) {
super(message)
@ -99,8 +169,17 @@ class InformationalError extends UndiciError {
this.message = message || 'Request information'
this.code = 'UND_ERR_INFO'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kInformationalError] === true
}
get [kInformationalError] () {
return true
}
}
const kRequestContentLengthMismatchError = Symbol.for('undici.error.UND_ERR_REQ_CONTENT_LENGTH_MISMATCH')
class RequestContentLengthMismatchError extends UndiciError {
constructor (message) {
super(message)
@ -108,8 +187,17 @@ class RequestContentLengthMismatchError extends UndiciError {
this.message = message || 'Request body length does not match content-length header'
this.code = 'UND_ERR_REQ_CONTENT_LENGTH_MISMATCH'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kRequestContentLengthMismatchError] === true
}
get [kRequestContentLengthMismatchError] () {
return true
}
}
const kResponseContentLengthMismatchError = Symbol.for('undici.error.UND_ERR_RES_CONTENT_LENGTH_MISMATCH')
class ResponseContentLengthMismatchError extends UndiciError {
constructor (message) {
super(message)
@ -117,8 +205,17 @@ class ResponseContentLengthMismatchError extends UndiciError {
this.message = message || 'Response body length does not match content-length header'
this.code = 'UND_ERR_RES_CONTENT_LENGTH_MISMATCH'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kResponseContentLengthMismatchError] === true
}
get [kResponseContentLengthMismatchError] () {
return true
}
}
const kClientDestroyedError = Symbol.for('undici.error.UND_ERR_DESTROYED')
class ClientDestroyedError extends UndiciError {
constructor (message) {
super(message)
@ -126,8 +223,17 @@ class ClientDestroyedError extends UndiciError {
this.message = message || 'The client is destroyed'
this.code = 'UND_ERR_DESTROYED'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kClientDestroyedError] === true
}
get [kClientDestroyedError] () {
return true
}
}
const kClientClosedError = Symbol.for('undici.error.UND_ERR_CLOSED')
class ClientClosedError extends UndiciError {
constructor (message) {
super(message)
@ -135,8 +241,17 @@ class ClientClosedError extends UndiciError {
this.message = message || 'The client is closed'
this.code = 'UND_ERR_CLOSED'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kClientClosedError] === true
}
get [kClientClosedError] () {
return true
}
}
const kSocketError = Symbol.for('undici.error.UND_ERR_SOCKET')
class SocketError extends UndiciError {
constructor (message, socket) {
super(message)
@ -145,8 +260,17 @@ class SocketError extends UndiciError {
this.code = 'UND_ERR_SOCKET'
this.socket = socket
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kSocketError] === true
}
get [kSocketError] () {
return true
}
}
const kNotSupportedError = Symbol.for('undici.error.UND_ERR_NOT_SUPPORTED')
class NotSupportedError extends UndiciError {
constructor (message) {
super(message)
@ -154,8 +278,17 @@ class NotSupportedError extends UndiciError {
this.message = message || 'Not supported error'
this.code = 'UND_ERR_NOT_SUPPORTED'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kNotSupportedError] === true
}
get [kNotSupportedError] () {
return true
}
}
const kBalancedPoolMissingUpstreamError = Symbol.for('undici.error.UND_ERR_BPL_MISSING_UPSTREAM')
class BalancedPoolMissingUpstreamError extends UndiciError {
constructor (message) {
super(message)
@ -163,8 +296,17 @@ class BalancedPoolMissingUpstreamError extends UndiciError {
this.message = message || 'No upstream has been added to the BalancedPool'
this.code = 'UND_ERR_BPL_MISSING_UPSTREAM'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kBalancedPoolMissingUpstreamError] === true
}
get [kBalancedPoolMissingUpstreamError] () {
return true
}
}
const kHTTPParserError = Symbol.for('undici.error.UND_ERR_HTTP_PARSER')
class HTTPParserError extends Error {
constructor (message, code, data) {
super(message)
@ -172,8 +314,17 @@ class HTTPParserError extends Error {
this.code = code ? `HPE_${code}` : undefined
this.data = data ? data.toString() : undefined
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kHTTPParserError] === true
}
get [kHTTPParserError] () {
return true
}
}
const kResponseExceededMaxSizeError = Symbol.for('undici.error.UND_ERR_RES_EXCEEDED_MAX_SIZE')
class ResponseExceededMaxSizeError extends UndiciError {
constructor (message) {
super(message)
@ -181,8 +332,17 @@ class ResponseExceededMaxSizeError extends UndiciError {
this.message = message || 'Response content exceeded max size'
this.code = 'UND_ERR_RES_EXCEEDED_MAX_SIZE'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kResponseExceededMaxSizeError] === true
}
get [kResponseExceededMaxSizeError] () {
return true
}
}
const kRequestRetryError = Symbol.for('undici.error.UND_ERR_REQ_RETRY')
class RequestRetryError extends UndiciError {
constructor (message, code, { headers, data }) {
super(message)
@ -193,8 +353,17 @@ class RequestRetryError extends UndiciError {
this.data = data
this.headers = headers
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kRequestRetryError] === true
}
get [kRequestRetryError] () {
return true
}
}
const kResponseError = Symbol.for('undici.error.UND_ERR_RESPONSE')
class ResponseError extends UndiciError {
constructor (message, code, { headers, body }) {
super(message)
@ -205,8 +374,17 @@ class ResponseError extends UndiciError {
this.body = body
this.headers = headers
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kResponseError] === true
}
get [kResponseError] () {
return true
}
}
const kSecureProxyConnectionError = Symbol.for('undici.error.UND_ERR_PRX_TLS')
class SecureProxyConnectionError extends UndiciError {
constructor (cause, message, options = {}) {
super(message, { cause, ...options })
@ -215,6 +393,32 @@ class SecureProxyConnectionError extends UndiciError {
this.code = 'UND_ERR_PRX_TLS'
this.cause = cause
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kSecureProxyConnectionError] === true
}
get [kSecureProxyConnectionError] () {
return true
}
}
const kMaxOriginsReachedError = Symbol.for('undici.error.UND_ERR_MAX_ORIGINS_REACHED')
class MaxOriginsReachedError extends UndiciError {
constructor (message) {
super(message)
this.name = 'MaxOriginsReachedError'
this.message = message || 'Maximum allowed origins reached'
this.code = 'UND_ERR_MAX_ORIGINS_REACHED'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kMaxOriginsReachedError] === true
}
get [kMaxOriginsReachedError] () {
return true
}
}
module.exports = {
@ -226,7 +430,6 @@ module.exports = {
BodyTimeoutError,
RequestContentLengthMismatchError,
ConnectTimeoutError,
ResponseStatusCodeError,
InvalidArgumentError,
InvalidReturnValueError,
RequestAbortedError,
@ -240,5 +443,6 @@ module.exports = {
ResponseExceededMaxSizeError,
RequestRetryError,
ResponseError,
SecureProxyConnectionError
SecureProxyConnectionError,
MaxOriginsReachedError
}

View File

@ -17,7 +17,8 @@ const {
serializePathWithQuery,
assertRequestHandler,
getServerName,
normalizedMethodRecords
normalizedMethodRecords,
getProtocolFromUrlString
} = require('./util')
const { channels } = require('./diagnostics.js')
const { headerNameLowerCasedRecord } = require('./constants')
@ -141,8 +142,11 @@ class Request {
this.path = query ? serializePathWithQuery(path, query) : path
// TODO: shall we maybe standardize it to an URL object?
this.origin = origin
this.protocol = getProtocolFromUrlString(origin)
this.idempotent = idempotent == null
? method === 'HEAD' || method === 'GET'
: idempotent

View File

@ -102,13 +102,24 @@ function isBlobLike (object) {
}
}
/**
* @param {string} url The path to check for query strings or fragments.
* @returns {boolean} Returns true if the path contains a query string or fragment.
*/
function pathHasQueryOrFragment (url) {
return (
url.includes('?') ||
url.includes('#')
)
}
/**
* @param {string} url The URL to add the query params to
* @param {import('node:querystring').ParsedUrlQueryInput} queryParams The object to serialize into a URL query string
* @returns {string} The URL with the query params added
*/
function serializePathWithQuery (url, queryParams) {
if (url.includes('?') || url.includes('#')) {
if (pathHasQueryOrFragment(url)) {
throw new Error('Query params cannot be passed when url already contains "?" or "#".')
}
@ -598,12 +609,11 @@ function ReadableStreamFrom (iterable) {
let iterator
return new ReadableStream(
{
async start () {
start () {
iterator = iterable[Symbol.asyncIterator]()
},
pull (controller) {
async function pull () {
const { done, value } = await iterator.next()
return iterator.next().then(({ done, value }) => {
if (done) {
queueMicrotask(() => {
controller.close()
@ -614,15 +624,13 @@ function ReadableStreamFrom (iterable) {
if (buf.byteLength) {
controller.enqueue(new Uint8Array(buf))
} else {
return await pull()
return this.pull(controller)
}
}
}
return pull()
})
},
async cancel () {
await iterator.return()
cancel () {
return iterator.return()
},
type: 'bytes'
}
@ -868,6 +876,30 @@ function onConnectTimeout (socket, opts) {
destroy(socket, new ConnectTimeoutError(message))
}
/**
* @param {string} urlString
* @returns {string}
*/
function getProtocolFromUrlString (urlString) {
if (
urlString[0] === 'h' &&
urlString[1] === 't' &&
urlString[2] === 't' &&
urlString[3] === 'p'
) {
switch (urlString[4]) {
case ':':
return 'http:'
case 's':
if (urlString[5] === ':') {
return 'https:'
}
}
}
// fallback if none of the usual suspects
return urlString.slice(0, urlString.indexOf(':') + 1)
}
const kEnumerableProperty = Object.create(null)
kEnumerableProperty.enumerable = true
@ -924,6 +956,7 @@ module.exports = {
assertRequestHandler,
getSocketInfo,
isFormDataLike,
pathHasQueryOrFragment,
serializePathWithQuery,
addAbortListener,
isValidHTTPToken,
@ -938,5 +971,6 @@ module.exports = {
nodeMinor,
safeHTTPMethods: Object.freeze(['GET', 'HEAD', 'OPTIONS', 'TRACE']),
wrapRequestBody,
setupConnectTimeout
setupConnectTimeout,
getProtocolFromUrlString
}

View File

@ -1,6 +1,6 @@
'use strict'
const { InvalidArgumentError } = require('../core/errors')
const { InvalidArgumentError, MaxOriginsReachedError } = require('../core/errors')
const { kClients, kRunning, kClose, kDestroy, kDispatch, kUrl } = require('../core/symbols')
const DispatcherBase = require('./dispatcher-base')
const Pool = require('./pool')
@ -13,6 +13,7 @@ const kOnConnectionError = Symbol('onConnectionError')
const kOnDrain = Symbol('onDrain')
const kFactory = Symbol('factory')
const kOptions = Symbol('options')
const kOrigins = Symbol('origins')
function defaultFactory (origin, opts) {
return opts && opts.connections === 1
@ -21,7 +22,7 @@ function defaultFactory (origin, opts) {
}
class Agent extends DispatcherBase {
constructor ({ factory = defaultFactory, connect, ...options } = {}) {
constructor ({ factory = defaultFactory, maxOrigins = Infinity, connect, ...options } = {}) {
if (typeof factory !== 'function') {
throw new InvalidArgumentError('factory must be a function.')
}
@ -30,42 +31,34 @@ class Agent extends DispatcherBase {
throw new InvalidArgumentError('connect must be a function or an object')
}
if (typeof maxOrigins !== 'number' || Number.isNaN(maxOrigins) || maxOrigins <= 0) {
throw new InvalidArgumentError('maxOrigins must be a number greater than 0')
}
super()
if (connect && typeof connect !== 'function') {
connect = { ...connect }
}
this[kOptions] = { ...util.deepClone(options), connect }
this[kOptions] = { ...util.deepClone(options), maxOrigins, connect }
this[kFactory] = factory
this[kClients] = new Map()
this[kOrigins] = new Set()
this[kOnDrain] = (origin, targets) => {
this.emit('drain', origin, [this, ...targets])
}
this[kOnConnect] = (origin, targets) => {
const result = this[kClients].get(origin)
if (result) {
result.count += 1
}
this.emit('connect', origin, [this, ...targets])
}
this[kOnDisconnect] = (origin, targets, err) => {
const result = this[kClients].get(origin)
if (result) {
result.count -= 1
if (result.count <= 0) {
this[kClients].delete(origin)
result.dispatcher.destroy()
}
}
this.emit('disconnect', origin, [this, ...targets], err)
}
this[kOnConnectionError] = (origin, targets, err) => {
// TODO: should this decrement result.count here?
this.emit('connectionError', origin, [this, ...targets], err)
}
}
@ -86,39 +79,67 @@ class Agent extends DispatcherBase {
throw new InvalidArgumentError('opts.origin must be a non-empty string or URL.')
}
if (this[kOrigins].size >= this[kOptions].maxOrigins && !this[kOrigins].has(key)) {
throw new MaxOriginsReachedError()
}
const result = this[kClients].get(key)
let dispatcher = result && result.dispatcher
if (!dispatcher) {
const closeClientIfUnused = (connected) => {
const result = this[kClients].get(key)
if (result) {
if (connected) result.count -= 1
if (result.count <= 0) {
this[kClients].delete(key)
result.dispatcher.close()
}
this[kOrigins].delete(key)
}
}
dispatcher = this[kFactory](opts.origin, this[kOptions])
.on('drain', this[kOnDrain])
.on('connect', this[kOnConnect])
.on('disconnect', this[kOnDisconnect])
.on('connectionError', this[kOnConnectionError])
.on('connect', (origin, targets) => {
const result = this[kClients].get(key)
if (result) {
result.count += 1
}
this[kOnConnect](origin, targets)
})
.on('disconnect', (origin, targets, err) => {
closeClientIfUnused(true)
this[kOnDisconnect](origin, targets, err)
})
.on('connectionError', (origin, targets, err) => {
closeClientIfUnused(false)
this[kOnConnectionError](origin, targets, err)
})
this[kClients].set(key, { count: 0, dispatcher })
this[kOrigins].add(key)
}
return dispatcher.dispatch(opts, handler)
}
async [kClose] () {
[kClose] () {
const closePromises = []
for (const { dispatcher } of this[kClients].values()) {
closePromises.push(dispatcher.close())
}
this[kClients].clear()
await Promise.all(closePromises)
return Promise.all(closePromises)
}
async [kDestroy] (err) {
[kDestroy] (err) {
const destroyPromises = []
for (const { dispatcher } of this[kClients].values()) {
destroyPromises.push(dispatcher.destroy(err))
}
this[kClients].clear()
await Promise.all(destroyPromises)
return Promise.all(destroyPromises)
}
get stats () {

View File

@ -64,11 +64,26 @@ function lazyllhttp () {
const llhttpWasmData = process.env.JEST_WORKER_ID ? require('../llhttp/llhttp-wasm.js') : undefined
let mod
// We disable wasm SIMD on ppc64 as it seems to be broken on Power 9 architectures.
let useWasmSIMD = process.arch !== 'ppc64'
// The Env Variable UNDICI_NO_WASM_SIMD allows explicitly overriding the default behavior
if (process.env.UNDICI_NO_WASM_SIMD === '1') {
useWasmSIMD = true
} else if (process.env.UNDICI_NO_WASM_SIMD === '0') {
useWasmSIMD = false
}
if (useWasmSIMD) {
try {
mod = new WebAssembly.Module(require('../llhttp/llhttp_simd-wasm.js'))
} catch (e) {
/* istanbul ignore next */
} catch {
}
}
/* istanbul ignore next */
if (!mod) {
// We could check if the error was caused by the simd option not
// being enabled, but the occurring of this other error
// * https://github.com/emscripten-core/emscripten/issues/11495
@ -325,10 +340,6 @@ class Parser {
currentBufferRef = chunk
currentParser = this
ret = llhttp.llhttp_execute(this.ptr, currentBufferPtr, chunk.length)
/* eslint-disable-next-line no-useless-catch */
} catch (err) {
/* istanbul ignore next: difficult to make a test case for */
throw err
} finally {
currentParser = null
currentBufferRef = null
@ -760,7 +771,7 @@ function onParserTimeout (parser) {
* @param {import('net').Socket} socket
* @returns
*/
async function connectH1 (client, socket) {
function connectH1 (client, socket) {
client[kSocket] = socket
if (!llhttpInstance) {

View File

@ -77,7 +77,7 @@ function parseH2Headers (headers) {
return result
}
async function connectH2 (client, socket) {
function connectH2 (client, socket) {
client[kSocket] = socket
const session = http2.connect(client[kUrl], {
@ -279,7 +279,7 @@ function shouldSendContentLength (method) {
function writeH2 (client, request) {
const requestTimeout = request.bodyTimeout ?? client[kBodyTimeout]
const session = client[kHTTP2Session]
const { method, path, host, upgrade, expectContinue, signal, headers: reqHeaders } = request
const { method, path, host, upgrade, expectContinue, signal, protocol, headers: reqHeaders } = request
let { body } = request
if (upgrade) {
@ -292,6 +292,16 @@ function writeH2 (client, request) {
const key = reqHeaders[n + 0]
const val = reqHeaders[n + 1]
if (key === 'cookie') {
if (headers[key] != null) {
headers[key] = Array.isArray(headers[key]) ? (headers[key].push(val), headers[key]) : [headers[key], val]
} else {
headers[key] = val
}
continue
}
if (Array.isArray(val)) {
for (let i = 0; i < val.length; i++) {
if (headers[key]) {
@ -387,7 +397,7 @@ function writeH2 (client, request) {
// :path and :scheme headers must be omitted when sending CONNECT
headers[HTTP2_HEADER_PATH] = path
headers[HTTP2_HEADER_SCHEME] = 'https'
headers[HTTP2_HEADER_SCHEME] = protocol === 'http:' ? 'http' : 'https'
// https://tools.ietf.org/html/rfc7231#section-4.3.1
// https://tools.ietf.org/html/rfc7231#section-4.3.2

View File

@ -296,8 +296,7 @@ class Client extends DispatcherBase {
}
[kDispatch] (opts, handler) {
const origin = opts.origin || this[kUrl].origin
const request = new Request(origin, opts, handler)
const request = new Request(this[kUrl].origin, opts, handler)
this[kQueue].push(request)
if (this[kResuming]) {
@ -317,7 +316,7 @@ class Client extends DispatcherBase {
return this[kNeedDrain] < 2
}
async [kClose] () {
[kClose] () {
// TODO: for H2 we need to gracefully flush the remaining enqueued
// request and close each stream.
return new Promise((resolve) => {
@ -329,7 +328,7 @@ class Client extends DispatcherBase {
})
}
async [kDestroy] (err) {
[kDestroy] (err) {
return new Promise((resolve) => {
const requests = this[kQueue].splice(this[kPendingIdx])
for (let i = 0; i < requests.length; i++) {
@ -381,9 +380,9 @@ function onError (client, err) {
/**
* @param {Client} client
* @returns
* @returns {void}
*/
async function connect (client) {
function connect (client) {
assert(!client[kConnecting])
assert(!client[kHTTPContext])
@ -417,8 +416,6 @@ async function connect (client) {
})
}
try {
const socket = await new Promise((resolve, reject) => {
client[kConnector]({
host,
hostname,
@ -428,15 +425,14 @@ async function connect (client) {
localAddress: client[kLocalAddress]
}, (err, socket) => {
if (err) {
reject(err)
} else {
resolve(socket)
handleConnectError(client, err, { host, hostname, protocol, port })
client[kResume]()
return
}
})
})
if (client.destroyed) {
util.destroy(socket.on('error', noop), new ClientDestroyedError())
client[kResume]()
return
}
@ -444,11 +440,13 @@ async function connect (client) {
try {
client[kHTTPContext] = socket.alpnProtocol === 'h2'
? await connectH2(client, socket)
: await connectH1(client, socket)
? connectH2(client, socket)
: connectH1(client, socket)
} catch (err) {
socket.destroy().on('error', noop)
throw err
handleConnectError(client, err, { host, hostname, protocol, port })
client[kResume]()
return
}
client[kConnecting] = false
@ -473,8 +471,13 @@ async function connect (client) {
socket
})
}
client.emit('connect', client[kUrl], [client])
} catch (err) {
client[kResume]()
})
}
function handleConnectError (client, err, { host, hostname, protocol, port }) {
if (client.destroyed) {
return
}
@ -510,9 +513,6 @@ async function connect (client) {
client.emit('connectionError', client[kUrl], [client], err)
}
client[kResume]()
}
function emitDrain (client) {
client[kNeedDrain] = 0
client.emit('drain', client[kUrl], [client])

View File

@ -13,19 +13,24 @@ const kOnDestroyed = Symbol('onDestroyed')
const kOnClosed = Symbol('onClosed')
class DispatcherBase extends Dispatcher {
constructor () {
super()
/** @type {boolean} */
[kDestroyed] = false;
this[kDestroyed] = false
this[kOnDestroyed] = null
this[kClosed] = false
this[kOnClosed] = []
}
/** @type {Array|null} */
[kOnDestroyed] = null;
/** @type {boolean} */
[kClosed] = false;
/** @type {Array} */
[kOnClosed] = []
/** @returns {boolean} */
get destroyed () {
return this[kDestroyed]
}
/** @returns {boolean} */
get closed () {
return this[kClosed]
}

View File

@ -46,24 +46,20 @@ class EnvHttpProxyAgent extends DispatcherBase {
return agent.dispatch(opts, handler)
}
async [kClose] () {
await this[kNoProxyAgent].close()
if (!this[kHttpProxyAgent][kClosed]) {
await this[kHttpProxyAgent].close()
}
if (!this[kHttpsProxyAgent][kClosed]) {
await this[kHttpsProxyAgent].close()
}
[kClose] () {
return Promise.all([
this[kNoProxyAgent].close(),
!this[kHttpProxyAgent][kClosed] && this[kHttpProxyAgent].close(),
!this[kHttpsProxyAgent][kClosed] && this[kHttpsProxyAgent].close()
])
}
async [kDestroy] (err) {
await this[kNoProxyAgent].destroy(err)
if (!this[kHttpProxyAgent][kDestroyed]) {
await this[kHttpProxyAgent].destroy(err)
}
if (!this[kHttpsProxyAgent][kDestroyed]) {
await this[kHttpsProxyAgent].destroy(err)
}
[kDestroy] (err) {
return Promise.all([
this[kNoProxyAgent].destroy(err),
!this[kHttpProxyAgent][kDestroyed] && this[kHttpProxyAgent].destroy(err),
!this[kHttpsProxyAgent][kDestroyed] && this[kHttpsProxyAgent].destroy(err)
])
}
#getProxyAgentForUrl (url) {

View File

@ -59,35 +59,21 @@ const kMask = kSize - 1
* @template T
*/
class FixedCircularBuffer {
constructor () {
/**
* @type {number}
*/
this.bottom = 0
/**
* @type {number}
*/
this.top = 0
/**
* @type {Array<T|undefined>}
*/
this.list = new Array(kSize).fill(undefined)
/**
* @type {T|null}
*/
this.next = null
}
/** @type {number} */
bottom = 0
/** @type {number} */
top = 0
/** @type {Array<T|undefined>} */
list = new Array(kSize).fill(undefined)
/** @type {T|null} */
next = null
/**
* @returns {boolean}
*/
/** @returns {boolean} */
isEmpty () {
return this.top === this.bottom
}
/**
* @returns {boolean}
*/
/** @returns {boolean} */
isFull () {
return ((this.top + 1) & kMask) === this.bottom
}
@ -101,9 +87,7 @@ class FixedCircularBuffer {
this.top = (this.top + 1) & kMask
}
/**
* @returns {T|null}
*/
/** @returns {T|null} */
shift () {
const nextItem = this.list[this.bottom]
if (nextItem === undefined) { return null }
@ -118,22 +102,16 @@ class FixedCircularBuffer {
*/
module.exports = class FixedQueue {
constructor () {
/**
* @type {FixedCircularBuffer<T>}
*/
/** @type {FixedCircularBuffer<T>} */
this.head = this.tail = new FixedCircularBuffer()
}
/**
* @returns {boolean}
*/
/** @returns {boolean} */
isEmpty () {
return this.head.isEmpty()
}
/**
* @param {T} data
*/
/** @param {T} data */
push (data) {
if (this.head.isFull()) {
// Head is full: Creates a new queue, sets the old queue's `.next` to it,
@ -143,9 +121,7 @@ module.exports = class FixedQueue {
this.head.push(data)
}
/**
* @returns {T|null}
*/
/** @returns {T|null} */
shift () {
const tail = this.tail
const next = tail.shift()

View File

@ -12,8 +12,6 @@ class H2CClient extends DispatcherBase {
#client = null
constructor (origin, clientOpts) {
super()
if (typeof origin === 'string') {
origin = new URL(origin)
}
@ -47,6 +45,8 @@ class H2CClient extends DispatcherBase {
)
}
super()
this.#client = new Client(origin, {
...opts,
connect: this.#buildConnector(connect),
@ -110,12 +110,12 @@ class H2CClient extends DispatcherBase {
return this.#client.dispatch(opts, handler)
}
async [kClose] () {
await this.#client.close()
[kClose] () {
return this.#client.close()
}
async [kDestroy] () {
await this.#client.destroy()
[kDestroy] () {
return this.#client.destroy()
}
}

View File

@ -18,17 +18,16 @@ const kAddClient = Symbol('add client')
const kRemoveClient = Symbol('remove client')
class PoolBase extends DispatcherBase {
constructor () {
super()
[kQueue] = new FixedQueue();
this[kQueue] = new FixedQueue()
this[kClients] = []
this[kQueued] = 0
[kQueued] = 0;
const pool = this
[kClients] = [];
this[kOnDrain] = function onDrain (origin, targets) {
const queue = pool[kQueue]
[kNeedDrain] = false;
[kOnDrain] (client, origin, targets) {
const queue = this[kQueue]
let needDrain = false
@ -37,35 +36,37 @@ class PoolBase extends DispatcherBase {
if (!item) {
break
}
pool[kQueued]--
needDrain = !this.dispatch(item.opts, item.handler)
this[kQueued]--
needDrain = !client.dispatch(item.opts, item.handler)
}
this[kNeedDrain] = needDrain
client[kNeedDrain] = needDrain
if (!this[kNeedDrain] && pool[kNeedDrain]) {
pool[kNeedDrain] = false
pool.emit('drain', origin, [pool, ...targets])
if (!needDrain && this[kNeedDrain]) {
this[kNeedDrain] = false
this.emit('drain', origin, [this, ...targets])
}
if (pool[kClosedResolve] && queue.isEmpty()) {
Promise
.all(pool[kClients].map(c => c.close()))
.then(pool[kClosedResolve])
if (this[kClosedResolve] && queue.isEmpty()) {
const closeAll = new Array(this[kClients].length)
for (let i = 0; i < this[kClients].length; i++) {
closeAll[i] = this[kClients][i].close()
}
Promise.all(closeAll)
.then(this[kClosedResolve])
}
}
this[kOnConnect] = (origin, targets) => {
pool.emit('connect', origin, [pool, ...targets])
}
[kOnConnect] = (origin, targets) => {
this.emit('connect', origin, [this, ...targets])
};
this[kOnDisconnect] = (origin, targets, err) => {
pool.emit('disconnect', origin, [pool, ...targets], err)
}
[kOnDisconnect] = (origin, targets, err) => {
this.emit('disconnect', origin, [this, ...targets], err)
};
this[kOnConnectionError] = (origin, targets, err) => {
pool.emit('connectionError', origin, [pool, ...targets], err)
}
[kOnConnectionError] = (origin, targets, err) => {
this.emit('connectionError', origin, [this, ...targets], err)
}
get [kBusy] () {
@ -73,11 +74,19 @@ class PoolBase extends DispatcherBase {
}
get [kConnected] () {
return this[kClients].filter(client => client[kConnected]).length
let ret = 0
for (const { [kConnected]: connected } of this[kClients]) {
ret += connected
}
return ret
}
get [kFree] () {
return this[kClients].filter(client => client[kConnected] && !client[kNeedDrain]).length
let ret = 0
for (const { [kConnected]: connected, [kNeedDrain]: needDrain } of this[kClients]) {
ret += connected && !needDrain
}
return ret
}
get [kPending] () {
@ -108,17 +117,21 @@ class PoolBase extends DispatcherBase {
return new PoolStats(this)
}
async [kClose] () {
[kClose] () {
if (this[kQueue].isEmpty()) {
await Promise.all(this[kClients].map(c => c.close()))
const closeAll = new Array(this[kClients].length)
for (let i = 0; i < this[kClients].length; i++) {
closeAll[i] = this[kClients][i].close()
}
return Promise.all(closeAll)
} else {
await new Promise((resolve) => {
return new Promise((resolve) => {
this[kClosedResolve] = resolve
})
}
}
async [kDestroy] (err) {
[kDestroy] (err) {
while (true) {
const item = this[kQueue].shift()
if (!item) {
@ -127,7 +140,11 @@ class PoolBase extends DispatcherBase {
item.handler.onError(err)
}
await Promise.all(this[kClients].map(c => c.destroy(err)))
const destroyAll = new Array(this[kClients].length)
for (let i = 0; i < this[kClients].length; i++) {
destroyAll[i] = this[kClients][i].destroy(err)
}
return Promise.all(destroyAll)
}
[kDispatch] (opts, handler) {
@ -147,7 +164,7 @@ class PoolBase extends DispatcherBase {
[kAddClient] (client) {
client
.on('drain', this[kOnDrain])
.on('drain', this[kOnDrain].bind(this, client))
.on('connect', this[kOnConnect])
.on('disconnect', this[kOnDisconnect])
.on('connectionError', this[kOnConnectionError])
@ -157,7 +174,7 @@ class PoolBase extends DispatcherBase {
if (this[kNeedDrain]) {
queueMicrotask(() => {
if (this[kNeedDrain]) {
this[kOnDrain](client[kUrl], [this, client])
this[kOnDrain](client, client[kUrl], [client, this])
}
})
}

View File

@ -51,8 +51,6 @@ class Pool extends PoolBase {
throw new InvalidArgumentError('connect must be a function or an object')
}
super()
if (typeof connect !== 'function') {
connect = buildConnector({
...tls,
@ -65,6 +63,8 @@ class Pool extends PoolBase {
})
}
super()
this[kConnections] = connections || null
this[kUrl] = util.parseOrigin(origin)
this[kOptions] = { ...util.deepClone(options), connect, allowH2, clientTtl }

View File

@ -37,11 +37,12 @@ class Http1ProxyWrapper extends DispatcherBase {
#client
constructor (proxyUrl, { headers = {}, connect, factory }) {
super()
if (!proxyUrl) {
throw new InvalidArgumentError('Proxy URL is mandatory')
}
super()
this[kProxyHeaders] = headers
if (factory) {
this.#client = factory(proxyUrl, { connect })
@ -80,11 +81,11 @@ class Http1ProxyWrapper extends DispatcherBase {
return this.#client[kDispatch](opts, handler)
}
async [kClose] () {
[kClose] () {
return this.#client.close()
}
async [kDestroy] (err) {
[kDestroy] (err) {
return this.#client.destroy(err)
}
}
@ -220,14 +221,18 @@ class ProxyAgent extends DispatcherBase {
}
}
async [kClose] () {
await this[kAgent].close()
await this[kClient].close()
[kClose] () {
return Promise.all([
this[kAgent].close(),
this[kClient].close()
])
}
async [kDestroy] () {
await this[kAgent].destroy()
await this[kClient].destroy()
[kDestroy] () {
return Promise.all([
this[kAgent].destroy(),
this[kClient].destroy()
])
}
}

View File

@ -26,7 +26,25 @@ function getGlobalDispatcher () {
return globalThis[globalDispatcher]
}
// These are the globals that can be installed by undici.install().
// Not exported by index.js to avoid use outside of this module.
const installedExports = /** @type {const} */ (
[
'fetch',
'Headers',
'Response',
'Request',
'FormData',
'WebSocket',
'CloseEvent',
'ErrorEvent',
'MessageEvent',
'EventSource'
]
)
module.exports = {
setGlobalDispatcher,
getGlobalDispatcher
getGlobalDispatcher,
installedExports
}

View File

@ -56,6 +56,22 @@ function needsRevalidation (result, cacheControlDirectives) {
return false
}
/**
* Check if we're within the stale-while-revalidate window for a stale response
* @param {import('../../types/cache-interceptor.d.ts').default.GetResult} result
* @returns {boolean}
*/
function withinStaleWhileRevalidateWindow (result) {
const staleWhileRevalidate = result.cacheControlDirectives?.['stale-while-revalidate']
if (!staleWhileRevalidate) {
return false
}
const now = Date.now()
const staleWhileRevalidateExpiry = result.staleAt + (staleWhileRevalidate * 1000)
return now <= staleWhileRevalidateExpiry
}
/**
* @param {DispatchFn} dispatch
* @param {import('../../types/cache-interceptor.d.ts').default.CacheHandlerOptions} globalOpts
@ -231,6 +247,51 @@ function handleResult (
return dispatch(opts, new CacheHandler(globalOpts, cacheKey, handler))
}
// RFC 5861: If we're within stale-while-revalidate window, serve stale immediately
// and revalidate in background
if (withinStaleWhileRevalidateWindow(result)) {
// Serve stale response immediately
sendCachedValue(handler, opts, result, age, null, true)
// Start background revalidation (fire-and-forget)
queueMicrotask(() => {
let headers = {
...opts.headers,
'if-modified-since': new Date(result.cachedAt).toUTCString()
}
if (result.etag) {
headers['if-none-match'] = result.etag
}
if (result.vary) {
headers = {
...headers,
...result.vary
}
}
// Background revalidation - update cache if we get new data
dispatch(
{
...opts,
headers
},
new CacheHandler(globalOpts, cacheKey, {
// Silent handler that just updates the cache
onRequestStart () {},
onRequestUpgrade () {},
onResponseStart () {},
onResponseData () {},
onResponseEnd () {},
onResponseError () {}
})
)
})
return true
}
let withinStaleIfErrorThreshold = false
const staleIfErrorExpiry = result.cacheControlDirectives['stale-if-error'] ?? reqCacheControl?.['stale-if-error']
if (staleIfErrorExpiry) {

View File

@ -0,0 +1,253 @@
'use strict'
const { createInflate, createGunzip, createBrotliDecompress, createZstdDecompress } = require('node:zlib')
const { pipeline } = require('node:stream')
const DecoratorHandler = require('../handler/decorator-handler')
/** @typedef {import('node:stream').Transform} Transform */
/** @typedef {import('node:stream').Transform} Controller */
/** @typedef {Transform&import('node:zlib').Zlib} DecompressorStream */
/** @type {Record<string, () => DecompressorStream>} */
const supportedEncodings = {
gzip: createGunzip,
'x-gzip': createGunzip,
br: createBrotliDecompress,
deflate: createInflate,
compress: createInflate,
'x-compress': createInflate,
...(createZstdDecompress ? { zstd: createZstdDecompress } : {})
}
const defaultSkipStatusCodes = /** @type {const} */ ([204, 304])
let warningEmitted = /** @type {boolean} */ (false)
/**
* @typedef {Object} DecompressHandlerOptions
* @property {number[]|Readonly<number[]>} [skipStatusCodes=[204, 304]] - List of status codes to skip decompression for
* @property {boolean} [skipErrorResponses] - Whether to skip decompression for error responses (status codes >= 400)
*/
class DecompressHandler extends DecoratorHandler {
/** @type {Transform[]} */
#decompressors = []
/** @type {NodeJS.WritableStream&NodeJS.ReadableStream|null} */
#pipelineStream
/** @type {Readonly<number[]>} */
#skipStatusCodes
/** @type {boolean} */
#skipErrorResponses
constructor (handler, { skipStatusCodes = defaultSkipStatusCodes, skipErrorResponses = true } = {}) {
super(handler)
this.#skipStatusCodes = skipStatusCodes
this.#skipErrorResponses = skipErrorResponses
}
/**
* Determines if decompression should be skipped based on encoding and status code
* @param {string} contentEncoding - Content-Encoding header value
* @param {number} statusCode - HTTP status code of the response
* @returns {boolean} - True if decompression should be skipped
*/
#shouldSkipDecompression (contentEncoding, statusCode) {
if (!contentEncoding || statusCode < 200) return true
if (this.#skipStatusCodes.includes(statusCode)) return true
if (this.#skipErrorResponses && statusCode >= 400) return true
return false
}
/**
* Creates a chain of decompressors for multiple content encodings
*
* @param {string} encodings - Comma-separated list of content encodings
* @returns {Array<DecompressorStream>} - Array of decompressor streams
*/
#createDecompressionChain (encodings) {
const parts = encodings.split(',')
/** @type {DecompressorStream[]} */
const decompressors = []
for (let i = parts.length - 1; i >= 0; i--) {
const encoding = parts[i].trim()
if (!encoding) continue
if (!supportedEncodings[encoding]) {
decompressors.length = 0 // Clear if unsupported encoding
return decompressors // Unsupported encoding
}
decompressors.push(supportedEncodings[encoding]())
}
return decompressors
}
/**
* Sets up event handlers for a decompressor stream using readable events
* @param {DecompressorStream} decompressor - The decompressor stream
* @param {Controller} controller - The controller to coordinate with
* @returns {void}
*/
#setupDecompressorEvents (decompressor, controller) {
decompressor.on('readable', () => {
let chunk
while ((chunk = decompressor.read()) !== null) {
const result = super.onResponseData(controller, chunk)
if (result === false) {
break
}
}
})
decompressor.on('error', (error) => {
super.onResponseError(controller, error)
})
}
/**
* Sets up event handling for a single decompressor
* @param {Controller} controller - The controller to handle events
* @returns {void}
*/
#setupSingleDecompressor (controller) {
const decompressor = this.#decompressors[0]
this.#setupDecompressorEvents(decompressor, controller)
decompressor.on('end', () => {
super.onResponseEnd(controller, {})
})
}
/**
* Sets up event handling for multiple chained decompressors using pipeline
* @param {Controller} controller - The controller to handle events
* @returns {void}
*/
#setupMultipleDecompressors (controller) {
const lastDecompressor = this.#decompressors[this.#decompressors.length - 1]
this.#setupDecompressorEvents(lastDecompressor, controller)
this.#pipelineStream = pipeline(this.#decompressors, (err) => {
if (err) {
super.onResponseError(controller, err)
return
}
super.onResponseEnd(controller, {})
})
}
/**
* Cleans up decompressor references to prevent memory leaks
* @returns {void}
*/
#cleanupDecompressors () {
this.#decompressors.length = 0
this.#pipelineStream = null
}
/**
* @param {Controller} controller
* @param {number} statusCode
* @param {Record<string, string | string[] | undefined>} headers
* @param {string} statusMessage
* @returns {void}
*/
onResponseStart (controller, statusCode, headers, statusMessage) {
const contentEncoding = headers['content-encoding']
// If content encoding is not supported or status code is in skip list
if (this.#shouldSkipDecompression(contentEncoding, statusCode)) {
return super.onResponseStart(controller, statusCode, headers, statusMessage)
}
const decompressors = this.#createDecompressionChain(contentEncoding.toLowerCase())
if (decompressors.length === 0) {
this.#cleanupDecompressors()
return super.onResponseStart(controller, statusCode, headers, statusMessage)
}
this.#decompressors = decompressors
// Remove compression headers since we're decompressing
const { 'content-encoding': _, 'content-length': __, ...newHeaders } = headers
if (this.#decompressors.length === 1) {
this.#setupSingleDecompressor(controller)
} else {
this.#setupMultipleDecompressors(controller)
}
super.onResponseStart(controller, statusCode, newHeaders, statusMessage)
}
/**
* @param {Controller} controller
* @param {Buffer} chunk
* @returns {void}
*/
onResponseData (controller, chunk) {
if (this.#decompressors.length > 0) {
this.#decompressors[0].write(chunk)
return
}
super.onResponseData(controller, chunk)
}
/**
* @param {Controller} controller
* @param {Record<string, string | string[]> | undefined} trailers
* @returns {void}
*/
onResponseEnd (controller, trailers) {
if (this.#decompressors.length > 0) {
this.#decompressors[0].end()
this.#cleanupDecompressors()
return
}
super.onResponseEnd(controller, trailers)
}
/**
* @param {Controller} controller
* @param {Error} err
* @returns {void}
*/
onResponseError (controller, err) {
if (this.#decompressors.length > 0) {
for (const decompressor of this.#decompressors) {
decompressor.destroy(err)
}
this.#cleanupDecompressors()
}
super.onResponseError(controller, err)
}
}
/**
* Creates a decompression interceptor for HTTP responses
* @param {DecompressHandlerOptions} [options] - Options for the interceptor
* @returns {Function} - Interceptor function
*/
function createDecompressInterceptor (options = {}) {
// Emit experimental warning only once
if (!warningEmitted) {
process.emitWarning(
'DecompressInterceptor is experimental and subject to change',
'ExperimentalWarning'
)
warningEmitted = true
}
return (dispatch) => {
return (opts, handler) => {
const decompressHandler = new DecompressHandler(handler, options)
return dispatch(opts, decompressHandler)
}
}
}
module.exports = createDecompressInterceptor

View File

@ -15,7 +15,7 @@ export declare const H_METHOD_MAP: {
[k: string]: number;
};
export declare const STATUSES_HTTP: number[];
export type CharList = Array<string | number>;
export type CharList = (string | number)[];
export declare const ALPHA: CharList;
export declare const NUM_MAP: {
0: number;
@ -95,3 +95,101 @@ export declare const SPECIAL_HEADERS: {
'transfer-encoding': number;
upgrade: number;
};
declare const _default: {
ERROR: IntDict;
TYPE: IntDict;
FLAGS: IntDict;
LENIENT_FLAGS: IntDict;
METHODS: IntDict;
STATUSES: IntDict;
FINISH: IntDict;
HEADER_STATE: IntDict;
ALPHA: CharList;
NUM_MAP: {
0: number;
1: number;
2: number;
3: number;
4: number;
5: number;
6: number;
7: number;
8: number;
9: number;
};
HEX_MAP: {
0: number;
1: number;
2: number;
3: number;
4: number;
5: number;
6: number;
7: number;
8: number;
9: number;
A: number;
B: number;
C: number;
D: number;
E: number;
F: number;
a: number;
b: number;
c: number;
d: number;
e: number;
f: number;
};
NUM: CharList;
ALPHANUM: CharList;
MARK: CharList;
USERINFO_CHARS: CharList;
URL_CHAR: CharList;
HEX: CharList;
TOKEN: CharList;
HEADER_CHARS: CharList;
CONNECTION_TOKEN_CHARS: CharList;
QUOTED_STRING: CharList;
HTAB_SP_VCHAR_OBS_TEXT: CharList;
MAJOR: {
0: number;
1: number;
2: number;
3: number;
4: number;
5: number;
6: number;
7: number;
8: number;
9: number;
};
MINOR: {
0: number;
1: number;
2: number;
3: number;
4: number;
5: number;
6: number;
7: number;
8: number;
9: number;
};
SPECIAL_HEADERS: {
connection: number;
'content-length': number;
'proxy-connection': number;
'transfer-encoding': number;
upgrade: number;
};
METHODS_HTTP: number[];
METHODS_ICE: number[];
METHODS_RTSP: number[];
METHOD_MAP: IntDict;
H_METHOD_MAP: {
[k: string]: number;
};
STATUSES_HTTP: number[];
};
export default _default;

View File

@ -40,6 +40,7 @@ exports.ERROR = {
CB_CHUNK_EXTENSION_NAME_COMPLETE: 34,
CB_CHUNK_EXTENSION_VALUE_COMPLETE: 35,
CB_RESET: 31,
CB_PROTOCOL_COMPLETE: 38,
};
exports.TYPE = {
BOTH: 0, // default
@ -495,4 +496,36 @@ exports.SPECIAL_HEADERS = {
'transfer-encoding': exports.HEADER_STATE.TRANSFER_ENCODING,
'upgrade': exports.HEADER_STATE.UPGRADE,
};
//# sourceMappingURL=constants.js.map
exports.default = {
ERROR: exports.ERROR,
TYPE: exports.TYPE,
FLAGS: exports.FLAGS,
LENIENT_FLAGS: exports.LENIENT_FLAGS,
METHODS: exports.METHODS,
STATUSES: exports.STATUSES,
FINISH: exports.FINISH,
HEADER_STATE: exports.HEADER_STATE,
ALPHA: exports.ALPHA,
NUM_MAP: exports.NUM_MAP,
HEX_MAP: exports.HEX_MAP,
NUM: exports.NUM,
ALPHANUM: exports.ALPHANUM,
MARK: exports.MARK,
USERINFO_CHARS: exports.USERINFO_CHARS,
URL_CHAR: exports.URL_CHAR,
HEX: exports.HEX,
TOKEN: exports.TOKEN,
HEADER_CHARS: exports.HEADER_CHARS,
CONNECTION_TOKEN_CHARS: exports.CONNECTION_TOKEN_CHARS,
QUOTED_STRING: exports.QUOTED_STRING,
HTAB_SP_VCHAR_OBS_TEXT: exports.HTAB_SP_VCHAR_OBS_TEXT,
MAJOR: exports.MAJOR,
MINOR: exports.MINOR,
SPECIAL_HEADERS: exports.SPECIAL_HEADERS,
METHODS_HTTP: exports.METHODS_HTTP,
METHODS_ICE: exports.METHODS_ICE,
METHODS_RTSP: exports.METHODS_RTSP,
METHOD_MAP: exports.METHOD_MAP,
H_METHOD_MAP: exports.H_METHOD_MAP,
STATUSES_HTTP: exports.STATUSES_HTTP,
};

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

Binary file not shown.

View File

@ -1,2 +1,2 @@
import { IntDict } from './constants';
export declare function enumToMap(obj: IntDict, filter?: ReadonlyArray<number>, exceptions?: ReadonlyArray<number>): IntDict;
import type { IntDict } from './constants';
export declare function enumToMap(obj: IntDict, filter?: readonly number[], exceptions?: readonly number[]): IntDict;

View File

@ -1,15 +1,12 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.enumToMap = void 0;
exports.enumToMap = enumToMap;
function enumToMap(obj, filter = [], exceptions = []) {
var _a, _b;
const emptyFilter = ((_a = filter === null || filter === void 0 ? void 0 : filter.length) !== null && _a !== void 0 ? _a : 0) === 0;
const emptyExceptions = ((_b = exceptions === null || exceptions === void 0 ? void 0 : exceptions.length) !== null && _b !== void 0 ? _b : 0) === 0;
const emptyFilter = (filter?.length ?? 0) === 0;
const emptyExceptions = (exceptions?.length ?? 0) === 0;
return Object.fromEntries(Object.entries(obj).filter(([, value]) => {
return (typeof value === 'number' &&
(emptyFilter || filter.includes(value)) &&
(emptyExceptions || !exceptions.includes(value)));
}));
}
exports.enumToMap = enumToMap;
//# sourceMappingURL=utils.js.map

View File

@ -1 +0,0 @@
{"version":3,"file":"utils.js","sourceRoot":"","sources":["../../src/llhttp/utils.ts"],"names":[],"mappings":";;;AAEA,SAAgB,SAAS,CACvB,GAAY,EACZ,SAAgC,EAAE,EAClC,aAAoC,EAAE;;IAEtC,MAAM,WAAW,GAAG,CAAC,MAAA,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,MAAM,mCAAI,CAAC,CAAC,KAAK,CAAC,CAAC;IAChD,MAAM,eAAe,GAAG,CAAC,MAAA,UAAU,aAAV,UAAU,uBAAV,UAAU,CAAE,MAAM,mCAAI,CAAC,CAAC,KAAK,CAAC,CAAC;IAExD,OAAO,MAAM,CAAC,WAAW,CAAC,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,CAAE,AAAD,EAAG,KAAK,CAAE,EAAE,EAAE;QACnE,OAAO,CACL,OAAO,KAAK,KAAK,QAAQ;YACzB,CAAC,WAAW,IAAI,MAAM,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;YACvC,CAAC,eAAe,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CACjD,CAAC;IACJ,CAAC,CAAC,CAAC,CAAC;AACN,CAAC;AAfD,8BAeC"}

View File

@ -1,8 +1,8 @@
> undici@7.14.0 build:wasm
> undici@7.16.0 build:wasm
> node build/wasm.js --docker
> docker run --rm --platform=linux/x86_64 --user 1001:118 --mount type=bind,source=/home/runner/work/node/node/deps/undici/src/lib/llhttp,target=/home/node/build/lib/llhttp --mount type=bind,source=/home/runner/work/node/node/deps/undici/src/build,target=/home/node/build/build --mount type=bind,source=/home/runner/work/node/node/deps/undici/src/deps,target=/home/node/build/deps -t ghcr.io/nodejs/wasm-builder@sha256:975f391d907e42a75b8c72eb77c782181e941608687d4d8694c3e9df415a0970 node build/wasm.js
> docker run --rm --platform=linux/x86_64 --user 1001:1001 --mount type=bind,source=/home/runner/work/node/node/deps/undici/src/lib/llhttp,target=/home/node/build/lib/llhttp --mount type=bind,source=/home/runner/work/node/node/deps/undici/src/build,target=/home/node/build/build --mount type=bind,source=/home/runner/work/node/node/deps/undici/src/deps,target=/home/node/build/deps -t ghcr.io/nodejs/wasm-builder@sha256:975f391d907e42a75b8c72eb77c782181e941608687d4d8694c3e9df415a0970 node build/wasm.js
alpine-baselayout-3.6.5-r0

View File

@ -29,16 +29,16 @@ const PendingInterceptorsFormatter = require('./pending-interceptors-formatter')
const { MockCallHistory } = require('./mock-call-history')
class MockAgent extends Dispatcher {
constructor (opts) {
constructor (opts = {}) {
super(opts)
const mockOptions = buildAndValidateMockOptions(opts)
this[kNetConnect] = true
this[kIsMockActive] = true
this[kMockAgentIsCallHistoryEnabled] = mockOptions?.enableCallHistory ?? false
this[kMockAgentAcceptsNonStandardSearchParameters] = mockOptions?.acceptNonStandardSearchParameters ?? false
this[kIgnoreTrailingSlash] = mockOptions?.ignoreTrailingSlash ?? false
this[kMockAgentIsCallHistoryEnabled] = mockOptions.enableCallHistory ?? false
this[kMockAgentAcceptsNonStandardSearchParameters] = mockOptions.acceptNonStandardSearchParameters ?? false
this[kIgnoreTrailingSlash] = mockOptions.ignoreTrailingSlash ?? false
// Instantiate Agent and encapsulate
if (opts?.agent && typeof opts.agent.dispatch !== 'function') {

View File

@ -2,6 +2,8 @@
const { UndiciError } = require('../core/errors')
const kMockNotMatchedError = Symbol.for('undici.error.UND_MOCK_ERR_MOCK_NOT_MATCHED')
/**
* The request does not match any registered mock dispatches.
*/
@ -12,6 +14,14 @@ class MockNotMatchedError extends UndiciError {
this.message = message || 'The request does not match any registered mock dispatches'
this.code = 'UND_MOCK_ERR_MOCK_NOT_MATCHED'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kMockNotMatchedError] === true
}
get [kMockNotMatchedError] () {
return true
}
}
module.exports = {

View File

@ -367,7 +367,7 @@ function buildMockDispatch () {
try {
mockDispatch.call(this, opts, handler)
} catch (error) {
if (error instanceof MockNotMatchedError) {
if (error.code === 'UND_MOCK_ERR_MOCK_NOT_MATCHED') {
const netConnect = agent[kGetNetConnect]()
if (netConnect === false) {
throw new MockNotMatchedError(`${error.message}: subsequent request to origin ${origin} was not allowed (net.connect disabled)`)
@ -398,7 +398,6 @@ function checkNetConnect (netConnect, origin) {
}
function buildAndValidateMockOptions (opts) {
if (opts) {
const { agent, ...mockOptions } = opts
if ('enableCallHistory' in mockOptions && typeof mockOptions.enableCallHistory !== 'boolean') {
@ -409,8 +408,11 @@ function buildAndValidateMockOptions (opts) {
throw new InvalidArgumentError('options.acceptNonStandardSearchParameters must to be a boolean')
}
return mockOptions
if ('ignoreTrailingSlash' in mockOptions && typeof mockOptions.ignoreTrailingSlash !== 'boolean') {
throw new InvalidArgumentError('options.ignoreTrailingSlash must to be a boolean')
}
return mockOptions
}
module.exports = {

View File

@ -1,7 +1,8 @@
'use strict'
const {
safeHTTPMethods
safeHTTPMethods,
pathHasQueryOrFragment
} = require('../core/util')
const { serializePathWithQuery } = require('../core/util')
@ -14,12 +15,10 @@ function makeCacheKey (opts) {
throw new Error('opts.origin is undefined')
}
let fullPath
try {
fullPath = serializePathWithQuery(opts.path || '/', opts.query)
} catch (error) {
// If fails (path already has query params), use as-is
fullPath = opts.path || '/'
let fullPath = opts.path || '/'
if (opts.query && !pathHasQueryOrFragment(opts.path)) {
fullPath = serializePathWithQuery(fullPath, opts.query)
}
return {

View File

@ -1,32 +1,20 @@
'use strict'
const IMF_DAYS = ['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun']
const IMF_SPACES = [4, 7, 11, 16, 25]
const IMF_MONTHS = ['jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul', 'aug', 'sep', 'oct', 'nov', 'dec']
const IMF_COLONS = [19, 22]
const ASCTIME_SPACES = [3, 7, 10, 19]
const RFC850_DAYS = ['monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday', 'sunday']
/**
* @see https://www.rfc-editor.org/rfc/rfc9110.html#name-date-time-formats
*
* @param {string} date
* @param {Date} [now]
* @returns {Date | undefined}
*/
function parseHttpDate (date, now) {
function parseHttpDate (date) {
// Sun, 06 Nov 1994 08:49:37 GMT ; IMF-fixdate
// Sun Nov 6 08:49:37 1994 ; ANSI C's asctime() format
// Sunday, 06-Nov-94 08:49:37 GMT ; obsolete RFC 850 format
date = date.toLowerCase()
switch (date[3]) {
case ',': return parseImfDate(date)
case ' ': return parseAscTimeDate(date)
default: return parseRfc850Date(date, now)
default: return parseRfc850Date(date)
}
}
@ -37,69 +25,207 @@ function parseHttpDate (date, now) {
* @returns {Date | undefined}
*/
function parseImfDate (date) {
if (date.length !== 29) {
if (
date.length !== 29 ||
date[4] !== ' ' ||
date[7] !== ' ' ||
date[11] !== ' ' ||
date[16] !== ' ' ||
date[19] !== ':' ||
date[22] !== ':' ||
date[25] !== ' ' ||
date[26] !== 'G' ||
date[27] !== 'M' ||
date[28] !== 'T'
) {
return undefined
}
if (!date.endsWith('gmt')) {
// Unsupported timezone
let weekday = -1
if (date[0] === 'S' && date[1] === 'u' && date[2] === 'n') { // Sunday
weekday = 0
} else if (date[0] === 'M' && date[1] === 'o' && date[2] === 'n') { // Monday
weekday = 1
} else if (date[0] === 'T' && date[1] === 'u' && date[2] === 'e') { // Tuesday
weekday = 2
} else if (date[0] === 'W' && date[1] === 'e' && date[2] === 'd') { // Wednesday
weekday = 3
} else if (date[0] === 'T' && date[1] === 'h' && date[2] === 'u') { // Thursday
weekday = 4
} else if (date[0] === 'F' && date[1] === 'r' && date[2] === 'i') { // Friday
weekday = 5
} else if (date[0] === 'S' && date[1] === 'a' && date[2] === 't') { // Saturday
weekday = 6
} else {
return undefined // Not a valid day of the week
}
let day = 0
if (date[5] === '0') {
// Single digit day, e.g. "Sun Nov 6 08:49:37 1994"
const code = date.charCodeAt(6)
if (code < 49 || code > 57) {
return undefined // Not a digit
}
day = code - 48 // Convert ASCII code to number
} else {
const code1 = date.charCodeAt(5)
if (code1 < 49 || code1 > 51) {
return undefined // Not a digit between 1 and 3
}
const code2 = date.charCodeAt(6)
if (code2 < 48 || code2 > 57) {
return undefined // Not a digit
}
day = (code1 - 48) * 10 + (code2 - 48) // Convert ASCII codes to number
}
let monthIdx = -1
if (
(date[8] === 'J' && date[9] === 'a' && date[10] === 'n')
) {
monthIdx = 0 // Jan
} else if (
(date[8] === 'F' && date[9] === 'e' && date[10] === 'b')
) {
monthIdx = 1 // Feb
} else if (
(date[8] === 'M' && date[9] === 'a')
) {
if (date[10] === 'r') {
monthIdx = 2 // Mar
} else if (date[10] === 'y') {
monthIdx = 4 // May
} else {
return undefined // Invalid month
}
} else if (
(date[8] === 'J')
) {
if (date[9] === 'a' && date[10] === 'n') {
monthIdx = 0 // Jan
} else if (date[9] === 'u') {
if (date[10] === 'n') {
monthIdx = 5 // Jun
} else if (date[10] === 'l') {
monthIdx = 6 // Jul
} else {
return undefined // Invalid month
}
} else {
return undefined // Invalid month
}
} else if (
(date[8] === 'A')
) {
if (date[9] === 'p' && date[10] === 'r') {
monthIdx = 3 // Apr
} else if (date[9] === 'u' && date[10] === 'g') {
monthIdx = 7 // Aug
} else {
return undefined // Invalid month
}
} else if (
(date[8] === 'S' && date[9] === 'e' && date[10] === 'p')
) {
monthIdx = 8 // Sep
} else if (
(date[8] === 'O' && date[9] === 'c' && date[10] === 't')
) {
monthIdx = 9 // Oct
} else if (
(date[8] === 'N' && date[9] === 'o' && date[10] === 'v')
) {
monthIdx = 10 // Nov
} else if (
(date[8] === 'D' && date[9] === 'e' && date[10] === 'c')
) {
monthIdx = 11 // Dec
} else {
// Not a valid month
return undefined
}
for (const spaceInx of IMF_SPACES) {
if (date[spaceInx] !== ' ') {
return undefined
const yearDigit1 = date.charCodeAt(12)
if (yearDigit1 < 48 || yearDigit1 > 57) {
return undefined // Not a digit
}
const yearDigit2 = date.charCodeAt(13)
if (yearDigit2 < 48 || yearDigit2 > 57) {
return undefined // Not a digit
}
const yearDigit3 = date.charCodeAt(14)
if (yearDigit3 < 48 || yearDigit3 > 57) {
return undefined // Not a digit
}
const yearDigit4 = date.charCodeAt(15)
if (yearDigit4 < 48 || yearDigit4 > 57) {
return undefined // Not a digit
}
const year = (yearDigit1 - 48) * 1000 + (yearDigit2 - 48) * 100 + (yearDigit3 - 48) * 10 + (yearDigit4 - 48)
let hour = 0
if (date[17] === '0') {
const code = date.charCodeAt(18)
if (code < 48 || code > 57) {
return undefined // Not a digit
}
hour = code - 48 // Convert ASCII code to number
} else {
const code1 = date.charCodeAt(17)
if (code1 < 48 || code1 > 50) {
return undefined // Not a digit between 0 and 2
}
const code2 = date.charCodeAt(18)
if (code2 < 48 || code2 > 57) {
return undefined // Not a digit
}
if (code1 === 50 && code2 > 51) {
return undefined // Hour cannot be greater than 23
}
hour = (code1 - 48) * 10 + (code2 - 48) // Convert ASCII codes to number
}
for (const colonIdx of IMF_COLONS) {
if (date[colonIdx] !== ':') {
return undefined
let minute = 0
if (date[20] === '0') {
const code = date.charCodeAt(21)
if (code < 48 || code > 57) {
return undefined // Not a digit
}
minute = code - 48 // Convert ASCII code to number
} else {
const code1 = date.charCodeAt(20)
if (code1 < 48 || code1 > 53) {
return undefined // Not a digit between 0 and 5
}
const code2 = date.charCodeAt(21)
if (code2 < 48 || code2 > 57) {
return undefined // Not a digit
}
minute = (code1 - 48) * 10 + (code2 - 48) // Convert ASCII codes to number
}
const dayName = date.substring(0, 3)
if (!IMF_DAYS.includes(dayName)) {
return undefined
let second = 0
if (date[23] === '0') {
const code = date.charCodeAt(24)
if (code < 48 || code > 57) {
return undefined // Not a digit
}
second = code - 48 // Convert ASCII code to number
} else {
const code1 = date.charCodeAt(23)
if (code1 < 48 || code1 > 53) {
return undefined // Not a digit between 0 and 5
}
const code2 = date.charCodeAt(24)
if (code2 < 48 || code2 > 57) {
return undefined // Not a digit
}
second = (code1 - 48) * 10 + (code2 - 48) // Convert ASCII codes to number
}
const dayString = date.substring(5, 7)
const day = Number.parseInt(dayString)
if (isNaN(day) || (day < 10 && dayString[0] !== '0')) {
// Not a number, 0, or it's less than 10 and didn't start with a 0
return undefined
}
const month = date.substring(8, 11)
const monthIdx = IMF_MONTHS.indexOf(month)
if (monthIdx === -1) {
return undefined
}
const year = Number.parseInt(date.substring(12, 16))
if (isNaN(year)) {
return undefined
}
const hourString = date.substring(17, 19)
const hour = Number.parseInt(hourString)
if (isNaN(hour) || (hour < 10 && hourString[0] !== '0')) {
return undefined
}
const minuteString = date.substring(20, 22)
const minute = Number.parseInt(minuteString)
if (isNaN(minute) || (minute < 10 && minuteString[0] !== '0')) {
return undefined
}
const secondString = date.substring(23, 25)
const second = Number.parseInt(secondString)
if (isNaN(second) || (second < 10 && secondString[0] !== '0')) {
return undefined
}
return new Date(Date.UTC(year, monthIdx, day, hour, minute, second))
const result = new Date(Date.UTC(year, monthIdx, day, hour, minute, second))
return result.getUTCDay() === weekday ? result : undefined
}
/**
@ -111,147 +237,415 @@ function parseImfDate (date) {
function parseAscTimeDate (date) {
// This is assumed to be in UTC
if (date.length !== 24) {
if (
date.length !== 24 ||
date[7] !== ' ' ||
date[10] !== ' ' ||
date[19] !== ' '
) {
return undefined
}
for (const spaceIdx of ASCTIME_SPACES) {
if (date[spaceIdx] !== ' ') {
return undefined
}
let weekday = -1
if (date[0] === 'S' && date[1] === 'u' && date[2] === 'n') { // Sunday
weekday = 0
} else if (date[0] === 'M' && date[1] === 'o' && date[2] === 'n') { // Monday
weekday = 1
} else if (date[0] === 'T' && date[1] === 'u' && date[2] === 'e') { // Tuesday
weekday = 2
} else if (date[0] === 'W' && date[1] === 'e' && date[2] === 'd') { // Wednesday
weekday = 3
} else if (date[0] === 'T' && date[1] === 'h' && date[2] === 'u') { // Thursday
weekday = 4
} else if (date[0] === 'F' && date[1] === 'r' && date[2] === 'i') { // Friday
weekday = 5
} else if (date[0] === 'S' && date[1] === 'a' && date[2] === 't') { // Saturday
weekday = 6
} else {
return undefined // Not a valid day of the week
}
const dayName = date.substring(0, 3)
if (!IMF_DAYS.includes(dayName)) {
let monthIdx = -1
if (
(date[4] === 'J' && date[5] === 'a' && date[6] === 'n')
) {
monthIdx = 0 // Jan
} else if (
(date[4] === 'F' && date[5] === 'e' && date[6] === 'b')
) {
monthIdx = 1 // Feb
} else if (
(date[4] === 'M' && date[5] === 'a')
) {
if (date[6] === 'r') {
monthIdx = 2 // Mar
} else if (date[6] === 'y') {
monthIdx = 4 // May
} else {
return undefined // Invalid month
}
} else if (
(date[4] === 'J')
) {
if (date[5] === 'a' && date[6] === 'n') {
monthIdx = 0 // Jan
} else if (date[5] === 'u') {
if (date[6] === 'n') {
monthIdx = 5 // Jun
} else if (date[6] === 'l') {
monthIdx = 6 // Jul
} else {
return undefined // Invalid month
}
} else {
return undefined // Invalid month
}
} else if (
(date[4] === 'A')
) {
if (date[5] === 'p' && date[6] === 'r') {
monthIdx = 3 // Apr
} else if (date[5] === 'u' && date[6] === 'g') {
monthIdx = 7 // Aug
} else {
return undefined // Invalid month
}
} else if (
(date[4] === 'S' && date[5] === 'e' && date[6] === 'p')
) {
monthIdx = 8 // Sep
} else if (
(date[4] === 'O' && date[5] === 'c' && date[6] === 't')
) {
monthIdx = 9 // Oct
} else if (
(date[4] === 'N' && date[5] === 'o' && date[6] === 'v')
) {
monthIdx = 10 // Nov
} else if (
(date[4] === 'D' && date[5] === 'e' && date[6] === 'c')
) {
monthIdx = 11 // Dec
} else {
// Not a valid month
return undefined
}
const month = date.substring(4, 7)
const monthIdx = IMF_MONTHS.indexOf(month)
if (monthIdx === -1) {
return undefined
let day = 0
if (date[8] === ' ') {
// Single digit day, e.g. "Sun Nov 6 08:49:37 1994"
const code = date.charCodeAt(9)
if (code < 49 || code > 57) {
return undefined // Not a digit
}
day = code - 48 // Convert ASCII code to number
} else {
const code1 = date.charCodeAt(8)
if (code1 < 49 || code1 > 51) {
return undefined // Not a digit between 1 and 3
}
const code2 = date.charCodeAt(9)
if (code2 < 48 || code2 > 57) {
return undefined // Not a digit
}
day = (code1 - 48) * 10 + (code2 - 48) // Convert ASCII codes to number
}
const dayString = date.substring(8, 10)
const day = Number.parseInt(dayString)
if (isNaN(day) || (day < 10 && dayString[0] !== ' ')) {
return undefined
let hour = 0
if (date[11] === '0') {
const code = date.charCodeAt(12)
if (code < 48 || code > 57) {
return undefined // Not a digit
}
hour = code - 48 // Convert ASCII code to number
} else {
const code1 = date.charCodeAt(11)
if (code1 < 48 || code1 > 50) {
return undefined // Not a digit between 0 and 2
}
const code2 = date.charCodeAt(12)
if (code2 < 48 || code2 > 57) {
return undefined // Not a digit
}
if (code1 === 50 && code2 > 51) {
return undefined // Hour cannot be greater than 23
}
hour = (code1 - 48) * 10 + (code2 - 48) // Convert ASCII codes to number
}
const hourString = date.substring(11, 13)
const hour = Number.parseInt(hourString)
if (isNaN(hour) || (hour < 10 && hourString[0] !== '0')) {
return undefined
let minute = 0
if (date[14] === '0') {
const code = date.charCodeAt(15)
if (code < 48 || code > 57) {
return undefined // Not a digit
}
minute = code - 48 // Convert ASCII code to number
} else {
const code1 = date.charCodeAt(14)
if (code1 < 48 || code1 > 53) {
return undefined // Not a digit between 0 and 5
}
const code2 = date.charCodeAt(15)
if (code2 < 48 || code2 > 57) {
return undefined // Not a digit
}
minute = (code1 - 48) * 10 + (code2 - 48) // Convert ASCII codes to number
}
const minuteString = date.substring(14, 16)
const minute = Number.parseInt(minuteString)
if (isNaN(minute) || (minute < 10 && minuteString[0] !== '0')) {
return undefined
let second = 0
if (date[17] === '0') {
const code = date.charCodeAt(18)
if (code < 48 || code > 57) {
return undefined // Not a digit
}
second = code - 48 // Convert ASCII code to number
} else {
const code1 = date.charCodeAt(17)
if (code1 < 48 || code1 > 53) {
return undefined // Not a digit between 0 and 5
}
const code2 = date.charCodeAt(18)
if (code2 < 48 || code2 > 57) {
return undefined // Not a digit
}
second = (code1 - 48) * 10 + (code2 - 48) // Convert ASCII codes to number
}
const secondString = date.substring(17, 19)
const second = Number.parseInt(secondString)
if (isNaN(second) || (second < 10 && secondString[0] !== '0')) {
return undefined
const yearDigit1 = date.charCodeAt(20)
if (yearDigit1 < 48 || yearDigit1 > 57) {
return undefined // Not a digit
}
const year = Number.parseInt(date.substring(20, 24))
if (isNaN(year)) {
return undefined
const yearDigit2 = date.charCodeAt(21)
if (yearDigit2 < 48 || yearDigit2 > 57) {
return undefined // Not a digit
}
const yearDigit3 = date.charCodeAt(22)
if (yearDigit3 < 48 || yearDigit3 > 57) {
return undefined // Not a digit
}
const yearDigit4 = date.charCodeAt(23)
if (yearDigit4 < 48 || yearDigit4 > 57) {
return undefined // Not a digit
}
const year = (yearDigit1 - 48) * 1000 + (yearDigit2 - 48) * 100 + (yearDigit3 - 48) * 10 + (yearDigit4 - 48)
return new Date(Date.UTC(year, monthIdx, day, hour, minute, second))
const result = new Date(Date.UTC(year, monthIdx, day, hour, minute, second))
return result.getUTCDay() === weekday ? result : undefined
}
/**
* @see https://httpwg.org/specs/rfc9110.html#obsolete.date.formats
*
* @param {string} date
* @param {Date} [now]
* @returns {Date | undefined}
*/
function parseRfc850Date (date, now = new Date()) {
if (!date.endsWith('gmt')) {
// Unsupported timezone
return undefined
}
function parseRfc850Date (date) {
let commaIndex = -1
const commaIndex = date.indexOf(',')
if (commaIndex === -1) {
return undefined
let weekday = -1
if (date[0] === 'S') {
if (date[1] === 'u' && date[2] === 'n' && date[3] === 'd' && date[4] === 'a' && date[5] === 'y') {
weekday = 0 // Sunday
commaIndex = 6
} else if (date[1] === 'a' && date[2] === 't' && date[3] === 'u' && date[4] === 'r' && date[5] === 'd' && date[6] === 'a' && date[7] === 'y') {
weekday = 6 // Saturday
commaIndex = 8
}
if ((date.length - commaIndex - 1) !== 23) {
return undefined
} else if (date[0] === 'M' && date[1] === 'o' && date[2] === 'n' && date[3] === 'd' && date[4] === 'a' && date[5] === 'y') {
weekday = 1 // Monday
commaIndex = 6
} else if (date[0] === 'T') {
if (date[1] === 'u' && date[2] === 'e' && date[3] === 's' && date[4] === 'd' && date[5] === 'a' && date[6] === 'y') {
weekday = 2 // Tuesday
commaIndex = 7
} else if (date[1] === 'h' && date[2] === 'u' && date[3] === 'r' && date[4] === 's' && date[5] === 'd' && date[6] === 'a' && date[7] === 'y') {
weekday = 4 // Thursday
commaIndex = 8
}
const dayName = date.substring(0, commaIndex)
if (!RFC850_DAYS.includes(dayName)) {
} else if (date[0] === 'W' && date[1] === 'e' && date[2] === 'd' && date[3] === 'n' && date[4] === 'e' && date[5] === 's' && date[6] === 'd' && date[7] === 'a' && date[8] === 'y') {
weekday = 3 // Wednesday
commaIndex = 9
} else if (date[0] === 'F' && date[1] === 'r' && date[2] === 'i' && date[3] === 'd' && date[4] === 'a' && date[5] === 'y') {
weekday = 5 // Friday
commaIndex = 6
} else {
// Not a valid day name
return undefined
}
if (
date[commaIndex] !== ',' ||
(date.length - commaIndex - 1) !== 23 ||
date[commaIndex + 1] !== ' ' ||
date[commaIndex + 4] !== '-' ||
date[commaIndex + 8] !== '-' ||
date[commaIndex + 11] !== ' ' ||
date[commaIndex + 14] !== ':' ||
date[commaIndex + 17] !== ':' ||
date[commaIndex + 20] !== ' '
date[commaIndex + 20] !== ' ' ||
date[commaIndex + 21] !== 'G' ||
date[commaIndex + 22] !== 'M' ||
date[commaIndex + 23] !== 'T'
) {
return undefined
}
const dayString = date.substring(commaIndex + 2, commaIndex + 4)
const day = Number.parseInt(dayString)
if (isNaN(day) || (day < 10 && dayString[0] !== '0')) {
// Not a number, or it's less than 10 and didn't start with a 0
return undefined
let day = 0
if (date[commaIndex + 2] === '0') {
// Single digit day, e.g. "Sun Nov 6 08:49:37 1994"
const code = date.charCodeAt(commaIndex + 3)
if (code < 49 || code > 57) {
return undefined // Not a digit
}
const month = date.substring(commaIndex + 5, commaIndex + 8)
const monthIdx = IMF_MONTHS.indexOf(month)
if (monthIdx === -1) {
return undefined
}
// As of this point year is just the decade (i.e. 94)
let year = Number.parseInt(date.substring(commaIndex + 9, commaIndex + 11))
if (isNaN(year)) {
return undefined
}
const currentYear = now.getUTCFullYear()
const currentDecade = currentYear % 100
const currentCentury = Math.floor(currentYear / 100)
if (year > currentDecade && year - currentDecade >= 50) {
// Over 50 years in future, go to previous century
year += (currentCentury - 1) * 100
day = code - 48 // Convert ASCII code to number
} else {
year += currentCentury * 100
const code1 = date.charCodeAt(commaIndex + 2)
if (code1 < 49 || code1 > 51) {
return undefined // Not a digit between 1 and 3
}
const code2 = date.charCodeAt(commaIndex + 3)
if (code2 < 48 || code2 > 57) {
return undefined // Not a digit
}
day = (code1 - 48) * 10 + (code2 - 48) // Convert ASCII codes to number
}
const hourString = date.substring(commaIndex + 12, commaIndex + 14)
const hour = Number.parseInt(hourString)
if (isNaN(hour) || (hour < 10 && hourString[0] !== '0')) {
let monthIdx = -1
if (
(date[commaIndex + 5] === 'J' && date[commaIndex + 6] === 'a' && date[commaIndex + 7] === 'n')
) {
monthIdx = 0 // Jan
} else if (
(date[commaIndex + 5] === 'F' && date[commaIndex + 6] === 'e' && date[commaIndex + 7] === 'b')
) {
monthIdx = 1 // Feb
} else if (
(date[commaIndex + 5] === 'M' && date[commaIndex + 6] === 'a' && date[commaIndex + 7] === 'r')
) {
monthIdx = 2 // Mar
} else if (
(date[commaIndex + 5] === 'A' && date[commaIndex + 6] === 'p' && date[commaIndex + 7] === 'r')
) {
monthIdx = 3 // Apr
} else if (
(date[commaIndex + 5] === 'M' && date[commaIndex + 6] === 'a' && date[commaIndex + 7] === 'y')
) {
monthIdx = 4 // May
} else if (
(date[commaIndex + 5] === 'J' && date[commaIndex + 6] === 'u' && date[commaIndex + 7] === 'n')
) {
monthIdx = 5 // Jun
} else if (
(date[commaIndex + 5] === 'J' && date[commaIndex + 6] === 'u' && date[commaIndex + 7] === 'l')
) {
monthIdx = 6 // Jul
} else if (
(date[commaIndex + 5] === 'A' && date[commaIndex + 6] === 'u' && date[commaIndex + 7] === 'g')
) {
monthIdx = 7 // Aug
} else if (
(date[commaIndex + 5] === 'S' && date[commaIndex + 6] === 'e' && date[commaIndex + 7] === 'p')
) {
monthIdx = 8 // Sep
} else if (
(date[commaIndex + 5] === 'O' && date[commaIndex + 6] === 'c' && date[commaIndex + 7] === 't')
) {
monthIdx = 9 // Oct
} else if (
(date[commaIndex + 5] === 'N' && date[commaIndex + 6] === 'o' && date[commaIndex + 7] === 'v')
) {
monthIdx = 10 // Nov
} else if (
(date[commaIndex + 5] === 'D' && date[commaIndex + 6] === 'e' && date[commaIndex + 7] === 'c')
) {
monthIdx = 11 // Dec
} else {
// Not a valid month
return undefined
}
const minuteString = date.substring(commaIndex + 15, commaIndex + 17)
const minute = Number.parseInt(minuteString)
if (isNaN(minute) || (minute < 10 && minuteString[0] !== '0')) {
return undefined
const yearDigit1 = date.charCodeAt(commaIndex + 9)
if (yearDigit1 < 48 || yearDigit1 > 57) {
return undefined // Not a digit
}
const yearDigit2 = date.charCodeAt(commaIndex + 10)
if (yearDigit2 < 48 || yearDigit2 > 57) {
return undefined // Not a digit
}
const secondString = date.substring(commaIndex + 18, commaIndex + 20)
const second = Number.parseInt(secondString)
if (isNaN(second) || (second < 10 && secondString[0] !== '0')) {
return undefined
let year = (yearDigit1 - 48) * 10 + (yearDigit2 - 48) // Convert ASCII codes to number
// RFC 6265 states that the year is in the range 1970-2069.
// @see https://datatracker.ietf.org/doc/html/rfc6265#section-5.1.1
//
// 3. If the year-value is greater than or equal to 70 and less than or
// equal to 99, increment the year-value by 1900.
// 4. If the year-value is greater than or equal to 0 and less than or
// equal to 69, increment the year-value by 2000.
year += year < 70 ? 2000 : 1900
let hour = 0
if (date[commaIndex + 12] === '0') {
const code = date.charCodeAt(commaIndex + 13)
if (code < 48 || code > 57) {
return undefined // Not a digit
}
hour = code - 48 // Convert ASCII code to number
} else {
const code1 = date.charCodeAt(commaIndex + 12)
if (code1 < 48 || code1 > 50) {
return undefined // Not a digit between 0 and 2
}
const code2 = date.charCodeAt(commaIndex + 13)
if (code2 < 48 || code2 > 57) {
return undefined // Not a digit
}
if (code1 === 50 && code2 > 51) {
return undefined // Hour cannot be greater than 23
}
hour = (code1 - 48) * 10 + (code2 - 48) // Convert ASCII codes to number
}
return new Date(Date.UTC(year, monthIdx, day, hour, minute, second))
let minute = 0
if (date[commaIndex + 15] === '0') {
const code = date.charCodeAt(commaIndex + 16)
if (code < 48 || code > 57) {
return undefined // Not a digit
}
minute = code - 48 // Convert ASCII code to number
} else {
const code1 = date.charCodeAt(commaIndex + 15)
if (code1 < 48 || code1 > 53) {
return undefined // Not a digit between 0 and 5
}
const code2 = date.charCodeAt(commaIndex + 16)
if (code2 < 48 || code2 > 57) {
return undefined // Not a digit
}
minute = (code1 - 48) * 10 + (code2 - 48) // Convert ASCII codes to number
}
let second = 0
if (date[commaIndex + 18] === '0') {
const code = date.charCodeAt(commaIndex + 19)
if (code < 48 || code > 57) {
return undefined // Not a digit
}
second = code - 48 // Convert ASCII code to number
} else {
const code1 = date.charCodeAt(commaIndex + 18)
if (code1 < 48 || code1 > 53) {
return undefined // Not a digit between 0 and 5
}
const code2 = date.charCodeAt(commaIndex + 19)
if (code2 < 48 || code2 > 57) {
return undefined // Not a digit
}
second = (code1 - 48) * 10 + (code2 - 48) // Convert ASCII codes to number
}
const result = new Date(Date.UTC(year, monthIdx, day, hour, minute, second))
return result.getUTCDay() === weekday ? result : undefined
}
module.exports = {

View File

@ -186,7 +186,7 @@ webidl.converters.Cookie = webidl.dictionaryConverter([
{
converter: webidl.sequenceConverter(webidl.converters.DOMString),
key: 'unparsed',
defaultValue: () => new Array(0)
defaultValue: () => []
}
])

View File

@ -4,7 +4,7 @@ const { maxNameValuePairSize, maxAttributeValueSize } = require('./constants')
const { isCTLExcludingHtab } = require('./util')
const { collectASequenceOfCodePointsFast } = require('../fetch/data-url')
const assert = require('node:assert')
const { unescape } = require('node:querystring')
const { unescape: qsUnescape } = require('node:querystring')
/**
* @description Parses the field-value attributes of a set-cookie header string.
@ -82,7 +82,7 @@ function parseSetCookie (header) {
// store arbitrary data in a cookie-value SHOULD encode that data, for
// example, using Base64 [RFC4648].
return {
name, value: unescape(value), ...parseUnparsedAttributes(unparsedAttributes)
name, value: qsUnescape(value), ...parseUnparsedAttributes(unparsedAttributes)
}
}

View File

@ -236,7 +236,7 @@ class EventSourceStream extends Transform {
this.buffer = this.buffer.subarray(this.pos + 1)
this.pos = 0
if (
this.event.data !== undefined || this.event.event || this.event.id || this.event.retry) {
this.event.data !== undefined || this.event.event || this.event.id !== undefined || this.event.retry) {
this.processEvent(this.event)
}
this.clearEvent()
@ -367,7 +367,7 @@ class EventSourceStream extends Transform {
this.state.reconnectionTime = parseInt(event.retry, 10)
}
if (event.id && isValidLastEventId(event.id)) {
if (event.id !== undefined && isValidLastEventId(event.id)) {
this.state.lastEventId = event.id
}

View File

@ -8,7 +8,6 @@ const { EventSourceStream } = require('./eventsource-stream')
const { parseMIMEType } = require('../fetch/data-url')
const { createFastMessageEvent } = require('../websocket/events')
const { isNetworkError } = require('../fetch/response')
const { delay } = require('./util')
const { kEnumerableProperty } = require('../../core/util')
const { environmentSettingsObject } = require('../fetch/util')
@ -318,9 +317,9 @@ class EventSource extends EventTarget {
/**
* @see https://html.spec.whatwg.org/multipage/server-sent-events.html#sse-processing-model
* @returns {Promise<void>}
* @returns {void}
*/
async #reconnect () {
#reconnect () {
// When a user agent is to reestablish the connection, the user agent must
// run the following steps. These steps are run in parallel, not as part of
// a task. (The tasks that it queues, of course, are run like normal tasks
@ -338,8 +337,7 @@ class EventSource extends EventTarget {
this.dispatchEvent(new Event('error'))
// 2. Wait a delay equal to the reconnection time of the event source.
await delay(this.#state.reconnectionTime)
setTimeout(() => {
// 5. Queue a task to run the following steps:
// 1. If the EventSource object's readyState attribute is not set to
@ -359,6 +357,7 @@ class EventSource extends EventTarget {
// 4. Fetch request and process the response obtained in this fashion, if any, as described earlier in this section.
this.#connect()
}, this.#state.reconnectionTime)?.unref()
}
/**
@ -383,9 +382,11 @@ class EventSource extends EventTarget {
this.removeEventListener('open', this.#events.open)
}
if (typeof fn === 'function') {
const listener = webidl.converters.EventHandlerNonNull(fn)
if (listener !== null) {
this.addEventListener('open', listener)
this.#events.open = fn
this.addEventListener('open', fn)
} else {
this.#events.open = null
}
@ -400,9 +401,11 @@ class EventSource extends EventTarget {
this.removeEventListener('message', this.#events.message)
}
if (typeof fn === 'function') {
const listener = webidl.converters.EventHandlerNonNull(fn)
if (listener !== null) {
this.addEventListener('message', listener)
this.#events.message = fn
this.addEventListener('message', fn)
} else {
this.#events.message = null
}
@ -417,9 +420,11 @@ class EventSource extends EventTarget {
this.removeEventListener('error', this.#events.error)
}
if (typeof fn === 'function') {
const listener = webidl.converters.EventHandlerNonNull(fn)
if (listener !== null) {
this.addEventListener('error', listener)
this.#events.error = fn
this.addEventListener('error', fn)
} else {
this.#events.error = null
}

View File

@ -23,15 +23,7 @@ function isASCIINumber (value) {
return true
}
// https://github.com/nodejs/undici/issues/2664
function delay (ms) {
return new Promise((resolve) => {
setTimeout(resolve, ms)
})
}
module.exports = {
isValidLastEventId,
isASCIINumber,
delay
isASCIINumber
}

View File

@ -60,7 +60,7 @@ function extractBody (object, keepalive = false) {
// 4. Otherwise, set stream to a new ReadableStream object, and set
// up stream with byte reading support.
stream = new ReadableStream({
async pull (controller) {
pull (controller) {
const buffer = typeof source === 'string' ? textEncoder.encode(source) : source
if (buffer.byteLength) {
@ -110,22 +110,16 @@ function extractBody (object, keepalive = false) {
// Set type to `application/x-www-form-urlencoded;charset=UTF-8`.
type = 'application/x-www-form-urlencoded;charset=UTF-8'
} else if (isArrayBuffer(object)) {
// BufferSource/ArrayBuffer
// Set source to a copy of the bytes held by object.
source = new Uint8Array(object.slice())
} else if (ArrayBuffer.isView(object)) {
// BufferSource/ArrayBufferView
// Set source to a copy of the bytes held by object.
source = new Uint8Array(object.buffer.slice(object.byteOffset, object.byteOffset + object.byteLength))
} else if (webidl.is.BufferSource(object)) {
source = isArrayBuffer(object)
? new Uint8Array(object.slice())
: new Uint8Array(object.buffer.slice(object.byteOffset, object.byteOffset + object.byteLength))
} else if (webidl.is.FormData(object)) {
const boundary = `----formdata-undici-0${`${random(1e11)}`.padStart(11, '0')}`
const prefix = `--${boundary}\r\nContent-Disposition: form-data`
/*! formdata-polyfill. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
const escape = (str) =>
const formdataEscape = (str) =>
str.replace(/\n/g, '%0A').replace(/\r/g, '%0D').replace(/"/g, '%22')
const normalizeLinefeeds = (value) => value.replace(/\r?\n|\r/g, '\r\n')
@ -143,13 +137,13 @@ function extractBody (object, keepalive = false) {
for (const [name, value] of object) {
if (typeof value === 'string') {
const chunk = textEncoder.encode(prefix +
`; name="${escape(normalizeLinefeeds(name))}"` +
`; name="${formdataEscape(normalizeLinefeeds(name))}"` +
`\r\n\r\n${normalizeLinefeeds(value)}\r\n`)
blobParts.push(chunk)
length += chunk.byteLength
} else {
const chunk = textEncoder.encode(`${prefix}; name="${escape(normalizeLinefeeds(name))}"` +
(value.name ? `; filename="${escape(value.name)}"` : '') + '\r\n' +
const chunk = textEncoder.encode(`${prefix}; name="${formdataEscape(normalizeLinefeeds(name))}"` +
(value.name ? `; filename="${formdataEscape(value.name)}"` : '') + '\r\n' +
`Content-Type: ${
value.type || 'application/octet-stream'
}\r\n\r\n`)
@ -320,12 +314,6 @@ function cloneBody (body) {
}
}
function throwIfAborted (state) {
if (state.aborted) {
throw new DOMException('The operation was aborted.', 'AbortError')
}
}
function bodyMixinMethods (instance, getInternalState) {
const methods = {
blob () {
@ -443,24 +431,30 @@ function mixinBody (prototype, getInternalState) {
* @param {any} instance
* @param {(target: any) => any} getInternalState
*/
async function consumeBody (object, convertBytesToJSValue, instance, getInternalState) {
function consumeBody (object, convertBytesToJSValue, instance, getInternalState) {
try {
webidl.brandCheck(object, instance)
} catch (e) {
return Promise.reject(e)
}
const state = getInternalState(object)
// 1. If object is unusable, then return a promise rejected
// with a TypeError.
if (bodyUnusable(state)) {
throw new TypeError('Body is unusable: Body has already been read')
return Promise.reject(new TypeError('Body is unusable: Body has already been read'))
}
throwIfAborted(state)
if (state.aborted) {
return Promise.reject(new DOMException('The operation was aborted.', 'AbortError'))
}
// 2. Let promise be a new promise.
const promise = createDeferredPromise()
// 3. Let errorSteps given error be to reject promise with error.
const errorSteps = (error) => promise.reject(error)
const errorSteps = promise.reject
// 4. Let successSteps given a byte sequence data be to resolve
// promise with the result of running convertBytesToJSValue

View File

@ -14,7 +14,6 @@ const { HeadersList } = require('./headers')
const { Request, cloneRequest, getRequestDispatcher, getRequestState } = require('./request')
const zlib = require('node:zlib')
const {
bytesMatch,
makePolicyContainer,
clonePolicyContainer,
requestBadPort,
@ -62,7 +61,11 @@ const { dataURLProcessor, serializeAMimeType, minimizeSupportedMimeType } = requ
const { getGlobalDispatcher } = require('../../global')
const { webidl } = require('../webidl')
const { STATUS_CODES } = require('node:http')
const { bytesMatch } = require('../subresource-integrity/subresource-integrity')
const { createDeferredPromise } = require('../../util/promise')
const hasZstd = typeof zlib.createZstdDecompress === 'function'
const GET_OR_HEAD = ['GET', 'HEAD']
const defaultUserAgent = typeof __UNDICI_IS_NODE__ !== 'undefined' || typeof esbuildDetection !== 'undefined'
@ -2104,33 +2107,29 @@ async function httpNetworkFetch (
return false
}
/** @type {string[]} */
let codings = []
const headersList = new HeadersList()
for (let i = 0; i < rawHeaders.length; i += 2) {
headersList.append(bufferToLowerCasedHeaderName(rawHeaders[i]), rawHeaders[i + 1].toString('latin1'), true)
}
const contentEncoding = headersList.get('content-encoding', true)
if (contentEncoding) {
// https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1
// "All content-coding values are case-insensitive..."
codings = contentEncoding.toLowerCase().split(',').map((x) => x.trim())
}
const location = headersList.get('location', true)
this.body = new Readable({ read: resume })
const decoders = []
const willFollow = location && request.redirect === 'follow' &&
redirectStatusSet.has(status)
const decoders = []
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding
if (codings.length !== 0 && request.method !== 'HEAD' && request.method !== 'CONNECT' && !nullBodyStatus.includes(status) && !willFollow) {
if (request.method !== 'HEAD' && request.method !== 'CONNECT' && !nullBodyStatus.includes(status) && !willFollow) {
// https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1
const contentEncoding = headersList.get('content-encoding', true)
// "All content-coding values are case-insensitive..."
/** @type {string[]} */
const codings = contentEncoding ? contentEncoding.toLowerCase().split(',') : []
for (let i = codings.length - 1; i >= 0; --i) {
const coding = codings[i]
const coding = codings[i].trim()
// https://www.rfc-editor.org/rfc/rfc9112.html#section-7.2
if (coding === 'x-gzip' || coding === 'gzip') {
decoders.push(zlib.createGunzip({
@ -2151,7 +2150,7 @@ async function httpNetworkFetch (
flush: zlib.constants.BROTLI_OPERATION_FLUSH,
finishFlush: zlib.constants.BROTLI_OPERATION_FLUSH
}))
} else if (coding === 'zstd' && typeof zlib.createZstdDecompress === 'function') {
} else if (coding === 'zstd' && hasZstd) {
// Node.js v23.8.0+ and v22.15.0+ supports Zstandard
decoders.push(zlib.createZstdDecompress({
flush: zlib.constants.ZSTD_e_continue,

View File

@ -23,8 +23,6 @@ const { URLSerializer } = require('./data-url')
const { kConstruct } = require('../../core/symbols')
const assert = require('node:assert')
const { isArrayBuffer } = nodeUtil.types
const textEncoder = new TextEncoder('utf-8')
// https://fetch.spec.whatwg.org/#response-class
@ -120,7 +118,7 @@ class Response {
}
if (body !== null) {
body = webidl.converters.BodyInit(body)
body = webidl.converters.BodyInit(body, 'Response', 'body')
}
init = webidl.converters.ResponseInit(init)
@ -580,7 +578,7 @@ webidl.converters.XMLHttpRequestBodyInit = function (V, prefix, name) {
return V
}
if (ArrayBuffer.isView(V) || isArrayBuffer(V)) {
if (webidl.is.BufferSource(V)) {
return V
}

View File

@ -11,20 +11,6 @@ const assert = require('node:assert')
const { isUint8Array } = require('node:util/types')
const { webidl } = require('../webidl')
let supportedHashes = []
// https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable
/** @type {import('crypto')} */
let crypto
try {
crypto = require('node:crypto')
const possibleRelevantHashes = ['sha256', 'sha384', 'sha512']
supportedHashes = crypto.getHashes().filter((hash) => possibleRelevantHashes.includes(hash))
/* c8 ignore next 3 */
} catch {
}
function responseURL (response) {
// https://fetch.spec.whatwg.org/#responses
// A response has an associated URL. It is a pointer to the last URL
@ -516,8 +502,8 @@ function determineRequestsReferrer (request) {
if (isURLPotentiallyTrustworthy(referrerURL) && !isURLPotentiallyTrustworthy(currentURL)) {
return 'no-referrer'
}
// 2. Return referrerOrigin
return referrerOrigin
// 2. Return referrerURL.
return referrerURL
}
}
}
@ -568,17 +554,11 @@ function stripURLForReferrer (url, originOnly = false) {
return url
}
const potentialleTrustworthyIPv4RegExp = new RegExp('^(?:' +
'(?:127\\.)' +
'(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])\\.){2}' +
'(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[1-9])' +
')$')
const isPotentialleTrustworthyIPv4 = RegExp.prototype.test
.bind(/^127\.(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)\.){2}(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)$/)
const potentialleTrustworthyIPv6RegExp = new RegExp('^(?:' +
'(?:(?:0{1,4}):){7}(?:(?:0{0,3}1))|' +
'(?:(?:0{1,4}):){1,6}(?::(?:0{0,3}1))|' +
'(?:::(?:0{0,3}1))|' +
')$')
const isPotentiallyTrustworthyIPv6 = RegExp.prototype.test
.bind(/^(?:(?:0{1,4}:){7}|(?:0{1,4}:){1,6}:|::)0{0,3}1$/)
/**
* Check if host matches one of the CIDR notations 127.0.0.0/8 or ::1/128.
@ -593,11 +573,11 @@ function isOriginIPPotentiallyTrustworthy (origin) {
if (origin[0] === '[' && origin[origin.length - 1] === ']') {
origin = origin.slice(1, -1)
}
return potentialleTrustworthyIPv6RegExp.test(origin)
return isPotentiallyTrustworthyIPv6(origin)
}
// IPv4
return potentialleTrustworthyIPv4RegExp.test(origin)
return isPotentialleTrustworthyIPv4(origin)
}
/**
@ -698,206 +678,6 @@ function isURLPotentiallyTrustworthy (url) {
return isOriginPotentiallyTrustworthy(url.origin)
}
/**
* @see https://w3c.github.io/webappsec-subresource-integrity/#does-response-match-metadatalist
* @param {Uint8Array} bytes
* @param {string} metadataList
*/
function bytesMatch (bytes, metadataList) {
// If node is not built with OpenSSL support, we cannot check
// a request's integrity, so allow it by default (the spec will
// allow requests if an invalid hash is given, as precedence).
/* istanbul ignore if: only if node is built with --without-ssl */
if (crypto === undefined) {
return true
}
// 1. Let parsedMetadata be the result of parsing metadataList.
const parsedMetadata = parseMetadata(metadataList)
// 2. If parsedMetadata is no metadata, return true.
if (parsedMetadata === 'no metadata') {
return true
}
// 3. If response is not eligible for integrity validation, return false.
// TODO
// 4. If parsedMetadata is the empty set, return true.
if (parsedMetadata.length === 0) {
return true
}
// 5. Let metadata be the result of getting the strongest
// metadata from parsedMetadata.
const strongest = getStrongestMetadata(parsedMetadata)
const metadata = filterMetadataListByAlgorithm(parsedMetadata, strongest)
// 6. For each item in metadata:
for (const item of metadata) {
// 1. Let algorithm be the alg component of item.
const algorithm = item.algo
// 2. Let expectedValue be the val component of item.
const expectedValue = item.hash
// See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e
// "be liberal with padding". This is annoying, and it's not even in the spec.
// 3. Let actualValue be the result of applying algorithm to bytes.
let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64')
if (actualValue[actualValue.length - 1] === '=') {
if (actualValue[actualValue.length - 2] === '=') {
actualValue = actualValue.slice(0, -2)
} else {
actualValue = actualValue.slice(0, -1)
}
}
// 4. If actualValue is a case-sensitive match for expectedValue,
// return true.
if (compareBase64Mixed(actualValue, expectedValue)) {
return true
}
}
// 7. Return false.
return false
}
// https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options
// https://www.w3.org/TR/CSP2/#source-list-syntax
// https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1
const parseHashWithOptions = /(?<algo>sha256|sha384|sha512)-((?<hash>[A-Za-z0-9+/]+|[A-Za-z0-9_-]+)={0,2}(?:\s|$)( +[!-~]*)?)?/i
/**
* @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata
* @param {string} metadata
*/
function parseMetadata (metadata) {
// 1. Let result be the empty set.
/** @type {{ algo: string, hash: string }[]} */
const result = []
// 2. Let empty be equal to true.
let empty = true
// 3. For each token returned by splitting metadata on spaces:
for (const token of metadata.split(' ')) {
// 1. Set empty to false.
empty = false
// 2. Parse token as a hash-with-options.
const parsedToken = parseHashWithOptions.exec(token)
// 3. If token does not parse, continue to the next token.
if (
parsedToken === null ||
parsedToken.groups === undefined ||
parsedToken.groups.algo === undefined
) {
// Note: Chromium blocks the request at this point, but Firefox
// gives a warning that an invalid integrity was given. The
// correct behavior is to ignore these, and subsequently not
// check the integrity of the resource.
continue
}
// 4. Let algorithm be the hash-algo component of token.
const algorithm = parsedToken.groups.algo.toLowerCase()
// 5. If algorithm is a hash function recognized by the user
// agent, add the parsed token to result.
if (supportedHashes.includes(algorithm)) {
result.push(parsedToken.groups)
}
}
// 4. Return no metadata if empty is true, otherwise return result.
if (empty === true) {
return 'no metadata'
}
return result
}
/**
* @param {{ algo: 'sha256' | 'sha384' | 'sha512' }[]} metadataList
*/
function getStrongestMetadata (metadataList) {
// Let algorithm be the algo component of the first item in metadataList.
// Can be sha256
let algorithm = metadataList[0].algo
// If the algorithm is sha512, then it is the strongest
// and we can return immediately
if (algorithm[3] === '5') {
return algorithm
}
for (let i = 1; i < metadataList.length; ++i) {
const metadata = metadataList[i]
// If the algorithm is sha512, then it is the strongest
// and we can break the loop immediately
if (metadata.algo[3] === '5') {
algorithm = 'sha512'
break
// If the algorithm is sha384, then a potential sha256 or sha384 is ignored
} else if (algorithm[3] === '3') {
continue
// algorithm is sha256, check if algorithm is sha384 and if so, set it as
// the strongest
} else if (metadata.algo[3] === '3') {
algorithm = 'sha384'
}
}
return algorithm
}
function filterMetadataListByAlgorithm (metadataList, algorithm) {
if (metadataList.length === 1) {
return metadataList
}
let pos = 0
for (let i = 0; i < metadataList.length; ++i) {
if (metadataList[i].algo === algorithm) {
metadataList[pos++] = metadataList[i]
}
}
metadataList.length = pos
return metadataList
}
/**
* Compares two base64 strings, allowing for base64url
* in the second string.
*
* @param {string} actualValue always base64
* @param {string} expectedValue base64 or base64url
* @returns {boolean}
*/
function compareBase64Mixed (actualValue, expectedValue) {
if (actualValue.length !== expectedValue.length) {
return false
}
for (let i = 0; i < actualValue.length; ++i) {
if (actualValue[i] !== expectedValue[i]) {
if (
(actualValue[i] === '+' && expectedValue[i] === '-') ||
(actualValue[i] === '/' && expectedValue[i] === '_')
) {
continue
}
return false
}
}
return true
}
// https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request
function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) {
// TODO
@ -1761,7 +1541,6 @@ module.exports = {
isValidHeaderValue,
isErrorLike,
fullyReadBody,
bytesMatch,
readableStreamClose,
isomorphicEncode,
urlIsLocal,
@ -1770,7 +1549,6 @@ module.exports = {
readAllBytes,
simpleRangeHeaderValue,
buildContentRange,
parseMetadata,
createInflate,
extractMimeType,
getDecodeSplit,

View File

@ -0,0 +1,9 @@
# Subresource Integrity
based on Editors Draft, 12 June 2025
This module provides support for Subresource Integrity (SRI) in the context of web fetch operations. SRI is a security feature that allows clients to verify that fetched resources are delivered without unexpected manipulation.
## Links
- [Subresource Integrity](https://w3c.github.io/webappsec-subresource-integrity/)

View File

@ -0,0 +1,306 @@
'use strict'
const assert = require('node:assert')
/**
* @typedef {object} Metadata
* @property {SRIHashAlgorithm} alg - The algorithm used for the hash.
* @property {string} val - The base64-encoded hash value.
*/
/**
* @typedef {Metadata[]} MetadataList
*/
/**
* @typedef {('sha256' | 'sha384' | 'sha512')} SRIHashAlgorithm
*/
/**
* @type {Map<SRIHashAlgorithm, number>}
*
* The valid SRI hash algorithm token set is the ordered set « "sha256",
* "sha384", "sha512" » (corresponding to SHA-256, SHA-384, and SHA-512
* respectively). The ordering of this set is meaningful, with stronger
* algorithms appearing later in the set.
*
* @see https://w3c.github.io/webappsec-subresource-integrity/#valid-sri-hash-algorithm-token-set
*/
const validSRIHashAlgorithmTokenSet = new Map([['sha256', 0], ['sha384', 1], ['sha512', 2]])
// https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable
/** @type {import('crypto')} */
let crypto
try {
crypto = require('node:crypto')
const cryptoHashes = crypto.getHashes()
// If no hashes are available, we cannot support SRI.
if (cryptoHashes.length === 0) {
validSRIHashAlgorithmTokenSet.clear()
}
for (const algorithm of validSRIHashAlgorithmTokenSet.keys()) {
// If the algorithm is not supported, remove it from the list.
if (cryptoHashes.includes(algorithm) === false) {
validSRIHashAlgorithmTokenSet.delete(algorithm)
}
}
/* c8 ignore next 4 */
} catch {
// If crypto is not available, we cannot support SRI.
validSRIHashAlgorithmTokenSet.clear()
}
/**
* @typedef GetSRIHashAlgorithmIndex
* @type {(algorithm: SRIHashAlgorithm) => number}
* @param {SRIHashAlgorithm} algorithm
* @returns {number} The index of the algorithm in the valid SRI hash algorithm
* token set.
*/
const getSRIHashAlgorithmIndex = /** @type {GetSRIHashAlgorithmIndex} */ (Map.prototype.get.bind(
validSRIHashAlgorithmTokenSet))
/**
* @typedef IsValidSRIHashAlgorithm
* @type {(algorithm: string) => algorithm is SRIHashAlgorithm}
* @param {*} algorithm
* @returns {algorithm is SRIHashAlgorithm}
*/
const isValidSRIHashAlgorithm = /** @type {IsValidSRIHashAlgorithm} */ (
Map.prototype.has.bind(validSRIHashAlgorithmTokenSet)
)
/**
* @param {Uint8Array} bytes
* @param {string} metadataList
* @returns {boolean}
*
* @see https://w3c.github.io/webappsec-subresource-integrity/#does-response-match-metadatalist
*/
const bytesMatch = crypto === undefined || validSRIHashAlgorithmTokenSet.size === 0
// If node is not built with OpenSSL support, we cannot check
// a request's integrity, so allow it by default (the spec will
// allow requests if an invalid hash is given, as precedence).
? () => true
: (bytes, metadataList) => {
// 1. Let parsedMetadata be the result of parsing metadataList.
const parsedMetadata = parseMetadata(metadataList)
// 2. If parsedMetadata is empty set, return true.
if (parsedMetadata.length === 0) {
return true
}
// 3. Let metadata be the result of getting the strongest
// metadata from parsedMetadata.
const metadata = getStrongestMetadata(parsedMetadata)
// 4. For each item in metadata:
for (const item of metadata) {
// 1. Let algorithm be the item["alg"].
const algorithm = item.alg
// 2. Let expectedValue be the item["val"].
const expectedValue = item.val
// See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e
// "be liberal with padding". This is annoying, and it's not even in the spec.
// 3. Let actualValue be the result of applying algorithm to bytes .
const actualValue = applyAlgorithmToBytes(algorithm, bytes)
// 4. If actualValue is a case-sensitive match for expectedValue,
// return true.
if (caseSensitiveMatch(actualValue, expectedValue)) {
return true
}
}
// 5. Return false.
return false
}
/**
* @param {MetadataList} metadataList
* @returns {MetadataList} The strongest hash algorithm from the metadata list.
*/
function getStrongestMetadata (metadataList) {
// 1. Let result be the empty set and strongest be the empty string.
const result = []
/** @type {Metadata|null} */
let strongest = null
// 2. For each item in set:
for (const item of metadataList) {
// 1. Assert: item["alg"] is a valid SRI hash algorithm token.
assert(isValidSRIHashAlgorithm(item.alg), 'Invalid SRI hash algorithm token')
// 2. If result is the empty set, then:
if (result.length === 0) {
// 1. Append item to result.
result.push(item)
// 2. Set strongest to item.
strongest = item
// 3. Continue.
continue
}
// 3. Let currentAlgorithm be strongest["alg"], and currentAlgorithmIndex be
// the index of currentAlgorithm in the valid SRI hash algorithm token set.
const currentAlgorithm = /** @type {Metadata} */ (strongest).alg
const currentAlgorithmIndex = getSRIHashAlgorithmIndex(currentAlgorithm)
// 4. Let newAlgorithm be the item["alg"], and newAlgorithmIndex be the
// index of newAlgorithm in the valid SRI hash algorithm token set.
const newAlgorithm = item.alg
const newAlgorithmIndex = getSRIHashAlgorithmIndex(newAlgorithm)
// 5. If newAlgorithmIndex is less than currentAlgorithmIndex, then continue.
if (newAlgorithmIndex < currentAlgorithmIndex) {
continue
// 6. Otherwise, if newAlgorithmIndex is greater than
// currentAlgorithmIndex:
} else if (newAlgorithmIndex > currentAlgorithmIndex) {
// 1. Set strongest to item.
strongest = item
// 2. Set result to « item ».
result[0] = item
result.length = 1
// 7. Otherwise, newAlgorithmIndex and currentAlgorithmIndex are the same
// value. Append item to result.
} else {
result.push(item)
}
}
// 3. Return result.
return result
}
/**
* @param {string} metadata
* @returns {MetadataList}
*
* @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata
*/
function parseMetadata (metadata) {
// 1. Let result be the empty set.
/** @type {MetadataList} */
const result = []
// 2. For each item returned by splitting metadata on spaces:
for (const item of metadata.split(' ')) {
// 1. Let expression-and-options be the result of splitting item on U+003F (?).
const expressionAndOptions = item.split('?', 1)
// 2. Let algorithm-expression be expression-and-options[0].
const algorithmExpression = expressionAndOptions[0]
// 3. Let base64-value be the empty string.
let base64Value = ''
// 4. Let algorithm-and-value be the result of splitting algorithm-expression on U+002D (-).
const algorithmAndValue = [algorithmExpression.slice(0, 6), algorithmExpression.slice(7)]
// 5. Let algorithm be algorithm-and-value[0].
const algorithm = algorithmAndValue[0]
// 6. If algorithm is not a valid SRI hash algorithm token, then continue.
if (!isValidSRIHashAlgorithm(algorithm)) {
continue
}
// 7. If algorithm-and-value[1] exists, set base64-value to
// algorithm-and-value[1].
if (algorithmAndValue[1]) {
base64Value = algorithmAndValue[1]
}
// 8. Let metadata be the ordered map
// «["alg" → algorithm, "val" → base64-value]».
const metadata = {
alg: algorithm,
val: base64Value
}
// 9. Append metadata to result.
result.push(metadata)
}
// 3. Return result.
return result
}
/**
* Applies the specified hash algorithm to the given bytes
*
* @typedef {(algorithm: SRIHashAlgorithm, bytes: Uint8Array) => string} ApplyAlgorithmToBytes
* @param {SRIHashAlgorithm} algorithm
* @param {Uint8Array} bytes
* @returns {string}
*/
const applyAlgorithmToBytes = (algorithm, bytes) => {
return crypto.hash(algorithm, bytes, 'base64')
}
/**
* Compares two base64 strings, allowing for base64url
* in the second string.
*
* @param {string} actualValue base64 encoded string
* @param {string} expectedValue base64 or base64url encoded string
* @returns {boolean}
*/
function caseSensitiveMatch (actualValue, expectedValue) {
// Ignore padding characters from the end of the strings by
// decreasing the length by 1 or 2 if the last characters are `=`.
let actualValueLength = actualValue.length
if (actualValueLength !== 0 && actualValue[actualValueLength - 1] === '=') {
actualValueLength -= 1
}
if (actualValueLength !== 0 && actualValue[actualValueLength - 1] === '=') {
actualValueLength -= 1
}
let expectedValueLength = expectedValue.length
if (expectedValueLength !== 0 && expectedValue[expectedValueLength - 1] === '=') {
expectedValueLength -= 1
}
if (expectedValueLength !== 0 && expectedValue[expectedValueLength - 1] === '=') {
expectedValueLength -= 1
}
if (actualValueLength !== expectedValueLength) {
return false
}
for (let i = 0; i < actualValueLength; ++i) {
if (
actualValue[i] === expectedValue[i] ||
(actualValue[i] === '+' && expectedValue[i] === '-') ||
(actualValue[i] === '/' && expectedValue[i] === '_')
) {
continue
}
return false
}
return true
}
module.exports = {
applyAlgorithmToBytes,
bytesMatch,
caseSensitiveMatch,
isValidSRIHashAlgorithm,
getStrongestMetadata,
parseMetadata
}

View File

@ -160,7 +160,7 @@ webidl.util.TypeValueToString = function (o) {
webidl.util.markAsUncloneable = markAsUncloneable || (() => {})
// https://webidl.spec.whatwg.org/#abstract-opdef-converttoint
webidl.util.ConvertToInt = function (V, bitLength, signedness, opts) {
webidl.util.ConvertToInt = function (V, bitLength, signedness, flags) {
let upperBound
let lowerBound
@ -204,7 +204,7 @@ webidl.util.ConvertToInt = function (V, bitLength, signedness, opts) {
// 6. If the conversion is to an IDL type associated
// with the [EnforceRange] extended attribute, then:
if (opts?.enforceRange === true) {
if (webidl.util.HasFlag(flags, webidl.attributes.EnforceRange)) {
// 1. If x is NaN, +∞, or −∞, then throw a TypeError.
if (
Number.isNaN(x) ||
@ -236,7 +236,7 @@ webidl.util.ConvertToInt = function (V, bitLength, signedness, opts) {
// 7. If x is not NaN and the conversion is to an IDL
// type associated with the [Clamp] extended
// attribute, then:
if (!Number.isNaN(x) && opts?.clamp === true) {
if (!Number.isNaN(x) && webidl.util.HasFlag(flags, webidl.attributes.Clamp)) {
// 1. Set x to min(max(x, lowerBound), upperBound).
x = Math.min(Math.max(x, lowerBound), upperBound)
@ -310,6 +310,25 @@ webidl.util.Stringify = function (V) {
}
}
webidl.util.IsResizableArrayBuffer = function (V) {
if (types.isArrayBuffer(V)) {
return V.resizable
}
if (types.isSharedArrayBuffer(V)) {
return V.growable
}
throw webidl.errors.exception({
header: 'IsResizableArrayBuffer',
message: `"${webidl.util.Stringify(V)}" is not an array buffer.`
})
}
webidl.util.HasFlag = function (flags, attributes) {
return typeof flags === 'number' && (flags & attributes) === attributes
}
// https://webidl.spec.whatwg.org/#es-sequence
webidl.sequenceConverter = function (converter) {
return (V, prefix, argument, Iterable) => {
@ -514,13 +533,20 @@ webidl.is.URL = webidl.util.MakeTypeAssertion(URL)
webidl.is.AbortSignal = webidl.util.MakeTypeAssertion(AbortSignal)
webidl.is.MessagePort = webidl.util.MakeTypeAssertion(MessagePort)
webidl.is.BufferSource = function (V) {
return types.isArrayBuffer(V) || (
ArrayBuffer.isView(V) &&
types.isArrayBuffer(V.buffer)
)
}
// https://webidl.spec.whatwg.org/#es-DOMString
webidl.converters.DOMString = function (V, prefix, argument, opts) {
webidl.converters.DOMString = function (V, prefix, argument, flags) {
// 1. If V is null and the conversion is to an IDL type
// associated with the [LegacyNullToEmptyString]
// extended attribute, then return the DOMString value
// that represents the empty string.
if (V === null && opts?.legacyNullToEmptyString) {
if (V === null && webidl.util.HasFlag(flags, webidl.attributes.LegacyNullToEmptyString)) {
return ''
}
@ -599,7 +625,7 @@ webidl.converters.any = function (V) {
// https://webidl.spec.whatwg.org/#es-long-long
webidl.converters['long long'] = function (V, prefix, argument) {
// 1. Let x be ? ConvertToInt(V, 64, "signed").
const x = webidl.util.ConvertToInt(V, 64, 'signed', undefined, prefix, argument)
const x = webidl.util.ConvertToInt(V, 64, 'signed', 0, prefix, argument)
// 2. Return the IDL long long value that represents
// the same numeric value as x.
@ -609,7 +635,7 @@ webidl.converters['long long'] = function (V, prefix, argument) {
// https://webidl.spec.whatwg.org/#es-unsigned-long-long
webidl.converters['unsigned long long'] = function (V, prefix, argument) {
// 1. Let x be ? ConvertToInt(V, 64, "unsigned").
const x = webidl.util.ConvertToInt(V, 64, 'unsigned', undefined, prefix, argument)
const x = webidl.util.ConvertToInt(V, 64, 'unsigned', 0, prefix, argument)
// 2. Return the IDL unsigned long long value that
// represents the same numeric value as x.
@ -619,7 +645,7 @@ webidl.converters['unsigned long long'] = function (V, prefix, argument) {
// https://webidl.spec.whatwg.org/#es-unsigned-long
webidl.converters['unsigned long'] = function (V, prefix, argument) {
// 1. Let x be ? ConvertToInt(V, 32, "unsigned").
const x = webidl.util.ConvertToInt(V, 32, 'unsigned', undefined, prefix, argument)
const x = webidl.util.ConvertToInt(V, 32, 'unsigned', 0, prefix, argument)
// 2. Return the IDL unsigned long value that
// represents the same numeric value as x.
@ -627,9 +653,9 @@ webidl.converters['unsigned long'] = function (V, prefix, argument) {
}
// https://webidl.spec.whatwg.org/#es-unsigned-short
webidl.converters['unsigned short'] = function (V, prefix, argument, opts) {
webidl.converters['unsigned short'] = function (V, prefix, argument, flags) {
// 1. Let x be ? ConvertToInt(V, 16, "unsigned").
const x = webidl.util.ConvertToInt(V, 16, 'unsigned', opts, prefix, argument)
const x = webidl.util.ConvertToInt(V, 16, 'unsigned', flags, prefix, argument)
// 2. Return the IDL unsigned short value that represents
// the same numeric value as x.
@ -637,15 +663,16 @@ webidl.converters['unsigned short'] = function (V, prefix, argument, opts) {
}
// https://webidl.spec.whatwg.org/#idl-ArrayBuffer
webidl.converters.ArrayBuffer = function (V, prefix, argument, opts) {
// 1. If Type(V) is not Object, or V does not have an
webidl.converters.ArrayBuffer = function (V, prefix, argument, flags) {
// 1. If V is not an Object, or V does not have an
// [[ArrayBufferData]] internal slot, then throw a
// TypeError.
// 2. If IsSharedArrayBuffer(V) is true, then throw a
// TypeError.
// see: https://tc39.es/ecma262/#sec-properties-of-the-arraybuffer-instances
// see: https://tc39.es/ecma262/#sec-properties-of-the-sharedarraybuffer-instances
if (
webidl.util.Type(V) !== OBJECT ||
!types.isAnyArrayBuffer(V)
!types.isArrayBuffer(V)
) {
throw webidl.errors.conversionFailed({
prefix,
@ -654,25 +681,14 @@ webidl.converters.ArrayBuffer = function (V, prefix, argument, opts) {
})
}
// 2. If the conversion is not to an IDL type associated
// with the [AllowShared] extended attribute, and
// IsSharedArrayBuffer(V) is true, then throw a
// TypeError.
if (opts?.allowShared === false && types.isSharedArrayBuffer(V)) {
throw webidl.errors.exception({
header: 'ArrayBuffer',
message: 'SharedArrayBuffer is not allowed.'
})
}
// 3. If the conversion is not to an IDL type associated
// with the [AllowResizable] extended attribute, and
// IsResizableArrayBuffer(V) is true, then throw a
// TypeError.
if (V.resizable || V.growable) {
if (!webidl.util.HasFlag(flags, webidl.attributes.AllowResizable) && webidl.util.IsResizableArrayBuffer(V)) {
throw webidl.errors.exception({
header: 'ArrayBuffer',
message: 'Received a resizable ArrayBuffer.'
header: prefix,
message: `${argument} cannot be a resizable ArrayBuffer.`
})
}
@ -681,7 +697,43 @@ webidl.converters.ArrayBuffer = function (V, prefix, argument, opts) {
return V
}
webidl.converters.TypedArray = function (V, T, prefix, name, opts) {
// https://webidl.spec.whatwg.org/#idl-SharedArrayBuffer
webidl.converters.SharedArrayBuffer = function (V, prefix, argument, flags) {
// 1. If V is not an Object, or V does not have an
// [[ArrayBufferData]] internal slot, then throw a
// TypeError.
// 2. If IsSharedArrayBuffer(V) is false, then throw a
// TypeError.
// see: https://tc39.es/ecma262/#sec-properties-of-the-sharedarraybuffer-instances
if (
webidl.util.Type(V) !== OBJECT ||
!types.isSharedArrayBuffer(V)
) {
throw webidl.errors.conversionFailed({
prefix,
argument: `${argument} ("${webidl.util.Stringify(V)}")`,
types: ['SharedArrayBuffer']
})
}
// 3. If the conversion is not to an IDL type associated
// with the [AllowResizable] extended attribute, and
// IsResizableArrayBuffer(V) is true, then throw a
// TypeError.
if (!webidl.util.HasFlag(flags, webidl.attributes.AllowResizable) && webidl.util.IsResizableArrayBuffer(V)) {
throw webidl.errors.exception({
header: prefix,
message: `${argument} cannot be a resizable SharedArrayBuffer.`
})
}
// 4. Return the IDL SharedArrayBuffer value that is a
// reference to the same object as V.
return V
}
// https://webidl.spec.whatwg.org/#dfn-typed-array-type
webidl.converters.TypedArray = function (V, T, prefix, argument, flags) {
// 1. Let T be the IDL type V is being converted to.
// 2. If Type(V) is not Object, or V does not have a
@ -694,7 +746,7 @@ webidl.converters.TypedArray = function (V, T, prefix, name, opts) {
) {
throw webidl.errors.conversionFailed({
prefix,
argument: `${name} ("${webidl.util.Stringify(V)}")`,
argument: `${argument} ("${webidl.util.Stringify(V)}")`,
types: [T.name]
})
}
@ -703,10 +755,10 @@ webidl.converters.TypedArray = function (V, T, prefix, name, opts) {
// with the [AllowShared] extended attribute, and
// IsSharedArrayBuffer(V.[[ViewedArrayBuffer]]) is
// true, then throw a TypeError.
if (opts?.allowShared === false && types.isSharedArrayBuffer(V.buffer)) {
if (!webidl.util.HasFlag(flags, webidl.attributes.AllowShared) && types.isSharedArrayBuffer(V.buffer)) {
throw webidl.errors.exception({
header: 'ArrayBuffer',
message: 'SharedArrayBuffer is not allowed.'
header: prefix,
message: `${argument} cannot be a view on a shared array buffer.`
})
}
@ -714,10 +766,10 @@ webidl.converters.TypedArray = function (V, T, prefix, name, opts) {
// with the [AllowResizable] extended attribute, and
// IsResizableArrayBuffer(V.[[ViewedArrayBuffer]]) is
// true, then throw a TypeError.
if (V.buffer.resizable || V.buffer.growable) {
if (!webidl.util.HasFlag(flags, webidl.attributes.AllowResizable) && webidl.util.IsResizableArrayBuffer(V.buffer)) {
throw webidl.errors.exception({
header: 'ArrayBuffer',
message: 'Received a resizable ArrayBuffer.'
header: prefix,
message: `${argument} cannot be a view on a resizable array buffer.`
})
}
@ -726,13 +778,15 @@ webidl.converters.TypedArray = function (V, T, prefix, name, opts) {
return V
}
webidl.converters.DataView = function (V, prefix, name, opts) {
// https://webidl.spec.whatwg.org/#idl-DataView
webidl.converters.DataView = function (V, prefix, argument, flags) {
// 1. If Type(V) is not Object, or V does not have a
// [[DataView]] internal slot, then throw a TypeError.
if (webidl.util.Type(V) !== OBJECT || !types.isDataView(V)) {
throw webidl.errors.exception({
header: prefix,
message: `${name} is not a DataView.`
throw webidl.errors.conversionFailed({
prefix,
argument: `${argument} ("${webidl.util.Stringify(V)}")`,
types: ['DataView']
})
}
@ -740,10 +794,10 @@ webidl.converters.DataView = function (V, prefix, name, opts) {
// with the [AllowShared] extended attribute, and
// IsSharedArrayBuffer(V.[[ViewedArrayBuffer]]) is true,
// then throw a TypeError.
if (opts?.allowShared === false && types.isSharedArrayBuffer(V.buffer)) {
if (!webidl.util.HasFlag(flags, webidl.attributes.AllowShared) && types.isSharedArrayBuffer(V.buffer)) {
throw webidl.errors.exception({
header: 'ArrayBuffer',
message: 'SharedArrayBuffer is not allowed.'
header: prefix,
message: `${argument} cannot be a view on a shared array buffer.`
})
}
@ -751,10 +805,10 @@ webidl.converters.DataView = function (V, prefix, name, opts) {
// with the [AllowResizable] extended attribute, and
// IsResizableArrayBuffer(V.[[ViewedArrayBuffer]]) is
// true, then throw a TypeError.
if (V.buffer.resizable || V.buffer.growable) {
if (!webidl.util.HasFlag(flags, webidl.attributes.AllowResizable) && webidl.util.IsResizableArrayBuffer(V.buffer)) {
throw webidl.errors.exception({
header: 'ArrayBuffer',
message: 'Received a resizable ArrayBuffer.'
header: prefix,
message: `${argument} cannot be a view on a resizable array buffer.`
})
}
@ -763,6 +817,85 @@ webidl.converters.DataView = function (V, prefix, name, opts) {
return V
}
// https://webidl.spec.whatwg.org/#ArrayBufferView
webidl.converters.ArrayBufferView = function (V, prefix, argument, flags) {
if (
webidl.util.Type(V) !== OBJECT ||
!types.isArrayBufferView(V)
) {
throw webidl.errors.conversionFailed({
prefix,
argument: `${argument} ("${webidl.util.Stringify(V)}")`,
types: ['ArrayBufferView']
})
}
if (!webidl.util.HasFlag(flags, webidl.attributes.AllowShared) && types.isSharedArrayBuffer(V.buffer)) {
throw webidl.errors.exception({
header: prefix,
message: `${argument} cannot be a view on a shared array buffer.`
})
}
if (!webidl.util.HasFlag(flags, webidl.attributes.AllowResizable) && webidl.util.IsResizableArrayBuffer(V.buffer)) {
throw webidl.errors.exception({
header: prefix,
message: `${argument} cannot be a view on a resizable array buffer.`
})
}
return V
}
// https://webidl.spec.whatwg.org/#BufferSource
webidl.converters.BufferSource = function (V, prefix, argument, flags) {
if (types.isArrayBuffer(V)) {
return webidl.converters.ArrayBuffer(V, prefix, argument, flags)
}
if (types.isArrayBufferView(V)) {
flags &= ~webidl.attributes.AllowShared
return webidl.converters.ArrayBufferView(V, prefix, argument, flags)
}
// Make this explicit for easier debugging
if (types.isSharedArrayBuffer(V)) {
throw webidl.errors.exception({
header: prefix,
message: `${argument} cannot be a SharedArrayBuffer.`
})
}
throw webidl.errors.conversionFailed({
prefix,
argument: `${argument} ("${webidl.util.Stringify(V)}")`,
types: ['ArrayBuffer', 'ArrayBufferView']
})
}
// https://webidl.spec.whatwg.org/#AllowSharedBufferSource
webidl.converters.AllowSharedBufferSource = function (V, prefix, argument, flags) {
if (types.isArrayBuffer(V)) {
return webidl.converters.ArrayBuffer(V, prefix, argument, flags)
}
if (types.isSharedArrayBuffer(V)) {
return webidl.converters.SharedArrayBuffer(V, prefix, argument, flags)
}
if (types.isArrayBufferView(V)) {
flags |= webidl.attributes.AllowShared
return webidl.converters.ArrayBufferView(V, prefix, argument, flags)
}
throw webidl.errors.conversionFailed({
prefix,
argument: `${argument} ("${webidl.util.Stringify(V)}")`,
types: ['ArrayBuffer', 'SharedArrayBuffer', 'ArrayBufferView']
})
}
webidl.converters['sequence<ByteString>'] = webidl.sequenceConverter(
webidl.converters.ByteString
)
@ -783,6 +916,34 @@ webidl.converters.AbortSignal = webidl.interfaceConverter(
'AbortSignal'
)
/**
* [LegacyTreatNonObjectAsNull]
* callback EventHandlerNonNull = any (Event event);
* typedef EventHandlerNonNull? EventHandler;
* @param {*} V
*/
webidl.converters.EventHandlerNonNull = function (V) {
if (webidl.util.Type(V) !== OBJECT) {
return null
}
// [I]f the value is not an object, it will be converted to null, and if the value is not callable,
// it will be converted to a callback function value that does nothing when called.
if (typeof V === 'function') {
return V
}
return () => {}
}
webidl.attributes = {
Clamp: 1 << 0,
EnforceRange: 1 << 1,
AllowShared: 1 << 2,
AllowResizable: 1 << 3,
LegacyNullToEmptyString: 1 << 4
}
module.exports = {
webidl
}

View File

@ -303,11 +303,12 @@ function failWebsocketConnection (handler, code, reason, cause) {
handler.controller.abort()
if (handler.socket?.destroyed === false) {
if (!handler.socket) {
// If the connection was not established, we must still emit an 'error' and 'close' events
handler.onSocketClose()
} else if (handler.socket.destroyed === false) {
handler.socket.destroy()
}
handler.onFail(code, reason, cause)
}
module.exports = {

View File

@ -272,7 +272,7 @@ webidl.converters.MessageEventInit = webidl.dictionaryConverter([
{
key: 'ports',
converter: webidl.converters['sequence<MessagePort>'],
defaultValue: () => new Array(0)
defaultValue: () => []
}
])

View File

@ -5,7 +5,28 @@ const { validateCloseCodeAndReason } = require('../util')
const { kConstruct } = require('../../../core/symbols')
const { kEnumerableProperty } = require('../../../core/util')
class WebSocketError extends DOMException {
function createInheritableDOMException () {
// https://github.com/nodejs/node/issues/59677
class Test extends DOMException {
get reason () {
return ''
}
}
if (new Test().reason !== undefined) {
return DOMException
}
return new Proxy(DOMException, {
construct (target, args, newTarget) {
const instance = Reflect.construct(target, args, target)
Object.setPrototypeOf(instance, newTarget.prototype)
return instance
}
})
}
class WebSocketError extends createInheritableDOMException() {
#closeCode
#reason

View File

@ -6,7 +6,6 @@ const { states, opcodes, sentCloseFrameState } = require('../constants')
const { webidl } = require('../../webidl')
const { getURLRecord, isValidSubprotocol, isEstablished, utf8Decode } = require('../util')
const { establishWebSocketConnection, failWebsocketConnection, closeWebSocketConnection } = require('../connection')
const { isArrayBuffer } = require('node:util/types')
const { channels } = require('../../../core/diagnostics')
const { WebsocketFrameSend } = require('../frame')
const { ByteParser } = require('../receiver')
@ -46,7 +45,6 @@ class WebSocketStream {
#handler = {
// https://whatpr.org/websockets/48/7b748d3...d5570f3.html#feedback-to-websocket-stream-from-the-protocol
onConnectionEstablished: (response, extensions) => this.#onConnectionEstablished(response, extensions),
onFail: (_code, _reason) => {},
onMessage: (opcode, data) => this.#onMessage(opcode, data),
onParserError: (err) => failWebsocketConnection(this.#handler, null, err.message),
onParserDrain: () => this.#handler.socket.resume(),
@ -200,6 +198,9 @@ class WebSocketStream {
}
#write (chunk) {
// See /websockets/stream/tentative/write.any.html
chunk = webidl.converters.WebSocketStreamWrite(chunk)
// 1. Let promise be a new promise created in stream s relevant realm .
const promise = createDeferredPromise()
@ -210,9 +211,9 @@ class WebSocketStream {
let opcode = null
// 4. If chunk is a BufferSource ,
if (ArrayBuffer.isView(chunk) || isArrayBuffer(chunk)) {
if (webidl.is.BufferSource(chunk)) {
// 4.1. Set data to a copy of the bytes given chunk .
data = new Uint8Array(ArrayBuffer.isView(chunk) ? new Uint8Array(chunk.buffer, chunk.byteOffset, chunk.byteLength) : chunk)
data = new Uint8Array(ArrayBuffer.isView(chunk) ? new Uint8Array(chunk.buffer, chunk.byteOffset, chunk.byteLength) : chunk.slice())
// 4.2. Set opcode to a binary frame opcode.
opcode = opcodes.BINARY
@ -227,7 +228,7 @@ class WebSocketStream {
string = webidl.converters.DOMString(chunk)
} catch (e) {
promise.reject(e)
return
return promise.promise
}
// 5.2. Set data to the result of UTF-8 encoding string .
@ -250,7 +251,7 @@ class WebSocketStream {
}
// 6.3. Queue a global task on the WebSocket task source given stream s relevant global object to resolve promise with undefined.
return promise
return promise.promise
}
/** @type {import('../websocket').Handler['onConnectionEstablished']} */
@ -476,7 +477,7 @@ webidl.converters.WebSocketStreamOptions = webidl.dictionaryConverter([
webidl.converters.WebSocketCloseInfo = webidl.dictionaryConverter([
{
key: 'closeCode',
converter: (V) => webidl.converters['unsigned short'](V, { enforceRange: true })
converter: (V) => webidl.converters['unsigned short'](V, webidl.attributes.EnforceRange)
},
{
key: 'reason',
@ -485,4 +486,12 @@ webidl.converters.WebSocketCloseInfo = webidl.dictionaryConverter([
}
])
webidl.converters.WebSocketStreamWrite = function (V) {
if (typeof V === 'string') {
return webidl.converters.USVString(V)
}
return webidl.converters.BufferSource(V)
}
module.exports = { WebSocketStream }

View File

@ -28,7 +28,6 @@ const { channels } = require('../../core/diagnostics')
/**
* @typedef {object} Handler
* @property {(response: any, extensions?: string[]) => void} onConnectionEstablished
* @property {(code: number, reason: any) => void} onFail
* @property {(opcode: number, data: Buffer) => void} onMessage
* @property {(error: Error) => void} onParserError
* @property {() => void} onParserDrain
@ -64,7 +63,6 @@ class WebSocket extends EventTarget {
/** @type {Handler} */
#handler = {
onConnectionEstablished: (response, extensions) => this.#onConnectionEstablished(response, extensions),
onFail: (code, reason, cause) => this.#onFail(code, reason, cause),
onMessage: (opcode, data) => this.#onMessage(opcode, data),
onParserError: (err) => failWebsocketConnection(this.#handler, null, err.message),
onParserDrain: () => this.#onParserDrain(),
@ -195,7 +193,7 @@ class WebSocket extends EventTarget {
const prefix = 'WebSocket.close'
if (code !== undefined) {
code = webidl.converters['unsigned short'](code, prefix, 'code', { clamp: true })
code = webidl.converters['unsigned short'](code, prefix, 'code', webidl.attributes.Clamp)
}
if (reason !== undefined) {
@ -355,9 +353,11 @@ class WebSocket extends EventTarget {
this.removeEventListener('open', this.#events.open)
}
if (typeof fn === 'function') {
const listener = webidl.converters.EventHandlerNonNull(fn)
if (listener !== null) {
this.addEventListener('open', listener)
this.#events.open = fn
this.addEventListener('open', fn)
} else {
this.#events.open = null
}
@ -376,9 +376,11 @@ class WebSocket extends EventTarget {
this.removeEventListener('error', this.#events.error)
}
if (typeof fn === 'function') {
const listener = webidl.converters.EventHandlerNonNull(fn)
if (listener !== null) {
this.addEventListener('error', listener)
this.#events.error = fn
this.addEventListener('error', fn)
} else {
this.#events.error = null
}
@ -397,9 +399,11 @@ class WebSocket extends EventTarget {
this.removeEventListener('close', this.#events.close)
}
if (typeof fn === 'function') {
const listener = webidl.converters.EventHandlerNonNull(fn)
if (listener !== null) {
this.addEventListener('close', listener)
this.#events.close = fn
this.addEventListener('close', fn)
} else {
this.#events.close = null
}
@ -418,9 +422,11 @@ class WebSocket extends EventTarget {
this.removeEventListener('message', this.#events.message)
}
if (typeof fn === 'function') {
const listener = webidl.converters.EventHandlerNonNull(fn)
if (listener !== null) {
this.addEventListener('message', listener)
this.#events.message = fn
this.addEventListener('message', fn)
} else {
this.#events.message = null
}
@ -498,26 +504,6 @@ class WebSocket extends EventTarget {
}
}
#onFail (code, reason, cause) {
if (reason) {
// TODO: process.nextTick
fireEvent('error', this, (type, init) => new ErrorEvent(type, init), {
error: new Error(reason, cause ? { cause } : undefined),
message: reason
})
}
if (!this.#handler.wasEverConnected) {
this.#handler.readyState = states.CLOSED
// If the WebSocket connection could not be established, it is also said
// that _The WebSocket Connection is Closed_, but not _cleanly_.
fireEvent('close', this, (type, init) => new CloseEvent(type, init), {
wasClean: false, code, reason
})
}
}
#onMessage (type, data) {
// 1. If ready state is not OPEN (1), then return.
if (this.#handler.readyState !== states.OPEN) {
@ -578,18 +564,11 @@ class WebSocket extends EventTarget {
let code = 1005
let reason = ''
const result = this.#parser.closingInfo
const result = this.#parser?.closingInfo
if (result && !result.error) {
code = result.code ?? 1005
reason = result.reason
} else if (!this.#handler.closeState.has(sentCloseFrameState.RECEIVED)) {
// If _The WebSocket
// Connection is Closed_ and no Close control frame was received by the
// endpoint (such as could occur if the underlying transport connection
// is lost), _The WebSocket Connection Close Code_ is considered to be
// 1006.
code = 1006
}
// 1. Change the ready state to CLOSED (3).
@ -599,7 +578,18 @@ class WebSocket extends EventTarget {
// connection, or if the WebSocket connection was closed
// after being flagged as full, fire an event named error
// at the WebSocket object.
// TODO
if (!this.#handler.closeState.has(sentCloseFrameState.RECEIVED)) {
// If _The WebSocket
// Connection is Closed_ and no Close control frame was received by the
// endpoint (such as could occur if the underlying transport connection
// is lost), _The WebSocket Connection Close Code_ is considered to be
// 1006.
code = 1006
fireEvent('error', this, (type, init) => new ErrorEvent(type, init), {
error: new TypeError(reason)
})
}
// 3. Fire an event named close at the WebSocket object,
// using CloseEvent, with the wasClean attribute
@ -708,7 +698,7 @@ webidl.converters.WebSocketInit = webidl.dictionaryConverter([
{
key: 'protocols',
converter: webidl.converters['DOMString or sequence<DOMString>'],
defaultValue: () => new Array(0)
defaultValue: () => []
},
{
key: 'dispatcher',
@ -735,7 +725,7 @@ webidl.converters.WebSocketSendData = function (V) {
return V
}
if (ArrayBuffer.isView(V) || isArrayBuffer(V)) {
if (webidl.is.BufferSource(V)) {
return V
}
}

830
deps/undici/src/package-lock.json generated vendored

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
{
"name": "undici",
"version": "7.14.0",
"version": "7.16.0",
"description": "An HTTP/1.1 client, written from scratch for Node.js",
"homepage": "https://undici.nodejs.org",
"bugs": {
@ -69,16 +69,17 @@
"lint:fix": "eslint --fix --cache",
"test": "npm run test:javascript && cross-env NODE_V8_COVERAGE= npm run test:typescript",
"test:javascript": "npm run test:javascript:no-jest && npm run test:jest",
"test:javascript:no-jest": "npm run generate-pem && npm run test:unit && npm run test:node-fetch && npm run test:cache && npm run test:cache-interceptor && npm run test:interceptors && npm run test:fetch && npm run test:cookies && npm run test:eventsource && npm run test:wpt && npm run test:websocket && npm run test:node-test && npm run test:cache-tests",
"test:javascript:no-jest": "npm run generate-pem && npm run test:unit && npm run test:fetch && npm run test:node-fetch && npm run test:cache && npm run test:cache-interceptor && npm run test:interceptors && npm run test:cookies && npm run test:eventsource && npm run test:subresource-integrity && npm run test:wpt && npm run test:websocket && npm run test:node-test && npm run test:cache-tests",
"test:javascript:without-intl": "npm run test:javascript:no-jest",
"test:busboy": "borp -p \"test/busboy/*.js\"",
"test:cache": "borp -p \"test/cache/*.js\"",
"test:sqlite": "NODE_OPTIONS=--experimental-sqlite borp -p \"test/cache-interceptor/*.js\"",
"test:sqlite": "cross-env NODE_OPTIONS=--experimental-sqlite borp -p \"test/cache-interceptor/*.js\"",
"test:cache-interceptor": "borp -p \"test/cache-interceptor/*.js\"",
"test:cookies": "borp -p \"test/cookie/*.js\"",
"test:eventsource": "npm run build:node && borp --expose-gc -p \"test/eventsource/*.js\"",
"test:fuzzing": "node test/fuzzing/fuzzing.test.js",
"test:fetch": "npm run build:node && borp --timeout 180000 --expose-gc --concurrency 1 -p \"test/fetch/*.js\" && npm run test:webidl && npm run test:busboy",
"test:subresource-integrity": "borp -p \"test/subresource-integrity/*.js\"",
"test:h2": "npm run test:h2:core && npm run test:h2:fetch",
"test:h2:core": "borp -p \"test/+(http2|h2)*.js\"",
"test:h2:fetch": "npm run build:node && borp -p \"test/fetch/http2*.js\"",
@ -94,8 +95,8 @@
"test:websocket": "borp -p \"test/websocket/**/*.js\"",
"test:websocket:autobahn": "node test/autobahn/client.js",
"test:websocket:autobahn:report": "node test/autobahn/report.js",
"test:wpt": "node test/wpt/start-fetch.mjs && node test/wpt/start-mimesniff.mjs && node test/wpt/start-xhr.mjs && node test/wpt/start-websockets.mjs && node test/wpt/start-cacheStorage.mjs && node test/wpt/start-eventsource.mjs",
"test:wpt:withoutintl": "node test/wpt/start-fetch.mjs && node test/wpt/start-mimesniff.mjs && node test/wpt/start-xhr.mjs && node test/wpt/start-cacheStorage.mjs && node test/wpt/start-eventsource.mjs",
"test:wpt:setup": "node test/web-platform-tests/wpt-runner.mjs setup",
"test:wpt": "npm run test:wpt:setup && node test/web-platform-tests/wpt-runner.mjs run /fetch /mimesniff /xhr /websockets /serviceWorkers /eventsource",
"test:cache-tests": "node test/cache-interceptor/cache-tests.mjs --ci",
"coverage": "npm run coverage:clean && cross-env NODE_V8_COVERAGE=./coverage/tmp npm run test:javascript && npm run coverage:report",
"coverage:ci": "npm run coverage:clean && cross-env NODE_V8_COVERAGE=./coverage/tmp npm run test:javascript && npm run coverage:report:ci",
@ -107,7 +108,7 @@
"prepare": "husky && node ./scripts/platform-shell.js"
},
"devDependencies": {
"@fastify/busboy": "3.1.1",
"@fastify/busboy": "3.2.0",
"@matteo.collina/tspl": "^0.2.0",
"@metcoder95/https-pem": "^1.0.0",
"@sinonjs/fake-timers": "^12.0.0",
@ -122,6 +123,7 @@
"fast-check": "^4.1.1",
"husky": "^9.0.7",
"jest": "^30.0.5",
"jsondiffpatch": "^0.7.3",
"neostandard": "^0.12.0",
"node-forge": "^1.3.1",
"proxy": "^2.1.1",

View File

@ -1,17 +0,0 @@
'use strict'
/* istanbul ignore file */
const [major, minor, patch] = process.versions.node.split('.').map(v => Number(v))
const required = process.argv.pop().split('.').map(v => Number(v))
const badMajor = major < required[0]
const badMinor = major === required[0] && minor < required[1]
const badPatch = major === required[0] && minor === required[1] && patch < required[2]
if (badMajor || badMinor || badPatch) {
console.log(`Required Node.js >=${required.join('.')}, got ${process.versions.node}`)
console.log('Skipping')
} else {
process.exit(1)
}

View File

@ -24,6 +24,7 @@ declare namespace Agent {
factory?(origin: string | URL, opts: Object): Dispatcher;
interceptors?: { Agent?: readonly Dispatcher.DispatchInterceptor[] } & Pool.Options['interceptors']
maxOrigins?: number
}
export interface DispatchOptions extends Dispatcher.DispatchOptions {

View File

@ -16,7 +16,6 @@ declare namespace DiagnosticsChannel {
statusText: string;
headers: Array<Buffer>;
}
type Error = unknown
interface ConnectParams {
host: URL['host'];
hostname: URL['hostname'];

View File

@ -49,21 +49,6 @@ declare namespace Errors {
headers: IncomingHttpHeaders | string[] | null
}
export class ResponseStatusCodeError extends UndiciError {
constructor (
message?: string,
statusCode?: number,
headers?: IncomingHttpHeaders | string[] | null,
body?: null | Record<string, any> | string
)
name: 'ResponseStatusCodeError'
code: 'UND_ERR_RESPONSE_STATUS_CODE'
body: null | Record<string, any> | string
status: number
statusCode: number
headers: IncomingHttpHeaders | string[] | null
}
/** Passed an invalid argument. */
export class InvalidArgumentError extends UndiciError {
name: 'InvalidArgumentError'
@ -168,4 +153,9 @@ declare namespace Errors {
name: 'SecureProxyConnectionError'
code: 'UND_ERR_PRX_TLS'
}
class MaxOriginsReachedError extends UndiciError {
name: 'MaxOriginsReachedError'
code: 'UND_ERR_MAX_ORIGINS_REACHED'
}
}

View File

@ -9,6 +9,10 @@ declare namespace Interceptors {
export type DumpInterceptorOpts = { maxSize?: number }
export type RetryInterceptorOpts = RetryHandler.RetryOptions
export type RedirectInterceptorOpts = { maxRedirections?: number }
export type DecompressInterceptorOpts = {
skipErrorResponses?: boolean
skipStatusCodes?: number[]
}
export type ResponseErrorInterceptorOpts = { throwOnError: boolean }
export type CacheInterceptorOpts = CacheHandler.CacheOptions
@ -28,6 +32,7 @@ declare namespace Interceptors {
export function dump (opts?: DumpInterceptorOpts): Dispatcher.DispatcherComposeInterceptor
export function retry (opts?: RetryInterceptorOpts): Dispatcher.DispatcherComposeInterceptor
export function redirect (opts?: RedirectInterceptorOpts): Dispatcher.DispatcherComposeInterceptor
export function decompress (opts?: DecompressInterceptorOpts): Dispatcher.DispatcherComposeInterceptor
export function responseError (opts?: ResponseErrorInterceptorOpts): Dispatcher.DispatcherComposeInterceptor
export function dns (opts?: DNSInterceptorOpts): Dispatcher.DispatcherComposeInterceptor
export function cache (opts?: CacheInterceptorOpts): Dispatcher.DispatcherComposeInterceptor

View File

@ -18,9 +18,11 @@ declare class SnapshotRecorder {
}
declare namespace SnapshotRecorder {
type SnapshotRecorderMode = 'record' | 'playback' | 'update'
export interface Options {
snapshotPath?: string
mode?: 'record' | 'playback' | 'update'
mode?: SnapshotRecorderMode
maxSnapshots?: number
autoFlush?: boolean
flushInterval?: number
@ -77,7 +79,7 @@ declare class SnapshotAgent extends MockAgent {
saveSnapshots (filePath?: string): Promise<void>
loadSnapshots (filePath?: string): Promise<void>
getRecorder (): SnapshotRecorder
getMode (): 'record' | 'playback' | 'update'
getMode (): SnapshotRecorder.SnapshotRecorderMode
clearSnapshots (): void
resetCallCounts (): void
deleteSnapshot (requestOpts: any): boolean
@ -87,7 +89,7 @@ declare class SnapshotAgent extends MockAgent {
declare namespace SnapshotAgent {
export interface Options extends MockAgent.Options {
mode?: 'record' | 'playback' | 'update'
mode?: SnapshotRecorder.SnapshotRecorderMode
snapshotPath?: string
maxSnapshots?: number
autoFlush?: boolean

View File

@ -10,11 +10,6 @@ type SequenceConverter<T> = (object: unknown, iterable?: IterableIterator<T>) =>
type RecordConverter<K extends string, V> = (object: unknown) => Record<K, V>
interface ConvertToIntOpts {
clamp?: boolean
enforceRange?: boolean
}
interface WebidlErrors {
/**
* @description Instantiate an error
@ -74,7 +69,7 @@ interface WebidlUtil {
V: unknown,
bitLength: number,
signedness: 'signed' | 'unsigned',
opts?: ConvertToIntOpts
flags?: number
): number
/**
@ -94,15 +89,17 @@ interface WebidlUtil {
* This is only effective in some newer Node.js versions.
*/
markAsUncloneable (V: any): void
IsResizableArrayBuffer (V: ArrayBufferLike): boolean
HasFlag (flag: number, attributes: number): boolean
}
interface WebidlConverters {
/**
* @see https://webidl.spec.whatwg.org/#es-DOMString
*/
DOMString (V: unknown, prefix: string, argument: string, opts?: {
legacyNullToEmptyString: boolean
}): string
DOMString (V: unknown, prefix: string, argument: string, flags?: number): string
/**
* @see https://webidl.spec.whatwg.org/#es-ByteString
@ -142,39 +139,78 @@ interface WebidlConverters {
/**
* @see https://webidl.spec.whatwg.org/#es-unsigned-short
*/
['unsigned short'] (V: unknown, opts?: ConvertToIntOpts): number
['unsigned short'] (V: unknown, flags?: number): number
/**
* @see https://webidl.spec.whatwg.org/#idl-ArrayBuffer
*/
ArrayBuffer (V: unknown): ArrayBufferLike
ArrayBuffer (V: unknown, opts: { allowShared: false }): ArrayBuffer
ArrayBuffer (
V: unknown,
prefix: string,
argument: string,
options?: { allowResizable: boolean }
): ArrayBuffer
/**
* @see https://webidl.spec.whatwg.org/#idl-SharedArrayBuffer
*/
SharedArrayBuffer (
V: unknown,
prefix: string,
argument: string,
options?: { allowResizable: boolean }
): SharedArrayBuffer
/**
* @see https://webidl.spec.whatwg.org/#es-buffer-source-types
*/
TypedArray (
V: unknown,
TypedArray: NodeJS.TypedArray | ArrayBufferLike
): NodeJS.TypedArray | ArrayBufferLike
TypedArray (
V: unknown,
TypedArray: NodeJS.TypedArray | ArrayBufferLike,
opts?: { allowShared: false }
): NodeJS.TypedArray | ArrayBuffer
T: new () => NodeJS.TypedArray,
prefix: string,
argument: string,
flags?: number
): NodeJS.TypedArray
/**
* @see https://webidl.spec.whatwg.org/#es-buffer-source-types
*/
DataView (V: unknown, opts?: { allowShared: boolean }): DataView
DataView (
V: unknown,
prefix: string,
argument: string,
flags?: number
): DataView
/**
* @see https://webidl.spec.whatwg.org/#es-buffer-source-types
*/
ArrayBufferView (
V: unknown,
prefix: string,
argument: string,
flags?: number
): NodeJS.ArrayBufferView
/**
* @see https://webidl.spec.whatwg.org/#BufferSource
*/
BufferSource (
V: unknown,
opts?: { allowShared: boolean }
): NodeJS.TypedArray | ArrayBufferLike | DataView
prefix: string,
argument: string,
flags?: number
): ArrayBuffer | NodeJS.ArrayBufferView
/**
* @see https://webidl.spec.whatwg.org/#AllowSharedBufferSource
*/
AllowSharedBufferSource (
V: unknown,
prefix: string,
argument: string,
flags?: number
): ArrayBuffer | SharedArrayBuffer | NodeJS.ArrayBufferView
['sequence<ByteString>']: SequenceConverter<string>
@ -192,6 +228,13 @@ interface WebidlConverters {
*/
RequestInit (V: unknown): undici.RequestInit
/**
* @see https://html.spec.whatwg.org/multipage/webappapis.html#eventhandlernonnull
*/
EventHandlerNonNull (V: unknown): Function | null
WebSocketStreamWrite (V: unknown): ArrayBuffer | NodeJS.TypedArray | string
[Key: string]: (...args: any[]) => unknown
}
@ -210,6 +253,10 @@ interface WebidlIs {
AbortSignal: WebidlIsFunction<AbortSignal>
MessagePort: WebidlIsFunction<MessagePort>
USVString: WebidlIsFunction<string>
/**
* @see https://webidl.spec.whatwg.org/#BufferSource
*/
BufferSource: WebidlIsFunction<ArrayBuffer | NodeJS.TypedArray>
}
export interface Webidl {
@ -217,6 +264,7 @@ export interface Webidl {
util: WebidlUtil
converters: WebidlConverters
is: WebidlIs
attributes: WebIDLExtendedAttributes
/**
* @description Performs a brand-check on {@param V} to ensure it is a
@ -278,3 +326,16 @@ export interface Webidl {
argumentLengthCheck (args: { length: number }, min: number, context: string): void
}
interface WebIDLExtendedAttributes {
/** https://webidl.spec.whatwg.org/#Clamp */
Clamp: number
/** https://webidl.spec.whatwg.org/#EnforceRange */
EnforceRange: number
/** https://webidl.spec.whatwg.org/#AllowShared */
AllowShared: number
/** https://webidl.spec.whatwg.org/#AllowResizable */
AllowResizable: number
/** https://webidl.spec.whatwg.org/#LegacyNullToEmptyString */
LegacyNullToEmptyString: number
}

1242
deps/undici/undici.js vendored

File diff suppressed because one or more lines are too long

View File

@ -2,5 +2,5 @@
// Refer to tools/dep_updaters/update-undici.sh
#ifndef SRC_UNDICI_VERSION_H_
#define SRC_UNDICI_VERSION_H_
#define UNDICI_VERSION "7.14.0"
#define UNDICI_VERSION "7.16.0"
#endif // SRC_UNDICI_VERSION_H_