mirror of
https://github.com/zebrajr/node.git
synced 2026-01-15 12:15:26 +00:00
deps: update undici to 6.10.2
PR-URL: https://github.com/nodejs/node/pull/52227 Reviewed-By: Matthew Aitken <maitken033380023@gmail.com> Reviewed-By: Filip Skokan <panva.ip@gmail.com> Reviewed-By: Rafael Gonzaga <rafael.nunu@hotmail.com> Reviewed-By: Luigi Pinca <luigipinca@gmail.com> Reviewed-By: Mohammed Keyvanzadeh <mohammadkeyvanzade94@gmail.com>
This commit is contained in:
committed by
GitHub
parent
29de7f82cd
commit
511be6cee7
76
deps/undici/src/README.md
vendored
76
deps/undici/src/README.md
vendored
@@ -17,24 +17,42 @@ npm i undici
|
||||
|
||||
## Benchmarks
|
||||
|
||||
The benchmark is a simple `hello world` [example](benchmarks/benchmark.js) using a
|
||||
The benchmark is a simple getting data [example](https://github.com/nodejs/undici/blob/main/benchmarks/benchmark.js) using a
|
||||
50 TCP connections with a pipelining depth of 10 running on Node 20.10.0.
|
||||
|
||||
```
|
||||
│ Tests │ Samples │ Result │ Tolerance │ Difference with slowest │
|
||||
|─────────────────────|─────────|─────────────────|───────────|─────────────────────────|
|
||||
│ got │ 45 │ 1661.71 req/sec │ ± 2.93 % │ - │
|
||||
│ node-fetch │ 20 │ 2164.81 req/sec │ ± 2.63 % │ + 30.28 % │
|
||||
│ undici - fetch │ 35 │ 2274.27 req/sec │ ± 2.70 % │ + 36.86 % │
|
||||
│ http - no keepalive │ 15 │ 2376.04 req/sec │ ± 2.99 % │ + 42.99 % │
|
||||
│ axios │ 25 │ 2612.93 req/sec │ ± 2.89 % │ + 57.24 % │
|
||||
│ request │ 40 │ 2712.19 req/sec │ ± 2.92 % │ + 63.22 % │
|
||||
│ http - keepalive │ 45 │ 4393.25 req/sec │ ± 2.86 % │ + 164.38 % │
|
||||
│ undici - pipeline │ 45 │ 5484.69 req/sec │ ± 2.87 % │ + 230.06 % │
|
||||
│ undici - request │ 55 │ 7773.98 req/sec │ ± 2.93 % │ + 367.83 % │
|
||||
│ undici - stream │ 70 │ 8425.96 req/sec │ ± 2.91 % │ + 407.07 % │
|
||||
│ undici - dispatch │ 50 │ 9488.99 req/sec │ ± 2.85 % │ + 471.04 % │
|
||||
```
|
||||
| _Tests_ | _Samples_ | _Result_ | _Tolerance_ | _Difference with slowest_ |
|
||||
| :-----------------: | :-------: | :--------------: | :---------: | :-----------------------: |
|
||||
| undici - fetch | 30 | 3704.43 req/sec | ± 2.95 % | - |
|
||||
| http - no keepalive | 20 | 4275.30 req/sec | ± 2.60 % | + 15.41 % |
|
||||
| node-fetch | 10 | 4759.42 req/sec | ± 0.87 % | + 28.48 % |
|
||||
| request | 40 | 4803.37 req/sec | ± 2.77 % | + 29.67 % |
|
||||
| axios | 45 | 4951.97 req/sec | ± 2.88 % | + 33.68 % |
|
||||
| got | 10 | 5969.67 req/sec | ± 2.64 % | + 61.15 % |
|
||||
| superagent | 10 | 9471.48 req/sec | ± 1.50 % | + 155.68 % |
|
||||
| http - keepalive | 25 | 10327.49 req/sec | ± 2.95 % | + 178.79 % |
|
||||
| undici - pipeline | 10 | 15053.41 req/sec | ± 1.63 % | + 306.36 % |
|
||||
| undici - request | 10 | 19264.24 req/sec | ± 1.74 % | + 420.03 % |
|
||||
| undici - stream | 15 | 20317.29 req/sec | ± 2.13 % | + 448.46 % |
|
||||
| undici - dispatch | 10 | 24883.28 req/sec | ± 1.54 % | + 571.72 % |
|
||||
|
||||
The benchmark is a simple sending data [example](https://github.com/nodejs/undici/blob/main/benchmarks/post-benchmark.js) using a
|
||||
50 TCP connections with a pipelining depth of 10 running on Node 20.10.0.
|
||||
|
||||
| _Tests_ | _Samples_ | _Result_ | _Tolerance_ | _Difference with slowest_ |
|
||||
| :-----------------: | :-------: | :-------------: | :---------: | :-----------------------: |
|
||||
| undici - fetch | 20 | 1968.42 req/sec | ± 2.63 % | - |
|
||||
| http - no keepalive | 25 | 2330.30 req/sec | ± 2.99 % | + 18.38 % |
|
||||
| node-fetch | 20 | 2485.36 req/sec | ± 2.70 % | + 26.26 % |
|
||||
| got | 15 | 2787.68 req/sec | ± 2.56 % | + 41.62 % |
|
||||
| request | 30 | 2805.10 req/sec | ± 2.59 % | + 42.50 % |
|
||||
| axios | 10 | 3040.45 req/sec | ± 1.72 % | + 54.46 % |
|
||||
| superagent | 20 | 3358.29 req/sec | ± 2.51 % | + 70.61 % |
|
||||
| http - keepalive | 20 | 3477.94 req/sec | ± 2.51 % | + 76.69 % |
|
||||
| undici - pipeline | 25 | 3812.61 req/sec | ± 2.80 % | + 93.69 % |
|
||||
| undici - request | 10 | 6067.00 req/sec | ± 0.94 % | + 208.22 % |
|
||||
| undici - stream | 10 | 6391.61 req/sec | ± 1.98 % | + 224.71 % |
|
||||
| undici - dispatch | 10 | 6397.00 req/sec | ± 1.48 % | + 224.98 % |
|
||||
|
||||
|
||||
## Quick Start
|
||||
|
||||
@@ -60,10 +78,14 @@ console.log('trailers', trailers)
|
||||
|
||||
The `body` mixins are the most common way to format the request/response body. Mixins include:
|
||||
|
||||
- [`.formData()`](https://fetch.spec.whatwg.org/#dom-body-formdata)
|
||||
- [`.arrayBuffer()`](https://fetch.spec.whatwg.org/#dom-body-arraybuffer)
|
||||
- [`.blob()`](https://fetch.spec.whatwg.org/#dom-body-blob)
|
||||
- [`.json()`](https://fetch.spec.whatwg.org/#dom-body-json)
|
||||
- [`.text()`](https://fetch.spec.whatwg.org/#dom-body-text)
|
||||
|
||||
> [!NOTE]
|
||||
> The body returned from `undici.request` does not implement `.formData()`.
|
||||
|
||||
Example usage:
|
||||
|
||||
```js
|
||||
@@ -123,14 +145,14 @@ Returns a promise with the result of the `Dispatcher.stream` method.
|
||||
|
||||
Calls `options.dispatcher.stream(options, factory)`.
|
||||
|
||||
See [Dispatcher.stream](docs/api/Dispatcher.md#dispatcherstreamoptions-factory-callback) for more details.
|
||||
See [Dispatcher.stream](./docs/api/Dispatcher.md#dispatcherstreamoptions-factory-callback) for more details.
|
||||
|
||||
### `undici.pipeline([url, options, ]handler): Duplex`
|
||||
|
||||
Arguments:
|
||||
|
||||
* **url** `string | URL | UrlObject`
|
||||
* **options** [`PipelineOptions`](docs/api/Dispatcher.md#parameter-pipelineoptions)
|
||||
* **options** [`PipelineOptions`](./docs/api/Dispatcher.md#parameter-pipelineoptions)
|
||||
* **dispatcher** `Dispatcher` - Default: [getGlobalDispatcher](#undicigetglobaldispatcher)
|
||||
* **method** `String` - Default: `PUT` if `options.body`, otherwise `GET`
|
||||
* **maxRedirections** `Integer` - Default: `0`
|
||||
@@ -140,7 +162,7 @@ Returns: `stream.Duplex`
|
||||
|
||||
Calls `options.dispatch.pipeline(options, handler)`.
|
||||
|
||||
See [Dispatcher.pipeline](docs/api/Dispatcher.md#dispatcherpipelineoptions-handler) for more details.
|
||||
See [Dispatcher.pipeline](./docs/api/Dispatcher.md#dispatcherpipelineoptions-handler) for more details.
|
||||
|
||||
### `undici.connect([url, options]): Promise`
|
||||
|
||||
@@ -149,7 +171,7 @@ Starts two-way communications with the requested resource using [HTTP CONNECT](h
|
||||
Arguments:
|
||||
|
||||
* **url** `string | URL | UrlObject`
|
||||
* **options** [`ConnectOptions`](docs/api/Dispatcher.md#parameter-connectoptions)
|
||||
* **options** [`ConnectOptions`](./docs/api/Dispatcher.md#parameter-connectoptions)
|
||||
* **dispatcher** `Dispatcher` - Default: [getGlobalDispatcher](#undicigetglobaldispatcher)
|
||||
* **maxRedirections** `Integer` - Default: `0`
|
||||
* **callback** `(err: Error | null, data: ConnectData | null) => void` (optional)
|
||||
@@ -158,7 +180,7 @@ Returns a promise with the result of the `Dispatcher.connect` method.
|
||||
|
||||
Calls `options.dispatch.connect(options)`.
|
||||
|
||||
See [Dispatcher.connect](docs/api/Dispatcher.md#dispatcherconnectoptions-callback) for more details.
|
||||
See [Dispatcher.connect](./docs/api/Dispatcher.md#dispatcherconnectoptions-callback) for more details.
|
||||
|
||||
### `undici.fetch(input[, init]): Promise`
|
||||
|
||||
@@ -226,7 +248,7 @@ await fetch('https://example.com', { body: data, method: 'POST', duplex: 'half'
|
||||
|
||||
- half
|
||||
|
||||
In this implementation of fetch, `request.duplex` must be set if `request.body` is `ReadableStream` or `Async Iterables`. And fetch requests are currently always be full duplex. More detail refer to [Fetch Standard.](https://fetch.spec.whatwg.org/#dom-requestinit-duplex)
|
||||
In this implementation of fetch, `request.duplex` must be set if `request.body` is `ReadableStream` or `Async Iterables`, however, fetch requests are currently always full duplex. For more detail refer to the [Fetch Standard.](https://fetch.spec.whatwg.org/#dom-requestinit-duplex).
|
||||
|
||||
#### `response.body`
|
||||
|
||||
@@ -297,7 +319,7 @@ Upgrade to a different protocol. See [MDN - HTTP - Protocol upgrade mechanism](h
|
||||
Arguments:
|
||||
|
||||
* **url** `string | URL | UrlObject`
|
||||
* **options** [`UpgradeOptions`](docs/api/Dispatcher.md#parameter-upgradeoptions)
|
||||
* **options** [`UpgradeOptions`](./docs/api/Dispatcher.md#parameter-upgradeoptions)
|
||||
* **dispatcher** `Dispatcher` - Default: [getGlobalDispatcher](#undicigetglobaldispatcher)
|
||||
* **maxRedirections** `Integer` - Default: `0`
|
||||
* **callback** `(error: Error | null, data: UpgradeData) => void` (optional)
|
||||
@@ -306,7 +328,7 @@ Returns a promise with the result of the `Dispatcher.upgrade` method.
|
||||
|
||||
Calls `options.dispatcher.upgrade(options)`.
|
||||
|
||||
See [Dispatcher.upgrade](docs/api/Dispatcher.md#dispatcherupgradeoptions-callback) for more details.
|
||||
See [Dispatcher.upgrade](./docs/api/Dispatcher.md#dispatcherupgradeoptions-callback) for more details.
|
||||
|
||||
### `undici.setGlobalDispatcher(dispatcher)`
|
||||
|
||||
@@ -400,9 +422,9 @@ Refs: https://fetch.spec.whatwg.org/#atomic-http-redirect-handling
|
||||
|
||||
If you experience problem when connecting to a remote server that is resolved by your DNS servers to a IPv6 (AAAA record)
|
||||
first, there are chances that your local router or ISP might have problem connecting to IPv6 networks. In that case
|
||||
undici will throw an error with code `UND_ERR_CONNECT_TIMEOUT`.
|
||||
undici will throw an error with code `UND_ERR_CONNECT_TIMEOUT`.
|
||||
|
||||
If the target server resolves to both a IPv6 and IPv4 (A records) address and you are using a compatible Node version
|
||||
If the target server resolves to both a IPv6 and IPv4 (A records) address and you are using a compatible Node version
|
||||
(18.3.0 and above), you can fix the problem by providing the `autoSelectFamily` option (support by both `undici.request`
|
||||
and `undici.Agent`) which will enable the family autoselection algorithm when establishing the connection.
|
||||
|
||||
|
||||
27
deps/undici/src/docs/api/Fetch.md
vendored
27
deps/undici/src/docs/api/Fetch.md
vendored
@@ -1,27 +0,0 @@
|
||||
# Fetch
|
||||
|
||||
Undici exposes a fetch() method starts the process of fetching a resource from the network.
|
||||
|
||||
Documentation and examples can be found on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/fetch).
|
||||
|
||||
## File
|
||||
|
||||
This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/File)
|
||||
|
||||
In Node versions v18.13.0 and above and v19.2.0 and above, undici will default to using Node's [File](https://nodejs.org/api/buffer.html#class-file) class. In versions where it's not available, it will default to the undici one.
|
||||
|
||||
## FormData
|
||||
|
||||
This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/FormData)
|
||||
|
||||
## Response
|
||||
|
||||
This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/Response)
|
||||
|
||||
## Request
|
||||
|
||||
This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/Request)
|
||||
|
||||
## Header
|
||||
|
||||
This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/Headers)
|
||||
BIN
deps/undici/src/docs/assets/lifecycle-diagram.png
vendored
BIN
deps/undici/src/docs/assets/lifecycle-diagram.png
vendored
Binary file not shown.
|
Before Width: | Height: | Size: 46 KiB |
@@ -29,7 +29,8 @@ Returns: `Client`
|
||||
* **pipelining** `number | null` (optional) - Default: `1` - The amount of concurrent requests to be sent over the single TCP/TLS connection according to [RFC7230](https://tools.ietf.org/html/rfc7230#section-6.3.2). Carefully consider your workload and environment before enabling concurrent requests as pipelining may reduce performance if used incorrectly. Pipelining is sensitive to network stack settings as well as head of line blocking caused by e.g. long running requests. Set to `0` to disable keep-alive connections.
|
||||
* **connect** `ConnectOptions | Function | null` (optional) - Default: `null`.
|
||||
* **strictContentLength** `Boolean` (optional) - Default: `true` - Whether to treat request content length mismatches as errors. If true, an error is thrown when the request content-length header doesn't match the length of the request body.
|
||||
* **interceptors** `{ Client: DispatchInterceptor[] }` - Default: `[RedirectInterceptor]` - A list of interceptors that are applied to the dispatch method. Additional logic can be applied (such as, but not limited to: 302 status code handling, authentication, cookies, compression and caching). Note that the behavior of interceptors is Experimental and might change at any given time.
|
||||
<!-- TODO: Remove once we drop its support -->
|
||||
* **interceptors** `{ Client: DispatchInterceptor[] }` - Default: `[RedirectInterceptor]` - A list of interceptors that are applied to the dispatch method. Additional logic can be applied (such as, but not limited to: 302 status code handling, authentication, cookies, compression and caching). Note that the behavior of interceptors is Experimental and might change at any given time. **Note: this is deprecated in favor of [Dispatcher#compose](./Dispatcher.md#dispatcher). Support will be droped in next major.**
|
||||
* **autoSelectFamily**: `boolean` (optional) - Default: depends on local Node version, on Node 18.13.0 and above is `false`. Enables a family autodetection algorithm that loosely implements section 5 of [RFC 8305](https://tools.ietf.org/html/rfc8305#section-5). See [here](https://nodejs.org/api/net.html#socketconnectoptions-connectlistener) for more details. This option is ignored if not supported by the current Node version.
|
||||
* **autoSelectFamilyAttemptTimeout**: `number` - Default: depends on local Node version, on Node 18.13.0 and above is `250`. The amount of time in milliseconds to wait for a connection attempt to finish before trying the next address when using the `autoSelectFamily` option. See [here](https://nodejs.org/api/net.html#socketconnectoptions-connectlistener) for more details.
|
||||
* **allowH2**: `boolean` - Default: `false`. Enables support for H2 if the server has assigned bigger priority to it through ALPN negotiation.
|
||||
@@ -19,9 +19,9 @@ diagnosticsChannel.channel('undici:request:create').subscribe(({ request }) => {
|
||||
console.log('completed', request.completed)
|
||||
console.log('method', request.method)
|
||||
console.log('path', request.path)
|
||||
console.log('headers') // raw text, e.g: 'bar: bar\r\n'
|
||||
console.log('headers') // array of strings, e.g: ['foo', 'bar']
|
||||
request.addHeader('hello', 'world')
|
||||
console.log('headers', request.headers) // e.g. 'bar: bar\r\nhello: world\r\n'
|
||||
console.log('headers', request.headers) // e.g. ['foo', 'bar', 'hello', 'world']
|
||||
})
|
||||
```
|
||||
|
||||
@@ -817,6 +817,141 @@ try {
|
||||
}
|
||||
```
|
||||
|
||||
### `Dispatcher.compose(interceptors[, interceptor])`
|
||||
|
||||
Compose a new dispatcher from the current dispatcher and the given interceptors.
|
||||
|
||||
> _Notes_:
|
||||
> - The order of the interceptors matters. The first interceptor will be the first to be called.
|
||||
> - It is important to note that the `interceptor` function should return a function that follows the `Dispatcher.dispatch` signature.
|
||||
> - Any fork of the chain of `interceptors` can lead to unexpected results.
|
||||
|
||||
Arguments:
|
||||
|
||||
* **interceptors** `Interceptor[interceptor[]]`: It is an array of `Interceptor` functions passed as only argument, or several interceptors passed as separate arguments.
|
||||
|
||||
Returns: `Dispatcher`.
|
||||
|
||||
#### Parameter: `Interceptor`
|
||||
|
||||
A function that takes a `dispatch` method and returns a `dispatch`-like function.
|
||||
|
||||
#### Example 1 - Basic Compose
|
||||
|
||||
```js
|
||||
const { Client, RedirectHandler } = require('undici')
|
||||
|
||||
const redirectInterceptor = dispatch => {
|
||||
return (opts, handler) => {
|
||||
const { maxRedirections } = opts
|
||||
|
||||
if (!maxRedirections) {
|
||||
return dispatch(opts, handler)
|
||||
}
|
||||
|
||||
const redirectHandler = new RedirectHandler(
|
||||
dispatch,
|
||||
maxRedirections,
|
||||
opts,
|
||||
handler
|
||||
)
|
||||
opts = { ...opts, maxRedirections: 0 } // Stop sub dispatcher from also redirecting.
|
||||
return dispatch(opts, redirectHandler)
|
||||
}
|
||||
}
|
||||
|
||||
const client = new Client('http://localhost:3000')
|
||||
.compose(redirectInterceptor)
|
||||
|
||||
await client.request({ path: '/', method: 'GET' })
|
||||
```
|
||||
|
||||
#### Example 2 - Chained Compose
|
||||
|
||||
```js
|
||||
const { Client, RedirectHandler, RetryHandler } = require('undici')
|
||||
|
||||
const redirectInterceptor = dispatch => {
|
||||
return (opts, handler) => {
|
||||
const { maxRedirections } = opts
|
||||
|
||||
if (!maxRedirections) {
|
||||
return dispatch(opts, handler)
|
||||
}
|
||||
|
||||
const redirectHandler = new RedirectHandler(
|
||||
dispatch,
|
||||
maxRedirections,
|
||||
opts,
|
||||
handler
|
||||
)
|
||||
opts = { ...opts, maxRedirections: 0 }
|
||||
return dispatch(opts, redirectHandler)
|
||||
}
|
||||
}
|
||||
|
||||
const retryInterceptor = dispatch => {
|
||||
return function retryInterceptor (opts, handler) {
|
||||
return dispatch(
|
||||
opts,
|
||||
new RetryHandler(opts, {
|
||||
handler,
|
||||
dispatch
|
||||
})
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const client = new Client('http://localhost:3000')
|
||||
.compose(redirectInterceptor)
|
||||
.compose(retryInterceptor)
|
||||
|
||||
await client.request({ path: '/', method: 'GET' })
|
||||
```
|
||||
|
||||
#### Pre-built interceptors
|
||||
|
||||
##### `redirect`
|
||||
|
||||
The `redirect` interceptor allows you to customize the way your dispatcher handles redirects.
|
||||
|
||||
It accepts the same arguments as the [`RedirectHandler` constructor](./RedirectHandler.md).
|
||||
|
||||
**Example - Basic Redirect Interceptor**
|
||||
|
||||
```js
|
||||
const { Client, interceptors } = require("undici");
|
||||
const { redirect } = interceptors;
|
||||
|
||||
const client = new Client("http://example.com").compose(
|
||||
redirect({ maxRedirections: 3, throwOnMaxRedirects: true })
|
||||
);
|
||||
client.request({ path: "/" })
|
||||
```
|
||||
|
||||
##### `retry`
|
||||
|
||||
The `retry` interceptor allows you to customize the way your dispatcher handles retries.
|
||||
|
||||
It accepts the same arguments as the [`RetryHandler` constructor](./RetryHandler.md).
|
||||
|
||||
**Example - Basic Redirect Interceptor**
|
||||
|
||||
```js
|
||||
const { Client, interceptors } = require("undici");
|
||||
const { retry } = interceptors;
|
||||
|
||||
const client = new Client("http://example.com").compose(
|
||||
retry({
|
||||
maxRetries: 3,
|
||||
minTimeout: 1000,
|
||||
maxTimeout: 10000,
|
||||
timeoutFactor: 2,
|
||||
retryAfter: true,
|
||||
})
|
||||
);
|
||||
```
|
||||
|
||||
## Instance Events
|
||||
|
||||
### Event: `'connect'`
|
||||
@@ -855,10 +990,12 @@ Emitted when dispatcher is no longer busy.
|
||||
|
||||
## Parameter: `UndiciHeaders`
|
||||
|
||||
* `Record<string, string | string[] | undefined> | string[] | null`
|
||||
|
||||
Header arguments such as `options.headers` in [`Client.dispatch`](Client.md#clientdispatchoptions-handlers) can be specified in two forms; either as an object specified by the `Record<string, string | string[] | undefined>` (`IncomingHttpHeaders`) type, or an array of strings. An array representation of a header list must have an even length or an `InvalidArgumentError` will be thrown.
|
||||
* `Record<string, string | string[] | undefined> | string[] | Iterable<[string, string | string[] | undefined]> | null`
|
||||
|
||||
Header arguments such as `options.headers` in [`Client.dispatch`](Client.md#clientdispatchoptions-handlers) can be specified in three forms:
|
||||
* As an object specified by the `Record<string, string | string[] | undefined>` (`IncomingHttpHeaders`) type.
|
||||
* As an array of strings. An array representation of a header list must have an even length, or an `InvalidArgumentError` will be thrown.
|
||||
* As an iterable that can encompass `Headers`, `Map`, or a custom iterator returning key-value pairs.
|
||||
Keys are lowercase and values are not modified.
|
||||
|
||||
Response headers will derive a `host` from the `url` of the [Client](Client.md#class-client) instance if no `host` header was previously specified.
|
||||
@@ -886,3 +1023,37 @@ Response headers will derive a `host` from the `url` of the [Client](Client.md#c
|
||||
'accept', '*/*'
|
||||
]
|
||||
```
|
||||
|
||||
### Example 3 - Iterable
|
||||
|
||||
```js
|
||||
new Headers({
|
||||
'content-length': '123',
|
||||
'content-type': 'text/plain',
|
||||
connection: 'keep-alive',
|
||||
host: 'mysite.com',
|
||||
accept: '*/*'
|
||||
})
|
||||
```
|
||||
or
|
||||
```js
|
||||
new Map([
|
||||
['content-length', '123'],
|
||||
['content-type', 'text/plain'],
|
||||
['connection', 'keep-alive'],
|
||||
['host', 'mysite.com'],
|
||||
['accept', '*/*']
|
||||
])
|
||||
```
|
||||
or
|
||||
```js
|
||||
{
|
||||
*[Symbol.iterator] () {
|
||||
yield ['content-length', '123']
|
||||
yield ['content-type', 'text/plain']
|
||||
yield ['connection', 'keep-alive']
|
||||
yield ['host', 'mysite.com']
|
||||
yield ['accept', '*/*']
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -26,6 +26,7 @@ import { errors } from 'undici'
|
||||
| `ResponseContentLengthMismatchError` | `UND_ERR_RES_CONTENT_LENGTH_MISMATCH` | response body does not match content-length header |
|
||||
| `InformationalError` | `UND_ERR_INFO` | expected error with reason |
|
||||
| `ResponseExceededMaxSizeError` | `UND_ERR_RES_EXCEEDED_MAX_SIZE` | response body exceed the max size allowed |
|
||||
| `SecureProxyConnectionError` | `UND_ERR_PRX_TLS` | tls connection to a proxy failed |
|
||||
|
||||
### `SocketError`
|
||||
|
||||
57
deps/undici/src/docs/docs/api/Fetch.md
vendored
Normal file
57
deps/undici/src/docs/docs/api/Fetch.md
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
# Fetch
|
||||
|
||||
Undici exposes a fetch() method starts the process of fetching a resource from the network.
|
||||
|
||||
Documentation and examples can be found on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/fetch).
|
||||
|
||||
## File
|
||||
|
||||
This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/File)
|
||||
|
||||
In Node versions v18.13.0 and above and v19.2.0 and above, undici will default to using Node's [File](https://nodejs.org/api/buffer.html#class-file) class. In versions where it's not available, it will default to the undici one.
|
||||
|
||||
## FormData
|
||||
|
||||
This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/FormData).
|
||||
|
||||
If any parameters are passed to the FormData constructor other than `undefined`, an error will be thrown. Other parameters are ignored.
|
||||
|
||||
## Response
|
||||
|
||||
This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/Response)
|
||||
|
||||
## Request
|
||||
|
||||
This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/Request)
|
||||
|
||||
## Header
|
||||
|
||||
This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/Headers)
|
||||
|
||||
# Body Mixins
|
||||
|
||||
`Response` and `Request` body inherit body mixin methods. These methods include:
|
||||
|
||||
- [`.arrayBuffer()`](https://fetch.spec.whatwg.org/#dom-body-arraybuffer)
|
||||
- [`.blob()`](https://fetch.spec.whatwg.org/#dom-body-blob)
|
||||
- [`.formData()`](https://fetch.spec.whatwg.org/#dom-body-formdata)
|
||||
- [`.json()`](https://fetch.spec.whatwg.org/#dom-body-json)
|
||||
- [`.text()`](https://fetch.spec.whatwg.org/#dom-body-text)
|
||||
|
||||
There is an ongoing discussion regarding `.formData()` and its usefulness and performance in server environments. It is recommended to use a dedicated library for parsing `multipart/form-data` bodies, such as [Busboy](https://www.npmjs.com/package/busboy) or [@fastify/busboy](https://www.npmjs.com/package/@fastify/busboy).
|
||||
|
||||
These libraries can be interfaced with fetch with the following example code:
|
||||
|
||||
```mjs
|
||||
import { Busboy } from '@fastify/busboy'
|
||||
import { Readable } from 'node:stream'
|
||||
|
||||
const response = await fetch('...')
|
||||
const busboy = new Busboy({
|
||||
headers: {
|
||||
'content-type': response.headers.get('content-type')
|
||||
}
|
||||
})
|
||||
|
||||
Readable.fromWeb(response.body).pipe(busboy)
|
||||
```
|
||||
@@ -16,7 +16,9 @@ Returns: `ProxyAgent`
|
||||
|
||||
Extends: [`AgentOptions`](Agent.md#parameter-agentoptions)
|
||||
|
||||
* **uri** `string` (required) - It can be passed either by a string or a object containing `uri` as string.
|
||||
* **uri** `string | URL` (required) - The URI of the proxy server. This can be provided as a string, as an instance of the URL class, or as an object with a `uri` property of type string.
|
||||
If the `uri` is provided as a string or `uri` is an object with an `uri` property of type string, then it will be parsed into a `URL` object according to the [WHATWG URL Specification](https://url.spec.whatwg.org).
|
||||
For detailed information on the parsing process and potential validation errors, please refer to the ["Writing" section](https://url.spec.whatwg.org/#writing) of the WHATWG URL Specification.
|
||||
* **token** `string` (optional) - It can be passed by a string of token for authentication.
|
||||
* **auth** `string` (**deprecated**) - Use token.
|
||||
* **clientFactory** `(origin: URL, opts: Object) => Dispatcher` (optional) - Default: `(origin, opts) => new Pool(origin, opts)`
|
||||
@@ -30,6 +32,8 @@ import { ProxyAgent } from 'undici'
|
||||
|
||||
const proxyAgent = new ProxyAgent('my.proxy.server')
|
||||
// or
|
||||
const proxyAgent = new ProxyAgent(new URL('my.proxy.server'))
|
||||
// or
|
||||
const proxyAgent = new ProxyAgent({ uri: 'my.proxy.server' })
|
||||
```
|
||||
|
||||
45
deps/undici/src/docs/docs/api/RetryAgent.md
vendored
Normal file
45
deps/undici/src/docs/docs/api/RetryAgent.md
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
# Class: RetryAgent
|
||||
|
||||
Extends: `undici.Dispatcher`
|
||||
|
||||
A `undici.Dispatcher` that allows to automatically retry a request.
|
||||
Wraps a `undici.RetryHandler`.
|
||||
|
||||
## `new RetryAgent(dispatcher, [options])`
|
||||
|
||||
Arguments:
|
||||
|
||||
* **dispatcher** `undici.Dispatcher` (required) - the dispatcher to wrap
|
||||
* **options** `RetryHandlerOptions` (optional) - the options
|
||||
|
||||
Returns: `ProxyAgent`
|
||||
|
||||
### Parameter: `RetryHandlerOptions`
|
||||
|
||||
- **retry** `(err: Error, context: RetryContext, callback: (err?: Error | null) => void) => void` (optional) - Function to be called after every retry. It should pass error if no more retries should be performed.
|
||||
- **maxRetries** `number` (optional) - Maximum number of retries. Default: `5`
|
||||
- **maxTimeout** `number` (optional) - Maximum number of milliseconds to wait before retrying. Default: `30000` (30 seconds)
|
||||
- **minTimeout** `number` (optional) - Minimum number of milliseconds to wait before retrying. Default: `500` (half a second)
|
||||
- **timeoutFactor** `number` (optional) - Factor to multiply the timeout by for each retry attempt. Default: `2`
|
||||
- **retryAfter** `boolean` (optional) - It enables automatic retry after the `Retry-After` header is received. Default: `true`
|
||||
-
|
||||
- **methods** `string[]` (optional) - Array of HTTP methods to retry. Default: `['GET', 'PUT', 'HEAD', 'OPTIONS', 'DELETE']`
|
||||
- **statusCodes** `number[]` (optional) - Array of HTTP status codes to retry. Default: `[429, 500, 502, 503, 504]`
|
||||
- **errorCodes** `string[]` (optional) - Array of Error codes to retry. Default: `['ECONNRESET', 'ECONNREFUSED', 'ENOTFOUND', 'ENETDOWN','ENETUNREACH', 'EHOSTDOWN', 'UND_ERR_SOCKET']`
|
||||
|
||||
**`RetryContext`**
|
||||
|
||||
- `state`: `RetryState` - Current retry state. It can be mutated.
|
||||
- `opts`: `Dispatch.DispatchOptions & RetryOptions` - Options passed to the retry handler.
|
||||
|
||||
Example:
|
||||
|
||||
```js
|
||||
import { Agent, RetryAgent } from 'undici'
|
||||
|
||||
const agent = new RetryAgent(new Agent())
|
||||
|
||||
const res = await agent.request('http://example.com')
|
||||
console.log(res.statuCode)
|
||||
console.log(await res.body.text())
|
||||
```
|
||||
@@ -28,13 +28,19 @@ Extends: [`Dispatch.DispatchOptions`](Dispatcher.md#parameter-dispatchoptions).
|
||||
-
|
||||
- **methods** `string[]` (optional) - Array of HTTP methods to retry. Default: `['GET', 'PUT', 'HEAD', 'OPTIONS', 'DELETE']`
|
||||
- **statusCodes** `number[]` (optional) - Array of HTTP status codes to retry. Default: `[429, 500, 502, 503, 504]`
|
||||
- **errorCodes** `string[]` (optional) - Array of Error codes to retry. Default: `['ECONNRESET', 'ECONNREFUSED', 'ENOTFOUND', 'ENETDOWN','ENETUNREACH', 'EHOSTDOWN',
|
||||
- **errorCodes** `string[]` (optional) - Array of Error codes to retry. Default: `['ECONNRESET', 'ECONNREFUSED', 'ENOTFOUND', 'ENETDOWN','ENETUNREACH', 'EHOSTDOWN', 'UND_ERR_SOCKET']`
|
||||
|
||||
**`RetryContext`**
|
||||
|
||||
- `state`: `RetryState` - Current retry state. It can be mutated.
|
||||
- `opts`: `Dispatch.DispatchOptions & RetryOptions` - Options passed to the retry handler.
|
||||
|
||||
**`RetryState`**
|
||||
|
||||
It represents the retry state for a given request.
|
||||
|
||||
- `counter`: `number` - Current retry attempt.
|
||||
|
||||
### Parameter `RetryHandlers`
|
||||
|
||||
- **dispatch** `(options: Dispatch.DispatchOptions, handlers: Dispatch.DispatchHandlers) => Promise<Dispatch.DispatchResponse>` (required) - Dispatch function to be called after every retry.
|
||||
@@ -21,10 +21,39 @@ An Undici [Client](Client.md) can be best described as a state machine. The foll
|
||||
* At any point in time, the *destroy* event will transition the `Client` from the **processing** state to the **destroyed** state, destroying any queued requests.
|
||||
* The **destroyed** state is a final state and the `Client` is no longer functional.
|
||||
|
||||

|
||||
A state diagram representing an Undici Client instance:
|
||||
|
||||
> The diagram was generated using Mermaid.js Live Editor. Modify the state diagram [here](https://mermaid-js.github.io/mermaid-live-editor/#/edit/eyJjb2RlIjoic3RhdGVEaWFncmFtLXYyXG4gICAgWypdIC0tPiBpZGxlXG4gICAgaWRsZSAtLT4gcGVuZGluZyA6IGNvbm5lY3RcbiAgICBpZGxlIC0tPiBkZXN0cm95ZWQgOiBkZXN0cm95L2Nsb3NlXG4gICAgXG4gICAgcGVuZGluZyAtLT4gaWRsZSA6IHRpbWVvdXRcbiAgICBwZW5kaW5nIC0tPiBkZXN0cm95ZWQgOiBkZXN0cm95XG5cbiAgICBzdGF0ZSBjbG9zZV9mb3JrIDw8Zm9yaz4-XG4gICAgcGVuZGluZyAtLT4gY2xvc2VfZm9yayA6IGNsb3NlXG4gICAgY2xvc2VfZm9yayAtLT4gcHJvY2Vzc2luZ1xuICAgIGNsb3NlX2ZvcmsgLS0-IGRlc3Ryb3llZFxuXG4gICAgcGVuZGluZyAtLT4gcHJvY2Vzc2luZyA6IHByb2Nlc3NcblxuICAgIHByb2Nlc3NpbmcgLS0-IHBlbmRpbmcgOiBrZWVwYWxpdmVcbiAgICBwcm9jZXNzaW5nIC0tPiBkZXN0cm95ZWQgOiBkb25lXG4gICAgcHJvY2Vzc2luZyAtLT4gZGVzdHJveWVkIDogZGVzdHJveVxuXG4gICAgc3RhdGUgcHJvY2Vzc2luZyB7XG4gICAgICAgIHJ1bm5pbmcgLS0-IGJ1c3kgOiBuZWVkRHJhaW5cbiAgICAgICAgYnVzeSAtLT4gcnVubmluZyA6IGRyYWluQ29tcGxldGVcbiAgICAgICAgcnVubmluZyAtLT4gWypdIDoga2VlcGFsaXZlXG4gICAgICAgIHJ1bm5pbmcgLS0-IGNsb3NpbmcgOiBjbG9zZVxuICAgICAgICBjbG9zaW5nIC0tPiBbKl0gOiBkb25lXG4gICAgICAgIFsqXSAtLT4gcnVubmluZ1xuICAgIH1cbiAgICAiLCJtZXJtYWlkIjp7InRoZW1lIjoiYmFzZSJ9LCJ1cGRhdGVFZGl0b3IiOmZhbHNlfQ)
|
||||
```mermaid
|
||||
stateDiagram-v2
|
||||
[*] --> idle
|
||||
idle --> pending : connect
|
||||
idle --> destroyed : destroy/close
|
||||
|
||||
pending --> idle : timeout
|
||||
pending --> destroyed : destroy
|
||||
|
||||
state close_fork <<fork>>
|
||||
pending --> close_fork : close
|
||||
close_fork --> processing
|
||||
close_fork --> destroyed
|
||||
|
||||
pending --> processing : process
|
||||
|
||||
processing --> pending : keepalive
|
||||
processing --> destroyed : done
|
||||
processing --> destroyed : destroy
|
||||
|
||||
destroyed --> [*]
|
||||
|
||||
state processing {
|
||||
[*] --> running
|
||||
running --> closing : close
|
||||
running --> busy : needDrain
|
||||
busy --> running : drainComplete
|
||||
running --> [*] : keepalive
|
||||
closing --> [*] : done
|
||||
}
|
||||
```
|
||||
## State details
|
||||
|
||||
### idle
|
||||
@@ -17,7 +17,7 @@ If you proxy requires basic authentication, you can send it via the `proxy-autho
|
||||
```js
|
||||
import { Client } from 'undici'
|
||||
import { createServer } from 'http'
|
||||
import proxy from 'proxy'
|
||||
import { createProxy } from 'proxy'
|
||||
|
||||
const server = await buildServer()
|
||||
const proxyServer = await buildProxy()
|
||||
@@ -59,7 +59,7 @@ function buildServer () {
|
||||
|
||||
function buildProxy () {
|
||||
return new Promise((resolve, reject) => {
|
||||
const server = proxy(createServer())
|
||||
const server = createProxy(createServer())
|
||||
server.listen(0, () => resolve(server))
|
||||
})
|
||||
}
|
||||
@@ -70,7 +70,7 @@ function buildProxy () {
|
||||
```js
|
||||
import { Client } from 'undici'
|
||||
import { createServer } from 'http'
|
||||
import proxy from 'proxy'
|
||||
import { createProxy } from 'proxy'
|
||||
|
||||
const server = await buildServer()
|
||||
const proxyServer = await buildProxy()
|
||||
@@ -78,8 +78,8 @@ const proxyServer = await buildProxy()
|
||||
const serverUrl = `http://localhost:${server.address().port}`
|
||||
const proxyUrl = `http://localhost:${proxyServer.address().port}`
|
||||
|
||||
proxyServer.authenticate = function (req, fn) {
|
||||
fn(null, req.headers['proxy-authorization'] === `Basic ${Buffer.from('user:pass').toString('base64')}`)
|
||||
proxyServer.authenticate = function (req) {
|
||||
return req.headers['proxy-authorization'] === `Basic ${Buffer.from('user:pass').toString('base64')}`
|
||||
}
|
||||
|
||||
server.on('request', (req, res) => {
|
||||
@@ -119,7 +119,7 @@ function buildServer () {
|
||||
|
||||
function buildProxy () {
|
||||
return new Promise((resolve, reject) => {
|
||||
const server = proxy(createServer())
|
||||
const server = createProxy(createServer())
|
||||
server.listen(0, () => resolve(server))
|
||||
})
|
||||
}
|
||||
17
deps/undici/src/index-fetch.js
vendored
17
deps/undici/src/index-fetch.js
vendored
@@ -1,20 +1,21 @@
|
||||
'use strict'
|
||||
|
||||
const fetchImpl = require('./lib/fetch').fetch
|
||||
const fetchImpl = require('./lib/web/fetch').fetch
|
||||
|
||||
module.exports.fetch = function fetch (resource, init = undefined) {
|
||||
return fetchImpl(resource, init).catch((err) => {
|
||||
if (typeof err === 'object') {
|
||||
if (err && typeof err === 'object') {
|
||||
Error.captureStackTrace(err, this)
|
||||
}
|
||||
throw err
|
||||
})
|
||||
}
|
||||
module.exports.FormData = require('./lib/fetch/formdata').FormData
|
||||
module.exports.Headers = require('./lib/fetch/headers').Headers
|
||||
module.exports.Response = require('./lib/fetch/response').Response
|
||||
module.exports.Request = require('./lib/fetch/request').Request
|
||||
module.exports.FormData = require('./lib/web/fetch/formdata').FormData
|
||||
module.exports.Headers = require('./lib/web/fetch/headers').Headers
|
||||
module.exports.Response = require('./lib/web/fetch/response').Response
|
||||
module.exports.Request = require('./lib/web/fetch/request').Request
|
||||
|
||||
module.exports.WebSocket = require('./lib/websocket/websocket').WebSocket
|
||||
module.exports.WebSocket = require('./lib/web/websocket/websocket').WebSocket
|
||||
module.exports.MessageEvent = require('./lib/web/websocket/events').MessageEvent
|
||||
|
||||
module.exports.EventSource = require('./lib/eventsource/eventsource').EventSource
|
||||
module.exports.EventSource = require('./lib/web/eventsource/eventsource').EventSource
|
||||
|
||||
60
deps/undici/src/index.js
vendored
60
deps/undici/src/index.js
vendored
@@ -1,11 +1,13 @@
|
||||
'use strict'
|
||||
|
||||
const Client = require('./lib/client')
|
||||
const Dispatcher = require('./lib/dispatcher')
|
||||
const Client = require('./lib/dispatcher/client')
|
||||
const Dispatcher = require('./lib/dispatcher/dispatcher')
|
||||
const Pool = require('./lib/dispatcher/pool')
|
||||
const BalancedPool = require('./lib/dispatcher/balanced-pool')
|
||||
const Agent = require('./lib/dispatcher/agent')
|
||||
const ProxyAgent = require('./lib/dispatcher/proxy-agent')
|
||||
const RetryAgent = require('./lib/dispatcher/retry-agent')
|
||||
const errors = require('./lib/core/errors')
|
||||
const Pool = require('./lib/pool')
|
||||
const BalancedPool = require('./lib/balanced-pool')
|
||||
const Agent = require('./lib/agent')
|
||||
const util = require('./lib/core/util')
|
||||
const { InvalidArgumentError } = errors
|
||||
const api = require('./lib/api')
|
||||
@@ -14,12 +16,11 @@ const MockClient = require('./lib/mock/mock-client')
|
||||
const MockAgent = require('./lib/mock/mock-agent')
|
||||
const MockPool = require('./lib/mock/mock-pool')
|
||||
const mockErrors = require('./lib/mock/mock-errors')
|
||||
const ProxyAgent = require('./lib/proxy-agent')
|
||||
const RetryHandler = require('./lib/handler/RetryHandler')
|
||||
const RetryHandler = require('./lib/handler/retry-handler')
|
||||
const { getGlobalDispatcher, setGlobalDispatcher } = require('./lib/global')
|
||||
const DecoratorHandler = require('./lib/handler/DecoratorHandler')
|
||||
const RedirectHandler = require('./lib/handler/RedirectHandler')
|
||||
const createRedirectInterceptor = require('./lib/interceptor/redirectInterceptor')
|
||||
const DecoratorHandler = require('./lib/handler/decorator-handler')
|
||||
const RedirectHandler = require('./lib/handler/redirect-handler')
|
||||
const createRedirectInterceptor = require('./lib/interceptor/redirect-interceptor')
|
||||
|
||||
Object.assign(Dispatcher.prototype, api)
|
||||
|
||||
@@ -29,11 +30,16 @@ module.exports.Pool = Pool
|
||||
module.exports.BalancedPool = BalancedPool
|
||||
module.exports.Agent = Agent
|
||||
module.exports.ProxyAgent = ProxyAgent
|
||||
module.exports.RetryAgent = RetryAgent
|
||||
module.exports.RetryHandler = RetryHandler
|
||||
|
||||
module.exports.DecoratorHandler = DecoratorHandler
|
||||
module.exports.RedirectHandler = RedirectHandler
|
||||
module.exports.createRedirectInterceptor = createRedirectInterceptor
|
||||
module.exports.interceptors = {
|
||||
redirect: require('./lib/interceptor/redirect'),
|
||||
retry: require('./lib/interceptor/retry')
|
||||
}
|
||||
|
||||
module.exports.buildConnector = buildConnector
|
||||
module.exports.errors = errors
|
||||
@@ -94,50 +100,54 @@ function makeDispatcher (fn) {
|
||||
module.exports.setGlobalDispatcher = setGlobalDispatcher
|
||||
module.exports.getGlobalDispatcher = getGlobalDispatcher
|
||||
|
||||
const fetchImpl = require('./lib/fetch').fetch
|
||||
const fetchImpl = require('./lib/web/fetch').fetch
|
||||
module.exports.fetch = async function fetch (init, options = undefined) {
|
||||
try {
|
||||
return await fetchImpl(init, options)
|
||||
} catch (err) {
|
||||
if (typeof err === 'object') {
|
||||
if (err && typeof err === 'object') {
|
||||
Error.captureStackTrace(err, this)
|
||||
}
|
||||
|
||||
throw err
|
||||
}
|
||||
}
|
||||
module.exports.Headers = require('./lib/fetch/headers').Headers
|
||||
module.exports.Response = require('./lib/fetch/response').Response
|
||||
module.exports.Request = require('./lib/fetch/request').Request
|
||||
module.exports.FormData = require('./lib/fetch/formdata').FormData
|
||||
module.exports.File = require('./lib/fetch/file').File
|
||||
module.exports.FileReader = require('./lib/fileapi/filereader').FileReader
|
||||
module.exports.Headers = require('./lib/web/fetch/headers').Headers
|
||||
module.exports.Response = require('./lib/web/fetch/response').Response
|
||||
module.exports.Request = require('./lib/web/fetch/request').Request
|
||||
module.exports.FormData = require('./lib/web/fetch/formdata').FormData
|
||||
module.exports.File = require('./lib/web/fetch/file').File
|
||||
module.exports.FileReader = require('./lib/web/fileapi/filereader').FileReader
|
||||
|
||||
const { setGlobalOrigin, getGlobalOrigin } = require('./lib/fetch/global')
|
||||
const { setGlobalOrigin, getGlobalOrigin } = require('./lib/web/fetch/global')
|
||||
|
||||
module.exports.setGlobalOrigin = setGlobalOrigin
|
||||
module.exports.getGlobalOrigin = getGlobalOrigin
|
||||
|
||||
const { CacheStorage } = require('./lib/cache/cachestorage')
|
||||
const { kConstruct } = require('./lib/cache/symbols')
|
||||
const { CacheStorage } = require('./lib/web/cache/cachestorage')
|
||||
const { kConstruct } = require('./lib/web/cache/symbols')
|
||||
|
||||
// Cache & CacheStorage are tightly coupled with fetch. Even if it may run
|
||||
// in an older version of Node, it doesn't have any use without fetch.
|
||||
module.exports.caches = new CacheStorage(kConstruct)
|
||||
|
||||
const { deleteCookie, getCookies, getSetCookies, setCookie } = require('./lib/cookies')
|
||||
const { deleteCookie, getCookies, getSetCookies, setCookie } = require('./lib/web/cookies')
|
||||
|
||||
module.exports.deleteCookie = deleteCookie
|
||||
module.exports.getCookies = getCookies
|
||||
module.exports.getSetCookies = getSetCookies
|
||||
module.exports.setCookie = setCookie
|
||||
|
||||
const { parseMIMEType, serializeAMimeType } = require('./lib/fetch/dataURL')
|
||||
const { parseMIMEType, serializeAMimeType } = require('./lib/web/fetch/data-url')
|
||||
|
||||
module.exports.parseMIMEType = parseMIMEType
|
||||
module.exports.serializeAMimeType = serializeAMimeType
|
||||
|
||||
module.exports.WebSocket = require('./lib/websocket/websocket').WebSocket
|
||||
const { CloseEvent, ErrorEvent, MessageEvent } = require('./lib/web/websocket/events')
|
||||
module.exports.WebSocket = require('./lib/web/websocket/websocket').WebSocket
|
||||
module.exports.CloseEvent = CloseEvent
|
||||
module.exports.ErrorEvent = ErrorEvent
|
||||
module.exports.MessageEvent = MessageEvent
|
||||
|
||||
module.exports.request = makeDispatcher(api.request)
|
||||
module.exports.stream = makeDispatcher(api.stream)
|
||||
@@ -150,6 +160,6 @@ module.exports.MockPool = MockPool
|
||||
module.exports.MockAgent = MockAgent
|
||||
module.exports.mockErrors = mockErrors
|
||||
|
||||
const { EventSource } = require('./lib/eventsource/eventsource')
|
||||
const { EventSource } = require('./lib/web/eventsource/eventsource')
|
||||
|
||||
module.exports.EventSource = EventSource
|
||||
|
||||
5
deps/undici/src/lib/api/api-request.js
vendored
5
deps/undici/src/lib/api/api-request.js
vendored
@@ -1,6 +1,6 @@
|
||||
'use strict'
|
||||
|
||||
const Readable = require('./readable')
|
||||
const { Readable } = require('./readable')
|
||||
const {
|
||||
InvalidArgumentError,
|
||||
RequestAbortedError
|
||||
@@ -91,7 +91,8 @@ class RequestHandler extends AsyncResource {
|
||||
|
||||
const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers
|
||||
const contentType = parsedHeaders['content-type']
|
||||
const body = new Readable({ resume, abort, contentType, highWaterMark })
|
||||
const contentLength = parsedHeaders['content-length']
|
||||
const body = new Readable({ resume, abort, contentType, contentLength, highWaterMark })
|
||||
|
||||
this.callback = null
|
||||
this.res = body
|
||||
|
||||
32
deps/undici/src/lib/api/readable.js
vendored
32
deps/undici/src/lib/api/readable.js
vendored
@@ -11,16 +11,18 @@ const { ReadableStreamFrom } = require('../core/util')
|
||||
const kConsume = Symbol('kConsume')
|
||||
const kReading = Symbol('kReading')
|
||||
const kBody = Symbol('kBody')
|
||||
const kAbort = Symbol('abort')
|
||||
const kAbort = Symbol('kAbort')
|
||||
const kContentType = Symbol('kContentType')
|
||||
const kContentLength = Symbol('kContentLength')
|
||||
|
||||
const noop = () => {}
|
||||
|
||||
module.exports = class BodyReadable extends Readable {
|
||||
class BodyReadable extends Readable {
|
||||
constructor ({
|
||||
resume,
|
||||
abort,
|
||||
contentType = '',
|
||||
contentLength,
|
||||
highWaterMark = 64 * 1024 // Same as nodejs fs streams.
|
||||
}) {
|
||||
super({
|
||||
@@ -35,6 +37,7 @@ module.exports = class BodyReadable extends Readable {
|
||||
this[kConsume] = null
|
||||
this[kBody] = null
|
||||
this[kContentType] = contentType
|
||||
this[kContentLength] = contentLength
|
||||
|
||||
// Is stream being consumed through Readable API?
|
||||
// This is an optimization so that we avoid checking
|
||||
@@ -146,7 +149,7 @@ module.exports = class BodyReadable extends Readable {
|
||||
}
|
||||
|
||||
async dump (opts) {
|
||||
let limit = Number.isFinite(opts?.limit) ? opts.limit : 262144
|
||||
let limit = Number.isFinite(opts?.limit) ? opts.limit : 128 * 1024
|
||||
const signal = opts?.signal
|
||||
|
||||
if (signal != null && (typeof signal !== 'object' || !('aborted' in signal))) {
|
||||
@@ -160,6 +163,10 @@ module.exports = class BodyReadable extends Readable {
|
||||
}
|
||||
|
||||
return await new Promise((resolve, reject) => {
|
||||
if (this[kContentLength] > limit) {
|
||||
this.destroy(new AbortError())
|
||||
}
|
||||
|
||||
const onAbort = () => {
|
||||
this.destroy(signal.reason ?? new AbortError())
|
||||
}
|
||||
@@ -284,16 +291,17 @@ function chunksDecode (chunks, length) {
|
||||
return ''
|
||||
}
|
||||
const buffer = chunks.length === 1 ? chunks[0] : Buffer.concat(chunks, length)
|
||||
const bufferLength = buffer.length
|
||||
|
||||
// Skip BOM.
|
||||
const start =
|
||||
buffer.length >= 3 &&
|
||||
// Skip BOM.
|
||||
buffer[0] === 0xef &&
|
||||
buffer[1] === 0xbb &&
|
||||
buffer[2] === 0xbf
|
||||
? 3
|
||||
: 0
|
||||
return buffer.utf8Slice(start, buffer.length - start)
|
||||
bufferLength > 2 &&
|
||||
buffer[0] === 0xef &&
|
||||
buffer[1] === 0xbb &&
|
||||
buffer[2] === 0xbf
|
||||
? 3
|
||||
: 0
|
||||
return buffer.utf8Slice(start, bufferLength)
|
||||
}
|
||||
|
||||
function consumeEnd (consume) {
|
||||
@@ -347,3 +355,5 @@ function consumeFinish (consume, err) {
|
||||
consume.length = 0
|
||||
consume.body = null
|
||||
}
|
||||
|
||||
module.exports = { Readable: BodyReadable, chunksDecode }
|
||||
|
||||
80
deps/undici/src/lib/api/util.js
vendored
80
deps/undici/src/lib/api/util.js
vendored
@@ -2,45 +2,85 @@ const assert = require('node:assert')
|
||||
const {
|
||||
ResponseStatusCodeError
|
||||
} = require('../core/errors')
|
||||
const { toUSVString } = require('../core/util')
|
||||
|
||||
const { chunksDecode } = require('./readable')
|
||||
const CHUNK_LIMIT = 128 * 1024
|
||||
|
||||
async function getResolveErrorBodyCallback ({ callback, body, contentType, statusCode, statusMessage, headers }) {
|
||||
assert(body)
|
||||
|
||||
let chunks = []
|
||||
let limit = 0
|
||||
let length = 0
|
||||
|
||||
for await (const chunk of body) {
|
||||
chunks.push(chunk)
|
||||
limit += chunk.length
|
||||
if (limit > 128 * 1024) {
|
||||
length += chunk.length
|
||||
if (length > CHUNK_LIMIT) {
|
||||
chunks = null
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
const message = `Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`
|
||||
|
||||
if (statusCode === 204 || !contentType || !chunks) {
|
||||
process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers))
|
||||
queueMicrotask(() => callback(new ResponseStatusCodeError(message, statusCode, headers)))
|
||||
return
|
||||
}
|
||||
|
||||
const stackTraceLimit = Error.stackTraceLimit
|
||||
Error.stackTraceLimit = 0
|
||||
let payload
|
||||
|
||||
try {
|
||||
if (contentType.startsWith('application/json')) {
|
||||
const payload = JSON.parse(toUSVString(Buffer.concat(chunks)))
|
||||
process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload))
|
||||
return
|
||||
if (isContentTypeApplicationJson(contentType)) {
|
||||
payload = JSON.parse(chunksDecode(chunks, length))
|
||||
} else if (isContentTypeText(contentType)) {
|
||||
payload = chunksDecode(chunks, length)
|
||||
}
|
||||
|
||||
if (contentType.startsWith('text/')) {
|
||||
const payload = toUSVString(Buffer.concat(chunks))
|
||||
process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload))
|
||||
return
|
||||
}
|
||||
} catch (err) {
|
||||
// Process in a fallback if error
|
||||
} catch {
|
||||
// process in a callback to avoid throwing in the microtask queue
|
||||
} finally {
|
||||
Error.stackTraceLimit = stackTraceLimit
|
||||
}
|
||||
|
||||
process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers))
|
||||
queueMicrotask(() => callback(new ResponseStatusCodeError(message, statusCode, headers, payload)))
|
||||
}
|
||||
|
||||
module.exports = { getResolveErrorBodyCallback }
|
||||
const isContentTypeApplicationJson = (contentType) => {
|
||||
return (
|
||||
contentType.length > 15 &&
|
||||
contentType[11] === '/' &&
|
||||
contentType[0] === 'a' &&
|
||||
contentType[1] === 'p' &&
|
||||
contentType[2] === 'p' &&
|
||||
contentType[3] === 'l' &&
|
||||
contentType[4] === 'i' &&
|
||||
contentType[5] === 'c' &&
|
||||
contentType[6] === 'a' &&
|
||||
contentType[7] === 't' &&
|
||||
contentType[8] === 'i' &&
|
||||
contentType[9] === 'o' &&
|
||||
contentType[10] === 'n' &&
|
||||
contentType[12] === 'j' &&
|
||||
contentType[13] === 's' &&
|
||||
contentType[14] === 'o' &&
|
||||
contentType[15] === 'n'
|
||||
)
|
||||
}
|
||||
|
||||
const isContentTypeText = (contentType) => {
|
||||
return (
|
||||
contentType.length > 4 &&
|
||||
contentType[4] === '/' &&
|
||||
contentType[0] === 't' &&
|
||||
contentType[1] === 'e' &&
|
||||
contentType[2] === 'x' &&
|
||||
contentType[3] === 't'
|
||||
)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getResolveErrorBodyCallback,
|
||||
isContentTypeApplicationJson,
|
||||
isContentTypeText
|
||||
}
|
||||
|
||||
5
deps/undici/src/lib/cache/symbols.js
vendored
5
deps/undici/src/lib/cache/symbols.js
vendored
@@ -1,5 +0,0 @@
|
||||
'use strict'
|
||||
|
||||
module.exports = {
|
||||
kConstruct: require('../core/symbols').kConstruct
|
||||
}
|
||||
2295
deps/undici/src/lib/client.js
vendored
2295
deps/undici/src/lib/client.js
vendored
File diff suppressed because it is too large
Load Diff
2
deps/undici/src/lib/core/connect.js
vendored
2
deps/undici/src/lib/core/connect.js
vendored
@@ -185,7 +185,7 @@ function setupTimeout (onConnectTimeout, timeout) {
|
||||
function onConnectTimeout (socket) {
|
||||
let message = 'Connect Timeout Error'
|
||||
if (Array.isArray(socket.autoSelectFamilyAttemptedAddresses)) {
|
||||
message = +` (attempted addresses: ${socket.autoSelectFamilyAttemptedAddresses.join(', ')})`
|
||||
message += ` (attempted addresses: ${socket.autoSelectFamilyAttemptedAddresses.join(', ')})`
|
||||
}
|
||||
util.destroy(socket, new ConnectTimeoutError(message))
|
||||
}
|
||||
|
||||
13
deps/undici/src/lib/core/errors.js
vendored
13
deps/undici/src/lib/core/errors.js
vendored
@@ -195,6 +195,16 @@ class RequestRetryError extends UndiciError {
|
||||
}
|
||||
}
|
||||
|
||||
class SecureProxyConnectionError extends UndiciError {
|
||||
constructor (cause, message, options) {
|
||||
super(message, { cause, ...(options ?? {}) })
|
||||
this.name = 'SecureProxyConnectionError'
|
||||
this.message = message || 'Secure Proxy Connection failed'
|
||||
this.code = 'UND_ERR_PRX_TLS'
|
||||
this.cause = cause
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
AbortError,
|
||||
HTTPParserError,
|
||||
@@ -216,5 +226,6 @@ module.exports = {
|
||||
ResponseContentLengthMismatchError,
|
||||
BalancedPoolMissingUpstreamError,
|
||||
ResponseExceededMaxSizeError,
|
||||
RequestRetryError
|
||||
RequestRetryError,
|
||||
SecureProxyConnectionError
|
||||
}
|
||||
|
||||
210
deps/undici/src/lib/core/request.js
vendored
210
deps/undici/src/lib/core/request.js
vendored
@@ -5,29 +5,27 @@ const {
|
||||
NotSupportedError
|
||||
} = require('./errors')
|
||||
const assert = require('node:assert')
|
||||
const { kHTTP2BuildRequest, kHTTP2CopyHeaders, kHTTP1BuildRequest } = require('./symbols')
|
||||
const util = require('./util')
|
||||
const {
|
||||
isValidHTTPToken,
|
||||
isValidHeaderChar,
|
||||
isStream,
|
||||
destroy,
|
||||
isBuffer,
|
||||
isFormDataLike,
|
||||
isIterable,
|
||||
isBlobLike,
|
||||
buildURL,
|
||||
validateHandler,
|
||||
getServerName
|
||||
} = require('./util')
|
||||
const { channels } = require('./diagnostics.js')
|
||||
const { headerNameLowerCasedRecord } = require('./constants')
|
||||
|
||||
// headerCharRegex have been lifted from
|
||||
// https://github.com/nodejs/node/blob/main/lib/_http_common.js
|
||||
|
||||
/**
|
||||
* Matches if val contains an invalid field-vchar
|
||||
* field-value = *( field-content / obs-fold )
|
||||
* field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ]
|
||||
* field-vchar = VCHAR / obs-text
|
||||
*/
|
||||
const headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/
|
||||
|
||||
// Verifies that a given path is valid does not contain control chars \x00 to \x20
|
||||
const invalidPathRegex = /[^\u0021-\u00ff]/
|
||||
|
||||
const kHandler = Symbol('handler')
|
||||
|
||||
let extractBody
|
||||
|
||||
class Request {
|
||||
constructor (origin, {
|
||||
path,
|
||||
@@ -42,7 +40,8 @@ class Request {
|
||||
bodyTimeout,
|
||||
reset,
|
||||
throwOnError,
|
||||
expectContinue
|
||||
expectContinue,
|
||||
servername
|
||||
}, handler) {
|
||||
if (typeof path !== 'string') {
|
||||
throw new InvalidArgumentError('path must be a string')
|
||||
@@ -58,7 +57,7 @@ class Request {
|
||||
|
||||
if (typeof method !== 'string') {
|
||||
throw new InvalidArgumentError('method must be a string')
|
||||
} else if (!util.isValidHTTPToken(method)) {
|
||||
} else if (!isValidHTTPToken(method)) {
|
||||
throw new InvalidArgumentError('invalid request method')
|
||||
}
|
||||
|
||||
@@ -94,13 +93,13 @@ class Request {
|
||||
|
||||
if (body == null) {
|
||||
this.body = null
|
||||
} else if (util.isStream(body)) {
|
||||
} else if (isStream(body)) {
|
||||
this.body = body
|
||||
|
||||
const rState = this.body._readableState
|
||||
if (!rState || !rState.autoDestroy) {
|
||||
this.endHandler = function autoDestroy () {
|
||||
util.destroy(this)
|
||||
destroy(this)
|
||||
}
|
||||
this.body.on('end', this.endHandler)
|
||||
}
|
||||
@@ -113,7 +112,7 @@ class Request {
|
||||
}
|
||||
}
|
||||
this.body.on('error', this.errorHandler)
|
||||
} else if (util.isBuffer(body)) {
|
||||
} else if (isBuffer(body)) {
|
||||
this.body = body.byteLength ? body : null
|
||||
} else if (ArrayBuffer.isView(body)) {
|
||||
this.body = body.buffer.byteLength ? Buffer.from(body.buffer, body.byteOffset, body.byteLength) : null
|
||||
@@ -121,7 +120,7 @@ class Request {
|
||||
this.body = body.byteLength ? Buffer.from(body) : null
|
||||
} else if (typeof body === 'string') {
|
||||
this.body = body.length ? Buffer.from(body) : null
|
||||
} else if (util.isFormDataLike(body) || util.isIterable(body) || util.isBlobLike(body)) {
|
||||
} else if (isFormDataLike(body) || isIterable(body) || isBlobLike(body)) {
|
||||
this.body = body
|
||||
} else {
|
||||
throw new InvalidArgumentError('body must be a string, a Buffer, a Readable stream, an iterable, or an async iterable')
|
||||
@@ -133,7 +132,7 @@ class Request {
|
||||
|
||||
this.upgrade = upgrade || null
|
||||
|
||||
this.path = query ? util.buildURL(path, query) : path
|
||||
this.path = query ? buildURL(path, query) : path
|
||||
|
||||
this.origin = origin
|
||||
|
||||
@@ -151,7 +150,7 @@ class Request {
|
||||
|
||||
this.contentType = null
|
||||
|
||||
this.headers = ''
|
||||
this.headers = []
|
||||
|
||||
// Only for H2
|
||||
this.expectContinue = expectContinue != null ? expectContinue : false
|
||||
@@ -164,35 +163,26 @@ class Request {
|
||||
processHeader(this, headers[i], headers[i + 1])
|
||||
}
|
||||
} else if (headers && typeof headers === 'object') {
|
||||
const keys = Object.keys(headers)
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
const key = keys[i]
|
||||
processHeader(this, key, headers[key])
|
||||
if (headers[Symbol.iterator]) {
|
||||
for (const header of headers) {
|
||||
if (!Array.isArray(header) || header.length !== 2) {
|
||||
throw new InvalidArgumentError('headers must be in key-value pair format')
|
||||
}
|
||||
processHeader(this, header[0], header[1])
|
||||
}
|
||||
} else {
|
||||
const keys = Object.keys(headers)
|
||||
for (let i = 0; i < keys.length; ++i) {
|
||||
processHeader(this, keys[i], headers[keys[i]])
|
||||
}
|
||||
}
|
||||
} else if (headers != null) {
|
||||
throw new InvalidArgumentError('headers must be an object or an array')
|
||||
}
|
||||
|
||||
if (util.isFormDataLike(this.body)) {
|
||||
if (!extractBody) {
|
||||
extractBody = require('../fetch/body.js').extractBody
|
||||
}
|
||||
validateHandler(handler, method, upgrade)
|
||||
|
||||
const [bodyStream, contentType] = extractBody(body)
|
||||
if (this.contentType == null) {
|
||||
this.contentType = contentType
|
||||
this.headers += `content-type: ${contentType}\r\n`
|
||||
}
|
||||
this.body = bodyStream.stream
|
||||
this.contentLength = bodyStream.length
|
||||
} else if (util.isBlobLike(body) && this.contentType == null && body.type) {
|
||||
this.contentType = body.type
|
||||
this.headers += `content-type: ${body.type}\r\n`
|
||||
}
|
||||
|
||||
util.validateHandler(handler, method, upgrade)
|
||||
|
||||
this.servername = util.getServerName(this.host)
|
||||
this.servername = servername || getServerName(this.host)
|
||||
|
||||
this[kHandler] = handler
|
||||
|
||||
@@ -320,81 +310,13 @@ class Request {
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: adjust to support H2
|
||||
addHeader (key, value) {
|
||||
processHeader(this, key, value)
|
||||
return this
|
||||
}
|
||||
|
||||
static [kHTTP1BuildRequest] (origin, opts, handler) {
|
||||
// TODO: Migrate header parsing here, to make Requests
|
||||
// HTTP agnostic
|
||||
return new Request(origin, opts, handler)
|
||||
}
|
||||
|
||||
static [kHTTP2BuildRequest] (origin, opts, handler) {
|
||||
const headers = opts.headers
|
||||
opts = { ...opts, headers: null }
|
||||
|
||||
const request = new Request(origin, opts, handler)
|
||||
|
||||
request.headers = {}
|
||||
|
||||
if (Array.isArray(headers)) {
|
||||
if (headers.length % 2 !== 0) {
|
||||
throw new InvalidArgumentError('headers array must be even')
|
||||
}
|
||||
for (let i = 0; i < headers.length; i += 2) {
|
||||
processHeader(request, headers[i], headers[i + 1], true)
|
||||
}
|
||||
} else if (headers && typeof headers === 'object') {
|
||||
const keys = Object.keys(headers)
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
const key = keys[i]
|
||||
processHeader(request, key, headers[key], true)
|
||||
}
|
||||
} else if (headers != null) {
|
||||
throw new InvalidArgumentError('headers must be an object or an array')
|
||||
}
|
||||
|
||||
return request
|
||||
}
|
||||
|
||||
static [kHTTP2CopyHeaders] (raw) {
|
||||
const rawHeaders = raw.split('\r\n')
|
||||
const headers = {}
|
||||
|
||||
for (const header of rawHeaders) {
|
||||
const [key, value] = header.split(': ')
|
||||
|
||||
if (value == null || value.length === 0) continue
|
||||
|
||||
if (headers[key]) {
|
||||
headers[key] += `,${value}`
|
||||
} else {
|
||||
headers[key] = value
|
||||
}
|
||||
}
|
||||
|
||||
return headers
|
||||
}
|
||||
}
|
||||
|
||||
function processHeaderValue (key, val, skipAppend) {
|
||||
if (val && typeof val === 'object') {
|
||||
throw new InvalidArgumentError(`invalid ${key} header`)
|
||||
}
|
||||
|
||||
val = val != null ? `${val}` : ''
|
||||
|
||||
if (headerCharRegex.exec(val) !== null) {
|
||||
throw new InvalidArgumentError(`invalid ${key} header`)
|
||||
}
|
||||
|
||||
return skipAppend ? val : `${key}: ${val}\r\n`
|
||||
}
|
||||
|
||||
function processHeader (request, key, val, skipAppend = false) {
|
||||
function processHeader (request, key, val) {
|
||||
if (val && (typeof val === 'object' && !Array.isArray(val))) {
|
||||
throw new InvalidArgumentError(`invalid ${key} header`)
|
||||
} else if (val === undefined) {
|
||||
@@ -405,15 +327,44 @@ function processHeader (request, key, val, skipAppend = false) {
|
||||
|
||||
if (headerName === undefined) {
|
||||
headerName = key.toLowerCase()
|
||||
if (headerNameLowerCasedRecord[headerName] === undefined && !util.isValidHTTPToken(headerName)) {
|
||||
if (headerNameLowerCasedRecord[headerName] === undefined && !isValidHTTPToken(headerName)) {
|
||||
throw new InvalidArgumentError('invalid header key')
|
||||
}
|
||||
}
|
||||
|
||||
if (request.host === null && headerName === 'host') {
|
||||
if (headerCharRegex.exec(val) !== null) {
|
||||
if (Array.isArray(val)) {
|
||||
const arr = []
|
||||
for (let i = 0; i < val.length; i++) {
|
||||
if (typeof val[i] === 'string') {
|
||||
if (!isValidHeaderChar(val[i])) {
|
||||
throw new InvalidArgumentError(`invalid ${key} header`)
|
||||
}
|
||||
arr.push(val[i])
|
||||
} else if (val[i] === null) {
|
||||
arr.push('')
|
||||
} else if (typeof val[i] === 'object') {
|
||||
throw new InvalidArgumentError(`invalid ${key} header`)
|
||||
} else {
|
||||
arr.push(`${val[i]}`)
|
||||
}
|
||||
}
|
||||
val = arr
|
||||
} else if (typeof val === 'string') {
|
||||
if (!isValidHeaderChar(val)) {
|
||||
throw new InvalidArgumentError(`invalid ${key} header`)
|
||||
}
|
||||
} else if (val === null) {
|
||||
val = ''
|
||||
} else if (typeof val === 'object') {
|
||||
throw new InvalidArgumentError(`invalid ${key} header`)
|
||||
} else {
|
||||
val = `${val}`
|
||||
}
|
||||
|
||||
if (request.host === null && headerName === 'host') {
|
||||
if (typeof val !== 'string') {
|
||||
throw new InvalidArgumentError('invalid host header')
|
||||
}
|
||||
// Consumed by Client
|
||||
request.host = val
|
||||
} else if (request.contentLength === null && headerName === 'content-length') {
|
||||
@@ -423,35 +374,22 @@ function processHeader (request, key, val, skipAppend = false) {
|
||||
}
|
||||
} else if (request.contentType === null && headerName === 'content-type') {
|
||||
request.contentType = val
|
||||
if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend)
|
||||
else request.headers += processHeaderValue(key, val)
|
||||
request.headers.push(key, val)
|
||||
} else if (headerName === 'transfer-encoding' || headerName === 'keep-alive' || headerName === 'upgrade') {
|
||||
throw new InvalidArgumentError(`invalid ${headerName} header`)
|
||||
} else if (headerName === 'connection') {
|
||||
const value = typeof val === 'string' ? val.toLowerCase() : null
|
||||
if (value !== 'close' && value !== 'keep-alive') {
|
||||
throw new InvalidArgumentError('invalid connection header')
|
||||
} else if (value === 'close') {
|
||||
}
|
||||
|
||||
if (value === 'close') {
|
||||
request.reset = true
|
||||
}
|
||||
} else if (headerName === 'expect') {
|
||||
throw new NotSupportedError('expect header not supported')
|
||||
} else if (Array.isArray(val)) {
|
||||
for (let i = 0; i < val.length; i++) {
|
||||
if (skipAppend) {
|
||||
if (request.headers[key]) {
|
||||
request.headers[key] += `,${processHeaderValue(key, val[i], skipAppend)}`
|
||||
} else {
|
||||
request.headers[key] = processHeaderValue(key, val[i], skipAppend)
|
||||
}
|
||||
} else {
|
||||
request.headers += processHeaderValue(key, val[i])
|
||||
}
|
||||
}
|
||||
} else if (skipAppend) {
|
||||
request.headers[key] = processHeaderValue(key, val, skipAppend)
|
||||
} else {
|
||||
request.headers += processHeaderValue(key, val)
|
||||
request.headers.push(key, val)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
11
deps/undici/src/lib/core/symbols.js
vendored
11
deps/undici/src/lib/core/symbols.js
vendored
@@ -33,6 +33,8 @@ module.exports = {
|
||||
kNeedDrain: Symbol('need drain'),
|
||||
kReset: Symbol('reset'),
|
||||
kDestroyed: Symbol.for('nodejs.stream.destroyed'),
|
||||
kResume: Symbol('resume'),
|
||||
kOnError: Symbol('on error'),
|
||||
kMaxHeadersSize: Symbol('max headers size'),
|
||||
kRunningIdx: Symbol('running index'),
|
||||
kPendingIdx: Symbol('pending index'),
|
||||
@@ -54,10 +56,9 @@ module.exports = {
|
||||
kMaxResponseSize: Symbol('max response size'),
|
||||
kHTTP2Session: Symbol('http2Session'),
|
||||
kHTTP2SessionState: Symbol('http2Session state'),
|
||||
kHTTP2BuildRequest: Symbol('http2 build request'),
|
||||
kHTTP1BuildRequest: Symbol('http1 build request'),
|
||||
kHTTP2CopyHeaders: Symbol('http2 copy headers'),
|
||||
kHTTPConnVersion: Symbol('http connection version'),
|
||||
kRetryHandlerDefaultRetry: Symbol('retry agent default retry'),
|
||||
kConstruct: Symbol('constructable')
|
||||
kConstruct: Symbol('constructable'),
|
||||
kListeners: Symbol('listeners'),
|
||||
kHTTPContext: Symbol('http context'),
|
||||
kMaxConcurrentStreams: Symbol('max concurrent streams')
|
||||
}
|
||||
|
||||
72
deps/undici/src/lib/core/tree.js
vendored
72
deps/undici/src/lib/core/tree.js
vendored
@@ -17,7 +17,7 @@ class TstNode {
|
||||
/** @type {number} */
|
||||
code
|
||||
/**
|
||||
* @param {Uint8Array} key
|
||||
* @param {string} key
|
||||
* @param {any} value
|
||||
* @param {number} index
|
||||
*/
|
||||
@@ -25,7 +25,11 @@ class TstNode {
|
||||
if (index === undefined || index >= key.length) {
|
||||
throw new TypeError('Unreachable')
|
||||
}
|
||||
this.code = key[index]
|
||||
const code = this.code = key.charCodeAt(index)
|
||||
// check code is ascii string
|
||||
if (code > 0x7F) {
|
||||
throw new TypeError('key must be ascii string')
|
||||
}
|
||||
if (key.length !== ++index) {
|
||||
this.middle = new TstNode(key, value, index)
|
||||
} else {
|
||||
@@ -34,33 +38,45 @@ class TstNode {
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} key
|
||||
* @param {string} key
|
||||
* @param {any} value
|
||||
* @param {number} index
|
||||
*/
|
||||
add (key, value, index) {
|
||||
if (index === undefined || index >= key.length) {
|
||||
add (key, value) {
|
||||
const length = key.length
|
||||
if (length === 0) {
|
||||
throw new TypeError('Unreachable')
|
||||
}
|
||||
const code = key[index]
|
||||
if (this.code === code) {
|
||||
if (key.length === ++index) {
|
||||
this.value = value
|
||||
} else if (this.middle !== null) {
|
||||
this.middle.add(key, value, index)
|
||||
} else {
|
||||
this.middle = new TstNode(key, value, index)
|
||||
let index = 0
|
||||
let node = this
|
||||
while (true) {
|
||||
const code = key.charCodeAt(index)
|
||||
// check code is ascii string
|
||||
if (code > 0x7F) {
|
||||
throw new TypeError('key must be ascii string')
|
||||
}
|
||||
} else if (this.code < code) {
|
||||
if (this.left !== null) {
|
||||
this.left.add(key, value, index)
|
||||
if (node.code === code) {
|
||||
if (length === ++index) {
|
||||
node.value = value
|
||||
break
|
||||
} else if (node.middle !== null) {
|
||||
node = node.middle
|
||||
} else {
|
||||
node.middle = new TstNode(key, value, index)
|
||||
break
|
||||
}
|
||||
} else if (node.code < code) {
|
||||
if (node.left !== null) {
|
||||
node = node.left
|
||||
} else {
|
||||
node.left = new TstNode(key, value, index)
|
||||
break
|
||||
}
|
||||
} else if (node.right !== null) {
|
||||
node = node.right
|
||||
} else {
|
||||
this.left = new TstNode(key, value, index)
|
||||
node.right = new TstNode(key, value, index)
|
||||
break
|
||||
}
|
||||
} else if (this.right !== null) {
|
||||
this.right.add(key, value, index)
|
||||
} else {
|
||||
this.right = new TstNode(key, value, index)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -75,7 +91,10 @@ class TstNode {
|
||||
while (node !== null && index < keylength) {
|
||||
let code = key[index]
|
||||
// A-Z
|
||||
if (code >= 0x41 && code <= 0x5a) {
|
||||
// First check if it is bigger than 0x5a.
|
||||
// Lowercase letters have higher char codes than uppercase ones.
|
||||
// Also we assume that headers will mostly contain lowercase characters.
|
||||
if (code <= 0x5a && code >= 0x41) {
|
||||
// Lowercase for uppercase.
|
||||
code |= 32
|
||||
}
|
||||
@@ -100,19 +119,20 @@ class TernarySearchTree {
|
||||
node = null
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} key
|
||||
* @param {string} key
|
||||
* @param {any} value
|
||||
* */
|
||||
insert (key, value) {
|
||||
if (this.node === null) {
|
||||
this.node = new TstNode(key, value, 0)
|
||||
} else {
|
||||
this.node.add(key, value, 0)
|
||||
this.node.add(key, value)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} key
|
||||
* @return {any}
|
||||
*/
|
||||
lookup (key) {
|
||||
return this.node?.search(key)?.value ?? null
|
||||
@@ -123,7 +143,7 @@ const tree = new TernarySearchTree()
|
||||
|
||||
for (let i = 0; i < wellknownHeaderNames.length; ++i) {
|
||||
const key = headerNameLowerCasedRecord[wellknownHeaderNames[i]]
|
||||
tree.insert(Buffer.from(key), key)
|
||||
tree.insert(key, key)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
|
||||
71
deps/undici/src/lib/core/util.js
vendored
71
deps/undici/src/lib/core/util.js
vendored
@@ -182,8 +182,8 @@ function bodyLength (body) {
|
||||
return null
|
||||
}
|
||||
|
||||
function isDestroyed (stream) {
|
||||
return !stream || !!(stream.destroyed || stream[kDestroyed])
|
||||
function isDestroyed (body) {
|
||||
return body && !!(body.destroyed || body[kDestroyed] || (stream.isDestroyed?.(body)))
|
||||
}
|
||||
|
||||
function isReadableAborted (stream) {
|
||||
@@ -204,9 +204,9 @@ function destroy (stream, err) {
|
||||
|
||||
stream.destroy(err)
|
||||
} else if (err) {
|
||||
process.nextTick((stream, err) => {
|
||||
queueMicrotask(() => {
|
||||
stream.emit('error', err)
|
||||
}, stream, err)
|
||||
})
|
||||
}
|
||||
|
||||
if (stream.destroyed !== true) {
|
||||
@@ -279,22 +279,30 @@ function parseHeaders (headers, obj) {
|
||||
}
|
||||
|
||||
function parseRawHeaders (headers) {
|
||||
const ret = []
|
||||
const len = headers.length
|
||||
const ret = new Array(len)
|
||||
|
||||
let hasContentLength = false
|
||||
let contentDispositionIdx = -1
|
||||
let key
|
||||
let val
|
||||
let kLen = 0
|
||||
|
||||
for (let n = 0; n < headers.length; n += 2) {
|
||||
const key = headers[n + 0].toString()
|
||||
const val = headers[n + 1].toString('utf8')
|
||||
key = headers[n]
|
||||
val = headers[n + 1]
|
||||
|
||||
if (key.length === 14 && (key === 'content-length' || key.toLowerCase() === 'content-length')) {
|
||||
ret.push(key, val)
|
||||
typeof key !== 'string' && (key = key.toString())
|
||||
typeof val !== 'string' && (val = val.toString('utf8'))
|
||||
|
||||
kLen = key.length
|
||||
if (kLen === 14 && key[7] === '-' && (key === 'content-length' || key.toLowerCase() === 'content-length')) {
|
||||
hasContentLength = true
|
||||
} else if (key.length === 19 && (key === 'content-disposition' || key.toLowerCase() === 'content-disposition')) {
|
||||
contentDispositionIdx = ret.push(key, val) - 1
|
||||
} else {
|
||||
ret.push(key, val)
|
||||
} else if (kLen === 19 && key[7] === '-' && (key === 'content-disposition' || key.toLowerCase() === 'content-disposition')) {
|
||||
contentDispositionIdx = n + 1
|
||||
}
|
||||
ret[n] = key
|
||||
ret[n + 1] = val
|
||||
}
|
||||
|
||||
// See https://github.com/nodejs/node/pull/46528
|
||||
@@ -432,19 +440,22 @@ function addAbortListener (signal, listener) {
|
||||
return () => signal.removeListener('abort', listener)
|
||||
}
|
||||
|
||||
const hasToWellFormed = !!String.prototype.toWellFormed
|
||||
const hasToWellFormed = typeof String.prototype.toWellFormed === 'function'
|
||||
const hasIsWellFormed = typeof String.prototype.isWellFormed === 'function'
|
||||
|
||||
/**
|
||||
* @param {string} val
|
||||
*/
|
||||
function toUSVString (val) {
|
||||
if (hasToWellFormed) {
|
||||
return `${val}`.toWellFormed()
|
||||
} else if (nodeUtil.toUSVString) {
|
||||
return nodeUtil.toUSVString(val)
|
||||
}
|
||||
return hasToWellFormed ? `${val}`.toWellFormed() : nodeUtil.toUSVString(val)
|
||||
}
|
||||
|
||||
return `${val}`
|
||||
/**
|
||||
* @param {string} val
|
||||
*/
|
||||
// TODO: move this to webidl
|
||||
function isUSVString (val) {
|
||||
return hasIsWellFormed ? `${val}`.isWellFormed() : toUSVString(val) === `${val}`
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -493,6 +504,24 @@ function isValidHTTPToken (characters) {
|
||||
return true
|
||||
}
|
||||
|
||||
// headerCharRegex have been lifted from
|
||||
// https://github.com/nodejs/node/blob/main/lib/_http_common.js
|
||||
|
||||
/**
|
||||
* Matches if val contains an invalid field-vchar
|
||||
* field-value = *( field-content / obs-fold )
|
||||
* field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ]
|
||||
* field-vchar = VCHAR / obs-text
|
||||
*/
|
||||
const headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/
|
||||
|
||||
/**
|
||||
* @param {string} characters
|
||||
*/
|
||||
function isValidHeaderChar (characters) {
|
||||
return !headerCharRegex.test(characters)
|
||||
}
|
||||
|
||||
// Parsed accordingly to RFC 9110
|
||||
// https://www.rfc-editor.org/rfc/rfc9110#field.content-range
|
||||
function parseRangeHeader (range) {
|
||||
@@ -518,6 +547,7 @@ module.exports = {
|
||||
isErrored,
|
||||
isReadable,
|
||||
toUSVString,
|
||||
isUSVString,
|
||||
isReadableAborted,
|
||||
isBlobLike,
|
||||
parseOrigin,
|
||||
@@ -543,6 +573,7 @@ module.exports = {
|
||||
buildURL,
|
||||
addAbortListener,
|
||||
isValidHTTPToken,
|
||||
isValidHeaderChar,
|
||||
isTokenCharCode,
|
||||
parseRangeHeader,
|
||||
nodeMajor,
|
||||
|
||||
19
deps/undici/src/lib/dispatcher.js
vendored
19
deps/undici/src/lib/dispatcher.js
vendored
@@ -1,19 +0,0 @@
|
||||
'use strict'
|
||||
|
||||
const EventEmitter = require('node:events')
|
||||
|
||||
class Dispatcher extends EventEmitter {
|
||||
dispatch () {
|
||||
throw new Error('not implemented')
|
||||
}
|
||||
|
||||
close () {
|
||||
throw new Error('not implemented')
|
||||
}
|
||||
|
||||
destroy () {
|
||||
throw new Error('not implemented')
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Dispatcher
|
||||
@@ -1,12 +1,12 @@
|
||||
'use strict'
|
||||
|
||||
const { InvalidArgumentError } = require('./core/errors')
|
||||
const { kClients, kRunning, kClose, kDestroy, kDispatch, kInterceptors } = require('./core/symbols')
|
||||
const { InvalidArgumentError } = require('../core/errors')
|
||||
const { kClients, kRunning, kClose, kDestroy, kDispatch, kInterceptors } = require('../core/symbols')
|
||||
const DispatcherBase = require('./dispatcher-base')
|
||||
const Pool = require('./pool')
|
||||
const Client = require('./client')
|
||||
const util = require('./core/util')
|
||||
const createRedirectInterceptor = require('./interceptor/redirectInterceptor')
|
||||
const util = require('../core/util')
|
||||
const createRedirectInterceptor = require('../interceptor/redirect-interceptor')
|
||||
|
||||
const kOnConnect = Symbol('onConnect')
|
||||
const kOnDisconnect = Symbol('onDisconnect')
|
||||
@@ -3,7 +3,7 @@
|
||||
const {
|
||||
BalancedPoolMissingUpstreamError,
|
||||
InvalidArgumentError
|
||||
} = require('./core/errors')
|
||||
} = require('../core/errors')
|
||||
const {
|
||||
PoolBase,
|
||||
kClients,
|
||||
@@ -13,8 +13,8 @@ const {
|
||||
kGetDispatcher
|
||||
} = require('./pool-base')
|
||||
const Pool = require('./pool')
|
||||
const { kUrl, kInterceptors } = require('./core/symbols')
|
||||
const { parseOrigin } = require('./core/util')
|
||||
const { kUrl, kInterceptors } = require('../core/symbols')
|
||||
const { parseOrigin } = require('../core/util')
|
||||
const kFactory = Symbol('factory')
|
||||
|
||||
const kOptions = Symbol('options')
|
||||
1352
deps/undici/src/lib/dispatcher/client-h1.js
vendored
Normal file
1352
deps/undici/src/lib/dispatcher/client-h1.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
639
deps/undici/src/lib/dispatcher/client-h2.js
vendored
Normal file
639
deps/undici/src/lib/dispatcher/client-h2.js
vendored
Normal file
@@ -0,0 +1,639 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert')
|
||||
const { pipeline } = require('node:stream')
|
||||
const util = require('../core/util.js')
|
||||
const {
|
||||
RequestContentLengthMismatchError,
|
||||
RequestAbortedError,
|
||||
SocketError,
|
||||
InformationalError
|
||||
} = require('../core/errors.js')
|
||||
const {
|
||||
kUrl,
|
||||
kReset,
|
||||
kClient,
|
||||
kRunning,
|
||||
kPending,
|
||||
kQueue,
|
||||
kPendingIdx,
|
||||
kRunningIdx,
|
||||
kError,
|
||||
kSocket,
|
||||
kStrictContentLength,
|
||||
kOnError,
|
||||
// HTTP2
|
||||
kMaxConcurrentStreams,
|
||||
kHTTP2Session,
|
||||
kResume
|
||||
} = require('../core/symbols.js')
|
||||
|
||||
const kOpenStreams = Symbol('open streams')
|
||||
|
||||
// Experimental
|
||||
let h2ExperimentalWarned = false
|
||||
|
||||
/** @type {import('http2')} */
|
||||
let http2
|
||||
try {
|
||||
http2 = require('node:http2')
|
||||
} catch {
|
||||
// @ts-ignore
|
||||
http2 = { constants: {} }
|
||||
}
|
||||
|
||||
const {
|
||||
constants: {
|
||||
HTTP2_HEADER_AUTHORITY,
|
||||
HTTP2_HEADER_METHOD,
|
||||
HTTP2_HEADER_PATH,
|
||||
HTTP2_HEADER_SCHEME,
|
||||
HTTP2_HEADER_CONTENT_LENGTH,
|
||||
HTTP2_HEADER_EXPECT,
|
||||
HTTP2_HEADER_STATUS
|
||||
}
|
||||
} = http2
|
||||
|
||||
async function connectH2 (client, socket) {
|
||||
client[kSocket] = socket
|
||||
|
||||
if (!h2ExperimentalWarned) {
|
||||
h2ExperimentalWarned = true
|
||||
process.emitWarning('H2 support is experimental, expect them to change at any time.', {
|
||||
code: 'UNDICI-H2'
|
||||
})
|
||||
}
|
||||
|
||||
const session = http2.connect(client[kUrl], {
|
||||
createConnection: () => socket,
|
||||
peerMaxConcurrentStreams: client[kMaxConcurrentStreams]
|
||||
})
|
||||
|
||||
session[kOpenStreams] = 0
|
||||
session[kClient] = client
|
||||
session[kSocket] = socket
|
||||
session.on('error', onHttp2SessionError)
|
||||
session.on('frameError', onHttp2FrameError)
|
||||
session.on('end', onHttp2SessionEnd)
|
||||
session.on('goaway', onHTTP2GoAway)
|
||||
session.on('close', function () {
|
||||
const { [kClient]: client } = this
|
||||
|
||||
const err = this[kError] || new SocketError('closed', util.getSocketInfo(this))
|
||||
|
||||
client[kSocket] = null
|
||||
|
||||
assert(client[kPending] === 0)
|
||||
|
||||
// Fail entire queue.
|
||||
const requests = client[kQueue].splice(client[kRunningIdx])
|
||||
for (let i = 0; i < requests.length; i++) {
|
||||
const request = requests[i]
|
||||
errorRequest(client, request, err)
|
||||
}
|
||||
|
||||
client[kPendingIdx] = client[kRunningIdx]
|
||||
|
||||
assert(client[kRunning] === 0)
|
||||
|
||||
client.emit('disconnect', client[kUrl], [client], err)
|
||||
|
||||
client[kResume]()
|
||||
})
|
||||
session.unref()
|
||||
|
||||
client[kHTTP2Session] = session
|
||||
socket[kHTTP2Session] = session
|
||||
|
||||
socket.on('error', function (err) {
|
||||
assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID')
|
||||
|
||||
this[kError] = err
|
||||
|
||||
this[kClient][kOnError](err)
|
||||
})
|
||||
socket.on('end', function () {
|
||||
util.destroy(this, new SocketError('other side closed', util.getSocketInfo(this)))
|
||||
})
|
||||
|
||||
let closed = false
|
||||
socket.on('close', () => {
|
||||
closed = true
|
||||
})
|
||||
|
||||
return {
|
||||
version: 'h2',
|
||||
defaultPipelining: Infinity,
|
||||
write (...args) {
|
||||
// TODO (fix): return
|
||||
writeH2(client, ...args)
|
||||
},
|
||||
resume () {
|
||||
|
||||
},
|
||||
destroy (err, callback) {
|
||||
session.destroy(err)
|
||||
if (closed) {
|
||||
queueMicrotask(callback)
|
||||
} else {
|
||||
socket.destroy(err).on('close', callback)
|
||||
}
|
||||
},
|
||||
get destroyed () {
|
||||
return socket.destroyed
|
||||
},
|
||||
busy () {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function onHttp2SessionError (err) {
|
||||
assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID')
|
||||
|
||||
this[kSocket][kError] = err
|
||||
|
||||
this[kClient][kOnError](err)
|
||||
}
|
||||
|
||||
function onHttp2FrameError (type, code, id) {
|
||||
const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`)
|
||||
|
||||
if (id === 0) {
|
||||
this[kSocket][kError] = err
|
||||
this[kClient][kOnError](err)
|
||||
}
|
||||
}
|
||||
|
||||
function onHttp2SessionEnd () {
|
||||
this.destroy(new SocketError('other side closed'))
|
||||
util.destroy(this[kSocket], new SocketError('other side closed'))
|
||||
}
|
||||
|
||||
function onHTTP2GoAway (code) {
|
||||
const client = this[kClient]
|
||||
const err = new InformationalError(`HTTP/2: "GOAWAY" frame received with code ${code}`)
|
||||
client[kSocket] = null
|
||||
client[kHTTP2Session] = null
|
||||
|
||||
if (client.destroyed) {
|
||||
assert(this[kPending] === 0)
|
||||
|
||||
// Fail entire queue.
|
||||
const requests = client[kQueue].splice(client[kRunningIdx])
|
||||
for (let i = 0; i < requests.length; i++) {
|
||||
const request = requests[i]
|
||||
errorRequest(this, request, err)
|
||||
}
|
||||
} else if (client[kRunning] > 0) {
|
||||
// Fail head of pipeline.
|
||||
const request = client[kQueue][client[kRunningIdx]]
|
||||
client[kQueue][client[kRunningIdx]++] = null
|
||||
|
||||
errorRequest(client, request, err)
|
||||
}
|
||||
|
||||
client[kPendingIdx] = client[kRunningIdx]
|
||||
|
||||
assert(client[kRunning] === 0)
|
||||
|
||||
client.emit('disconnect',
|
||||
client[kUrl],
|
||||
[client],
|
||||
err
|
||||
)
|
||||
|
||||
client[kResume]()
|
||||
}
|
||||
|
||||
function errorRequest (client, request, err) {
|
||||
try {
|
||||
request.onError(err)
|
||||
assert(request.aborted)
|
||||
} catch (err) {
|
||||
client.emit('error', err)
|
||||
}
|
||||
}
|
||||
|
||||
// https://www.rfc-editor.org/rfc/rfc7230#section-3.3.2
|
||||
function shouldSendContentLength (method) {
|
||||
return method !== 'GET' && method !== 'HEAD' && method !== 'OPTIONS' && method !== 'TRACE' && method !== 'CONNECT'
|
||||
}
|
||||
|
||||
function writeH2 (client, request) {
|
||||
const session = client[kHTTP2Session]
|
||||
const { body, method, path, host, upgrade, expectContinue, signal, headers: reqHeaders } = request
|
||||
|
||||
if (upgrade) {
|
||||
errorRequest(client, request, new Error('Upgrade not supported for H2'))
|
||||
return false
|
||||
}
|
||||
|
||||
if (request.aborted) {
|
||||
return false
|
||||
}
|
||||
|
||||
const headers = {}
|
||||
for (let n = 0; n < reqHeaders.length; n += 2) {
|
||||
const key = reqHeaders[n + 0]
|
||||
const val = reqHeaders[n + 1]
|
||||
|
||||
if (Array.isArray(val)) {
|
||||
for (let i = 0; i < val.length; i++) {
|
||||
if (headers[key]) {
|
||||
headers[key] += `,${val[i]}`
|
||||
} else {
|
||||
headers[key] = val[i]
|
||||
}
|
||||
}
|
||||
} else {
|
||||
headers[key] = val
|
||||
}
|
||||
}
|
||||
|
||||
/** @type {import('node:http2').ClientHttp2Stream} */
|
||||
let stream
|
||||
|
||||
const { hostname, port } = client[kUrl]
|
||||
|
||||
headers[HTTP2_HEADER_AUTHORITY] = host || `${hostname}${port ? `:${port}` : ''}`
|
||||
headers[HTTP2_HEADER_METHOD] = method
|
||||
|
||||
try {
|
||||
// We are already connected, streams are pending.
|
||||
// We can call on connect, and wait for abort
|
||||
request.onConnect((err) => {
|
||||
if (request.aborted || request.completed) {
|
||||
return
|
||||
}
|
||||
|
||||
err = err || new RequestAbortedError()
|
||||
|
||||
if (stream != null) {
|
||||
util.destroy(stream, err)
|
||||
|
||||
session[kOpenStreams] -= 1
|
||||
if (session[kOpenStreams] === 0) {
|
||||
session.unref()
|
||||
}
|
||||
}
|
||||
|
||||
errorRequest(client, request, err)
|
||||
})
|
||||
} catch (err) {
|
||||
errorRequest(client, request, err)
|
||||
}
|
||||
|
||||
if (method === 'CONNECT') {
|
||||
session.ref()
|
||||
// We are already connected, streams are pending, first request
|
||||
// will create a new stream. We trigger a request to create the stream and wait until
|
||||
// `ready` event is triggered
|
||||
// We disabled endStream to allow the user to write to the stream
|
||||
stream = session.request(headers, { endStream: false, signal })
|
||||
|
||||
if (stream.id && !stream.pending) {
|
||||
request.onUpgrade(null, null, stream)
|
||||
++session[kOpenStreams]
|
||||
} else {
|
||||
stream.once('ready', () => {
|
||||
request.onUpgrade(null, null, stream)
|
||||
++session[kOpenStreams]
|
||||
})
|
||||
}
|
||||
|
||||
stream.once('close', () => {
|
||||
session[kOpenStreams] -= 1
|
||||
// TODO(HTTP/2): unref only if current streams count is 0
|
||||
if (session[kOpenStreams] === 0) session.unref()
|
||||
})
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// https://tools.ietf.org/html/rfc7540#section-8.3
|
||||
// :path and :scheme headers must be omitted when sending CONNECT
|
||||
|
||||
headers[HTTP2_HEADER_PATH] = path
|
||||
headers[HTTP2_HEADER_SCHEME] = 'https'
|
||||
|
||||
// https://tools.ietf.org/html/rfc7231#section-4.3.1
|
||||
// https://tools.ietf.org/html/rfc7231#section-4.3.2
|
||||
// https://tools.ietf.org/html/rfc7231#section-4.3.5
|
||||
|
||||
// Sending a payload body on a request that does not
|
||||
// expect it can cause undefined behavior on some
|
||||
// servers and corrupt connection state. Do not
|
||||
// re-use the connection for further requests.
|
||||
|
||||
const expectsPayload = (
|
||||
method === 'PUT' ||
|
||||
method === 'POST' ||
|
||||
method === 'PATCH'
|
||||
)
|
||||
|
||||
if (body && typeof body.read === 'function') {
|
||||
// Try to read EOF in order to get length.
|
||||
body.read(0)
|
||||
}
|
||||
|
||||
let contentLength = util.bodyLength(body)
|
||||
|
||||
if (contentLength == null) {
|
||||
contentLength = request.contentLength
|
||||
}
|
||||
|
||||
if (contentLength === 0 || !expectsPayload) {
|
||||
// https://tools.ietf.org/html/rfc7230#section-3.3.2
|
||||
// A user agent SHOULD NOT send a Content-Length header field when
|
||||
// the request message does not contain a payload body and the method
|
||||
// semantics do not anticipate such a body.
|
||||
|
||||
contentLength = null
|
||||
}
|
||||
|
||||
// https://github.com/nodejs/undici/issues/2046
|
||||
// A user agent may send a Content-Length header with 0 value, this should be allowed.
|
||||
if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength != null && request.contentLength !== contentLength) {
|
||||
if (client[kStrictContentLength]) {
|
||||
errorRequest(client, request, new RequestContentLengthMismatchError())
|
||||
return false
|
||||
}
|
||||
|
||||
process.emitWarning(new RequestContentLengthMismatchError())
|
||||
}
|
||||
|
||||
if (contentLength != null) {
|
||||
assert(body, 'no body must not have content length')
|
||||
headers[HTTP2_HEADER_CONTENT_LENGTH] = `${contentLength}`
|
||||
}
|
||||
|
||||
session.ref()
|
||||
|
||||
const shouldEndStream = method === 'GET' || method === 'HEAD' || body === null
|
||||
if (expectContinue) {
|
||||
headers[HTTP2_HEADER_EXPECT] = '100-continue'
|
||||
stream = session.request(headers, { endStream: shouldEndStream, signal })
|
||||
|
||||
stream.once('continue', writeBodyH2)
|
||||
} else {
|
||||
stream = session.request(headers, {
|
||||
endStream: shouldEndStream,
|
||||
signal
|
||||
})
|
||||
writeBodyH2()
|
||||
}
|
||||
|
||||
// Increment counter as we have new several streams open
|
||||
++session[kOpenStreams]
|
||||
|
||||
stream.once('response', headers => {
|
||||
const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers
|
||||
request.onResponseStarted()
|
||||
|
||||
if (request.onHeaders(Number(statusCode), realHeaders, stream.resume.bind(stream), '') === false) {
|
||||
stream.pause()
|
||||
}
|
||||
|
||||
stream.on('data', (chunk) => {
|
||||
if (request.onData(chunk) === false) {
|
||||
stream.pause()
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
stream.once('end', () => {
|
||||
// When state is null, it means we haven't consumed body and the stream still do not have
|
||||
// a state.
|
||||
// Present specially when using pipeline or stream
|
||||
if (stream.state?.state == null || stream.state.state < 6) {
|
||||
request.onComplete([])
|
||||
return
|
||||
}
|
||||
|
||||
// Stream is closed or half-closed-remote (6), decrement counter and cleanup
|
||||
// It does not have sense to continue working with the stream as we do not
|
||||
// have yet RST_STREAM support on client-side
|
||||
session[kOpenStreams] -= 1
|
||||
if (session[kOpenStreams] === 0) {
|
||||
session.unref()
|
||||
}
|
||||
|
||||
const err = new InformationalError('HTTP/2: stream half-closed (remote)')
|
||||
errorRequest(client, request, err)
|
||||
util.destroy(stream, err)
|
||||
})
|
||||
|
||||
stream.once('close', () => {
|
||||
session[kOpenStreams] -= 1
|
||||
// TODO(HTTP/2): unref only if current streams count is 0
|
||||
if (session[kOpenStreams] === 0) {
|
||||
session.unref()
|
||||
}
|
||||
})
|
||||
|
||||
stream.once('error', function (err) {
|
||||
if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) {
|
||||
session[kOpenStreams] -= 1
|
||||
util.destroy(stream, err)
|
||||
}
|
||||
})
|
||||
|
||||
stream.once('frameError', (type, code) => {
|
||||
const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`)
|
||||
errorRequest(client, request, err)
|
||||
|
||||
if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) {
|
||||
session[kOpenStreams] -= 1
|
||||
util.destroy(stream, err)
|
||||
}
|
||||
})
|
||||
|
||||
// stream.on('aborted', () => {
|
||||
// // TODO(HTTP/2): Support aborted
|
||||
// })
|
||||
|
||||
// stream.on('timeout', () => {
|
||||
// // TODO(HTTP/2): Support timeout
|
||||
// })
|
||||
|
||||
// stream.on('push', headers => {
|
||||
// // TODO(HTTP/2): Support push
|
||||
// })
|
||||
|
||||
// stream.on('trailers', headers => {
|
||||
// // TODO(HTTP/2): Support trailers
|
||||
// })
|
||||
|
||||
return true
|
||||
|
||||
function writeBodyH2 () {
|
||||
/* istanbul ignore else: assertion */
|
||||
if (!body) {
|
||||
request.onRequestSent()
|
||||
} else if (util.isBuffer(body)) {
|
||||
assert(contentLength === body.byteLength, 'buffer body must have content length')
|
||||
stream.cork()
|
||||
stream.write(body)
|
||||
stream.uncork()
|
||||
stream.end()
|
||||
request.onBodySent(body)
|
||||
request.onRequestSent()
|
||||
} else if (util.isBlobLike(body)) {
|
||||
if (typeof body.stream === 'function') {
|
||||
writeIterable({
|
||||
client,
|
||||
request,
|
||||
contentLength,
|
||||
h2stream: stream,
|
||||
expectsPayload,
|
||||
body: body.stream(),
|
||||
socket: client[kSocket],
|
||||
header: ''
|
||||
})
|
||||
} else {
|
||||
writeBlob({
|
||||
body,
|
||||
client,
|
||||
request,
|
||||
contentLength,
|
||||
expectsPayload,
|
||||
h2stream: stream,
|
||||
header: '',
|
||||
socket: client[kSocket]
|
||||
})
|
||||
}
|
||||
} else if (util.isStream(body)) {
|
||||
writeStream({
|
||||
body,
|
||||
client,
|
||||
request,
|
||||
contentLength,
|
||||
expectsPayload,
|
||||
socket: client[kSocket],
|
||||
h2stream: stream,
|
||||
header: ''
|
||||
})
|
||||
} else if (util.isIterable(body)) {
|
||||
writeIterable({
|
||||
body,
|
||||
client,
|
||||
request,
|
||||
contentLength,
|
||||
expectsPayload,
|
||||
header: '',
|
||||
h2stream: stream,
|
||||
socket: client[kSocket]
|
||||
})
|
||||
} else {
|
||||
assert(false)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function writeStream ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) {
|
||||
assert(contentLength !== 0 || client[kRunning] === 0, 'stream body cannot be pipelined')
|
||||
|
||||
// For HTTP/2, is enough to pipe the stream
|
||||
const pipe = pipeline(
|
||||
body,
|
||||
h2stream,
|
||||
(err) => {
|
||||
if (err) {
|
||||
util.destroy(body, err)
|
||||
util.destroy(h2stream, err)
|
||||
} else {
|
||||
request.onRequestSent()
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
pipe.on('data', onPipeData)
|
||||
pipe.once('end', () => {
|
||||
pipe.removeListener('data', onPipeData)
|
||||
util.destroy(pipe)
|
||||
})
|
||||
|
||||
function onPipeData (chunk) {
|
||||
request.onBodySent(chunk)
|
||||
}
|
||||
}
|
||||
|
||||
async function writeBlob ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) {
|
||||
assert(contentLength === body.size, 'blob body must have content length')
|
||||
|
||||
try {
|
||||
if (contentLength != null && contentLength !== body.size) {
|
||||
throw new RequestContentLengthMismatchError()
|
||||
}
|
||||
|
||||
const buffer = Buffer.from(await body.arrayBuffer())
|
||||
|
||||
h2stream.cork()
|
||||
h2stream.write(buffer)
|
||||
h2stream.uncork()
|
||||
|
||||
request.onBodySent(buffer)
|
||||
request.onRequestSent()
|
||||
|
||||
if (!expectsPayload) {
|
||||
socket[kReset] = true
|
||||
}
|
||||
|
||||
client[kResume]()
|
||||
} catch (err) {
|
||||
util.destroy(h2stream)
|
||||
}
|
||||
}
|
||||
|
||||
async function writeIterable ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) {
|
||||
assert(contentLength !== 0 || client[kRunning] === 0, 'iterator body cannot be pipelined')
|
||||
|
||||
let callback = null
|
||||
function onDrain () {
|
||||
if (callback) {
|
||||
const cb = callback
|
||||
callback = null
|
||||
cb()
|
||||
}
|
||||
}
|
||||
|
||||
const waitForDrain = () => new Promise((resolve, reject) => {
|
||||
assert(callback === null)
|
||||
|
||||
if (socket[kError]) {
|
||||
reject(socket[kError])
|
||||
} else {
|
||||
callback = resolve
|
||||
}
|
||||
})
|
||||
|
||||
h2stream
|
||||
.on('close', onDrain)
|
||||
.on('drain', onDrain)
|
||||
|
||||
try {
|
||||
// It's up to the user to somehow abort the async iterable.
|
||||
for await (const chunk of body) {
|
||||
if (socket[kError]) {
|
||||
throw socket[kError]
|
||||
}
|
||||
|
||||
const res = h2stream.write(chunk)
|
||||
request.onBodySent(chunk)
|
||||
if (!res) {
|
||||
await waitForDrain()
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
h2stream.destroy(err)
|
||||
} finally {
|
||||
request.onRequestSent()
|
||||
h2stream.end()
|
||||
h2stream
|
||||
.off('close', onDrain)
|
||||
.off('drain', onDrain)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = connectH2
|
||||
621
deps/undici/src/lib/dispatcher/client.js
vendored
Normal file
621
deps/undici/src/lib/dispatcher/client.js
vendored
Normal file
@@ -0,0 +1,621 @@
|
||||
// @ts-check
|
||||
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert')
|
||||
const net = require('node:net')
|
||||
const http = require('node:http')
|
||||
const util = require('../core/util.js')
|
||||
const { channels } = require('../core/diagnostics.js')
|
||||
const Request = require('../core/request.js')
|
||||
const DispatcherBase = require('./dispatcher-base')
|
||||
const {
|
||||
InvalidArgumentError,
|
||||
InformationalError,
|
||||
ClientDestroyedError
|
||||
} = require('../core/errors.js')
|
||||
const buildConnector = require('../core/connect.js')
|
||||
const {
|
||||
kUrl,
|
||||
kServerName,
|
||||
kClient,
|
||||
kBusy,
|
||||
kConnect,
|
||||
kResuming,
|
||||
kRunning,
|
||||
kPending,
|
||||
kSize,
|
||||
kQueue,
|
||||
kConnected,
|
||||
kConnecting,
|
||||
kNeedDrain,
|
||||
kKeepAliveDefaultTimeout,
|
||||
kHostHeader,
|
||||
kPendingIdx,
|
||||
kRunningIdx,
|
||||
kError,
|
||||
kPipelining,
|
||||
kKeepAliveTimeoutValue,
|
||||
kMaxHeadersSize,
|
||||
kKeepAliveMaxTimeout,
|
||||
kKeepAliveTimeoutThreshold,
|
||||
kHeadersTimeout,
|
||||
kBodyTimeout,
|
||||
kStrictContentLength,
|
||||
kConnector,
|
||||
kMaxRedirections,
|
||||
kMaxRequests,
|
||||
kCounter,
|
||||
kClose,
|
||||
kDestroy,
|
||||
kDispatch,
|
||||
kInterceptors,
|
||||
kLocalAddress,
|
||||
kMaxResponseSize,
|
||||
kOnError,
|
||||
kHTTPContext,
|
||||
kMaxConcurrentStreams,
|
||||
kResume
|
||||
} = require('../core/symbols.js')
|
||||
const connectH1 = require('./client-h1.js')
|
||||
const connectH2 = require('./client-h2.js')
|
||||
let deprecatedInterceptorWarned = false
|
||||
|
||||
const kClosedResolve = Symbol('kClosedResolve')
|
||||
|
||||
function getPipelining (client) {
|
||||
return client[kPipelining] ?? client[kHTTPContext]?.defaultPipelining ?? 1
|
||||
}
|
||||
|
||||
/**
|
||||
* @type {import('../../types/client.js').default}
|
||||
*/
|
||||
class Client extends DispatcherBase {
|
||||
/**
|
||||
*
|
||||
* @param {string|URL} url
|
||||
* @param {import('../../types/client.js').Client.Options} options
|
||||
*/
|
||||
constructor (url, {
|
||||
interceptors,
|
||||
maxHeaderSize,
|
||||
headersTimeout,
|
||||
socketTimeout,
|
||||
requestTimeout,
|
||||
connectTimeout,
|
||||
bodyTimeout,
|
||||
idleTimeout,
|
||||
keepAlive,
|
||||
keepAliveTimeout,
|
||||
maxKeepAliveTimeout,
|
||||
keepAliveMaxTimeout,
|
||||
keepAliveTimeoutThreshold,
|
||||
socketPath,
|
||||
pipelining,
|
||||
tls,
|
||||
strictContentLength,
|
||||
maxCachedSessions,
|
||||
maxRedirections,
|
||||
connect,
|
||||
maxRequestsPerClient,
|
||||
localAddress,
|
||||
maxResponseSize,
|
||||
autoSelectFamily,
|
||||
autoSelectFamilyAttemptTimeout,
|
||||
// h2
|
||||
maxConcurrentStreams,
|
||||
allowH2
|
||||
} = {}) {
|
||||
super()
|
||||
|
||||
if (keepAlive !== undefined) {
|
||||
throw new InvalidArgumentError('unsupported keepAlive, use pipelining=0 instead')
|
||||
}
|
||||
|
||||
if (socketTimeout !== undefined) {
|
||||
throw new InvalidArgumentError('unsupported socketTimeout, use headersTimeout & bodyTimeout instead')
|
||||
}
|
||||
|
||||
if (requestTimeout !== undefined) {
|
||||
throw new InvalidArgumentError('unsupported requestTimeout, use headersTimeout & bodyTimeout instead')
|
||||
}
|
||||
|
||||
if (idleTimeout !== undefined) {
|
||||
throw new InvalidArgumentError('unsupported idleTimeout, use keepAliveTimeout instead')
|
||||
}
|
||||
|
||||
if (maxKeepAliveTimeout !== undefined) {
|
||||
throw new InvalidArgumentError('unsupported maxKeepAliveTimeout, use keepAliveMaxTimeout instead')
|
||||
}
|
||||
|
||||
if (maxHeaderSize != null && !Number.isFinite(maxHeaderSize)) {
|
||||
throw new InvalidArgumentError('invalid maxHeaderSize')
|
||||
}
|
||||
|
||||
if (socketPath != null && typeof socketPath !== 'string') {
|
||||
throw new InvalidArgumentError('invalid socketPath')
|
||||
}
|
||||
|
||||
if (connectTimeout != null && (!Number.isFinite(connectTimeout) || connectTimeout < 0)) {
|
||||
throw new InvalidArgumentError('invalid connectTimeout')
|
||||
}
|
||||
|
||||
if (keepAliveTimeout != null && (!Number.isFinite(keepAliveTimeout) || keepAliveTimeout <= 0)) {
|
||||
throw new InvalidArgumentError('invalid keepAliveTimeout')
|
||||
}
|
||||
|
||||
if (keepAliveMaxTimeout != null && (!Number.isFinite(keepAliveMaxTimeout) || keepAliveMaxTimeout <= 0)) {
|
||||
throw new InvalidArgumentError('invalid keepAliveMaxTimeout')
|
||||
}
|
||||
|
||||
if (keepAliveTimeoutThreshold != null && !Number.isFinite(keepAliveTimeoutThreshold)) {
|
||||
throw new InvalidArgumentError('invalid keepAliveTimeoutThreshold')
|
||||
}
|
||||
|
||||
if (headersTimeout != null && (!Number.isInteger(headersTimeout) || headersTimeout < 0)) {
|
||||
throw new InvalidArgumentError('headersTimeout must be a positive integer or zero')
|
||||
}
|
||||
|
||||
if (bodyTimeout != null && (!Number.isInteger(bodyTimeout) || bodyTimeout < 0)) {
|
||||
throw new InvalidArgumentError('bodyTimeout must be a positive integer or zero')
|
||||
}
|
||||
|
||||
if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') {
|
||||
throw new InvalidArgumentError('connect must be a function or an object')
|
||||
}
|
||||
|
||||
if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) {
|
||||
throw new InvalidArgumentError('maxRedirections must be a positive number')
|
||||
}
|
||||
|
||||
if (maxRequestsPerClient != null && (!Number.isInteger(maxRequestsPerClient) || maxRequestsPerClient < 0)) {
|
||||
throw new InvalidArgumentError('maxRequestsPerClient must be a positive number')
|
||||
}
|
||||
|
||||
if (localAddress != null && (typeof localAddress !== 'string' || net.isIP(localAddress) === 0)) {
|
||||
throw new InvalidArgumentError('localAddress must be valid string IP address')
|
||||
}
|
||||
|
||||
if (maxResponseSize != null && (!Number.isInteger(maxResponseSize) || maxResponseSize < -1)) {
|
||||
throw new InvalidArgumentError('maxResponseSize must be a positive number')
|
||||
}
|
||||
|
||||
if (
|
||||
autoSelectFamilyAttemptTimeout != null &&
|
||||
(!Number.isInteger(autoSelectFamilyAttemptTimeout) || autoSelectFamilyAttemptTimeout < -1)
|
||||
) {
|
||||
throw new InvalidArgumentError('autoSelectFamilyAttemptTimeout must be a positive number')
|
||||
}
|
||||
|
||||
// h2
|
||||
if (allowH2 != null && typeof allowH2 !== 'boolean') {
|
||||
throw new InvalidArgumentError('allowH2 must be a valid boolean value')
|
||||
}
|
||||
|
||||
if (maxConcurrentStreams != null && (typeof maxConcurrentStreams !== 'number' || maxConcurrentStreams < 1)) {
|
||||
throw new InvalidArgumentError('maxConcurrentStreams must be a positive integer, greater than 0')
|
||||
}
|
||||
|
||||
if (typeof connect !== 'function') {
|
||||
connect = buildConnector({
|
||||
...tls,
|
||||
maxCachedSessions,
|
||||
allowH2,
|
||||
socketPath,
|
||||
timeout: connectTimeout,
|
||||
...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined),
|
||||
...connect
|
||||
})
|
||||
}
|
||||
|
||||
if (interceptors?.Client && Array.isArray(interceptors.Client)) {
|
||||
this[kInterceptors] = interceptors.Client
|
||||
if (!deprecatedInterceptorWarned) {
|
||||
deprecatedInterceptorWarned = true
|
||||
process.emitWarning('Client.Options#interceptor is deprecated. Use Dispatcher#compose instead.', {
|
||||
code: 'UNDICI-CLIENT-INTERCEPTOR-DEPRECATED'
|
||||
})
|
||||
}
|
||||
} else {
|
||||
this[kInterceptors] = [createRedirectInterceptor({ maxRedirections })]
|
||||
}
|
||||
|
||||
this[kUrl] = util.parseOrigin(url)
|
||||
this[kConnector] = connect
|
||||
this[kPipelining] = pipelining != null ? pipelining : 1
|
||||
this[kMaxHeadersSize] = maxHeaderSize || http.maxHeaderSize
|
||||
this[kKeepAliveDefaultTimeout] = keepAliveTimeout == null ? 4e3 : keepAliveTimeout
|
||||
this[kKeepAliveMaxTimeout] = keepAliveMaxTimeout == null ? 600e3 : keepAliveMaxTimeout
|
||||
this[kKeepAliveTimeoutThreshold] = keepAliveTimeoutThreshold == null ? 1e3 : keepAliveTimeoutThreshold
|
||||
this[kKeepAliveTimeoutValue] = this[kKeepAliveDefaultTimeout]
|
||||
this[kServerName] = null
|
||||
this[kLocalAddress] = localAddress != null ? localAddress : null
|
||||
this[kResuming] = 0 // 0, idle, 1, scheduled, 2 resuming
|
||||
this[kNeedDrain] = 0 // 0, idle, 1, scheduled, 2 resuming
|
||||
this[kHostHeader] = `host: ${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ''}\r\n`
|
||||
this[kBodyTimeout] = bodyTimeout != null ? bodyTimeout : 300e3
|
||||
this[kHeadersTimeout] = headersTimeout != null ? headersTimeout : 300e3
|
||||
this[kStrictContentLength] = strictContentLength == null ? true : strictContentLength
|
||||
this[kMaxRedirections] = maxRedirections
|
||||
this[kMaxRequests] = maxRequestsPerClient
|
||||
this[kClosedResolve] = null
|
||||
this[kMaxResponseSize] = maxResponseSize > -1 ? maxResponseSize : -1
|
||||
this[kMaxConcurrentStreams] = maxConcurrentStreams != null ? maxConcurrentStreams : 100 // Max peerConcurrentStreams for a Node h2 server
|
||||
this[kHTTPContext] = null
|
||||
|
||||
// kQueue is built up of 3 sections separated by
|
||||
// the kRunningIdx and kPendingIdx indices.
|
||||
// | complete | running | pending |
|
||||
// ^ kRunningIdx ^ kPendingIdx ^ kQueue.length
|
||||
// kRunningIdx points to the first running element.
|
||||
// kPendingIdx points to the first pending element.
|
||||
// This implements a fast queue with an amortized
|
||||
// time of O(1).
|
||||
|
||||
this[kQueue] = []
|
||||
this[kRunningIdx] = 0
|
||||
this[kPendingIdx] = 0
|
||||
|
||||
this[kResume] = (sync) => resume(this, sync)
|
||||
this[kOnError] = (err) => onError(this, err)
|
||||
}
|
||||
|
||||
get pipelining () {
|
||||
return this[kPipelining]
|
||||
}
|
||||
|
||||
set pipelining (value) {
|
||||
this[kPipelining] = value
|
||||
this[kResume](true)
|
||||
}
|
||||
|
||||
get [kPending] () {
|
||||
return this[kQueue].length - this[kPendingIdx]
|
||||
}
|
||||
|
||||
get [kRunning] () {
|
||||
return this[kPendingIdx] - this[kRunningIdx]
|
||||
}
|
||||
|
||||
get [kSize] () {
|
||||
return this[kQueue].length - this[kRunningIdx]
|
||||
}
|
||||
|
||||
get [kConnected] () {
|
||||
return !!this[kHTTPContext] && !this[kConnecting] && !this[kHTTPContext].destroyed
|
||||
}
|
||||
|
||||
get [kBusy] () {
|
||||
return Boolean(
|
||||
this[kHTTPContext]?.busy(null) ||
|
||||
(this[kSize] >= (getPipelining(this) || 1)) ||
|
||||
this[kPending] > 0
|
||||
)
|
||||
}
|
||||
|
||||
/* istanbul ignore: only used for test */
|
||||
[kConnect] (cb) {
|
||||
connect(this)
|
||||
this.once('connect', cb)
|
||||
}
|
||||
|
||||
[kDispatch] (opts, handler) {
|
||||
const origin = opts.origin || this[kUrl].origin
|
||||
const request = new Request(origin, opts, handler)
|
||||
|
||||
this[kQueue].push(request)
|
||||
if (this[kResuming]) {
|
||||
// Do nothing.
|
||||
} else if (util.bodyLength(request.body) == null && util.isIterable(request.body)) {
|
||||
// Wait a tick in case stream/iterator is ended in the same tick.
|
||||
this[kResuming] = 1
|
||||
queueMicrotask(() => resume(this))
|
||||
} else {
|
||||
this[kResume](true)
|
||||
}
|
||||
|
||||
if (this[kResuming] && this[kNeedDrain] !== 2 && this[kBusy]) {
|
||||
this[kNeedDrain] = 2
|
||||
}
|
||||
|
||||
return this[kNeedDrain] < 2
|
||||
}
|
||||
|
||||
async [kClose] () {
|
||||
// TODO: for H2 we need to gracefully flush the remaining enqueued
|
||||
// request and close each stream.
|
||||
return new Promise((resolve) => {
|
||||
if (this[kSize]) {
|
||||
this[kClosedResolve] = resolve
|
||||
} else {
|
||||
resolve(null)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async [kDestroy] (err) {
|
||||
return new Promise((resolve) => {
|
||||
const requests = this[kQueue].splice(this[kPendingIdx])
|
||||
for (let i = 0; i < requests.length; i++) {
|
||||
const request = requests[i]
|
||||
errorRequest(this, request, err)
|
||||
}
|
||||
|
||||
const callback = () => {
|
||||
if (this[kClosedResolve]) {
|
||||
// TODO (fix): Should we error here with ClientDestroyedError?
|
||||
this[kClosedResolve]()
|
||||
this[kClosedResolve] = null
|
||||
}
|
||||
resolve(null)
|
||||
}
|
||||
|
||||
if (this[kHTTPContext]) {
|
||||
this[kHTTPContext].destroy(err, callback)
|
||||
this[kHTTPContext] = null
|
||||
} else {
|
||||
queueMicrotask(callback)
|
||||
}
|
||||
|
||||
this[kResume]()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const createRedirectInterceptor = require('../interceptor/redirect-interceptor.js')
|
||||
|
||||
function onError (client, err) {
|
||||
if (
|
||||
client[kRunning] === 0 &&
|
||||
err.code !== 'UND_ERR_INFO' &&
|
||||
err.code !== 'UND_ERR_SOCKET'
|
||||
) {
|
||||
// Error is not caused by running request and not a recoverable
|
||||
// socket error.
|
||||
|
||||
assert(client[kPendingIdx] === client[kRunningIdx])
|
||||
|
||||
const requests = client[kQueue].splice(client[kRunningIdx])
|
||||
for (let i = 0; i < requests.length; i++) {
|
||||
const request = requests[i]
|
||||
errorRequest(client, request, err)
|
||||
}
|
||||
assert(client[kSize] === 0)
|
||||
}
|
||||
}
|
||||
|
||||
async function connect (client) {
|
||||
assert(!client[kConnecting])
|
||||
assert(!client[kHTTPContext])
|
||||
|
||||
let { host, hostname, protocol, port } = client[kUrl]
|
||||
|
||||
// Resolve ipv6
|
||||
if (hostname[0] === '[') {
|
||||
const idx = hostname.indexOf(']')
|
||||
|
||||
assert(idx !== -1)
|
||||
const ip = hostname.substring(1, idx)
|
||||
|
||||
assert(net.isIP(ip))
|
||||
hostname = ip
|
||||
}
|
||||
|
||||
client[kConnecting] = true
|
||||
|
||||
if (channels.beforeConnect.hasSubscribers) {
|
||||
channels.beforeConnect.publish({
|
||||
connectParams: {
|
||||
host,
|
||||
hostname,
|
||||
protocol,
|
||||
port,
|
||||
version: client[kHTTPContext]?.version,
|
||||
servername: client[kServerName],
|
||||
localAddress: client[kLocalAddress]
|
||||
},
|
||||
connector: client[kConnector]
|
||||
})
|
||||
}
|
||||
|
||||
try {
|
||||
const socket = await new Promise((resolve, reject) => {
|
||||
client[kConnector]({
|
||||
host,
|
||||
hostname,
|
||||
protocol,
|
||||
port,
|
||||
servername: client[kServerName],
|
||||
localAddress: client[kLocalAddress]
|
||||
}, (err, socket) => {
|
||||
if (err) {
|
||||
reject(err)
|
||||
} else {
|
||||
resolve(socket)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
if (client.destroyed) {
|
||||
util.destroy(socket.on('error', () => {}), new ClientDestroyedError())
|
||||
return
|
||||
}
|
||||
|
||||
assert(socket)
|
||||
|
||||
try {
|
||||
client[kHTTPContext] = socket.alpnProtocol === 'h2'
|
||||
? await connectH2(client, socket)
|
||||
: await connectH1(client, socket)
|
||||
} catch (err) {
|
||||
socket.destroy().on('error', () => {})
|
||||
throw err
|
||||
}
|
||||
|
||||
client[kConnecting] = false
|
||||
|
||||
socket[kCounter] = 0
|
||||
socket[kMaxRequests] = client[kMaxRequests]
|
||||
socket[kClient] = client
|
||||
socket[kError] = null
|
||||
|
||||
if (channels.connected.hasSubscribers) {
|
||||
channels.connected.publish({
|
||||
connectParams: {
|
||||
host,
|
||||
hostname,
|
||||
protocol,
|
||||
port,
|
||||
version: client[kHTTPContext]?.version,
|
||||
servername: client[kServerName],
|
||||
localAddress: client[kLocalAddress]
|
||||
},
|
||||
connector: client[kConnector],
|
||||
socket
|
||||
})
|
||||
}
|
||||
client.emit('connect', client[kUrl], [client])
|
||||
} catch (err) {
|
||||
if (client.destroyed) {
|
||||
return
|
||||
}
|
||||
|
||||
client[kConnecting] = false
|
||||
|
||||
if (channels.connectError.hasSubscribers) {
|
||||
channels.connectError.publish({
|
||||
connectParams: {
|
||||
host,
|
||||
hostname,
|
||||
protocol,
|
||||
port,
|
||||
version: client[kHTTPContext]?.version,
|
||||
servername: client[kServerName],
|
||||
localAddress: client[kLocalAddress]
|
||||
},
|
||||
connector: client[kConnector],
|
||||
error: err
|
||||
})
|
||||
}
|
||||
|
||||
if (err.code === 'ERR_TLS_CERT_ALTNAME_INVALID') {
|
||||
assert(client[kRunning] === 0)
|
||||
while (client[kPending] > 0 && client[kQueue][client[kPendingIdx]].servername === client[kServerName]) {
|
||||
const request = client[kQueue][client[kPendingIdx]++]
|
||||
errorRequest(client, request, err)
|
||||
}
|
||||
} else {
|
||||
onError(client, err)
|
||||
}
|
||||
|
||||
client.emit('connectionError', client[kUrl], [client], err)
|
||||
}
|
||||
|
||||
client[kResume]()
|
||||
}
|
||||
|
||||
function emitDrain (client) {
|
||||
client[kNeedDrain] = 0
|
||||
client.emit('drain', client[kUrl], [client])
|
||||
}
|
||||
|
||||
function resume (client, sync) {
|
||||
if (client[kResuming] === 2) {
|
||||
return
|
||||
}
|
||||
|
||||
client[kResuming] = 2
|
||||
|
||||
_resume(client, sync)
|
||||
client[kResuming] = 0
|
||||
|
||||
if (client[kRunningIdx] > 256) {
|
||||
client[kQueue].splice(0, client[kRunningIdx])
|
||||
client[kPendingIdx] -= client[kRunningIdx]
|
||||
client[kRunningIdx] = 0
|
||||
}
|
||||
}
|
||||
|
||||
function _resume (client, sync) {
|
||||
while (true) {
|
||||
if (client.destroyed) {
|
||||
assert(client[kPending] === 0)
|
||||
return
|
||||
}
|
||||
|
||||
if (client[kClosedResolve] && !client[kSize]) {
|
||||
client[kClosedResolve]()
|
||||
client[kClosedResolve] = null
|
||||
return
|
||||
}
|
||||
|
||||
if (client[kHTTPContext]) {
|
||||
client[kHTTPContext].resume()
|
||||
}
|
||||
|
||||
if (client[kBusy]) {
|
||||
client[kNeedDrain] = 2
|
||||
} else if (client[kNeedDrain] === 2) {
|
||||
if (sync) {
|
||||
client[kNeedDrain] = 1
|
||||
queueMicrotask(() => emitDrain(client))
|
||||
} else {
|
||||
emitDrain(client)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if (client[kPending] === 0) {
|
||||
return
|
||||
}
|
||||
|
||||
if (client[kRunning] >= (getPipelining(client) || 1)) {
|
||||
return
|
||||
}
|
||||
|
||||
const request = client[kQueue][client[kPendingIdx]]
|
||||
|
||||
if (client[kUrl].protocol === 'https:' && client[kServerName] !== request.servername) {
|
||||
if (client[kRunning] > 0) {
|
||||
return
|
||||
}
|
||||
|
||||
client[kServerName] = request.servername
|
||||
client[kHTTPContext]?.destroy(new InformationalError('servername changed'))
|
||||
}
|
||||
|
||||
if (client[kConnecting]) {
|
||||
return
|
||||
}
|
||||
|
||||
if (!client[kHTTPContext]) {
|
||||
connect(client)
|
||||
return
|
||||
}
|
||||
|
||||
if (client[kHTTPContext].destroyed) {
|
||||
return
|
||||
}
|
||||
|
||||
if (client[kHTTPContext].busy(request)) {
|
||||
return
|
||||
}
|
||||
|
||||
if (!request.aborted && client[kHTTPContext].write(request)) {
|
||||
client[kPendingIdx]++
|
||||
} else {
|
||||
client[kQueue].splice(client[kPendingIdx], 1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function errorRequest (client, request, err) {
|
||||
try {
|
||||
request.onError(err)
|
||||
assert(request.aborted)
|
||||
} catch (err) {
|
||||
client.emit('error', err)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Client
|
||||
@@ -5,8 +5,8 @@ const {
|
||||
ClientDestroyedError,
|
||||
ClientClosedError,
|
||||
InvalidArgumentError
|
||||
} = require('./core/errors')
|
||||
const { kDestroy, kClose, kDispatch, kInterceptors } = require('./core/symbols')
|
||||
} = require('../core/errors')
|
||||
const { kDestroy, kClose, kDispatch, kInterceptors } = require('../core/symbols')
|
||||
|
||||
const kDestroyed = Symbol('destroyed')
|
||||
const kClosed = Symbol('closed')
|
||||
65
deps/undici/src/lib/dispatcher/dispatcher.js
vendored
Normal file
65
deps/undici/src/lib/dispatcher/dispatcher.js
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
'use strict'
|
||||
const EventEmitter = require('node:events')
|
||||
|
||||
class Dispatcher extends EventEmitter {
|
||||
dispatch () {
|
||||
throw new Error('not implemented')
|
||||
}
|
||||
|
||||
close () {
|
||||
throw new Error('not implemented')
|
||||
}
|
||||
|
||||
destroy () {
|
||||
throw new Error('not implemented')
|
||||
}
|
||||
|
||||
compose (...args) {
|
||||
// So we handle [interceptor1, interceptor2] or interceptor1, interceptor2, ...
|
||||
const interceptors = Array.isArray(args[0]) ? args[0] : args
|
||||
let dispatch = this.dispatch.bind(this)
|
||||
|
||||
for (const interceptor of interceptors) {
|
||||
if (interceptor == null) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (typeof interceptor !== 'function') {
|
||||
throw new TypeError(`invalid interceptor, expected function received ${typeof interceptor}`)
|
||||
}
|
||||
|
||||
dispatch = interceptor(dispatch)
|
||||
|
||||
if (dispatch == null || typeof dispatch !== 'function' || dispatch.length !== 2) {
|
||||
throw new TypeError('invalid interceptor')
|
||||
}
|
||||
}
|
||||
|
||||
return new ComposedDispatcher(this, dispatch)
|
||||
}
|
||||
}
|
||||
|
||||
class ComposedDispatcher extends Dispatcher {
|
||||
#dispatcher = null
|
||||
#dispatch = null
|
||||
|
||||
constructor (dispatcher, dispatch) {
|
||||
super()
|
||||
this.#dispatcher = dispatcher
|
||||
this.#dispatch = dispatch
|
||||
}
|
||||
|
||||
dispatch (...args) {
|
||||
this.#dispatch(...args)
|
||||
}
|
||||
|
||||
close (...args) {
|
||||
return this.#dispatcher.close(...args)
|
||||
}
|
||||
|
||||
destroy (...args) {
|
||||
return this.#dispatcher.destroy(...args)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Dispatcher
|
||||
@@ -1,8 +1,8 @@
|
||||
'use strict'
|
||||
|
||||
const DispatcherBase = require('./dispatcher-base')
|
||||
const FixedQueue = require('./node/fixed-queue')
|
||||
const { kConnected, kSize, kRunning, kPending, kQueued, kBusy, kFree, kUrl, kClose, kDestroy, kDispatch } = require('./core/symbols')
|
||||
const FixedQueue = require('./fixed-queue')
|
||||
const { kConnected, kSize, kRunning, kPending, kQueued, kBusy, kFree, kUrl, kClose, kDestroy, kDispatch } = require('../core/symbols')
|
||||
const PoolStats = require('./pool-stats')
|
||||
|
||||
const kClients = Symbol('clients')
|
||||
@@ -158,7 +158,7 @@ class PoolBase extends DispatcherBase {
|
||||
this[kClients].push(client)
|
||||
|
||||
if (this[kNeedDrain]) {
|
||||
process.nextTick(() => {
|
||||
queueMicrotask(() => {
|
||||
if (this[kNeedDrain]) {
|
||||
this[kOnDrain](client[kUrl], [this, client])
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
const { kFree, kConnected, kPending, kQueued, kRunning, kSize } = require('./core/symbols')
|
||||
const { kFree, kConnected, kPending, kQueued, kRunning, kSize } = require('../core/symbols')
|
||||
const kPool = Symbol('pool')
|
||||
|
||||
class PoolStats {
|
||||
@@ -10,10 +10,10 @@ const {
|
||||
const Client = require('./client')
|
||||
const {
|
||||
InvalidArgumentError
|
||||
} = require('./core/errors')
|
||||
const util = require('./core/util')
|
||||
const { kUrl, kInterceptors } = require('./core/symbols')
|
||||
const buildConnector = require('./core/connect')
|
||||
} = require('../core/errors')
|
||||
const util = require('../core/util')
|
||||
const { kUrl, kInterceptors } = require('../core/symbols')
|
||||
const buildConnector = require('../core/connect')
|
||||
|
||||
const kOptions = Symbol('options')
|
||||
const kConnections = Symbol('connections')
|
||||
@@ -1,12 +1,12 @@
|
||||
'use strict'
|
||||
|
||||
const { kProxy, kClose, kDestroy, kInterceptors } = require('./core/symbols')
|
||||
const { kProxy, kClose, kDestroy, kInterceptors } = require('../core/symbols')
|
||||
const { URL } = require('node:url')
|
||||
const Agent = require('./agent')
|
||||
const Pool = require('./pool')
|
||||
const DispatcherBase = require('./dispatcher-base')
|
||||
const { InvalidArgumentError, RequestAbortedError } = require('./core/errors')
|
||||
const buildConnector = require('./core/connect')
|
||||
const { InvalidArgumentError, RequestAbortedError, SecureProxyConnectionError } = require('../core/errors')
|
||||
const buildConnector = require('../core/connect')
|
||||
|
||||
const kAgent = Symbol('proxy agent')
|
||||
const kClient = Symbol('proxy client')
|
||||
@@ -19,55 +19,34 @@ function defaultProtocolPort (protocol) {
|
||||
return protocol === 'https:' ? 443 : 80
|
||||
}
|
||||
|
||||
function buildProxyOptions (opts) {
|
||||
if (typeof opts === 'string') {
|
||||
opts = { uri: opts }
|
||||
}
|
||||
|
||||
if (!opts || !opts.uri) {
|
||||
throw new InvalidArgumentError('Proxy opts.uri is mandatory')
|
||||
}
|
||||
|
||||
return {
|
||||
uri: opts.uri,
|
||||
protocol: opts.protocol || 'https'
|
||||
}
|
||||
}
|
||||
|
||||
function defaultFactory (origin, opts) {
|
||||
return new Pool(origin, opts)
|
||||
}
|
||||
|
||||
class ProxyAgent extends DispatcherBase {
|
||||
constructor (opts) {
|
||||
super(opts)
|
||||
this[kProxy] = buildProxyOptions(opts)
|
||||
this[kAgent] = new Agent(opts)
|
||||
this[kInterceptors] = opts.interceptors?.ProxyAgent && Array.isArray(opts.interceptors.ProxyAgent)
|
||||
? opts.interceptors.ProxyAgent
|
||||
: []
|
||||
super()
|
||||
|
||||
if (typeof opts === 'string') {
|
||||
opts = { uri: opts }
|
||||
}
|
||||
|
||||
if (!opts || !opts.uri) {
|
||||
throw new InvalidArgumentError('Proxy opts.uri is mandatory')
|
||||
if (!opts || (typeof opts === 'object' && !(opts instanceof URL) && !opts.uri)) {
|
||||
throw new InvalidArgumentError('Proxy uri is mandatory')
|
||||
}
|
||||
|
||||
const { clientFactory = defaultFactory } = opts
|
||||
|
||||
if (typeof clientFactory !== 'function') {
|
||||
throw new InvalidArgumentError('Proxy opts.clientFactory must be a function.')
|
||||
}
|
||||
|
||||
const url = this.#getUrl(opts)
|
||||
const { href, origin, port, protocol, username, password, hostname: proxyHostname } = url
|
||||
|
||||
this[kProxy] = { uri: href, protocol }
|
||||
this[kInterceptors] = opts.interceptors?.ProxyAgent && Array.isArray(opts.interceptors.ProxyAgent)
|
||||
? opts.interceptors.ProxyAgent
|
||||
: []
|
||||
this[kRequestTls] = opts.requestTls
|
||||
this[kProxyTls] = opts.proxyTls
|
||||
this[kProxyHeaders] = opts.headers || {}
|
||||
|
||||
const resolvedUrl = new URL(opts.uri)
|
||||
const { origin, port, username, password } = resolvedUrl
|
||||
|
||||
if (opts.auth && opts.token) {
|
||||
throw new InvalidArgumentError('opts.auth cannot be used in combination with opts.token')
|
||||
} else if (opts.auth) {
|
||||
@@ -81,7 +60,7 @@ class ProxyAgent extends DispatcherBase {
|
||||
|
||||
const connect = buildConnector({ ...opts.proxyTls })
|
||||
this[kConnectEndpoint] = buildConnector({ ...opts.requestTls })
|
||||
this[kClient] = clientFactory(resolvedUrl, { connect })
|
||||
this[kClient] = clientFactory(url, { connect })
|
||||
this[kAgent] = new Agent({
|
||||
...opts,
|
||||
connect: async (opts, callback) => {
|
||||
@@ -98,7 +77,8 @@ class ProxyAgent extends DispatcherBase {
|
||||
headers: {
|
||||
...this[kProxyHeaders],
|
||||
host: requestedHost
|
||||
}
|
||||
},
|
||||
servername: this[kProxyTls]?.servername || proxyHostname
|
||||
})
|
||||
if (statusCode !== 200) {
|
||||
socket.on('error', () => {}).destroy()
|
||||
@@ -116,7 +96,12 @@ class ProxyAgent extends DispatcherBase {
|
||||
}
|
||||
this[kConnectEndpoint]({ ...opts, servername, httpSocket: socket }, callback)
|
||||
} catch (err) {
|
||||
callback(err)
|
||||
if (err.code === 'ERR_TLS_CERT_ALTNAME_INVALID') {
|
||||
// Throw a custom error to avoid loop in client.js#connect
|
||||
callback(new SecureProxyConnectionError(err))
|
||||
} else {
|
||||
callback(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
@@ -138,6 +123,20 @@ class ProxyAgent extends DispatcherBase {
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('../types/proxy-agent').ProxyAgent.Options | string | URL} opts
|
||||
* @returns {URL}
|
||||
*/
|
||||
#getUrl (opts) {
|
||||
if (typeof opts === 'string') {
|
||||
return new URL(opts)
|
||||
} else if (opts instanceof URL) {
|
||||
return opts
|
||||
} else {
|
||||
return new URL(opts.uri)
|
||||
}
|
||||
}
|
||||
|
||||
async [kClose] () {
|
||||
await this[kAgent].close()
|
||||
await this[kClient].close()
|
||||
35
deps/undici/src/lib/dispatcher/retry-agent.js
vendored
Normal file
35
deps/undici/src/lib/dispatcher/retry-agent.js
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
'use strict'
|
||||
|
||||
const Dispatcher = require('./dispatcher')
|
||||
const RetryHandler = require('../handler/retry-handler')
|
||||
|
||||
class RetryAgent extends Dispatcher {
|
||||
#agent = null
|
||||
#options = null
|
||||
constructor (agent, options = {}) {
|
||||
super(options)
|
||||
this.#agent = agent
|
||||
this.#options = options
|
||||
}
|
||||
|
||||
dispatch (opts, handler) {
|
||||
const retry = new RetryHandler({
|
||||
...opts,
|
||||
retryOptions: this.#options
|
||||
}, {
|
||||
dispatch: this.#agent.dispatch.bind(this.#agent),
|
||||
handler
|
||||
})
|
||||
return this.#agent.dispatch(opts, retry)
|
||||
}
|
||||
|
||||
close () {
|
||||
return this.#agent.close()
|
||||
}
|
||||
|
||||
destroy () {
|
||||
return this.#agent.destroy()
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = RetryAgent
|
||||
2
deps/undici/src/lib/global.js
vendored
2
deps/undici/src/lib/global.js
vendored
@@ -4,7 +4,7 @@
|
||||
// this version number must be increased to avoid conflicts.
|
||||
const globalDispatcher = Symbol.for('undici.globalDispatcher.1')
|
||||
const { InvalidArgumentError } = require('./core/errors')
|
||||
const Agent = require('./agent')
|
||||
const Agent = require('./dispatcher/agent')
|
||||
|
||||
if (getGlobalDispatcher() === undefined) {
|
||||
setGlobalDispatcher(new Agent())
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
'use strict'
|
||||
const assert = require('node:assert')
|
||||
|
||||
const { kRetryHandlerDefaultRetry } = require('../core/symbols')
|
||||
@@ -37,7 +38,7 @@ class RetryHandler {
|
||||
retry: retryFn ?? RetryHandler[kRetryHandlerDefaultRetry],
|
||||
retryAfter: retryAfter ?? true,
|
||||
maxTimeout: maxTimeout ?? 30 * 1000, // 30s,
|
||||
timeout: minTimeout ?? 500, // .5s
|
||||
minTimeout: minTimeout ?? 500, // .5s
|
||||
timeoutFactor: timeoutFactor ?? 2,
|
||||
maxRetries: maxRetries ?? 5,
|
||||
// What errors we should retry
|
||||
@@ -53,11 +54,13 @@ class RetryHandler {
|
||||
'ENETUNREACH',
|
||||
'EHOSTDOWN',
|
||||
'EHOSTUNREACH',
|
||||
'EPIPE'
|
||||
'EPIPE',
|
||||
'UND_ERR_SOCKET'
|
||||
]
|
||||
}
|
||||
|
||||
this.retryCount = 0
|
||||
this.retryCountCheckpoint = 0
|
||||
this.start = 0
|
||||
this.end = null
|
||||
this.etag = null
|
||||
@@ -103,23 +106,19 @@ class RetryHandler {
|
||||
const { method, retryOptions } = opts
|
||||
const {
|
||||
maxRetries,
|
||||
timeout,
|
||||
minTimeout,
|
||||
maxTimeout,
|
||||
timeoutFactor,
|
||||
statusCodes,
|
||||
errorCodes,
|
||||
methods
|
||||
} = retryOptions
|
||||
let { counter, currentTimeout } = state
|
||||
|
||||
currentTimeout =
|
||||
currentTimeout != null && currentTimeout > 0 ? currentTimeout : timeout
|
||||
const { counter } = state
|
||||
|
||||
// Any code that is not a Undici's originated and allowed to retry
|
||||
if (
|
||||
code &&
|
||||
code !== 'UND_ERR_REQ_RETRY' &&
|
||||
code !== 'UND_ERR_SOCKET' &&
|
||||
!errorCodes.includes(code)
|
||||
) {
|
||||
cb(err)
|
||||
@@ -159,9 +158,7 @@ class RetryHandler {
|
||||
const retryTimeout =
|
||||
retryAfterHeader > 0
|
||||
? Math.min(retryAfterHeader, maxTimeout)
|
||||
: Math.min(currentTimeout * timeoutFactor ** counter, maxTimeout)
|
||||
|
||||
state.currentTimeout = retryTimeout
|
||||
: Math.min(minTimeout * timeoutFactor ** (counter - 1), maxTimeout)
|
||||
|
||||
setTimeout(() => cb(null), retryTimeout)
|
||||
}
|
||||
@@ -245,14 +242,12 @@ class RetryHandler {
|
||||
}
|
||||
|
||||
const { start, size, end = size } = range
|
||||
|
||||
assert(
|
||||
start != null && Number.isFinite(start) && this.start !== start,
|
||||
start != null && Number.isFinite(start),
|
||||
'content-range mismatch'
|
||||
)
|
||||
assert(Number.isFinite(start))
|
||||
assert(
|
||||
end != null && Number.isFinite(end) && this.end !== end,
|
||||
end != null && Number.isFinite(end),
|
||||
'invalid content-length'
|
||||
)
|
||||
|
||||
@@ -309,10 +304,19 @@ class RetryHandler {
|
||||
return this.handler.onError(err)
|
||||
}
|
||||
|
||||
// We reconcile in case of a mix between network errors
|
||||
// and server error response
|
||||
if (this.retryCount - this.retryCountCheckpoint > 0) {
|
||||
// We count the difference between the last checkpoint and the current retry count
|
||||
this.retryCount = this.retryCountCheckpoint + (this.retryCount - this.retryCountCheckpoint)
|
||||
} else {
|
||||
this.retryCount += 1
|
||||
}
|
||||
|
||||
this.retryOpts.retry(
|
||||
err,
|
||||
{
|
||||
state: { counter: this.retryCount++, currentTimeout: this.retryAfter },
|
||||
state: { counter: this.retryCount },
|
||||
opts: { retryOptions: this.retryOpts, ...this.opts }
|
||||
},
|
||||
onRetry.bind(this)
|
||||
@@ -334,6 +338,7 @@ class RetryHandler {
|
||||
}
|
||||
|
||||
try {
|
||||
this.retryCountCheckpoint = this.retryCount
|
||||
this.dispatch(this.opts, this)
|
||||
} catch (err) {
|
||||
this.handler.onError(err)
|
||||
@@ -1,6 +1,6 @@
|
||||
'use strict'
|
||||
|
||||
const RedirectHandler = require('../handler/RedirectHandler')
|
||||
const RedirectHandler = require('../handler/redirect-handler')
|
||||
|
||||
function createRedirectInterceptor ({ maxRedirections: defaultMaxRedirections }) {
|
||||
return (dispatch) => {
|
||||
24
deps/undici/src/lib/interceptor/redirect.js
vendored
Normal file
24
deps/undici/src/lib/interceptor/redirect.js
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
'use strict'
|
||||
const RedirectHandler = require('../handler/redirect-handler')
|
||||
|
||||
module.exports = opts => {
|
||||
const globalMaxRedirections = opts?.maxRedirections
|
||||
return dispatch => {
|
||||
return function redirectInterceptor (opts, handler) {
|
||||
const { maxRedirections = globalMaxRedirections, ...baseOpts } = opts
|
||||
|
||||
if (!maxRedirections) {
|
||||
return dispatch(opts, handler)
|
||||
}
|
||||
|
||||
const redirectHandler = new RedirectHandler(
|
||||
dispatch,
|
||||
maxRedirections,
|
||||
opts,
|
||||
handler
|
||||
)
|
||||
|
||||
return dispatch(baseOpts, redirectHandler)
|
||||
}
|
||||
}
|
||||
}
|
||||
19
deps/undici/src/lib/interceptor/retry.js
vendored
Normal file
19
deps/undici/src/lib/interceptor/retry.js
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
'use strict'
|
||||
const RetryHandler = require('../handler/retry-handler')
|
||||
|
||||
module.exports = globalOpts => {
|
||||
return dispatch => {
|
||||
return function retryInterceptor (opts, handler) {
|
||||
return dispatch(
|
||||
opts,
|
||||
new RetryHandler(
|
||||
{ ...opts, retryOptions: { ...globalOpts, ...opts.retryOptions } },
|
||||
{
|
||||
handler,
|
||||
dispatch
|
||||
}
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
BIN
deps/undici/src/lib/llhttp/llhttp.wasm
vendored
BIN
deps/undici/src/lib/llhttp/llhttp.wasm
vendored
Binary file not shown.
BIN
deps/undici/src/lib/llhttp/llhttp_simd.wasm
vendored
BIN
deps/undici/src/lib/llhttp/llhttp_simd.wasm
vendored
Binary file not shown.
4
deps/undici/src/lib/mock/mock-agent.js
vendored
4
deps/undici/src/lib/mock/mock-agent.js
vendored
@@ -1,7 +1,7 @@
|
||||
'use strict'
|
||||
|
||||
const { kClients } = require('../core/symbols')
|
||||
const Agent = require('../agent')
|
||||
const Agent = require('../dispatcher/agent')
|
||||
const {
|
||||
kAgent,
|
||||
kMockAgentSet,
|
||||
@@ -17,7 +17,7 @@ const MockClient = require('./mock-client')
|
||||
const MockPool = require('./mock-pool')
|
||||
const { matchValue, buildMockOptions } = require('./mock-utils')
|
||||
const { InvalidArgumentError, UndiciError } = require('../core/errors')
|
||||
const Dispatcher = require('../dispatcher')
|
||||
const Dispatcher = require('../dispatcher/dispatcher')
|
||||
const Pluralizer = require('./pluralizer')
|
||||
const PendingInterceptorsFormatter = require('./pending-interceptors-formatter')
|
||||
|
||||
|
||||
2
deps/undici/src/lib/mock/mock-client.js
vendored
2
deps/undici/src/lib/mock/mock-client.js
vendored
@@ -1,7 +1,7 @@
|
||||
'use strict'
|
||||
|
||||
const { promisify } = require('node:util')
|
||||
const Client = require('../client')
|
||||
const Client = require('../dispatcher/client')
|
||||
const { buildMockDispatch } = require('./mock-utils')
|
||||
const {
|
||||
kDispatches,
|
||||
|
||||
4
deps/undici/src/lib/mock/mock-interceptor.js
vendored
4
deps/undici/src/lib/mock/mock-interceptor.js
vendored
@@ -74,7 +74,7 @@ class MockInterceptor {
|
||||
if (opts.query) {
|
||||
opts.path = buildURL(opts.path, opts.query)
|
||||
} else {
|
||||
// Matches https://github.com/nodejs/undici/blob/main/lib/fetch/index.js#L1811
|
||||
// Matches https://github.com/nodejs/undici/blob/main/lib/web/fetch/index.js#L1811
|
||||
const parsedURL = new URL(opts.path, 'data://')
|
||||
opts.path = parsedURL.pathname + parsedURL.search
|
||||
}
|
||||
@@ -106,7 +106,7 @@ class MockInterceptor {
|
||||
if (typeof data === 'undefined') {
|
||||
throw new InvalidArgumentError('data must be defined')
|
||||
}
|
||||
if (typeof responseOptions !== 'object') {
|
||||
if (typeof responseOptions !== 'object' || responseOptions === null) {
|
||||
throw new InvalidArgumentError('responseOptions must be an object')
|
||||
}
|
||||
}
|
||||
|
||||
2
deps/undici/src/lib/mock/mock-pool.js
vendored
2
deps/undici/src/lib/mock/mock-pool.js
vendored
@@ -1,7 +1,7 @@
|
||||
'use strict'
|
||||
|
||||
const { promisify } = require('node:util')
|
||||
const Pool = require('../pool')
|
||||
const Pool = require('../dispatcher/pool')
|
||||
const { buildMockDispatch } = require('./mock-utils')
|
||||
const {
|
||||
kDispatches,
|
||||
|
||||
10
deps/undici/src/lib/mock/mock-utils.js
vendored
10
deps/undici/src/lib/mock/mock-utils.js
vendored
@@ -138,19 +138,20 @@ function getMockDispatch (mockDispatches, key) {
|
||||
// Match method
|
||||
matchedMockDispatches = matchedMockDispatches.filter(({ method }) => matchValue(method, key.method))
|
||||
if (matchedMockDispatches.length === 0) {
|
||||
throw new MockNotMatchedError(`Mock dispatch not matched for method '${key.method}'`)
|
||||
throw new MockNotMatchedError(`Mock dispatch not matched for method '${key.method}' on path '${resolvedPath}'`)
|
||||
}
|
||||
|
||||
// Match body
|
||||
matchedMockDispatches = matchedMockDispatches.filter(({ body }) => typeof body !== 'undefined' ? matchValue(body, key.body) : true)
|
||||
if (matchedMockDispatches.length === 0) {
|
||||
throw new MockNotMatchedError(`Mock dispatch not matched for body '${key.body}'`)
|
||||
throw new MockNotMatchedError(`Mock dispatch not matched for body '${key.body}' on path '${resolvedPath}'`)
|
||||
}
|
||||
|
||||
// Match headers
|
||||
matchedMockDispatches = matchedMockDispatches.filter((mockDispatch) => matchHeaders(mockDispatch, key.headers))
|
||||
if (matchedMockDispatches.length === 0) {
|
||||
throw new MockNotMatchedError(`Mock dispatch not matched for headers '${typeof key.headers === 'object' ? JSON.stringify(key.headers) : key.headers}'`)
|
||||
const headers = typeof key.headers === 'object' ? JSON.stringify(key.headers) : key.headers
|
||||
throw new MockNotMatchedError(`Mock dispatch not matched for headers '${headers}' on path '${resolvedPath}'`)
|
||||
}
|
||||
|
||||
return matchedMockDispatches[0]
|
||||
@@ -357,5 +358,6 @@ module.exports = {
|
||||
buildMockDispatch,
|
||||
checkNetConnect,
|
||||
buildMockOptions,
|
||||
getHeaderByName
|
||||
getHeaderByName,
|
||||
buildHeadersFromArray
|
||||
}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
'use strict'
|
||||
|
||||
const { kConstruct } = require('./symbols')
|
||||
const { urlEquals, fieldValues: getFieldValues } = require('./util')
|
||||
const { kEnumerableProperty, isDisturbed } = require('../core/util')
|
||||
const { urlEquals, getFieldValues } = require('./util')
|
||||
const { kEnumerableProperty, isDisturbed } = require('../../core/util')
|
||||
const { webidl } = require('../fetch/webidl')
|
||||
const { Response, cloneResponse, fromInnerResponse } = require('../fetch/response')
|
||||
const { Request, fromInnerRequest } = require('../fetch/request')
|
||||
@@ -10,7 +10,6 @@ const { kState } = require('../fetch/symbols')
|
||||
const { fetching } = require('../fetch/index')
|
||||
const { urlIsHttpHttpsScheme, createDeferredPromise, readAllBytes } = require('../fetch/util')
|
||||
const assert = require('node:assert')
|
||||
const { getGlobalDispatcher } = require('../global')
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/ServiceWorker/#dfn-cache-batch-operation
|
||||
@@ -150,7 +149,6 @@ class Cache {
|
||||
// 5.7
|
||||
fetchControllers.push(fetching({
|
||||
request: r,
|
||||
dispatcher: getGlobalDispatcher(),
|
||||
processResponse (response) {
|
||||
// 1.
|
||||
if (response.type === 'error' || response.status === 206 || response.status < 200 || response.status > 299) {
|
||||
@@ -3,7 +3,7 @@
|
||||
const { kConstruct } = require('./symbols')
|
||||
const { Cache } = require('./cache')
|
||||
const { webidl } = require('../fetch/webidl')
|
||||
const { kEnumerableProperty } = require('../core/util')
|
||||
const { kEnumerableProperty } = require('../../core/util')
|
||||
|
||||
class CacheStorage {
|
||||
/**
|
||||
5
deps/undici/src/lib/web/cache/symbols.js
vendored
Normal file
5
deps/undici/src/lib/web/cache/symbols.js
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
'use strict'
|
||||
|
||||
module.exports = {
|
||||
kConstruct: require('../../core/symbols').kConstruct
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert')
|
||||
const { URLSerializer } = require('../fetch/dataURL')
|
||||
const { URLSerializer } = require('../fetch/data-url')
|
||||
const { isValidHeaderName } = require('../fetch/util')
|
||||
|
||||
/**
|
||||
@@ -23,7 +23,7 @@ function urlEquals (A, B, excludeFragment = false) {
|
||||
* @see https://github.com/chromium/chromium/blob/694d20d134cb553d8d89e5500b9148012b1ba299/content/browser/cache_storage/cache_storage_cache.cc#L260-L262
|
||||
* @param {string} header
|
||||
*/
|
||||
function fieldValues (header) {
|
||||
function getFieldValues (header) {
|
||||
assert(header !== null)
|
||||
|
||||
const values = []
|
||||
@@ -31,13 +31,9 @@ function fieldValues (header) {
|
||||
for (let value of header.split(',')) {
|
||||
value = value.trim()
|
||||
|
||||
if (!value.length) {
|
||||
continue
|
||||
} else if (!isValidHeaderName(value)) {
|
||||
continue
|
||||
if (isValidHeaderName(value)) {
|
||||
values.push(value)
|
||||
}
|
||||
|
||||
values.push(value)
|
||||
}
|
||||
|
||||
return values
|
||||
@@ -45,5 +41,5 @@ function fieldValues (header) {
|
||||
|
||||
module.exports = {
|
||||
urlEquals,
|
||||
fieldValues
|
||||
getFieldValues
|
||||
}
|
||||
@@ -102,7 +102,7 @@ function setCookie (headers, cookie) {
|
||||
const str = stringify(cookie)
|
||||
|
||||
if (str) {
|
||||
headers.append('Set-Cookie', stringify(cookie))
|
||||
headers.append('Set-Cookie', str)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
const { maxNameValuePairSize, maxAttributeValueSize } = require('./constants')
|
||||
const { isCTLExcludingHtab } = require('./util')
|
||||
const { collectASequenceOfCodePointsFast } = require('../fetch/dataURL')
|
||||
const { collectASequenceOfCodePointsFast } = require('../fetch/data-url')
|
||||
const assert = require('node:assert')
|
||||
|
||||
/**
|
||||
@@ -1,24 +1,25 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert')
|
||||
const { kHeadersList } = require('../core/symbols')
|
||||
const { kHeadersList } = require('../../core/symbols')
|
||||
|
||||
/**
|
||||
* @param {string} value
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isCTLExcludingHtab (value) {
|
||||
if (value.length === 0) {
|
||||
return false
|
||||
}
|
||||
|
||||
for (const char of value) {
|
||||
const code = char.charCodeAt(0)
|
||||
for (let i = 0; i < value.length; ++i) {
|
||||
const code = value.charCodeAt(i)
|
||||
|
||||
if (
|
||||
(code >= 0x00 || code <= 0x08) ||
|
||||
(code >= 0x0A || code <= 0x1F) ||
|
||||
(code >= 0x00 && code <= 0x08) ||
|
||||
(code >= 0x0A && code <= 0x1F) ||
|
||||
code === 0x7F
|
||||
) {
|
||||
return false
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -31,28 +32,29 @@ function isCTLExcludingHtab (value) {
|
||||
* @param {string} name
|
||||
*/
|
||||
function validateCookieName (name) {
|
||||
for (const char of name) {
|
||||
const code = char.charCodeAt(0)
|
||||
for (let i = 0; i < name.length; ++i) {
|
||||
const code = name.charCodeAt(i)
|
||||
|
||||
if (
|
||||
(code <= 0x20 || code > 0x7F) ||
|
||||
char === '(' ||
|
||||
char === ')' ||
|
||||
char === '>' ||
|
||||
char === '<' ||
|
||||
char === '@' ||
|
||||
char === ',' ||
|
||||
char === ';' ||
|
||||
char === ':' ||
|
||||
char === '\\' ||
|
||||
char === '"' ||
|
||||
char === '/' ||
|
||||
char === '[' ||
|
||||
char === ']' ||
|
||||
char === '?' ||
|
||||
char === '=' ||
|
||||
char === '{' ||
|
||||
char === '}'
|
||||
code < 0x21 || // exclude CTLs (0-31), SP and HT
|
||||
code > 0x7E || // exclude non-ascii and DEL
|
||||
code === 0x22 || // "
|
||||
code === 0x28 || // (
|
||||
code === 0x29 || // )
|
||||
code === 0x3C || // <
|
||||
code === 0x3E || // >
|
||||
code === 0x40 || // @
|
||||
code === 0x2C || // ,
|
||||
code === 0x3B || // ;
|
||||
code === 0x3A || // :
|
||||
code === 0x5C || // \
|
||||
code === 0x2F || // /
|
||||
code === 0x5B || // [
|
||||
code === 0x5D || // ]
|
||||
code === 0x3F || // ?
|
||||
code === 0x3D || // =
|
||||
code === 0x7B || // {
|
||||
code === 0x7D // }
|
||||
) {
|
||||
throw new Error('Invalid cookie name')
|
||||
}
|
||||
@@ -68,18 +70,30 @@ function validateCookieName (name) {
|
||||
* @param {string} value
|
||||
*/
|
||||
function validateCookieValue (value) {
|
||||
for (const char of value) {
|
||||
const code = char.charCodeAt(0)
|
||||
let len = value.length
|
||||
let i = 0
|
||||
|
||||
// if the value is wrapped in DQUOTE
|
||||
if (value[0] === '"') {
|
||||
if (len === 1 || value[len - 1] !== '"') {
|
||||
throw new Error('Invalid cookie value')
|
||||
}
|
||||
--len
|
||||
++i
|
||||
}
|
||||
|
||||
while (i < len) {
|
||||
const code = value.charCodeAt(i++)
|
||||
|
||||
if (
|
||||
code < 0x21 || // exclude CTLs (0-31)
|
||||
code === 0x22 ||
|
||||
code === 0x2C ||
|
||||
code === 0x3B ||
|
||||
code === 0x5C ||
|
||||
code > 0x7E // non-ascii
|
||||
code > 0x7E || // non-ascii and DEL (127)
|
||||
code === 0x22 || // "
|
||||
code === 0x2C || // ,
|
||||
code === 0x3B || // ;
|
||||
code === 0x5C // \
|
||||
) {
|
||||
throw new Error('Invalid header value')
|
||||
throw new Error('Invalid cookie value')
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -89,10 +103,14 @@ function validateCookieValue (value) {
|
||||
* @param {string} path
|
||||
*/
|
||||
function validateCookiePath (path) {
|
||||
for (const char of path) {
|
||||
const code = char.charCodeAt(0)
|
||||
for (let i = 0; i < path.length; ++i) {
|
||||
const code = path.charCodeAt(i)
|
||||
|
||||
if (code < 0x21 || char === ';') {
|
||||
if (
|
||||
code < 0x20 || // exclude CTLs (0-31)
|
||||
code === 0x7F || // DEL
|
||||
code === 0x3B // ;
|
||||
) {
|
||||
throw new Error('Invalid cookie path')
|
||||
}
|
||||
}
|
||||
@@ -113,6 +131,18 @@ function validateCookieDomain (domain) {
|
||||
}
|
||||
}
|
||||
|
||||
const IMFDays = [
|
||||
'Sun', 'Mon', 'Tue', 'Wed',
|
||||
'Thu', 'Fri', 'Sat'
|
||||
]
|
||||
|
||||
const IMFMonths = [
|
||||
'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
|
||||
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'
|
||||
]
|
||||
|
||||
const IMFPaddedNumbers = Array(61).fill(0).map((_, i) => i.toString().padStart(2, '0'))
|
||||
|
||||
/**
|
||||
* @see https://www.rfc-editor.org/rfc/rfc7231#section-7.1.1.1
|
||||
* @param {number|Date} date
|
||||
@@ -159,25 +189,7 @@ function toIMFDate (date) {
|
||||
date = new Date(date)
|
||||
}
|
||||
|
||||
const days = [
|
||||
'Sun', 'Mon', 'Tue', 'Wed',
|
||||
'Thu', 'Fri', 'Sat'
|
||||
]
|
||||
|
||||
const months = [
|
||||
'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
|
||||
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'
|
||||
]
|
||||
|
||||
const dayName = days[date.getUTCDay()]
|
||||
const day = date.getUTCDate().toString().padStart(2, '0')
|
||||
const month = months[date.getUTCMonth()]
|
||||
const year = date.getUTCFullYear()
|
||||
const hour = date.getUTCHours().toString().padStart(2, '0')
|
||||
const minute = date.getUTCMinutes().toString().padStart(2, '0')
|
||||
const second = date.getUTCSeconds().toString().padStart(2, '0')
|
||||
|
||||
return `${dayName}, ${day} ${month} ${year} ${hour}:${minute}:${second} GMT`
|
||||
return `${IMFDays[date.getUTCDay()]}, ${IMFPaddedNumbers[date.getUTCDate()]} ${IMFMonths[date.getUTCMonth()]} ${date.getUTCFullYear()} ${IMFPaddedNumbers[date.getUTCHours()]}:${IMFPaddedNumbers[date.getUTCMinutes()]}:${IMFPaddedNumbers[date.getUTCSeconds()]} GMT`
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -286,6 +298,10 @@ function getHeadersList (headers) {
|
||||
|
||||
module.exports = {
|
||||
isCTLExcludingHtab,
|
||||
validateCookieName,
|
||||
validateCookiePath,
|
||||
validateCookieValue,
|
||||
toIMFDate,
|
||||
stringify,
|
||||
getHeadersList
|
||||
}
|
||||
@@ -6,11 +6,11 @@ const { makeRequest } = require('../fetch/request')
|
||||
const { getGlobalOrigin } = require('../fetch/global')
|
||||
const { webidl } = require('../fetch/webidl')
|
||||
const { EventSourceStream } = require('./eventsource-stream')
|
||||
const { parseMIMEType } = require('../fetch/dataURL')
|
||||
const { parseMIMEType } = require('../fetch/data-url')
|
||||
const { MessageEvent } = require('../websocket/events')
|
||||
const { isNetworkError } = require('../fetch/response')
|
||||
const { getGlobalDispatcher } = require('../global')
|
||||
const { delay } = require('./util')
|
||||
const { kEnumerableProperty } = require('../../core/util')
|
||||
|
||||
let experimentalWarned = false
|
||||
|
||||
@@ -316,10 +316,7 @@ class EventSource extends EventTarget {
|
||||
})
|
||||
}
|
||||
|
||||
this.#controller = fetching({
|
||||
...fetchParam,
|
||||
dispatcher: getGlobalDispatcher()
|
||||
})
|
||||
this.#controller = fetching(fetchParam)
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -463,6 +460,16 @@ const constantsPropertyDescriptors = {
|
||||
Object.defineProperties(EventSource, constantsPropertyDescriptors)
|
||||
Object.defineProperties(EventSource.prototype, constantsPropertyDescriptors)
|
||||
|
||||
Object.defineProperties(EventSource.prototype, {
|
||||
close: kEnumerableProperty,
|
||||
onerror: kEnumerableProperty,
|
||||
onmessage: kEnumerableProperty,
|
||||
onopen: kEnumerableProperty,
|
||||
readyState: kEnumerableProperty,
|
||||
url: kEnumerableProperty,
|
||||
withCredentials: kEnumerableProperty
|
||||
})
|
||||
|
||||
webidl.converters.EventSourceInitDict = webidl.dictionaryConverter([
|
||||
{ key: 'withCredentials', converter: webidl.converters.boolean, defaultValue: false }
|
||||
])
|
||||
@@ -1,7 +1,6 @@
|
||||
'use strict'
|
||||
|
||||
const Busboy = require('@fastify/busboy')
|
||||
const util = require('../core/util')
|
||||
const util = require('../../core/util')
|
||||
const {
|
||||
ReadableStreamFrom,
|
||||
isBlobLike,
|
||||
@@ -9,23 +8,20 @@ const {
|
||||
readableStreamClose,
|
||||
createDeferredPromise,
|
||||
fullyReadBody,
|
||||
extractMimeType
|
||||
extractMimeType,
|
||||
utf8DecodeBytes
|
||||
} = require('./util')
|
||||
const { FormData } = require('./formdata')
|
||||
const { kState } = require('./symbols')
|
||||
const { webidl } = require('./webidl')
|
||||
const { Blob, File: NativeFile } = require('node:buffer')
|
||||
const { kBodyUsed } = require('../core/symbols')
|
||||
const { Blob } = require('node:buffer')
|
||||
const assert = require('node:assert')
|
||||
const { isErrored } = require('../core/util')
|
||||
const { isUint8Array, isArrayBuffer } = require('util/types')
|
||||
const { File: UndiciFile } = require('./file')
|
||||
const { serializeAMimeType } = require('./dataURL')
|
||||
const { isErrored } = require('../../core/util')
|
||||
const { isArrayBuffer } = require('node:util/types')
|
||||
const { serializeAMimeType } = require('./data-url')
|
||||
const { multipartFormDataParser } = require('./formdata-parser')
|
||||
|
||||
/** @type {globalThis['File']} */
|
||||
const File = NativeFile ?? UndiciFile
|
||||
const textEncoder = new TextEncoder()
|
||||
const textDecoder = new TextDecoder()
|
||||
|
||||
// https://fetch.spec.whatwg.org/#concept-bodyinit-extract
|
||||
function extractBody (object, keepalive = false) {
|
||||
@@ -275,45 +271,18 @@ function cloneBody (body) {
|
||||
|
||||
// 1. Let « out1, out2 » be the result of teeing body’s stream.
|
||||
const [out1, out2] = body.stream.tee()
|
||||
const out2Clone = structuredClone(out2, { transfer: [out2] })
|
||||
// This, for whatever reasons, unrefs out2Clone which allows
|
||||
// the process to exit by itself.
|
||||
const [, finalClone] = out2Clone.tee()
|
||||
|
||||
// 2. Set body’s stream to out1.
|
||||
body.stream = out1
|
||||
|
||||
// 3. Return a body whose stream is out2 and other members are copied from body.
|
||||
return {
|
||||
stream: finalClone,
|
||||
stream: out2,
|
||||
length: body.length,
|
||||
source: body.source
|
||||
}
|
||||
}
|
||||
|
||||
async function * consumeBody (body) {
|
||||
if (body) {
|
||||
if (isUint8Array(body)) {
|
||||
yield body
|
||||
} else {
|
||||
const stream = body.stream
|
||||
|
||||
if (util.isDisturbed(stream)) {
|
||||
throw new TypeError('The body has already been consumed.')
|
||||
}
|
||||
|
||||
if (stream.locked) {
|
||||
throw new TypeError('The stream is locked.')
|
||||
}
|
||||
|
||||
// Compat.
|
||||
stream[kBodyUsed] = true
|
||||
|
||||
yield * stream
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function throwIfAborted (state) {
|
||||
if (state.aborted) {
|
||||
throw new DOMException('The operation was aborted.', 'AbortError')
|
||||
@@ -328,7 +297,7 @@ function bodyMixinMethods (instance) {
|
||||
// given a byte sequence bytes: return a Blob whose
|
||||
// contents are bytes and whose type attribute is this’s
|
||||
// MIME type.
|
||||
return specConsumeBody(this, (bytes) => {
|
||||
return consumeBody(this, (bytes) => {
|
||||
let mimeType = bodyMimeType(this)
|
||||
|
||||
if (mimeType === null) {
|
||||
@@ -348,7 +317,7 @@ function bodyMixinMethods (instance) {
|
||||
// of running consume body with this and the following step
|
||||
// given a byte sequence bytes: return a new ArrayBuffer
|
||||
// whose contents are bytes.
|
||||
return specConsumeBody(this, (bytes) => {
|
||||
return consumeBody(this, (bytes) => {
|
||||
return new Uint8Array(bytes).buffer
|
||||
}, instance)
|
||||
},
|
||||
@@ -356,127 +325,65 @@ function bodyMixinMethods (instance) {
|
||||
text () {
|
||||
// The text() method steps are to return the result of running
|
||||
// consume body with this and UTF-8 decode.
|
||||
return specConsumeBody(this, utf8DecodeBytes, instance)
|
||||
return consumeBody(this, utf8DecodeBytes, instance)
|
||||
},
|
||||
|
||||
json () {
|
||||
// The json() method steps are to return the result of running
|
||||
// consume body with this and parse JSON from bytes.
|
||||
return specConsumeBody(this, parseJSONFromBytes, instance)
|
||||
return consumeBody(this, parseJSONFromBytes, instance)
|
||||
},
|
||||
|
||||
async formData () {
|
||||
webidl.brandCheck(this, instance)
|
||||
formData () {
|
||||
// The formData() method steps are to return the result of running
|
||||
// consume body with this and the following step given a byte sequence bytes:
|
||||
return consumeBody(this, (value) => {
|
||||
// 1. Let mimeType be the result of get the MIME type with this.
|
||||
const mimeType = bodyMimeType(this)
|
||||
|
||||
throwIfAborted(this[kState])
|
||||
// 2. If mimeType is non-null, then switch on mimeType’s essence and run
|
||||
// the corresponding steps:
|
||||
if (mimeType !== null) {
|
||||
switch (mimeType.essence) {
|
||||
case 'multipart/form-data': {
|
||||
// 1. ... [long step]
|
||||
const parsed = multipartFormDataParser(value, mimeType)
|
||||
|
||||
// 1. Let mimeType be the result of get the MIME type with this.
|
||||
const mimeType = bodyMimeType(this)
|
||||
// 2. If that fails for some reason, then throw a TypeError.
|
||||
if (parsed === 'failure') {
|
||||
throw new TypeError('Failed to parse body as FormData.')
|
||||
}
|
||||
|
||||
// If mimeType’s essence is "multipart/form-data", then:
|
||||
if (mimeType !== null && mimeType.essence === 'multipart/form-data') {
|
||||
const headers = {}
|
||||
for (const [key, value] of this.headers) headers[key] = value
|
||||
// 3. Return a new FormData object, appending each entry,
|
||||
// resulting from the parsing operation, to its entry list.
|
||||
const fd = new FormData()
|
||||
fd[kState] = parsed
|
||||
|
||||
const responseFormData = new FormData()
|
||||
|
||||
let busboy
|
||||
|
||||
try {
|
||||
busboy = new Busboy({
|
||||
headers,
|
||||
preservePath: true
|
||||
})
|
||||
} catch (err) {
|
||||
throw new DOMException(`${err}`, 'AbortError')
|
||||
}
|
||||
|
||||
busboy.on('field', (name, value) => {
|
||||
responseFormData.append(name, value)
|
||||
})
|
||||
busboy.on('file', (name, value, filename, encoding, mimeType) => {
|
||||
const chunks = []
|
||||
|
||||
if (encoding === 'base64' || encoding.toLowerCase() === 'base64') {
|
||||
let base64chunk = ''
|
||||
|
||||
value.on('data', (chunk) => {
|
||||
base64chunk += chunk.toString().replace(/[\r\n]/gm, '')
|
||||
|
||||
const end = base64chunk.length - base64chunk.length % 4
|
||||
chunks.push(Buffer.from(base64chunk.slice(0, end), 'base64'))
|
||||
|
||||
base64chunk = base64chunk.slice(end)
|
||||
})
|
||||
value.on('end', () => {
|
||||
chunks.push(Buffer.from(base64chunk, 'base64'))
|
||||
responseFormData.append(name, new File(chunks, filename, { type: mimeType }))
|
||||
})
|
||||
} else {
|
||||
value.on('data', (chunk) => {
|
||||
chunks.push(chunk)
|
||||
})
|
||||
value.on('end', () => {
|
||||
responseFormData.append(name, new File(chunks, filename, { type: mimeType }))
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
const busboyResolve = new Promise((resolve, reject) => {
|
||||
busboy.on('finish', resolve)
|
||||
busboy.on('error', (err) => reject(new TypeError(err)))
|
||||
})
|
||||
|
||||
if (this.body !== null) for await (const chunk of consumeBody(this[kState].body)) busboy.write(chunk)
|
||||
busboy.end()
|
||||
await busboyResolve
|
||||
|
||||
return responseFormData
|
||||
} else if (mimeType !== null && mimeType.essence === 'application/x-www-form-urlencoded') {
|
||||
// Otherwise, if mimeType’s essence is "application/x-www-form-urlencoded", then:
|
||||
|
||||
// 1. Let entries be the result of parsing bytes.
|
||||
let entries
|
||||
try {
|
||||
let text = ''
|
||||
// application/x-www-form-urlencoded parser will keep the BOM.
|
||||
// https://url.spec.whatwg.org/#concept-urlencoded-parser
|
||||
// Note that streaming decoder is stateful and cannot be reused
|
||||
const streamingDecoder = new TextDecoder('utf-8', { ignoreBOM: true })
|
||||
|
||||
for await (const chunk of consumeBody(this[kState].body)) {
|
||||
if (!isUint8Array(chunk)) {
|
||||
throw new TypeError('Expected Uint8Array chunk')
|
||||
return fd
|
||||
}
|
||||
case 'application/x-www-form-urlencoded': {
|
||||
// 1. Let entries be the result of parsing bytes.
|
||||
const entries = new URLSearchParams(value.toString())
|
||||
|
||||
// 2. If entries is failure, then throw a TypeError.
|
||||
|
||||
// 3. Return a new FormData object whose entry list is entries.
|
||||
const fd = new FormData()
|
||||
|
||||
for (const [name, value] of entries) {
|
||||
fd.append(name, value)
|
||||
}
|
||||
|
||||
return fd
|
||||
}
|
||||
text += streamingDecoder.decode(chunk, { stream: true })
|
||||
}
|
||||
text += streamingDecoder.decode()
|
||||
entries = new URLSearchParams(text)
|
||||
} catch (err) {
|
||||
// istanbul ignore next: Unclear when new URLSearchParams can fail on a string.
|
||||
// 2. If entries is failure, then throw a TypeError.
|
||||
throw new TypeError(undefined, { cause: err })
|
||||
}
|
||||
|
||||
// 3. Return a new FormData object whose entries are entries.
|
||||
const formData = new FormData()
|
||||
for (const [name, value] of entries) {
|
||||
formData.append(name, value)
|
||||
}
|
||||
return formData
|
||||
} else {
|
||||
// Wait a tick before checking if the request has been aborted.
|
||||
// Otherwise, a TypeError can be thrown when an AbortError should.
|
||||
await Promise.resolve()
|
||||
|
||||
throwIfAborted(this[kState])
|
||||
|
||||
// Otherwise, throw a TypeError.
|
||||
throw webidl.errors.exception({
|
||||
header: `${instance.name}.formData`,
|
||||
message: 'Could not parse content as FormData.'
|
||||
})
|
||||
}
|
||||
// 3. Throw a TypeError.
|
||||
throw new TypeError(
|
||||
'Content-Type was not one of "multipart/form-data" or "application/x-www-form-urlencoded".'
|
||||
)
|
||||
}, instance)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -493,7 +400,7 @@ function mixinBody (prototype) {
|
||||
* @param {(value: unknown) => unknown} convertBytesToJSValue
|
||||
* @param {Response|Request} instance
|
||||
*/
|
||||
async function specConsumeBody (object, convertBytesToJSValue, instance) {
|
||||
async function consumeBody (object, convertBytesToJSValue, instance) {
|
||||
webidl.brandCheck(object, instance)
|
||||
|
||||
throwIfAborted(object[kState])
|
||||
@@ -545,32 +452,6 @@ function bodyUnusable (body) {
|
||||
return body != null && (body.stream.locked || util.isDisturbed(body.stream))
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://encoding.spec.whatwg.org/#utf-8-decode
|
||||
* @param {Buffer} buffer
|
||||
*/
|
||||
function utf8DecodeBytes (buffer) {
|
||||
if (buffer.length === 0) {
|
||||
return ''
|
||||
}
|
||||
|
||||
// 1. Let buffer be the result of peeking three bytes from
|
||||
// ioQueue, converted to a byte sequence.
|
||||
|
||||
// 2. If buffer is 0xEF 0xBB 0xBF, then read three
|
||||
// bytes from ioQueue. (Do nothing with those bytes.)
|
||||
if (buffer[0] === 0xEF && buffer[1] === 0xBB && buffer[2] === 0xBF) {
|
||||
buffer = buffer.subarray(3)
|
||||
}
|
||||
|
||||
// 3. Process a queue with an instance of UTF-8’s
|
||||
// decoder, ioQueue, output, and "replacement".
|
||||
const output = textDecoder.decode(buffer)
|
||||
|
||||
// 4. Return output.
|
||||
return output
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://infra.spec.whatwg.org/#parse-json-bytes-to-a-javascript-value
|
||||
* @param {Uint8Array} bytes
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert')
|
||||
|
||||
const encoder = new TextEncoder()
|
||||
@@ -626,7 +628,6 @@ function removeASCIIWhitespace (str, leading = true, trailing = true) {
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} str
|
||||
* @param {boolean} leading
|
||||
* @param {boolean} trailing
|
||||
@@ -736,5 +737,7 @@ module.exports = {
|
||||
collectAnHTTPQuotedString,
|
||||
serializeAMimeType,
|
||||
removeChars,
|
||||
minimizeSupportedMimeType
|
||||
minimizeSupportedMimeType,
|
||||
HTTP_TOKEN_CODEPOINTS,
|
||||
isomorphicDecode
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
'use strict'
|
||||
|
||||
const { kConnected, kSize } = require('../core/symbols')
|
||||
const { kConnected, kSize } = require('../../core/symbols')
|
||||
|
||||
class CompatWeakRef {
|
||||
constructor (value) {
|
||||
@@ -1,12 +1,14 @@
|
||||
'use strict'
|
||||
|
||||
const { EOL } = require('node:os')
|
||||
const { Blob, File: NativeFile } = require('node:buffer')
|
||||
const { types } = require('node:util')
|
||||
const { kState } = require('./symbols')
|
||||
const { isBlobLike } = require('./util')
|
||||
const { webidl } = require('./webidl')
|
||||
const { parseMIMEType, serializeAMimeType } = require('./dataURL')
|
||||
const { kEnumerableProperty } = require('../core/util')
|
||||
const { parseMIMEType, serializeAMimeType } = require('./data-url')
|
||||
const { kEnumerableProperty } = require('../../core/util')
|
||||
|
||||
const encoder = new TextEncoder()
|
||||
|
||||
class File extends Blob {
|
||||
@@ -307,17 +309,14 @@ function processBlobParts (parts, options) {
|
||||
*/
|
||||
function convertLineEndingsNative (s) {
|
||||
// 1. Let native line ending be be the code point U+000A LF.
|
||||
let nativeLineEnding = '\n'
|
||||
|
||||
// 2. If the underlying platform’s conventions are to
|
||||
// represent newlines as a carriage return and line feed
|
||||
// sequence, set native line ending to the code point
|
||||
// U+000D CR followed by the code point U+000A LF.
|
||||
if (process.platform === 'win32') {
|
||||
nativeLineEnding = '\r\n'
|
||||
}
|
||||
// NOTE: We are using the native line ending for the current
|
||||
// platform, provided by node's os module.
|
||||
|
||||
return s.replace(/\r?\n/g, nativeLineEnding)
|
||||
return s.replace(/\r?\n/g, EOL)
|
||||
}
|
||||
|
||||
// If this function is moved to ./util.js, some tools (such as
|
||||
502
deps/undici/src/lib/web/fetch/formdata-parser.js
vendored
Normal file
502
deps/undici/src/lib/web/fetch/formdata-parser.js
vendored
Normal file
@@ -0,0 +1,502 @@
|
||||
'use strict'
|
||||
|
||||
const { toUSVString, isUSVString, bufferToLowerCasedHeaderName } = require('../../core/util')
|
||||
const { utf8DecodeBytes } = require('./util')
|
||||
const { HTTP_TOKEN_CODEPOINTS, isomorphicDecode } = require('./data-url')
|
||||
const { isFileLike, File: UndiciFile } = require('./file')
|
||||
const { makeEntry } = require('./formdata')
|
||||
const assert = require('node:assert')
|
||||
const { File: NodeFile } = require('node:buffer')
|
||||
|
||||
const File = globalThis.File ?? NodeFile ?? UndiciFile
|
||||
|
||||
const formDataNameBuffer = Buffer.from('form-data; name="')
|
||||
const filenameBuffer = Buffer.from('; filename')
|
||||
const dd = Buffer.from('--')
|
||||
const ddcrlf = Buffer.from('--\r\n')
|
||||
|
||||
/**
|
||||
* @param {string} chars
|
||||
*/
|
||||
function isAsciiString (chars) {
|
||||
for (let i = 0; i < chars.length; ++i) {
|
||||
if ((chars.charCodeAt(i) & ~0x7F) !== 0) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://andreubotella.github.io/multipart-form-data/#multipart-form-data-boundary
|
||||
* @param {string} boundary
|
||||
*/
|
||||
function validateBoundary (boundary) {
|
||||
const length = boundary.length
|
||||
|
||||
// - its length is greater or equal to 27 and lesser or equal to 70, and
|
||||
if (length < 27 || length > 70) {
|
||||
return false
|
||||
}
|
||||
|
||||
// - it is composed by bytes in the ranges 0x30 to 0x39, 0x41 to 0x5A, or
|
||||
// 0x61 to 0x7A, inclusive (ASCII alphanumeric), or which are 0x27 ('),
|
||||
// 0x2D (-) or 0x5F (_).
|
||||
for (let i = 0; i < length; ++i) {
|
||||
const cp = boundary.charCodeAt(i)
|
||||
|
||||
if (!(
|
||||
(cp >= 0x30 && cp <= 0x39) ||
|
||||
(cp >= 0x41 && cp <= 0x5a) ||
|
||||
(cp >= 0x61 && cp <= 0x7a) ||
|
||||
cp === 0x27 ||
|
||||
cp === 0x2d ||
|
||||
cp === 0x5f
|
||||
)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://andreubotella.github.io/multipart-form-data/#escape-a-multipart-form-data-name
|
||||
* @param {string} name
|
||||
* @param {string} [encoding='utf-8']
|
||||
* @param {boolean} [isFilename=false]
|
||||
*/
|
||||
function escapeFormDataName (name, encoding = 'utf-8', isFilename = false) {
|
||||
// 1. If isFilename is true:
|
||||
if (isFilename) {
|
||||
// 1.1. Set name to the result of converting name into a scalar value string.
|
||||
name = toUSVString(name)
|
||||
} else {
|
||||
// 2. Otherwise:
|
||||
|
||||
// 2.1. Assert: name is a scalar value string.
|
||||
assert(isUSVString(name))
|
||||
|
||||
// 2.2. Replace every occurrence of U+000D (CR) not followed by U+000A (LF),
|
||||
// and every occurrence of U+000A (LF) not preceded by U+000D (CR), in
|
||||
// name, by a string consisting of U+000D (CR) and U+000A (LF).
|
||||
name = name.replace(/\r\n?|\r?\n/g, '\r\n')
|
||||
}
|
||||
|
||||
// 3. Let encoded be the result of encoding name with encoding.
|
||||
assert(Buffer.isEncoding(encoding))
|
||||
|
||||
// 4. Replace every 0x0A (LF) bytes in encoded with the byte sequence `%0A`,
|
||||
// 0x0D (CR) with `%0D` and 0x22 (") with `%22`.
|
||||
name = name
|
||||
.replace(/\n/g, '%0A')
|
||||
.replace(/\r/g, '%0D')
|
||||
.replace(/"/g, '%22')
|
||||
|
||||
// 5. Return encoded.
|
||||
return Buffer.from(name, encoding) // encoded
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://andreubotella.github.io/multipart-form-data/#multipart-form-data-parser
|
||||
* @param {Buffer} input
|
||||
* @param {ReturnType<import('./data-url')['parseMIMEType']>} mimeType
|
||||
*/
|
||||
function multipartFormDataParser (input, mimeType) {
|
||||
// 1. Assert: mimeType’s essence is "multipart/form-data".
|
||||
assert(mimeType !== 'failure' && mimeType.essence === 'multipart/form-data')
|
||||
|
||||
const boundaryString = mimeType.parameters.get('boundary')
|
||||
|
||||
// 2. If mimeType’s parameters["boundary"] does not exist, return failure.
|
||||
// Otherwise, let boundary be the result of UTF-8 decoding mimeType’s
|
||||
// parameters["boundary"].
|
||||
if (boundaryString === undefined) {
|
||||
return 'failure'
|
||||
}
|
||||
|
||||
const boundary = Buffer.from(`--${boundaryString}`, 'utf8')
|
||||
|
||||
// 3. Let entry list be an empty entry list.
|
||||
const entryList = []
|
||||
|
||||
// 4. Let position be a pointer to a byte in input, initially pointing at
|
||||
// the first byte.
|
||||
const position = { position: 0 }
|
||||
|
||||
// Note: undici addition, allow \r\n before the body.
|
||||
if (input[0] === 0x0d && input[1] === 0x0a) {
|
||||
position.position += 2
|
||||
}
|
||||
|
||||
// 5. While true:
|
||||
while (true) {
|
||||
// 5.1. If position points to a sequence of bytes starting with 0x2D 0x2D
|
||||
// (`--`) followed by boundary, advance position by 2 + the length of
|
||||
// boundary. Otherwise, return failure.
|
||||
// Note: boundary is padded with 2 dashes already, no need to add 2.
|
||||
if (input.subarray(position.position, position.position + boundary.length).equals(boundary)) {
|
||||
position.position += boundary.length
|
||||
} else {
|
||||
return 'failure'
|
||||
}
|
||||
|
||||
// 5.2. If position points to the sequence of bytes 0x2D 0x2D 0x0D 0x0A
|
||||
// (`--` followed by CR LF) followed by the end of input, return entry list.
|
||||
// Note: a body does NOT need to end with CRLF. It can end with --.
|
||||
if (
|
||||
(position.position === input.length - 2 && bufferStartsWith(input, dd, position)) ||
|
||||
(position.position === input.length - 4 && bufferStartsWith(input, ddcrlf, position))
|
||||
) {
|
||||
return entryList
|
||||
}
|
||||
|
||||
// 5.3. If position does not point to a sequence of bytes starting with 0x0D
|
||||
// 0x0A (CR LF), return failure.
|
||||
if (input[position.position] !== 0x0d || input[position.position + 1] !== 0x0a) {
|
||||
return 'failure'
|
||||
}
|
||||
|
||||
// 5.4. Advance position by 2. (This skips past the newline.)
|
||||
position.position += 2
|
||||
|
||||
// 5.5. Let name, filename and contentType be the result of parsing
|
||||
// multipart/form-data headers on input and position, if the result
|
||||
// is not failure. Otherwise, return failure.
|
||||
const result = parseMultipartFormDataHeaders(input, position)
|
||||
|
||||
if (result === 'failure') {
|
||||
return 'failure'
|
||||
}
|
||||
|
||||
let { name, filename, contentType, encoding } = result
|
||||
|
||||
// 5.6. Advance position by 2. (This skips past the empty line that marks
|
||||
// the end of the headers.)
|
||||
position.position += 2
|
||||
|
||||
// 5.7. Let body be the empty byte sequence.
|
||||
let body
|
||||
|
||||
// 5.8. Body loop: While position is not past the end of input:
|
||||
// TODO: the steps here are completely wrong
|
||||
{
|
||||
const boundaryIndex = input.indexOf(boundary.subarray(2), position.position)
|
||||
|
||||
if (boundaryIndex === -1) {
|
||||
return 'failure'
|
||||
}
|
||||
|
||||
body = input.subarray(position.position, boundaryIndex - 4)
|
||||
|
||||
position.position += body.length
|
||||
|
||||
// Note: position must be advanced by the body's length before being
|
||||
// decoded, otherwise the parsing will fail.
|
||||
if (encoding === 'base64') {
|
||||
body = Buffer.from(body.toString(), 'base64')
|
||||
}
|
||||
}
|
||||
|
||||
// 5.9. If position does not point to a sequence of bytes starting with
|
||||
// 0x0D 0x0A (CR LF), return failure. Otherwise, advance position by 2.
|
||||
if (input[position.position] !== 0x0d || input[position.position + 1] !== 0x0a) {
|
||||
return 'failure'
|
||||
} else {
|
||||
position.position += 2
|
||||
}
|
||||
|
||||
// 5.10. If filename is not null:
|
||||
let value
|
||||
|
||||
if (filename !== null) {
|
||||
// 5.10.1. If contentType is null, set contentType to "text/plain".
|
||||
contentType ??= 'text/plain'
|
||||
|
||||
// 5.10.2. If contentType is not an ASCII string, set contentType to the empty string.
|
||||
|
||||
// Note: `buffer.isAscii` can be used at zero-cost, but converting a string to a buffer is a high overhead.
|
||||
// Content-Type is a relatively small string, so it is faster to use `String#charCodeAt`.
|
||||
if (!isAsciiString(contentType)) {
|
||||
contentType = ''
|
||||
}
|
||||
|
||||
// 5.10.3. Let value be a new File object with name filename, type contentType, and body body.
|
||||
value = new File([body], filename, { type: contentType })
|
||||
} else {
|
||||
// 5.11. Otherwise:
|
||||
|
||||
// 5.11.1. Let value be the UTF-8 decoding without BOM of body.
|
||||
value = utf8DecodeBytes(Buffer.from(body))
|
||||
}
|
||||
|
||||
// 5.12. Assert: name is a scalar value string and value is either a scalar value string or a File object.
|
||||
assert(isUSVString(name))
|
||||
assert((typeof value === 'string' && isUSVString(value)) || isFileLike(value))
|
||||
|
||||
// 5.13. Create an entry with name and value, and append it to entry list.
|
||||
entryList.push(makeEntry(name, value, filename))
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://andreubotella.github.io/multipart-form-data/#parse-multipart-form-data-headers
|
||||
* @param {Buffer} input
|
||||
* @param {{ position: number }} position
|
||||
*/
|
||||
function parseMultipartFormDataHeaders (input, position) {
|
||||
// 1. Let name, filename and contentType be null.
|
||||
let name = null
|
||||
let filename = null
|
||||
let contentType = null
|
||||
let encoding = null
|
||||
|
||||
// 2. While true:
|
||||
while (true) {
|
||||
// 2.1. If position points to a sequence of bytes starting with 0x0D 0x0A (CR LF):
|
||||
if (input[position.position] === 0x0d && input[position.position + 1] === 0x0a) {
|
||||
// 2.1.1. If name is null, return failure.
|
||||
if (name === null) {
|
||||
return 'failure'
|
||||
}
|
||||
|
||||
// 2.1.2. Return name, filename and contentType.
|
||||
return { name, filename, contentType, encoding }
|
||||
}
|
||||
|
||||
// 2.2. Let header name be the result of collecting a sequence of bytes that are
|
||||
// not 0x0A (LF), 0x0D (CR) or 0x3A (:), given position.
|
||||
let headerName = collectASequenceOfBytes(
|
||||
(char) => char !== 0x0a && char !== 0x0d && char !== 0x3a,
|
||||
input,
|
||||
position
|
||||
)
|
||||
|
||||
// 2.3. Remove any HTTP tab or space bytes from the start or end of header name.
|
||||
headerName = removeChars(headerName, true, true, (char) => char === 0x9 || char === 0x20)
|
||||
|
||||
// 2.4. If header name does not match the field-name token production, return failure.
|
||||
if (!HTTP_TOKEN_CODEPOINTS.test(headerName.toString())) {
|
||||
return 'failure'
|
||||
}
|
||||
|
||||
// 2.5. If the byte at position is not 0x3A (:), return failure.
|
||||
if (input[position.position] !== 0x3a) {
|
||||
return 'failure'
|
||||
}
|
||||
|
||||
// 2.6. Advance position by 1.
|
||||
position.position++
|
||||
|
||||
// 2.7. Collect a sequence of bytes that are HTTP tab or space bytes given position.
|
||||
// (Do nothing with those bytes.)
|
||||
collectASequenceOfBytes(
|
||||
(char) => char === 0x20 || char === 0x09,
|
||||
input,
|
||||
position
|
||||
)
|
||||
|
||||
// 2.8. Byte-lowercase header name and switch on the result:
|
||||
switch (bufferToLowerCasedHeaderName(headerName)) {
|
||||
case 'content-disposition': {
|
||||
// 1. Set name and filename to null.
|
||||
name = filename = null
|
||||
|
||||
// 2. If position does not point to a sequence of bytes starting with
|
||||
// `form-data; name="`, return failure.
|
||||
if (!bufferStartsWith(input, formDataNameBuffer, position)) {
|
||||
return 'failure'
|
||||
}
|
||||
|
||||
// 3. Advance position so it points at the byte after the next 0x22 (")
|
||||
// byte (the one in the sequence of bytes matched above).
|
||||
position.position += 17
|
||||
|
||||
// 4. Set name to the result of parsing a multipart/form-data name given
|
||||
// input and position, if the result is not failure. Otherwise, return
|
||||
// failure.
|
||||
name = parseMultipartFormDataName(input, position)
|
||||
|
||||
if (name === null) {
|
||||
return 'failure'
|
||||
}
|
||||
|
||||
// 5. If position points to a sequence of bytes starting with `; filename="`:
|
||||
if (bufferStartsWith(input, filenameBuffer, position)) {
|
||||
// Note: undici also handles filename*
|
||||
let check = position.position + filenameBuffer.length
|
||||
|
||||
if (input[check] === 0x2a) {
|
||||
position.position += 1
|
||||
check += 1
|
||||
}
|
||||
|
||||
if (input[check] !== 0x3d || input[check + 1] !== 0x22) { // ="
|
||||
return 'failure'
|
||||
}
|
||||
|
||||
// 1. Advance position so it points at the byte after the next 0x22 (") byte
|
||||
// (the one in the sequence of bytes matched above).
|
||||
position.position += 12
|
||||
|
||||
// 2. Set filename to the result of parsing a multipart/form-data name given
|
||||
// input and position, if the result is not failure. Otherwise, return failure.
|
||||
filename = parseMultipartFormDataName(input, position)
|
||||
|
||||
if (filename === null) {
|
||||
return 'failure'
|
||||
}
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
case 'content-type': {
|
||||
// 1. Let header value be the result of collecting a sequence of bytes that are
|
||||
// not 0x0A (LF) or 0x0D (CR), given position.
|
||||
let headerValue = collectASequenceOfBytes(
|
||||
(char) => char !== 0x0a && char !== 0x0d,
|
||||
input,
|
||||
position
|
||||
)
|
||||
|
||||
// 2. Remove any HTTP tab or space bytes from the end of header value.
|
||||
headerValue = removeChars(headerValue, false, true, (char) => char === 0x9 || char === 0x20)
|
||||
|
||||
// 3. Set contentType to the isomorphic decoding of header value.
|
||||
contentType = isomorphicDecode(headerValue)
|
||||
|
||||
break
|
||||
}
|
||||
case 'content-transfer-encoding': {
|
||||
let headerValue = collectASequenceOfBytes(
|
||||
(char) => char !== 0x0a && char !== 0x0d,
|
||||
input,
|
||||
position
|
||||
)
|
||||
|
||||
headerValue = removeChars(headerValue, false, true, (char) => char === 0x9 || char === 0x20)
|
||||
|
||||
encoding = isomorphicDecode(headerValue)
|
||||
|
||||
break
|
||||
}
|
||||
default: {
|
||||
// Collect a sequence of bytes that are not 0x0A (LF) or 0x0D (CR), given position.
|
||||
// (Do nothing with those bytes.)
|
||||
collectASequenceOfBytes(
|
||||
(char) => char !== 0x0a && char !== 0x0d,
|
||||
input,
|
||||
position
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// 2.9. If position does not point to a sequence of bytes starting with 0x0D 0x0A
|
||||
// (CR LF), return failure. Otherwise, advance position by 2 (past the newline).
|
||||
if (input[position.position] !== 0x0d && input[position.position + 1] !== 0x0a) {
|
||||
return 'failure'
|
||||
} else {
|
||||
position.position += 2
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://andreubotella.github.io/multipart-form-data/#parse-a-multipart-form-data-name
|
||||
* @param {Buffer} input
|
||||
* @param {{ position: number }} position
|
||||
*/
|
||||
function parseMultipartFormDataName (input, position) {
|
||||
// 1. Assert: The byte at (position - 1) is 0x22 (").
|
||||
assert(input[position.position - 1] === 0x22)
|
||||
|
||||
// 2. Let name be the result of collecting a sequence of bytes that are not 0x0A (LF), 0x0D (CR) or 0x22 ("), given position.
|
||||
/** @type {string | Buffer} */
|
||||
let name = collectASequenceOfBytes(
|
||||
(char) => char !== 0x0a && char !== 0x0d && char !== 0x22,
|
||||
input,
|
||||
position
|
||||
)
|
||||
|
||||
// 3. If the byte at position is not 0x22 ("), return failure. Otherwise, advance position by 1.
|
||||
if (input[position.position] !== 0x22) {
|
||||
return null // name could be 'failure'
|
||||
} else {
|
||||
position.position++
|
||||
}
|
||||
|
||||
// 4. Replace any occurrence of the following subsequences in name with the given byte:
|
||||
// - `%0A`: 0x0A (LF)
|
||||
// - `%0D`: 0x0D (CR)
|
||||
// - `%22`: 0x22 (")
|
||||
name = new TextDecoder().decode(name)
|
||||
.replace(/%0A/ig, '\n')
|
||||
.replace(/%0D/ig, '\r')
|
||||
.replace(/%22/g, '"')
|
||||
|
||||
// 5. Return the UTF-8 decoding without BOM of name.
|
||||
return name
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {(char: number) => boolean} condition
|
||||
* @param {Buffer} input
|
||||
* @param {{ position: number }} position
|
||||
*/
|
||||
function collectASequenceOfBytes (condition, input, position) {
|
||||
let start = position.position
|
||||
|
||||
while (start < input.length && condition(input[start])) {
|
||||
++start
|
||||
}
|
||||
|
||||
return input.subarray(position.position, (position.position = start))
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Buffer} buf
|
||||
* @param {boolean} leading
|
||||
* @param {boolean} trailing
|
||||
* @param {(charCode: number) => boolean} predicate
|
||||
* @returns {Buffer}
|
||||
*/
|
||||
function removeChars (buf, leading, trailing, predicate) {
|
||||
let lead = 0
|
||||
let trail = buf.length - 1
|
||||
|
||||
if (leading) {
|
||||
while (lead < buf.length && predicate(buf[lead])) lead++
|
||||
}
|
||||
|
||||
if (trailing) {
|
||||
while (trail > 0 && predicate(buf[trail])) trail--
|
||||
}
|
||||
|
||||
return lead === 0 && trail === buf.length - 1 ? buf : buf.subarray(lead, trail + 1)
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if {@param buffer} starts with {@param start}
|
||||
* @param {Buffer} buffer
|
||||
* @param {Buffer} start
|
||||
* @param {{ position: number }} position
|
||||
*/
|
||||
function bufferStartsWith (buffer, start, position) {
|
||||
if (buffer.length < start.length) {
|
||||
return false
|
||||
}
|
||||
|
||||
for (let i = 0; i < start.length; i++) {
|
||||
if (start[i] !== buffer[position.position + i]) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
multipartFormDataParser,
|
||||
validateBoundary,
|
||||
escapeFormDataName
|
||||
}
|
||||
@@ -1,11 +1,12 @@
|
||||
'use strict'
|
||||
|
||||
const { isBlobLike, toUSVString, makeIterator } = require('./util')
|
||||
const { isBlobLike, iteratorMixin } = require('./util')
|
||||
const { kState } = require('./symbols')
|
||||
const { kEnumerableProperty } = require('../core/util')
|
||||
const { kEnumerableProperty } = require('../../core/util')
|
||||
const { File: UndiciFile, FileLike, isFileLike } = require('./file')
|
||||
const { webidl } = require('./webidl')
|
||||
const { File: NativeFile } = require('node:buffer')
|
||||
const nodeUtil = require('node:util')
|
||||
|
||||
/** @type {globalThis['File']} */
|
||||
const File = NativeFile ?? UndiciFile
|
||||
@@ -133,7 +134,7 @@ class FormData {
|
||||
? webidl.converters.Blob(value, { strict: false })
|
||||
: webidl.converters.USVString(value)
|
||||
filename = arguments.length === 3
|
||||
? toUSVString(filename)
|
||||
? webidl.converters.USVString(filename)
|
||||
: undefined
|
||||
|
||||
// 2. Let entry be the result of creating an entry with name, value, and
|
||||
@@ -155,61 +156,32 @@ class FormData {
|
||||
}
|
||||
}
|
||||
|
||||
entries () {
|
||||
webidl.brandCheck(this, FormData)
|
||||
[nodeUtil.inspect.custom] (depth, options) {
|
||||
const state = this[kState].reduce((a, b) => {
|
||||
if (a[b.name]) {
|
||||
if (Array.isArray(a[b.name])) {
|
||||
a[b.name].push(b.value)
|
||||
} else {
|
||||
a[b.name] = [a[b.name], b.value]
|
||||
}
|
||||
} else {
|
||||
a[b.name] = b.value
|
||||
}
|
||||
|
||||
return makeIterator(
|
||||
() => this[kState],
|
||||
'FormData',
|
||||
'key+value',
|
||||
'name', 'value'
|
||||
)
|
||||
}
|
||||
return a
|
||||
}, { __proto__: null })
|
||||
|
||||
keys () {
|
||||
webidl.brandCheck(this, FormData)
|
||||
options.depth ??= depth
|
||||
options.colors ??= true
|
||||
|
||||
return makeIterator(
|
||||
() => this[kState],
|
||||
'FormData',
|
||||
'key',
|
||||
'name', 'value'
|
||||
)
|
||||
}
|
||||
const output = nodeUtil.formatWithOptions(options, state)
|
||||
|
||||
values () {
|
||||
webidl.brandCheck(this, FormData)
|
||||
|
||||
return makeIterator(
|
||||
() => this[kState],
|
||||
'FormData',
|
||||
'value',
|
||||
'name', 'value'
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {(value: string, key: string, self: FormData) => void} callbackFn
|
||||
* @param {unknown} thisArg
|
||||
*/
|
||||
forEach (callbackFn, thisArg = globalThis) {
|
||||
webidl.brandCheck(this, FormData)
|
||||
|
||||
webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.forEach' })
|
||||
|
||||
if (typeof callbackFn !== 'function') {
|
||||
throw new TypeError(
|
||||
"Failed to execute 'forEach' on 'FormData': parameter 1 is not of type 'Function'."
|
||||
)
|
||||
}
|
||||
|
||||
for (const [key, value] of this) {
|
||||
callbackFn.call(thisArg, value, key, this)
|
||||
}
|
||||
// remove [Object null prototype]
|
||||
return `FormData ${output.slice(output.indexOf(']') + 2)}`
|
||||
}
|
||||
}
|
||||
|
||||
FormData.prototype[Symbol.iterator] = FormData.prototype.entries
|
||||
iteratorMixin('FormData', FormData, kState, 'name', 'value')
|
||||
|
||||
Object.defineProperties(FormData.prototype, {
|
||||
append: kEnumerableProperty,
|
||||
@@ -218,11 +190,6 @@ Object.defineProperties(FormData.prototype, {
|
||||
getAll: kEnumerableProperty,
|
||||
has: kEnumerableProperty,
|
||||
set: kEnumerableProperty,
|
||||
entries: kEnumerableProperty,
|
||||
keys: kEnumerableProperty,
|
||||
values: kEnumerableProperty,
|
||||
forEach: kEnumerableProperty,
|
||||
[Symbol.iterator]: { enumerable: false },
|
||||
[Symbol.toStringTag]: {
|
||||
value: 'FormData',
|
||||
configurable: true
|
||||
@@ -238,15 +205,12 @@ Object.defineProperties(FormData.prototype, {
|
||||
*/
|
||||
function makeEntry (name, value, filename) {
|
||||
// 1. Set name to the result of converting name into a scalar value string.
|
||||
// "To convert a string into a scalar value string, replace any surrogates
|
||||
// with U+FFFD."
|
||||
// see: https://nodejs.org/dist/latest-v20.x/docs/api/buffer.html#buftostringencoding-start-end
|
||||
name = Buffer.from(name).toString('utf8')
|
||||
// Note: This operation was done by the webidl converter USVString.
|
||||
|
||||
// 2. If value is a string, then set value to the result of converting
|
||||
// value into a scalar value string.
|
||||
if (typeof value === 'string') {
|
||||
value = Buffer.from(value).toString('utf8')
|
||||
// Note: This operation was done by the webidl converter USVString.
|
||||
} else {
|
||||
// 3. Otherwise:
|
||||
|
||||
@@ -277,4 +241,4 @@ function makeEntry (name, value, filename) {
|
||||
return { name, value }
|
||||
}
|
||||
|
||||
module.exports = { FormData }
|
||||
module.exports = { FormData, makeEntry }
|
||||
@@ -2,16 +2,17 @@
|
||||
|
||||
'use strict'
|
||||
|
||||
const { kHeadersList, kConstruct } = require('../core/symbols')
|
||||
const { kHeadersList, kConstruct } = require('../../core/symbols')
|
||||
const { kGuard } = require('./symbols')
|
||||
const { kEnumerableProperty } = require('../core/util')
|
||||
const { kEnumerableProperty } = require('../../core/util')
|
||||
const {
|
||||
makeIterator,
|
||||
iteratorMixin,
|
||||
isValidHeaderName,
|
||||
isValidHeaderValue
|
||||
} = require('./util')
|
||||
const { webidl } = require('./webidl')
|
||||
const assert = require('node:assert')
|
||||
const util = require('util')
|
||||
|
||||
const kHeadersMap = Symbol('headers map')
|
||||
const kHeadersSortedMap = Symbol('headers map sorted')
|
||||
@@ -120,6 +121,10 @@ function appendHeader (headers, name, value) {
|
||||
// privileged no-CORS request headers from headers
|
||||
}
|
||||
|
||||
function compareHeaderName (a, b) {
|
||||
return a[0] < b[0] ? -1 : 1
|
||||
}
|
||||
|
||||
class HeadersList {
|
||||
/** @type {[string, string][]|null} */
|
||||
cookies = null
|
||||
@@ -237,7 +242,7 @@ class HeadersList {
|
||||
|
||||
* [Symbol.iterator] () {
|
||||
// use the lowercased name
|
||||
for (const [name, { value }] of this[kHeadersMap]) {
|
||||
for (const { 0: name, 1: { value } } of this[kHeadersMap]) {
|
||||
yield [name, value]
|
||||
}
|
||||
}
|
||||
@@ -253,6 +258,79 @@ class HeadersList {
|
||||
|
||||
return headers
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#convert-header-names-to-a-sorted-lowercase-set
|
||||
toSortedArray () {
|
||||
const size = this[kHeadersMap].size
|
||||
const array = new Array(size)
|
||||
// In most cases, you will use the fast-path.
|
||||
// fast-path: Use binary insertion sort for small arrays.
|
||||
if (size <= 32) {
|
||||
if (size === 0) {
|
||||
// If empty, it is an empty array. To avoid the first index assignment.
|
||||
return array
|
||||
}
|
||||
// Improve performance by unrolling loop and avoiding double-loop.
|
||||
// Double-loop-less version of the binary insertion sort.
|
||||
const iterator = this[kHeadersMap][Symbol.iterator]()
|
||||
const firstValue = iterator.next().value
|
||||
// set [name, value] to first index.
|
||||
array[0] = [firstValue[0], firstValue[1].value]
|
||||
// https://fetch.spec.whatwg.org/#concept-header-list-sort-and-combine
|
||||
// 3.2.2. Assert: value is non-null.
|
||||
assert(firstValue[1].value !== null)
|
||||
for (
|
||||
let i = 1, j = 0, right = 0, left = 0, pivot = 0, x, value;
|
||||
i < size;
|
||||
++i
|
||||
) {
|
||||
// get next value
|
||||
value = iterator.next().value
|
||||
// set [name, value] to current index.
|
||||
x = array[i] = [value[0], value[1].value]
|
||||
// https://fetch.spec.whatwg.org/#concept-header-list-sort-and-combine
|
||||
// 3.2.2. Assert: value is non-null.
|
||||
assert(x[1] !== null)
|
||||
left = 0
|
||||
right = i
|
||||
// binary search
|
||||
while (left < right) {
|
||||
// middle index
|
||||
pivot = left + ((right - left) >> 1)
|
||||
// compare header name
|
||||
if (array[pivot][0] <= x[0]) {
|
||||
left = pivot + 1
|
||||
} else {
|
||||
right = pivot
|
||||
}
|
||||
}
|
||||
if (i !== pivot) {
|
||||
j = i
|
||||
while (j > left) {
|
||||
array[j] = array[--j]
|
||||
}
|
||||
array[left] = x
|
||||
}
|
||||
}
|
||||
/* c8 ignore next 4 */
|
||||
if (!iterator.next().done) {
|
||||
// This is for debugging and will never be called.
|
||||
throw new TypeError('Unreachable')
|
||||
}
|
||||
return array
|
||||
} else {
|
||||
// This case would be a rare occurrence.
|
||||
// slow-path: fallback
|
||||
let i = 0
|
||||
for (const { 0: name, 1: { value } } of this[kHeadersMap]) {
|
||||
array[i++] = [name, value]
|
||||
// https://fetch.spec.whatwg.org/#concept-header-list-sort-and-combine
|
||||
// 3.2.2. Assert: value is non-null.
|
||||
assert(value !== null)
|
||||
}
|
||||
return array.sort(compareHeaderName)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#headers-class
|
||||
@@ -454,12 +532,19 @@ class Headers {
|
||||
|
||||
// 2. Let names be the result of convert header names to a sorted-lowercase
|
||||
// set with all the names of the headers in list.
|
||||
const names = [...this[kHeadersList]].sort((a, b) => a[0] < b[0] ? -1 : 1)
|
||||
const names = this[kHeadersList].toSortedArray()
|
||||
|
||||
const cookies = this[kHeadersList].cookies
|
||||
|
||||
// fast-path
|
||||
if (cookies === null || cookies.length === 1) {
|
||||
// Note: The non-null assertion of value has already been done by `HeadersList#toSortedArray`
|
||||
return (this[kHeadersList][kHeadersSortedMap] = names)
|
||||
}
|
||||
|
||||
// 3. For each name of names:
|
||||
for (let i = 0; i < names.length; ++i) {
|
||||
const [name, value] = names[i]
|
||||
const { 0: name, 1: value } = names[i]
|
||||
// 1. If name is `set-cookie`, then:
|
||||
if (name === 'set-cookie') {
|
||||
// 1. Let values be a list of all values of headers in list whose name
|
||||
@@ -476,80 +561,29 @@ class Headers {
|
||||
// 1. Let value be the result of getting name from list.
|
||||
|
||||
// 2. Assert: value is non-null.
|
||||
assert(value !== null)
|
||||
// Note: This operation was done by `HeadersList#toSortedArray`.
|
||||
|
||||
// 3. Append (name, value) to headers.
|
||||
headers.push([name, value])
|
||||
}
|
||||
}
|
||||
|
||||
this[kHeadersList][kHeadersSortedMap] = headers
|
||||
|
||||
// 4. Return headers.
|
||||
return headers
|
||||
return (this[kHeadersList][kHeadersSortedMap] = headers)
|
||||
}
|
||||
|
||||
keys () {
|
||||
webidl.brandCheck(this, Headers)
|
||||
[util.inspect.custom] (depth, options) {
|
||||
options.depth ??= depth
|
||||
|
||||
return makeIterator(
|
||||
() => this[kHeadersSortedMap],
|
||||
'Headers',
|
||||
'key',
|
||||
0, 1
|
||||
)
|
||||
}
|
||||
|
||||
values () {
|
||||
webidl.brandCheck(this, Headers)
|
||||
|
||||
return makeIterator(
|
||||
() => this[kHeadersSortedMap],
|
||||
'Headers',
|
||||
'value',
|
||||
0, 1
|
||||
)
|
||||
}
|
||||
|
||||
entries () {
|
||||
webidl.brandCheck(this, Headers)
|
||||
|
||||
return makeIterator(
|
||||
() => this[kHeadersSortedMap],
|
||||
'Headers',
|
||||
'key+value',
|
||||
0, 1
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {(value: string, key: string, self: Headers) => void} callbackFn
|
||||
* @param {unknown} thisArg
|
||||
*/
|
||||
forEach (callbackFn, thisArg = globalThis) {
|
||||
webidl.brandCheck(this, Headers)
|
||||
|
||||
webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.forEach' })
|
||||
|
||||
if (typeof callbackFn !== 'function') {
|
||||
throw new TypeError(
|
||||
"Failed to execute 'forEach' on 'Headers': parameter 1 is not of type 'Function'."
|
||||
)
|
||||
}
|
||||
|
||||
for (const [key, value] of this) {
|
||||
callbackFn.call(thisArg, value, key, this)
|
||||
}
|
||||
}
|
||||
|
||||
[Symbol.for('nodejs.util.inspect.custom')] () {
|
||||
webidl.brandCheck(this, Headers)
|
||||
|
||||
return this[kHeadersList]
|
||||
return `Headers ${util.formatWithOptions(options, this[kHeadersList].entries)}`
|
||||
}
|
||||
}
|
||||
|
||||
Headers.prototype[Symbol.iterator] = Headers.prototype.entries
|
||||
Object.defineProperty(Headers.prototype, util.inspect.custom, {
|
||||
enumerable: false
|
||||
})
|
||||
|
||||
iteratorMixin('Headers', Headers, kHeadersSortedMap, 0, 1)
|
||||
|
||||
Object.defineProperties(Headers.prototype, {
|
||||
append: kEnumerableProperty,
|
||||
@@ -558,11 +592,6 @@ Object.defineProperties(Headers.prototype, {
|
||||
has: kEnumerableProperty,
|
||||
set: kEnumerableProperty,
|
||||
getSetCookie: kEnumerableProperty,
|
||||
keys: kEnumerableProperty,
|
||||
values: kEnumerableProperty,
|
||||
entries: kEnumerableProperty,
|
||||
forEach: kEnumerableProperty,
|
||||
[Symbol.iterator]: { enumerable: false },
|
||||
[Symbol.toStringTag]: {
|
||||
value: 'Headers',
|
||||
configurable: true
|
||||
@@ -571,8 +600,10 @@ Object.defineProperties(Headers.prototype, {
|
||||
|
||||
webidl.converters.HeadersInit = function (V) {
|
||||
if (webidl.util.Type(V) === 'Object') {
|
||||
if (V[Symbol.iterator]) {
|
||||
return webidl.converters['sequence<sequence<ByteString>>'](V)
|
||||
const iterator = Reflect.get(V, Symbol.iterator)
|
||||
|
||||
if (typeof iterator === 'function') {
|
||||
return webidl.converters['sequence<sequence<ByteString>>'](V, iterator.bind(V))
|
||||
}
|
||||
|
||||
return webidl.converters['record<ByteString, ByteString>'](V)
|
||||
@@ -587,6 +618,8 @@ webidl.converters.HeadersInit = function (V) {
|
||||
|
||||
module.exports = {
|
||||
fill,
|
||||
// for test.
|
||||
compareHeaderName,
|
||||
Headers,
|
||||
HeadersList
|
||||
}
|
||||
@@ -10,7 +10,7 @@ const {
|
||||
fromInnerResponse
|
||||
} = require('./response')
|
||||
const { HeadersList } = require('./headers')
|
||||
const { Request, makeRequest } = require('./request')
|
||||
const { Request, cloneRequest } = require('./request')
|
||||
const zlib = require('node:zlib')
|
||||
const {
|
||||
bytesMatch,
|
||||
@@ -47,7 +47,7 @@ const {
|
||||
createInflate,
|
||||
extractMimeType
|
||||
} = require('./util')
|
||||
const { kState } = require('./symbols')
|
||||
const { kState, kDispatcher } = require('./symbols')
|
||||
const assert = require('node:assert')
|
||||
const { safelyExtractBody, extractBody } = require('./body')
|
||||
const {
|
||||
@@ -59,13 +59,17 @@ const {
|
||||
} = require('./constants')
|
||||
const EE = require('node:events')
|
||||
const { Readable, pipeline } = require('node:stream')
|
||||
const { addAbortListener, isErrored, isReadable, nodeMajor, nodeMinor, bufferToLowerCasedHeaderName } = require('../core/util')
|
||||
const { dataURLProcessor, serializeAMimeType, minimizeSupportedMimeType } = require('./dataURL')
|
||||
const { getGlobalDispatcher } = require('../global')
|
||||
const { addAbortListener, isErrored, isReadable, nodeMajor, nodeMinor, bufferToLowerCasedHeaderName } = require('../../core/util')
|
||||
const { dataURLProcessor, serializeAMimeType, minimizeSupportedMimeType } = require('./data-url')
|
||||
const { getGlobalDispatcher } = require('../../global')
|
||||
const { webidl } = require('./webidl')
|
||||
const { STATUS_CODES } = require('node:http')
|
||||
const GET_OR_HEAD = ['GET', 'HEAD']
|
||||
|
||||
const defaultUserAgent = typeof __UNDICI_IS_NODE__ !== 'undefined' || typeof esbuildDetection !== 'undefined'
|
||||
? 'node'
|
||||
: 'undici'
|
||||
|
||||
/** @type {import('buffer').resolveObjectURL} */
|
||||
let resolveObjectURL
|
||||
|
||||
@@ -77,12 +81,6 @@ class Fetch extends EE {
|
||||
this.connection = null
|
||||
this.dump = false
|
||||
this.state = 'ongoing'
|
||||
// 2 terminated listeners get added per request,
|
||||
// but only 1 gets removed. If there are 20 redirects,
|
||||
// 21 listeners will be added.
|
||||
// See https://github.com/nodejs/undici/issues/1711
|
||||
// TODO (fix): Find and fix root cause for leaked listener.
|
||||
this.setMaxListeners(21)
|
||||
}
|
||||
|
||||
terminate (reason) {
|
||||
@@ -206,7 +204,7 @@ function fetch (input, init = undefined) {
|
||||
const processResponse = (response) => {
|
||||
// 1. If locallyAborted is true, terminate these substeps.
|
||||
if (locallyAborted) {
|
||||
return Promise.resolve()
|
||||
return
|
||||
}
|
||||
|
||||
// 2. If response’s aborted flag is set, then:
|
||||
@@ -219,14 +217,14 @@ function fetch (input, init = undefined) {
|
||||
// deserializedError.
|
||||
|
||||
abortFetch(p, request, responseObject, controller.serializedAbortReason)
|
||||
return Promise.resolve()
|
||||
return
|
||||
}
|
||||
|
||||
// 3. If response is a network error, then reject p with a TypeError
|
||||
// and terminate these substeps.
|
||||
if (response.type === 'error') {
|
||||
p.reject(new TypeError('fetch failed', { cause: response.error }))
|
||||
return Promise.resolve()
|
||||
return
|
||||
}
|
||||
|
||||
// 4. Set responseObject to the result of creating a Response object,
|
||||
@@ -241,7 +239,7 @@ function fetch (input, init = undefined) {
|
||||
request,
|
||||
processResponseEndOfBody: handleFetchDone,
|
||||
processResponse,
|
||||
dispatcher: init?.dispatcher ?? getGlobalDispatcher() // undici
|
||||
dispatcher: requestObject[kDispatcher] // undici
|
||||
})
|
||||
|
||||
// 14. Return p.
|
||||
@@ -363,9 +361,9 @@ function fetching ({
|
||||
processResponseEndOfBody,
|
||||
processResponseConsumeBody,
|
||||
useParallelQueue = false,
|
||||
dispatcher // undici
|
||||
dispatcher = getGlobalDispatcher() // undici
|
||||
}) {
|
||||
// This has bitten me in the ass more times than I'd like to admit.
|
||||
// Ensure that the dispatcher is set accordingly
|
||||
assert(dispatcher)
|
||||
|
||||
// 1. Let taskDestination be null.
|
||||
@@ -1114,7 +1112,6 @@ function fetchFinale (fetchParams, response) {
|
||||
controller.enqueue(value)
|
||||
}
|
||||
},
|
||||
queuingStrategy: new ByteLengthQueuingStrategy({ highWaterMark: 16384 }),
|
||||
type: 'bytes'
|
||||
})
|
||||
|
||||
@@ -1408,7 +1405,7 @@ async function httpNetworkOrCacheFetch (
|
||||
// Otherwise:
|
||||
|
||||
// 1. Set httpRequest to a clone of request.
|
||||
httpRequest = makeRequest(request)
|
||||
httpRequest = cloneRequest(request)
|
||||
|
||||
// 2. Set httpFetchParams to a copy of fetchParams.
|
||||
httpFetchParams = { ...fetchParams }
|
||||
@@ -1478,7 +1475,7 @@ async function httpNetworkOrCacheFetch (
|
||||
// user agents should append `User-Agent`/default `User-Agent` value to
|
||||
// httpRequest’s header list.
|
||||
if (!httpRequest.headersList.contains('user-agent', true)) {
|
||||
httpRequest.headersList.append('user-agent', typeof esbuildDetection === 'undefined' ? 'undici' : 'node', true)
|
||||
httpRequest.headersList.append('user-agent', defaultUserAgent)
|
||||
}
|
||||
|
||||
// 15. If httpRequest’s cache mode is "default" and httpRequest’s header
|
||||
@@ -1929,7 +1926,6 @@ async function httpNetworkFetch (
|
||||
// cancelAlgorithm set to cancelAlgorithm.
|
||||
const stream = new ReadableStream(
|
||||
{
|
||||
highWaterMark: 16384,
|
||||
async start (controller) {
|
||||
fetchParams.controller.controller = controller
|
||||
},
|
||||
@@ -1939,15 +1935,14 @@ async function httpNetworkFetch (
|
||||
async cancel (reason) {
|
||||
await cancelAlgorithm(reason)
|
||||
},
|
||||
type: 'bytes',
|
||||
queuingStrategy: new ByteLengthQueuingStrategy({ highWaterMark: 16384 })
|
||||
type: 'bytes'
|
||||
}
|
||||
)
|
||||
|
||||
// 17. Run these steps, but abort when the ongoing fetch is terminated:
|
||||
|
||||
// 1. Set response’s body to a new body whose stream is stream.
|
||||
response.body = { stream }
|
||||
response.body = { stream, source: null, length: null }
|
||||
|
||||
// 2. If response is not a network error and request’s cache mode is
|
||||
// not "no-store", then update response in httpCache for request.
|
||||
@@ -1966,6 +1961,7 @@ async function httpNetworkFetch (
|
||||
// 19. Run these steps in parallel:
|
||||
|
||||
// 1. Run these steps, but abort when fetchParams is canceled:
|
||||
fetchParams.controller.onAborted = onAborted
|
||||
fetchParams.controller.on('terminated', onAborted)
|
||||
fetchParams.controller.resume = async () => {
|
||||
// 1. While true
|
||||
@@ -2149,7 +2145,15 @@ async function httpNetworkFetch (
|
||||
const keys = Object.keys(rawHeaders)
|
||||
for (let i = 0; i < keys.length; ++i) {
|
||||
// The header names are already in lowercase.
|
||||
headersList.append(keys[i], rawHeaders[keys[i]], true)
|
||||
const key = keys[i]
|
||||
const value = rawHeaders[key]
|
||||
if (key === 'set-cookie') {
|
||||
for (let j = 0; j < value.length; ++j) {
|
||||
headersList.append(key, value[j], true)
|
||||
}
|
||||
} else {
|
||||
headersList.append(key, value, true)
|
||||
}
|
||||
}
|
||||
// For H2, The header names are already in lowercase,
|
||||
// so we can avoid the `HeadersList#get` call here.
|
||||
@@ -2234,6 +2238,10 @@ async function httpNetworkFetch (
|
||||
fetchParams.controller.off('terminated', this.abort)
|
||||
}
|
||||
|
||||
if (fetchParams.controller.onAborted) {
|
||||
fetchParams.controller.off('terminated', fetchParams.controller.onAborted)
|
||||
}
|
||||
|
||||
fetchParams.controller.ended = true
|
||||
|
||||
this.body.push(null)
|
||||
@@ -4,8 +4,9 @@
|
||||
|
||||
const { extractBody, mixinBody, cloneBody } = require('./body')
|
||||
const { Headers, fill: fillHeaders, HeadersList } = require('./headers')
|
||||
const { FinalizationRegistry } = require('../compat/dispatcher-weakref')()
|
||||
const util = require('../core/util')
|
||||
const { FinalizationRegistry } = require('./dispatcher-weakref')()
|
||||
const util = require('../../core/util')
|
||||
const nodeUtil = require('node:util')
|
||||
const {
|
||||
isValidHTTPToken,
|
||||
sameOrigin,
|
||||
@@ -24,11 +25,11 @@ const {
|
||||
requestDuplex
|
||||
} = require('./constants')
|
||||
const { kEnumerableProperty } = util
|
||||
const { kHeaders, kSignal, kState, kGuard, kRealm } = require('./symbols')
|
||||
const { kHeaders, kSignal, kState, kGuard, kRealm, kDispatcher } = require('./symbols')
|
||||
const { webidl } = require('./webidl')
|
||||
const { getGlobalOrigin } = require('./global')
|
||||
const { URLSerializer } = require('./dataURL')
|
||||
const { kHeadersList, kConstruct } = require('../core/symbols')
|
||||
const { URLSerializer } = require('./data-url')
|
||||
const { kHeadersList, kConstruct } = require('../../core/symbols')
|
||||
const assert = require('node:assert')
|
||||
const { getMaxListeners, setMaxListeners, getEventListeners, defaultMaxListeners } = require('node:events')
|
||||
|
||||
@@ -78,6 +79,8 @@ class Request {
|
||||
|
||||
// 5. If input is a string, then:
|
||||
if (typeof input === 'string') {
|
||||
this[kDispatcher] = init.dispatcher
|
||||
|
||||
// 1. Let parsedURL be the result of parsing input with baseURL.
|
||||
// 2. If parsedURL is failure, then throw a TypeError.
|
||||
let parsedURL
|
||||
@@ -101,6 +104,8 @@ class Request {
|
||||
// 5. Set fallbackMode to "cors".
|
||||
fallbackMode = 'cors'
|
||||
} else {
|
||||
this[kDispatcher] = init.dispatcher || input[kDispatcher]
|
||||
|
||||
// 6. Otherwise:
|
||||
|
||||
// 7. Assert: input is a Request object.
|
||||
@@ -700,7 +705,7 @@ class Request {
|
||||
}
|
||||
|
||||
// Returns a boolean indicating whether or not request is for a history
|
||||
// navigation (a.k.a. back-foward navigation).
|
||||
// navigation (a.k.a. back-forward navigation).
|
||||
get isHistoryNavigation () {
|
||||
webidl.brandCheck(this, Request)
|
||||
|
||||
@@ -767,6 +772,34 @@ class Request {
|
||||
// 4. Return clonedRequestObject.
|
||||
return fromInnerRequest(clonedRequest, ac.signal, this[kHeaders][kGuard], this[kRealm])
|
||||
}
|
||||
|
||||
[nodeUtil.inspect.custom] (depth, options) {
|
||||
if (options.depth === null) {
|
||||
options.depth = 2
|
||||
}
|
||||
|
||||
options.colors ??= true
|
||||
|
||||
const properties = {
|
||||
method: this.method,
|
||||
url: this.url,
|
||||
headers: this.headers,
|
||||
destination: this.destination,
|
||||
referrer: this.referrer,
|
||||
referrerPolicy: this.referrerPolicy,
|
||||
mode: this.mode,
|
||||
credentials: this.credentials,
|
||||
cache: this.cache,
|
||||
redirect: this.redirect,
|
||||
integrity: this.integrity,
|
||||
keepalive: this.keepalive,
|
||||
isReloadNavigation: this.isReloadNavigation,
|
||||
isHistoryNavigation: this.isHistoryNavigation,
|
||||
signal: this.signal
|
||||
}
|
||||
|
||||
return `Request ${nodeUtil.formatWithOptions(options, properties)}`
|
||||
}
|
||||
}
|
||||
|
||||
mixinBody(Request)
|
||||
@@ -979,7 +1012,11 @@ webidl.converters.RequestInit = webidl.dictionaryConverter([
|
||||
key: 'duplex',
|
||||
converter: webidl.converters.DOMString,
|
||||
allowedValues: requestDuplex
|
||||
},
|
||||
{
|
||||
key: 'dispatcher', // undici specific option
|
||||
converter: webidl.converters.any
|
||||
}
|
||||
])
|
||||
|
||||
module.exports = { Request, makeRequest, fromInnerRequest }
|
||||
module.exports = { Request, makeRequest, fromInnerRequest, cloneRequest }
|
||||
@@ -2,7 +2,8 @@
|
||||
|
||||
const { Headers, HeadersList, fill } = require('./headers')
|
||||
const { extractBody, cloneBody, mixinBody } = require('./body')
|
||||
const util = require('../core/util')
|
||||
const util = require('../../core/util')
|
||||
const nodeUtil = require('node:util')
|
||||
const { kEnumerableProperty } = util
|
||||
const {
|
||||
isValidReasonPhrase,
|
||||
@@ -21,8 +22,8 @@ const { kState, kHeaders, kGuard, kRealm } = require('./symbols')
|
||||
const { webidl } = require('./webidl')
|
||||
const { FormData } = require('./formdata')
|
||||
const { getGlobalOrigin } = require('./global')
|
||||
const { URLSerializer } = require('./dataURL')
|
||||
const { kHeadersList, kConstruct } = require('../core/symbols')
|
||||
const { URLSerializer } = require('./data-url')
|
||||
const { kHeadersList, kConstruct } = require('../../core/symbols')
|
||||
const assert = require('node:assert')
|
||||
const { types } = require('node:util')
|
||||
|
||||
@@ -252,6 +253,28 @@ class Response {
|
||||
// clonedResponse, this’s headers’s guard, and this’s relevant Realm.
|
||||
return fromInnerResponse(clonedResponse, this[kHeaders][kGuard], this[kRealm])
|
||||
}
|
||||
|
||||
[nodeUtil.inspect.custom] (depth, options) {
|
||||
if (options.depth === null) {
|
||||
options.depth = 2
|
||||
}
|
||||
|
||||
options.colors ??= true
|
||||
|
||||
const properties = {
|
||||
status: this.status,
|
||||
statusText: this.statusText,
|
||||
headers: this.headers,
|
||||
body: this.body,
|
||||
bodyUsed: this.bodyUsed,
|
||||
ok: this.ok,
|
||||
redirected: this.redirected,
|
||||
type: this.type,
|
||||
url: this.url
|
||||
}
|
||||
|
||||
return `Response ${nodeUtil.formatWithOptions(options, properties)}`
|
||||
}
|
||||
}
|
||||
|
||||
mixinBody(Response)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user