feat: init

This commit is contained in:
Nick 2024-08-10 02:59:22 -05:00
parent 8379d09058
commit 2cfa016090
2929 changed files with 299087 additions and 3 deletions

21
node_modules/undici/LICENSE generated vendored Normal file
View file

@ -0,0 +1,21 @@
MIT License
Copyright (c) Matteo Collina and Undici contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

466
node_modules/undici/README.md generated vendored Normal file
View file

@ -0,0 +1,466 @@
# undici
[![Node CI](https://github.com/nodejs/undici/actions/workflows/nodejs.yml/badge.svg)](https://github.com/nodejs/undici/actions/workflows/nodejs.yml) [![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](http://standardjs.com/) [![npm version](https://badge.fury.io/js/undici.svg)](https://badge.fury.io/js/undici) [![codecov](https://codecov.io/gh/nodejs/undici/branch/main/graph/badge.svg?token=yZL6LtXkOA)](https://codecov.io/gh/nodejs/undici)
An HTTP/1.1 client, written from scratch for Node.js.
> Undici means eleven in Italian. 1.1 -> 11 -> Eleven -> Undici.
It is also a Stranger Things reference.
## How to get involved
Have a question about using Undici? Open a [Q&A Discussion](https://github.com/nodejs/undici/discussions/new) or join our official OpenJS [Slack](https://openjs-foundation.slack.com/archives/C01QF9Q31QD) channel.
Looking to contribute? Start by reading the [contributing guide](./CONTRIBUTING.md)
## Install
```
npm i undici
```
## Benchmarks
The benchmark is a simple getting data [example](https://github.com/nodejs/undici/blob/main/benchmarks/benchmark.js) using a
50 TCP connections with a pipelining depth of 10 running on Node 20.10.0.
| _Tests_ | _Samples_ | _Result_ | _Tolerance_ | _Difference with slowest_ |
| :-----------------: | :-------: | :--------------: | :---------: | :-----------------------: |
| undici - fetch | 30 | 3704.43 req/sec | ± 2.95 % | - |
| http - no keepalive | 20 | 4275.30 req/sec | ± 2.60 % | + 15.41 % |
| node-fetch | 10 | 4759.42 req/sec | ± 0.87 % | + 28.48 % |
| request | 40 | 4803.37 req/sec | ± 2.77 % | + 29.67 % |
| axios | 45 | 4951.97 req/sec | ± 2.88 % | + 33.68 % |
| got | 10 | 5969.67 req/sec | ± 2.64 % | + 61.15 % |
| superagent | 10 | 9471.48 req/sec | ± 1.50 % | + 155.68 % |
| http - keepalive | 25 | 10327.49 req/sec | ± 2.95 % | + 178.79 % |
| undici - pipeline | 10 | 15053.41 req/sec | ± 1.63 % | + 306.36 % |
| undici - request | 10 | 19264.24 req/sec | ± 1.74 % | + 420.03 % |
| undici - stream | 15 | 20317.29 req/sec | ± 2.13 % | + 448.46 % |
| undici - dispatch | 10 | 24883.28 req/sec | ± 1.54 % | + 571.72 % |
The benchmark is a simple sending data [example](https://github.com/nodejs/undici/blob/main/benchmarks/post-benchmark.js) using a
50 TCP connections with a pipelining depth of 10 running on Node 20.10.0.
| _Tests_ | _Samples_ | _Result_ | _Tolerance_ | _Difference with slowest_ |
| :-----------------: | :-------: | :-------------: | :---------: | :-----------------------: |
| undici - fetch | 20 | 1968.42 req/sec | ± 2.63 % | - |
| http - no keepalive | 25 | 2330.30 req/sec | ± 2.99 % | + 18.38 % |
| node-fetch | 20 | 2485.36 req/sec | ± 2.70 % | + 26.26 % |
| got | 15 | 2787.68 req/sec | ± 2.56 % | + 41.62 % |
| request | 30 | 2805.10 req/sec | ± 2.59 % | + 42.50 % |
| axios | 10 | 3040.45 req/sec | ± 1.72 % | + 54.46 % |
| superagent | 20 | 3358.29 req/sec | ± 2.51 % | + 70.61 % |
| http - keepalive | 20 | 3477.94 req/sec | ± 2.51 % | + 76.69 % |
| undici - pipeline | 25 | 3812.61 req/sec | ± 2.80 % | + 93.69 % |
| undici - request | 10 | 6067.00 req/sec | ± 0.94 % | + 208.22 % |
| undici - stream | 10 | 6391.61 req/sec | ± 1.98 % | + 224.71 % |
| undici - dispatch | 10 | 6397.00 req/sec | ± 1.48 % | + 224.98 % |
## Quick Start
```js
import { request } from 'undici'
const {
statusCode,
headers,
trailers,
body
} = await request('http://localhost:3000/foo')
console.log('response received', statusCode)
console.log('headers', headers)
for await (const data of body) { console.log('data', data) }
console.log('trailers', trailers)
```
## Body Mixins
The `body` mixins are the most common way to format the request/response body. Mixins include:
- [`.arrayBuffer()`](https://fetch.spec.whatwg.org/#dom-body-arraybuffer)
- [`.blob()`](https://fetch.spec.whatwg.org/#dom-body-blob)
- [`.json()`](https://fetch.spec.whatwg.org/#dom-body-json)
- [`.text()`](https://fetch.spec.whatwg.org/#dom-body-text)
> [!NOTE]
> The body returned from `undici.request` does not implement `.formData()`.
Example usage:
```js
import { request } from 'undici'
const {
statusCode,
headers,
trailers,
body
} = await request('http://localhost:3000/foo')
console.log('response received', statusCode)
console.log('headers', headers)
console.log('data', await body.json())
console.log('trailers', trailers)
```
_Note: Once a mixin has been called then the body cannot be reused, thus calling additional mixins on `.body`, e.g. `.body.json(); .body.text()` will result in an error `TypeError: unusable` being thrown and returned through the `Promise` rejection._
Should you need to access the `body` in plain-text after using a mixin, the best practice is to use the `.text()` mixin first and then manually parse the text to the desired format.
For more information about their behavior, please reference the body mixin from the [Fetch Standard](https://fetch.spec.whatwg.org/#body-mixin).
## Common API Methods
This section documents our most commonly used API methods. Additional APIs are documented in their own files within the [docs](./docs/) folder and are accessible via the navigation list on the left side of the docs site.
### `undici.request([url, options]): Promise`
Arguments:
* **url** `string | URL | UrlObject`
* **options** [`RequestOptions`](./docs/api/Dispatcher.md#parameter-requestoptions)
* **dispatcher** `Dispatcher` - Default: [getGlobalDispatcher](#undicigetglobaldispatcher)
* **method** `String` - Default: `PUT` if `options.body`, otherwise `GET`
* **maxRedirections** `Integer` - Default: `0`
Returns a promise with the result of the `Dispatcher.request` method.
Calls `options.dispatcher.request(options)`.
See [Dispatcher.request](./docs/api/Dispatcher.md#dispatcherrequestoptions-callback) for more details, and [request examples](./examples/README.md) for examples.
### `undici.stream([url, options, ]factory): Promise`
Arguments:
* **url** `string | URL | UrlObject`
* **options** [`StreamOptions`](./docs/api/Dispatcher.md#parameter-streamoptions)
* **dispatcher** `Dispatcher` - Default: [getGlobalDispatcher](#undicigetglobaldispatcher)
* **method** `String` - Default: `PUT` if `options.body`, otherwise `GET`
* **maxRedirections** `Integer` - Default: `0`
* **factory** `Dispatcher.stream.factory`
Returns a promise with the result of the `Dispatcher.stream` method.
Calls `options.dispatcher.stream(options, factory)`.
See [Dispatcher.stream](./docs/api/Dispatcher.md#dispatcherstreamoptions-factory-callback) for more details.
### `undici.pipeline([url, options, ]handler): Duplex`
Arguments:
* **url** `string | URL | UrlObject`
* **options** [`PipelineOptions`](./docs/api/Dispatcher.md#parameter-pipelineoptions)
* **dispatcher** `Dispatcher` - Default: [getGlobalDispatcher](#undicigetglobaldispatcher)
* **method** `String` - Default: `PUT` if `options.body`, otherwise `GET`
* **maxRedirections** `Integer` - Default: `0`
* **handler** `Dispatcher.pipeline.handler`
Returns: `stream.Duplex`
Calls `options.dispatch.pipeline(options, handler)`.
See [Dispatcher.pipeline](./docs/api/Dispatcher.md#dispatcherpipelineoptions-handler) for more details.
### `undici.connect([url, options]): Promise`
Starts two-way communications with the requested resource using [HTTP CONNECT](https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/CONNECT).
Arguments:
* **url** `string | URL | UrlObject`
* **options** [`ConnectOptions`](./docs/api/Dispatcher.md#parameter-connectoptions)
* **dispatcher** `Dispatcher` - Default: [getGlobalDispatcher](#undicigetglobaldispatcher)
* **maxRedirections** `Integer` - Default: `0`
* **callback** `(err: Error | null, data: ConnectData | null) => void` (optional)
Returns a promise with the result of the `Dispatcher.connect` method.
Calls `options.dispatch.connect(options)`.
See [Dispatcher.connect](./docs/api/Dispatcher.md#dispatcherconnectoptions-callback) for more details.
### `undici.fetch(input[, init]): Promise`
Implements [fetch](https://fetch.spec.whatwg.org/#fetch-method).
* https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/fetch
* https://fetch.spec.whatwg.org/#fetch-method
Basic usage example:
```js
import { fetch } from 'undici'
const res = await fetch('https://example.com')
const json = await res.json()
console.log(json)
```
You can pass an optional dispatcher to `fetch` as:
```js
import { fetch, Agent } from 'undici'
const res = await fetch('https://example.com', {
// Mocks are also supported
dispatcher: new Agent({
keepAliveTimeout: 10,
keepAliveMaxTimeout: 10
})
})
const json = await res.json()
console.log(json)
```
#### `request.body`
A body can be of the following types:
- ArrayBuffer
- ArrayBufferView
- AsyncIterables
- Blob
- Iterables
- String
- URLSearchParams
- FormData
In this implementation of fetch, ```request.body``` now accepts ```Async Iterables```. It is not present in the [Fetch Standard.](https://fetch.spec.whatwg.org)
```js
import { fetch } from 'undici'
const data = {
async *[Symbol.asyncIterator]() {
yield 'hello'
yield 'world'
},
}
await fetch('https://example.com', { body: data, method: 'POST', duplex: 'half' })
```
[FormData](https://developer.mozilla.org/en-US/docs/Web/API/FormData) besides text data and buffers can also utilize streams via [Blob](https://developer.mozilla.org/en-US/docs/Web/API/Blob) objects:
```js
import { openAsBlob } from 'node:fs'
const file = await openAsBlob('./big.csv')
const body = new FormData()
body.set('file', file, 'big.csv')
await fetch('http://example.com', { method: 'POST', body })
```
#### `request.duplex`
- half
In this implementation of fetch, `request.duplex` must be set if `request.body` is `ReadableStream` or `Async Iterables`, however, fetch requests are currently always full duplex. For more detail refer to the [Fetch Standard.](https://fetch.spec.whatwg.org/#dom-requestinit-duplex).
#### `response.body`
Nodejs has two kinds of streams: [web streams](https://nodejs.org/dist/latest-v16.x/docs/api/webstreams.html), which follow the API of the WHATWG web standard found in browsers, and an older Node-specific [streams API](https://nodejs.org/api/stream.html). `response.body` returns a readable web stream. If you would prefer to work with a Node stream you can convert a web stream using `.fromWeb()`.
```js
import { fetch } from 'undici'
import { Readable } from 'node:stream'
const response = await fetch('https://example.com')
const readableWebStream = response.body
const readableNodeStream = Readable.fromWeb(readableWebStream)
```
#### Specification Compliance
This section documents parts of the [Fetch Standard](https://fetch.spec.whatwg.org) that Undici does
not support or does not fully implement.
##### Garbage Collection
* https://fetch.spec.whatwg.org/#garbage-collection
The [Fetch Standard](https://fetch.spec.whatwg.org) allows users to skip consuming the response body by relying on
[garbage collection](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Memory_Management#garbage_collection) to release connection resources. Undici does not do the same. Therefore, it is important to always either consume or cancel the response body.
Garbage collection in Node is less aggressive and deterministic
(due to the lack of clear idle periods that browsers have through the rendering refresh rate)
which means that leaving the release of connection resources to the garbage collector can lead
to excessive connection usage, reduced performance (due to less connection re-use), and even
stalls or deadlocks when running out of connections.
```js
// Do
const headers = await fetch(url)
.then(async res => {
for await (const chunk of res.body) {
// force consumption of body
}
return res.headers
})
// Do not
const headers = await fetch(url)
.then(res => res.headers)
```
However, if you want to get only headers, it might be better to use `HEAD` request method. Usage of this method will obviate the need for consumption or cancelling of the response body. See [MDN - HTTP - HTTP request methods - HEAD](https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/HEAD) for more details.
```js
const headers = await fetch(url, { method: 'HEAD' })
.then(res => res.headers)
```
##### Forbidden and Safelisted Header Names
* https://fetch.spec.whatwg.org/#cors-safelisted-response-header-name
* https://fetch.spec.whatwg.org/#forbidden-header-name
* https://fetch.spec.whatwg.org/#forbidden-response-header-name
* https://github.com/wintercg/fetch/issues/6
The [Fetch Standard](https://fetch.spec.whatwg.org) requires implementations to exclude certain headers from requests and responses. In browser environments, some headers are forbidden so the user agent remains in full control over them. In Undici, these constraints are removed to give more control to the user.
### `undici.upgrade([url, options]): Promise`
Upgrade to a different protocol. See [MDN - HTTP - Protocol upgrade mechanism](https://developer.mozilla.org/en-US/docs/Web/HTTP/Protocol_upgrade_mechanism) for more details.
Arguments:
* **url** `string | URL | UrlObject`
* **options** [`UpgradeOptions`](./docs/api/Dispatcher.md#parameter-upgradeoptions)
* **dispatcher** `Dispatcher` - Default: [getGlobalDispatcher](#undicigetglobaldispatcher)
* **maxRedirections** `Integer` - Default: `0`
* **callback** `(error: Error | null, data: UpgradeData) => void` (optional)
Returns a promise with the result of the `Dispatcher.upgrade` method.
Calls `options.dispatcher.upgrade(options)`.
See [Dispatcher.upgrade](./docs/api/Dispatcher.md#dispatcherupgradeoptions-callback) for more details.
### `undici.setGlobalDispatcher(dispatcher)`
* dispatcher `Dispatcher`
Sets the global dispatcher used by Common API Methods.
### `undici.getGlobalDispatcher()`
Gets the global dispatcher used by Common API Methods.
Returns: `Dispatcher`
### `undici.setGlobalOrigin(origin)`
* origin `string | URL | undefined`
Sets the global origin used in `fetch`.
If `undefined` is passed, the global origin will be reset. This will cause `Response.redirect`, `new Request()`, and `fetch` to throw an error when a relative path is passed.
```js
setGlobalOrigin('http://localhost:3000')
const response = await fetch('/api/ping')
console.log(response.url) // http://localhost:3000/api/ping
```
### `undici.getGlobalOrigin()`
Gets the global origin used in `fetch`.
Returns: `URL`
### `UrlObject`
* **port** `string | number` (optional)
* **path** `string` (optional)
* **pathname** `string` (optional)
* **hostname** `string` (optional)
* **origin** `string` (optional)
* **protocol** `string` (optional)
* **search** `string` (optional)
## Specification Compliance
This section documents parts of the HTTP/1.1 specification that Undici does
not support or does not fully implement.
### Expect
Undici does not support the `Expect` request header field. The request
body is always immediately sent and the `100 Continue` response will be
ignored.
Refs: https://tools.ietf.org/html/rfc7231#section-5.1.1
### Pipelining
Undici will only use pipelining if configured with a `pipelining` factor
greater than `1`.
Undici always assumes that connections are persistent and will immediately
pipeline requests, without checking whether the connection is persistent.
Hence, automatic fallback to HTTP/1.0 or HTTP/1.1 without pipelining is
not supported.
Undici will immediately pipeline when retrying requests after a failed
connection. However, Undici will not retry the first remaining requests in
the prior pipeline and instead error the corresponding callback/promise/stream.
Undici will abort all running requests in the pipeline when any of them are
aborted.
* Refs: https://tools.ietf.org/html/rfc2616#section-8.1.2.2
* Refs: https://tools.ietf.org/html/rfc7230#section-6.3.2
### Manual Redirect
Since it is not possible to manually follow an HTTP redirect on the server-side,
Undici returns the actual response instead of an `opaqueredirect` filtered one
when invoked with a `manual` redirect. This aligns `fetch()` with the other
implementations in Deno and Cloudflare Workers.
Refs: https://fetch.spec.whatwg.org/#atomic-http-redirect-handling
## Workarounds
### Network address family autoselection.
If you experience problem when connecting to a remote server that is resolved by your DNS servers to a IPv6 (AAAA record)
first, there are chances that your local router or ISP might have problem connecting to IPv6 networks. In that case
undici will throw an error with code `UND_ERR_CONNECT_TIMEOUT`.
If the target server resolves to both a IPv6 and IPv4 (A records) address and you are using a compatible Node version
(18.3.0 and above), you can fix the problem by providing the `autoSelectFamily` option (support by both `undici.request`
and `undici.Agent`) which will enable the family autoselection algorithm when establishing the connection.
## Collaborators
* [__Daniele Belardi__](https://github.com/dnlup), <https://www.npmjs.com/~dnlup>
* [__Ethan Arrowood__](https://github.com/ethan-arrowood), <https://www.npmjs.com/~ethan_arrowood>
* [__Matteo Collina__](https://github.com/mcollina), <https://www.npmjs.com/~matteo.collina>
* [__Matthew Aitken__](https://github.com/KhafraDev), <https://www.npmjs.com/~khaf>
* [__Robert Nagy__](https://github.com/ronag), <https://www.npmjs.com/~ronag>
* [__Szymon Marczak__](https://github.com/szmarczak), <https://www.npmjs.com/~szmarczak>
* [__Tomas Della Vedova__](https://github.com/delvedor), <https://www.npmjs.com/~delvedor>
### Releasers
* [__Ethan Arrowood__](https://github.com/ethan-arrowood), <https://www.npmjs.com/~ethan_arrowood>
* [__Matteo Collina__](https://github.com/mcollina), <https://www.npmjs.com/~matteo.collina>
* [__Robert Nagy__](https://github.com/ronag), <https://www.npmjs.com/~ronag>
* [__Matthew Aitken__](https://github.com/KhafraDev), <https://www.npmjs.com/~khaf>
## License
MIT

80
node_modules/undici/docs/docs/api/Agent.md generated vendored Normal file
View file

@ -0,0 +1,80 @@
# Agent
Extends: `undici.Dispatcher`
Agent allow dispatching requests against multiple different origins.
Requests are not guaranteed to be dispatched in order of invocation.
## `new undici.Agent([options])`
Arguments:
* **options** `AgentOptions` (optional)
Returns: `Agent`
### Parameter: `AgentOptions`
Extends: [`PoolOptions`](Pool.md#parameter-pooloptions)
* **factory** `(origin: URL, opts: Object) => Dispatcher` - Default: `(origin, opts) => new Pool(origin, opts)`
* **maxRedirections** `Integer` - Default: `0`. The number of HTTP redirection to follow unless otherwise specified in `DispatchOptions`.
* **interceptors** `{ Agent: DispatchInterceptor[] }` - Default: `[RedirectInterceptor]` - A list of interceptors that are applied to the dispatch method. Additional logic can be applied (such as, but not limited to: 302 status code handling, authentication, cookies, compression and caching). Note that the behavior of interceptors is Experimental and might change at any given time.
## Instance Properties
### `Agent.closed`
Implements [Client.closed](Client.md#clientclosed)
### `Agent.destroyed`
Implements [Client.destroyed](Client.md#clientdestroyed)
## Instance Methods
### `Agent.close([callback])`
Implements [`Dispatcher.close([callback])`](Dispatcher.md#dispatcherclosecallback-promise).
### `Agent.destroy([error, callback])`
Implements [`Dispatcher.destroy([error, callback])`](Dispatcher.md#dispatcherdestroyerror-callback-promise).
### `Agent.dispatch(options, handler: AgentDispatchOptions)`
Implements [`Dispatcher.dispatch(options, handler)`](Dispatcher.md#dispatcherdispatchoptions-handler).
#### Parameter: `AgentDispatchOptions`
Extends: [`DispatchOptions`](Dispatcher.md#parameter-dispatchoptions)
* **origin** `string | URL`
* **maxRedirections** `Integer`.
Implements [`Dispatcher.destroy([error, callback])`](Dispatcher.md#dispatcherdestroyerror-callback-promise).
### `Agent.connect(options[, callback])`
See [`Dispatcher.connect(options[, callback])`](Dispatcher.md#dispatcherconnectoptions-callback).
### `Agent.dispatch(options, handler)`
Implements [`Dispatcher.dispatch(options, handler)`](Dispatcher.md#dispatcherdispatchoptions-handler).
### `Agent.pipeline(options, handler)`
See [`Dispatcher.pipeline(options, handler)`](Dispatcher.md#dispatcherpipelineoptions-handler).
### `Agent.request(options[, callback])`
See [`Dispatcher.request(options [, callback])`](Dispatcher.md#dispatcherrequestoptions-callback).
### `Agent.stream(options, factory[, callback])`
See [`Dispatcher.stream(options, factory[, callback])`](Dispatcher.md#dispatcherstreamoptions-factory-callback).
### `Agent.upgrade(options[, callback])`
See [`Dispatcher.upgrade(options[, callback])`](Dispatcher.md#dispatcherupgradeoptions-callback).

99
node_modules/undici/docs/docs/api/BalancedPool.md generated vendored Normal file
View file

@ -0,0 +1,99 @@
# Class: BalancedPool
Extends: `undici.Dispatcher`
A pool of [Pool](Pool.md) instances connected to multiple upstreams.
Requests are not guaranteed to be dispatched in order of invocation.
## `new BalancedPool(upstreams [, options])`
Arguments:
* **upstreams** `URL | string | string[]` - It should only include the **protocol, hostname, and port**.
* **options** `BalancedPoolOptions` (optional)
### Parameter: `BalancedPoolOptions`
Extends: [`PoolOptions`](Pool.md#parameter-pooloptions)
* **factory** `(origin: URL, opts: Object) => Dispatcher` - Default: `(origin, opts) => new Pool(origin, opts)`
The `PoolOptions` are passed to each of the `Pool` instances being created.
## Instance Properties
### `BalancedPool.upstreams`
Returns an array of upstreams that were previously added.
### `BalancedPool.closed`
Implements [Client.closed](Client.md#clientclosed)
### `BalancedPool.destroyed`
Implements [Client.destroyed](Client.md#clientdestroyed)
### `Pool.stats`
Returns [`PoolStats`](PoolStats.md) instance for this pool.
## Instance Methods
### `BalancedPool.addUpstream(upstream)`
Add an upstream.
Arguments:
* **upstream** `string` - It should only include the **protocol, hostname, and port**.
### `BalancedPool.removeUpstream(upstream)`
Removes an upstream that was previously added.
### `BalancedPool.close([callback])`
Implements [`Dispatcher.close([callback])`](Dispatcher.md#dispatcherclosecallback-promise).
### `BalancedPool.destroy([error, callback])`
Implements [`Dispatcher.destroy([error, callback])`](Dispatcher.md#dispatcherdestroyerror-callback-promise).
### `BalancedPool.connect(options[, callback])`
See [`Dispatcher.connect(options[, callback])`](Dispatcher.md#dispatcherconnectoptions-callback).
### `BalancedPool.dispatch(options, handlers)`
Implements [`Dispatcher.dispatch(options, handlers)`](Dispatcher.md#dispatcherdispatchoptions-handler).
### `BalancedPool.pipeline(options, handler)`
See [`Dispatcher.pipeline(options, handler)`](Dispatcher.md#dispatcherpipelineoptions-handler).
### `BalancedPool.request(options[, callback])`
See [`Dispatcher.request(options [, callback])`](Dispatcher.md#dispatcherrequestoptions-callback).
### `BalancedPool.stream(options, factory[, callback])`
See [`Dispatcher.stream(options, factory[, callback])`](Dispatcher.md#dispatcherstreamoptions-factory-callback).
### `BalancedPool.upgrade(options[, callback])`
See [`Dispatcher.upgrade(options[, callback])`](Dispatcher.md#dispatcherupgradeoptions-callback).
## Instance Events
### Event: `'connect'`
See [Dispatcher Event: `'connect'`](Dispatcher.md#event-connect).
### Event: `'disconnect'`
See [Dispatcher Event: `'disconnect'`](Dispatcher.md#event-disconnect).
### Event: `'drain'`
See [Dispatcher Event: `'drain'`](Dispatcher.md#event-drain).

30
node_modules/undici/docs/docs/api/CacheStorage.md generated vendored Normal file
View file

@ -0,0 +1,30 @@
# CacheStorage
Undici exposes a W3C spec-compliant implementation of [CacheStorage](https://developer.mozilla.org/en-US/docs/Web/API/CacheStorage) and [Cache](https://developer.mozilla.org/en-US/docs/Web/API/Cache).
## Opening a Cache
Undici exports a top-level CacheStorage instance. You can open a new Cache, or duplicate a Cache with an existing name, by using `CacheStorage.prototype.open`. If you open a Cache with the same name as an already-existing Cache, its list of cached Responses will be shared between both instances.
```mjs
import { caches } from 'undici'
const cache_1 = await caches.open('v1')
const cache_2 = await caches.open('v1')
// Although .open() creates a new instance,
assert(cache_1 !== cache_2)
// The same Response is matched in both.
assert.deepStrictEqual(await cache_1.match('/req'), await cache_2.match('/req'))
```
## Deleting a Cache
If a Cache is deleted, the cached Responses/Requests can still be used.
```mjs
const response = await cache_1.match('/req')
await caches.delete('v1')
await response.text() // the Response's body
```

274
node_modules/undici/docs/docs/api/Client.md generated vendored Normal file
View file

@ -0,0 +1,274 @@
# Class: Client
Extends: `undici.Dispatcher`
A basic HTTP/1.1 client, mapped on top a single TCP/TLS connection. Pipelining is disabled by default.
Requests are not guaranteed to be dispatched in order of invocation.
## `new Client(url[, options])`
Arguments:
* **url** `URL | string` - Should only include the **protocol, hostname, and port**.
* **options** `ClientOptions` (optional)
Returns: `Client`
### Parameter: `ClientOptions`
> ⚠️ Warning: The `H2` support is experimental.
* **bodyTimeout** `number | null` (optional) - Default: `300e3` - The timeout after which a request will time out, in milliseconds. Monitors time between receiving body data. Use `0` to disable it entirely. Defaults to 300 seconds.
* **headersTimeout** `number | null` (optional) - Default: `300e3` - The amount of time, in milliseconds, the parser will wait to receive the complete HTTP headers while not sending the request. Defaults to 300 seconds.
* **keepAliveMaxTimeout** `number | null` (optional) - Default: `600e3` - The maximum allowed `keepAliveTimeout`, in milliseconds, when overridden by *keep-alive* hints from the server. Defaults to 10 minutes.
* **keepAliveTimeout** `number | null` (optional) - Default: `4e3` - The timeout, in milliseconds, after which a socket without active requests will time out. Monitors time between activity on a connected socket. This value may be overridden by *keep-alive* hints from the server. See [MDN: HTTP - Headers - Keep-Alive directives](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Keep-Alive#directives) for more details. Defaults to 4 seconds.
* **keepAliveTimeoutThreshold** `number | null` (optional) - Default: `1e3` - A number of milliseconds subtracted from server *keep-alive* hints when overriding `keepAliveTimeout` to account for timing inaccuracies caused by e.g. transport latency. Defaults to 1 second.
* **maxHeaderSize** `number | null` (optional) - Default: `--max-http-header-size` or `16384` - The maximum length of request headers in bytes. Defaults to Node.js' --max-http-header-size or 16KiB.
* **maxResponseSize** `number | null` (optional) - Default: `-1` - The maximum length of response body in bytes. Set to `-1` to disable.
* **pipelining** `number | null` (optional) - Default: `1` - The amount of concurrent requests to be sent over the single TCP/TLS connection according to [RFC7230](https://tools.ietf.org/html/rfc7230#section-6.3.2). Carefully consider your workload and environment before enabling concurrent requests as pipelining may reduce performance if used incorrectly. Pipelining is sensitive to network stack settings as well as head of line blocking caused by e.g. long running requests. Set to `0` to disable keep-alive connections.
* **connect** `ConnectOptions | Function | null` (optional) - Default: `null`.
* **strictContentLength** `Boolean` (optional) - Default: `true` - Whether to treat request content length mismatches as errors. If true, an error is thrown when the request content-length header doesn't match the length of the request body.
<!-- TODO: Remove once we drop its support -->
* **interceptors** `{ Client: DispatchInterceptor[] }` - Default: `[RedirectInterceptor]` - A list of interceptors that are applied to the dispatch method. Additional logic can be applied (such as, but not limited to: 302 status code handling, authentication, cookies, compression and caching). Note that the behavior of interceptors is Experimental and might change at any given time. **Note: this is deprecated in favor of [Dispatcher#compose](./Dispatcher.md#dispatcher). Support will be droped in next major.**
* **autoSelectFamily**: `boolean` (optional) - Default: depends on local Node version, on Node 18.13.0 and above is `false`. Enables a family autodetection algorithm that loosely implements section 5 of [RFC 8305](https://tools.ietf.org/html/rfc8305#section-5). See [here](https://nodejs.org/api/net.html#socketconnectoptions-connectlistener) for more details. This option is ignored if not supported by the current Node version.
* **autoSelectFamilyAttemptTimeout**: `number` - Default: depends on local Node version, on Node 18.13.0 and above is `250`. The amount of time in milliseconds to wait for a connection attempt to finish before trying the next address when using the `autoSelectFamily` option. See [here](https://nodejs.org/api/net.html#socketconnectoptions-connectlistener) for more details.
* **allowH2**: `boolean` - Default: `false`. Enables support for H2 if the server has assigned bigger priority to it through ALPN negotiation.
* **maxConcurrentStreams**: `number` - Default: `100`. Dictates the maximum number of concurrent streams for a single H2 session. It can be overridden by a SETTINGS remote frame.
#### Parameter: `ConnectOptions`
Every Tls option, see [here](https://nodejs.org/api/tls.html#tls_tls_connect_options_callback).
Furthermore, the following options can be passed:
* **socketPath** `string | null` (optional) - Default: `null` - An IPC endpoint, either Unix domain socket or Windows named pipe.
* **maxCachedSessions** `number | null` (optional) - Default: `100` - Maximum number of TLS cached sessions. Use 0 to disable TLS session caching. Default: 100.
* **timeout** `number | null` (optional) - In milliseconds, Default `10e3`.
* **servername** `string | null` (optional)
* **keepAlive** `boolean | null` (optional) - Default: `true` - TCP keep-alive enabled
* **keepAliveInitialDelay** `number | null` (optional) - Default: `60000` - TCP keep-alive interval for the socket in milliseconds
### Example - Basic Client instantiation
This will instantiate the undici Client, but it will not connect to the origin until something is queued. Consider using `client.connect` to prematurely connect to the origin, or just call `client.request`.
```js
'use strict'
import { Client } from 'undici'
const client = new Client('http://localhost:3000')
```
### Example - Custom connector
This will allow you to perform some additional check on the socket that will be used for the next request.
```js
'use strict'
import { Client, buildConnector } from 'undici'
const connector = buildConnector({ rejectUnauthorized: false })
const client = new Client('https://localhost:3000', {
connect (opts, cb) {
connector(opts, (err, socket) => {
if (err) {
cb(err)
} else if (/* assertion */) {
socket.destroy()
cb(new Error('kaboom'))
} else {
cb(null, socket)
}
})
}
})
```
## Instance Methods
### `Client.close([callback])`
Implements [`Dispatcher.close([callback])`](Dispatcher.md#dispatcherclosecallback-promise).
### `Client.destroy([error, callback])`
Implements [`Dispatcher.destroy([error, callback])`](Dispatcher.md#dispatcherdestroyerror-callback-promise).
Waits until socket is closed before invoking the callback (or returning a promise if no callback is provided).
### `Client.connect(options[, callback])`
See [`Dispatcher.connect(options[, callback])`](Dispatcher.md#dispatcherconnectoptions-callback).
### `Client.dispatch(options, handlers)`
Implements [`Dispatcher.dispatch(options, handlers)`](Dispatcher.md#dispatcherdispatchoptions-handler).
### `Client.pipeline(options, handler)`
See [`Dispatcher.pipeline(options, handler)`](Dispatcher.md#dispatcherpipelineoptions-handler).
### `Client.request(options[, callback])`
See [`Dispatcher.request(options [, callback])`](Dispatcher.md#dispatcherrequestoptions-callback).
### `Client.stream(options, factory[, callback])`
See [`Dispatcher.stream(options, factory[, callback])`](Dispatcher.md#dispatcherstreamoptions-factory-callback).
### `Client.upgrade(options[, callback])`
See [`Dispatcher.upgrade(options[, callback])`](Dispatcher.md#dispatcherupgradeoptions-callback).
## Instance Properties
### `Client.closed`
* `boolean`
`true` after `client.close()` has been called.
### `Client.destroyed`
* `boolean`
`true` after `client.destroyed()` has been called or `client.close()` has been called and the client shutdown has completed.
### `Client.pipelining`
* `number`
Property to get and set the pipelining factor.
## Instance Events
### Event: `'connect'`
See [Dispatcher Event: `'connect'`](Dispatcher.md#event-connect).
Parameters:
* **origin** `URL`
* **targets** `Array<Dispatcher>`
Emitted when a socket has been created and connected. The client will connect once `client.size > 0`.
#### Example - Client connect event
```js
import { createServer } from 'http'
import { Client } from 'undici'
import { once } from 'events'
const server = createServer((request, response) => {
response.end('Hello, World!')
}).listen()
await once(server, 'listening')
const client = new Client(`http://localhost:${server.address().port}`)
client.on('connect', (origin) => {
console.log(`Connected to ${origin}`) // should print before the request body statement
})
try {
const { body } = await client.request({
path: '/',
method: 'GET'
})
body.setEncoding('utf-8')
body.on('data', console.log)
client.close()
server.close()
} catch (error) {
console.error(error)
client.close()
server.close()
}
```
### Event: `'disconnect'`
See [Dispatcher Event: `'disconnect'`](Dispatcher.md#event-disconnect).
Parameters:
* **origin** `URL`
* **targets** `Array<Dispatcher>`
* **error** `Error`
Emitted when socket has disconnected. The error argument of the event is the error which caused the socket to disconnect. The client will reconnect if or once `client.size > 0`.
#### Example - Client disconnect event
```js
import { createServer } from 'http'
import { Client } from 'undici'
import { once } from 'events'
const server = createServer((request, response) => {
response.destroy()
}).listen()
await once(server, 'listening')
const client = new Client(`http://localhost:${server.address().port}`)
client.on('disconnect', (origin) => {
console.log(`Disconnected from ${origin}`)
})
try {
await client.request({
path: '/',
method: 'GET'
})
} catch (error) {
console.error(error.message)
client.close()
server.close()
}
```
### Event: `'drain'`
Emitted when pipeline is no longer busy.
See [Dispatcher Event: `'drain'`](Dispatcher.md#event-drain).
#### Example - Client drain event
```js
import { createServer } from 'http'
import { Client } from 'undici'
import { once } from 'events'
const server = createServer((request, response) => {
response.end('Hello, World!')
}).listen()
await once(server, 'listening')
const client = new Client(`http://localhost:${server.address().port}`)
client.on('drain', () => {
console.log('drain event')
client.close()
server.close()
})
const requests = [
client.request({ path: '/', method: 'GET' }),
client.request({ path: '/', method: 'GET' }),
client.request({ path: '/', method: 'GET' })
]
await Promise.all(requests)
console.log('requests completed')
```
### Event: `'error'`
Invoked for users errors such as throwing in the `onError` handler.

115
node_modules/undici/docs/docs/api/Connector.md generated vendored Normal file
View file

@ -0,0 +1,115 @@
# Connector
Undici creates the underlying socket via the connector builder.
Normally, this happens automatically and you don't need to care about this,
but if you need to perform some additional check over the currently used socket,
this is the right place.
If you want to create a custom connector, you must import the `buildConnector` utility.
#### Parameter: `buildConnector.BuildOptions`
Every Tls option, see [here](https://nodejs.org/api/tls.html#tls_tls_connect_options_callback).
Furthermore, the following options can be passed:
* **socketPath** `string | null` (optional) - Default: `null` - An IPC endpoint, either Unix domain socket or Windows named pipe.
* **maxCachedSessions** `number | null` (optional) - Default: `100` - Maximum number of TLS cached sessions. Use 0 to disable TLS session caching. Default: `100`.
* **timeout** `number | null` (optional) - In milliseconds. Default `10e3`.
* **servername** `string | null` (optional)
Once you call `buildConnector`, it will return a connector function, which takes the following parameters.
#### Parameter: `connector.Options`
* **hostname** `string` (required)
* **host** `string` (optional)
* **protocol** `string` (required)
* **port** `string` (required)
* **servername** `string` (optional)
* **localAddress** `string | null` (optional) Local address the socket should connect from.
* **httpSocket** `Socket` (optional) Establish secure connection on a given socket rather than creating a new socket. It can only be sent on TLS update.
### Basic example
```js
'use strict'
import { Client, buildConnector } from 'undici'
const connector = buildConnector({ rejectUnauthorized: false })
const client = new Client('https://localhost:3000', {
connect (opts, cb) {
connector(opts, (err, socket) => {
if (err) {
cb(err)
} else if (/* assertion */) {
socket.destroy()
cb(new Error('kaboom'))
} else {
cb(null, socket)
}
})
}
})
```
### Example: validate the CA fingerprint
```js
'use strict'
import { Client, buildConnector } from 'undici'
const caFingerprint = 'FO:OB:AR'
const connector = buildConnector({ rejectUnauthorized: false })
const client = new Client('https://localhost:3000', {
connect (opts, cb) {
connector(opts, (err, socket) => {
if (err) {
cb(err)
} else if (getIssuerCertificate(socket).fingerprint256 !== caFingerprint) {
socket.destroy()
cb(new Error('Fingerprint does not match or malformed certificate'))
} else {
cb(null, socket)
}
})
}
})
client.request({
path: '/',
method: 'GET'
}, (err, data) => {
if (err) throw err
const bufs = []
data.body.on('data', (buf) => {
bufs.push(buf)
})
data.body.on('end', () => {
console.log(Buffer.concat(bufs).toString('utf8'))
client.close()
})
})
function getIssuerCertificate (socket) {
let certificate = socket.getPeerCertificate(true)
while (certificate && Object.keys(certificate).length > 0) {
// invalid certificate
if (certificate.issuerCertificate == null) {
return null
}
// We have reached the root certificate.
// In case of self-signed certificates, `issuerCertificate` may be a circular reference.
if (certificate.fingerprint256 === certificate.issuerCertificate.fingerprint256) {
break
}
// continue the loop
certificate = certificate.issuerCertificate
}
return certificate
}
```

57
node_modules/undici/docs/docs/api/ContentType.md generated vendored Normal file
View file

@ -0,0 +1,57 @@
# MIME Type Parsing
## `MIMEType` interface
* **type** `string`
* **subtype** `string`
* **parameters** `Map<string, string>`
* **essence** `string`
## `parseMIMEType(input)`
Implements [parse a MIME type](https://mimesniff.spec.whatwg.org/#parse-a-mime-type).
Parses a MIME type, returning its type, subtype, and any associated parameters. If the parser can't parse an input it returns the string literal `'failure'`.
```js
import { parseMIMEType } from 'undici'
parseMIMEType('text/html; charset=gbk')
// {
// type: 'text',
// subtype: 'html',
// parameters: Map(1) { 'charset' => 'gbk' },
// essence: 'text/html'
// }
```
Arguments:
* **input** `string`
Returns: `MIMEType|'failure'`
## `serializeAMimeType(input)`
Implements [serialize a MIME type](https://mimesniff.spec.whatwg.org/#serialize-a-mime-type).
Serializes a MIMEType object.
```js
import { serializeAMimeType } from 'undici'
serializeAMimeType({
type: 'text',
subtype: 'html',
parameters: new Map([['charset', 'gbk']]),
essence: 'text/html'
})
// text/html;charset=gbk
```
Arguments:
* **mimeType** `MIMEType`
Returns: `string`

101
node_modules/undici/docs/docs/api/Cookies.md generated vendored Normal file
View file

@ -0,0 +1,101 @@
# Cookie Handling
## `Cookie` interface
* **name** `string`
* **value** `string`
* **expires** `Date|number` (optional)
* **maxAge** `number` (optional)
* **domain** `string` (optional)
* **path** `string` (optional)
* **secure** `boolean` (optional)
* **httpOnly** `boolean` (optional)
* **sameSite** `'String'|'Lax'|'None'` (optional)
* **unparsed** `string[]` (optional) Left over attributes that weren't parsed.
## `deleteCookie(headers, name[, attributes])`
Sets the expiry time of the cookie to the unix epoch, causing browsers to delete it when received.
```js
import { deleteCookie, Headers } from 'undici'
const headers = new Headers()
deleteCookie(headers, 'name')
console.log(headers.get('set-cookie')) // name=; Expires=Thu, 01 Jan 1970 00:00:00 GMT
```
Arguments:
* **headers** `Headers`
* **name** `string`
* **attributes** `{ path?: string, domain?: string }` (optional)
Returns: `void`
## `getCookies(headers)`
Parses the `Cookie` header and returns a list of attributes and values.
```js
import { getCookies, Headers } from 'undici'
const headers = new Headers({
cookie: 'get=cookies; and=attributes'
})
console.log(getCookies(headers)) // { get: 'cookies', and: 'attributes' }
```
Arguments:
* **headers** `Headers`
Returns: `Record<string, string>`
## `getSetCookies(headers)`
Parses all `Set-Cookie` headers.
```js
import { getSetCookies, Headers } from 'undici'
const headers = new Headers({ 'set-cookie': 'undici=getSetCookies; Secure' })
console.log(getSetCookies(headers))
// [
// {
// name: 'undici',
// value: 'getSetCookies',
// secure: true
// }
// ]
```
Arguments:
* **headers** `Headers`
Returns: `Cookie[]`
## `setCookie(headers, cookie)`
Appends a cookie to the `Set-Cookie` header.
```js
import { setCookie, Headers } from 'undici'
const headers = new Headers()
setCookie(headers, { name: 'undici', value: 'setCookie' })
console.log(headers.get('Set-Cookie')) // undici=setCookie
```
Arguments:
* **headers** `Headers`
* **cookie** `Cookie`
Returns: `void`

62
node_modules/undici/docs/docs/api/Debug.md generated vendored Normal file
View file

@ -0,0 +1,62 @@
# Debug
Undici (and subsenquently `fetch` and `websocket`) exposes a debug statement that can be enabled by setting `NODE_DEBUG` within the environment.
The flags availabile are:
## `undici`
This flag enables debug statements for the core undici library.
```sh
NODE_DEBUG=undici node script.js
UNDICI 16241: connecting to nodejs.org using https:h1
UNDICI 16241: connecting to nodejs.org using https:h1
UNDICI 16241: connected to nodejs.org using https:h1
UNDICI 16241: sending request to GET https://nodejs.org//
UNDICI 16241: received response to GET https://nodejs.org// - HTTP 307
UNDICI 16241: connecting to nodejs.org using https:h1
UNDICI 16241: trailers received from GET https://nodejs.org//
UNDICI 16241: connected to nodejs.org using https:h1
UNDICI 16241: sending request to GET https://nodejs.org//en
UNDICI 16241: received response to GET https://nodejs.org//en - HTTP 200
UNDICI 16241: trailers received from GET https://nodejs.org//en
```
## `fetch`
This flag enables debug statements for the `fetch` API.
> **Note**: statements are pretty similar to the ones in the `undici` flag, but scoped to `fetch`
```sh
NODE_DEBUG=fetch node script.js
FETCH 16241: connecting to nodejs.org using https:h1
FETCH 16241: connecting to nodejs.org using https:h1
FETCH 16241: connected to nodejs.org using https:h1
FETCH 16241: sending request to GET https://nodejs.org//
FETCH 16241: received response to GET https://nodejs.org// - HTTP 307
FETCH 16241: connecting to nodejs.org using https:h1
FETCH 16241: trailers received from GET https://nodejs.org//
FETCH 16241: connected to nodejs.org using https:h1
FETCH 16241: sending request to GET https://nodejs.org//en
FETCH 16241: received response to GET https://nodejs.org//en - HTTP 200
FETCH 16241: trailers received from GET https://nodejs.org//en
```
## `websocket`
This flag enables debug statements for the `Websocket` API.
> **Note**: statements can overlap with `UNDICI` ones if `undici` or `fetch` flag has been enabled as well.
```sh
NODE_DEBUG=websocket node script.js
WEBSOCKET 18309: connecting to echo.websocket.org using https:h1
WEBSOCKET 18309: connected to echo.websocket.org using https:h1
WEBSOCKET 18309: sending request to GET https://echo.websocket.org//
WEBSOCKET 18309: connection opened <ip_address>
```

204
node_modules/undici/docs/docs/api/DiagnosticsChannel.md generated vendored Normal file
View file

@ -0,0 +1,204 @@
# Diagnostics Channel Support
Stability: Experimental.
Undici supports the [`diagnostics_channel`](https://nodejs.org/api/diagnostics_channel.html) (currently available only on Node.js v16+).
It is the preferred way to instrument Undici and retrieve internal information.
The channels available are the following.
## `undici:request:create`
This message is published when a new outgoing request is created.
```js
import diagnosticsChannel from 'diagnostics_channel'
diagnosticsChannel.channel('undici:request:create').subscribe(({ request }) => {
console.log('origin', request.origin)
console.log('completed', request.completed)
console.log('method', request.method)
console.log('path', request.path)
console.log('headers') // array of strings, e.g: ['foo', 'bar']
request.addHeader('hello', 'world')
console.log('headers', request.headers) // e.g. ['foo', 'bar', 'hello', 'world']
})
```
Note: a request is only loosely completed to a given socket.
## `undici:request:bodySent`
```js
import diagnosticsChannel from 'diagnostics_channel'
diagnosticsChannel.channel('undici:request:bodySent').subscribe(({ request }) => {
// request is the same object undici:request:create
})
```
## `undici:request:headers`
This message is published after the response headers have been received, i.e. the response has been completed.
```js
import diagnosticsChannel from 'diagnostics_channel'
diagnosticsChannel.channel('undici:request:headers').subscribe(({ request, response }) => {
// request is the same object undici:request:create
console.log('statusCode', response.statusCode)
console.log(response.statusText)
// response.headers are buffers.
console.log(response.headers.map((x) => x.toString()))
})
```
## `undici:request:trailers`
This message is published after the response body and trailers have been received, i.e. the response has been completed.
```js
import diagnosticsChannel from 'diagnostics_channel'
diagnosticsChannel.channel('undici:request:trailers').subscribe(({ request, trailers }) => {
// request is the same object undici:request:create
console.log('completed', request.completed)
// trailers are buffers.
console.log(trailers.map((x) => x.toString()))
})
```
## `undici:request:error`
This message is published if the request is going to error, but it has not errored yet.
```js
import diagnosticsChannel from 'diagnostics_channel'
diagnosticsChannel.channel('undici:request:error').subscribe(({ request, error }) => {
// request is the same object undici:request:create
})
```
## `undici:client:sendHeaders`
This message is published right before the first byte of the request is written to the socket.
*Note*: It will publish the exact headers that will be sent to the server in raw format.
```js
import diagnosticsChannel from 'diagnostics_channel'
diagnosticsChannel.channel('undici:client:sendHeaders').subscribe(({ request, headers, socket }) => {
// request is the same object undici:request:create
console.log(`Full headers list ${headers.split('\r\n')}`);
})
```
## `undici:client:beforeConnect`
This message is published before creating a new connection for **any** request.
You can not assume that this event is related to any specific request.
```js
import diagnosticsChannel from 'diagnostics_channel'
diagnosticsChannel.channel('undici:client:beforeConnect').subscribe(({ connectParams, connector }) => {
// const { host, hostname, protocol, port, servername, version } = connectParams
// connector is a function that creates the socket
})
```
## `undici:client:connected`
This message is published after a connection is established.
```js
import diagnosticsChannel from 'diagnostics_channel'
diagnosticsChannel.channel('undici:client:connected').subscribe(({ socket, connectParams, connector }) => {
// const { host, hostname, protocol, port, servername, version } = connectParams
// connector is a function that creates the socket
})
```
## `undici:client:connectError`
This message is published if it did not succeed to create new connection
```js
import diagnosticsChannel from 'diagnostics_channel'
diagnosticsChannel.channel('undici:client:connectError').subscribe(({ error, socket, connectParams, connector }) => {
// const { host, hostname, protocol, port, servername, version } = connectParams
// connector is a function that creates the socket
console.log(`Connect failed with ${error.message}`)
})
```
## `undici:websocket:open`
This message is published after the client has successfully connected to a server.
```js
import diagnosticsChannel from 'diagnostics_channel'
diagnosticsChannel.channel('undici:websocket:open').subscribe(({ address, protocol, extensions }) => {
console.log(address) // address, family, and port
console.log(protocol) // negotiated subprotocols
console.log(extensions) // negotiated extensions
})
```
## `undici:websocket:close`
This message is published after the connection has closed.
```js
import diagnosticsChannel from 'diagnostics_channel'
diagnosticsChannel.channel('undici:websocket:close').subscribe(({ websocket, code, reason }) => {
console.log(websocket) // the WebSocket object
console.log(code) // the closing status code
console.log(reason) // the closing reason
})
```
## `undici:websocket:socket_error`
This message is published if the socket experiences an error.
```js
import diagnosticsChannel from 'diagnostics_channel'
diagnosticsChannel.channel('undici:websocket:socket_error').subscribe((error) => {
console.log(error)
})
```
## `undici:websocket:ping`
This message is published after the client receives a ping frame, if the connection is not closing.
```js
import diagnosticsChannel from 'diagnostics_channel'
diagnosticsChannel.channel('undici:websocket:ping').subscribe(({ payload }) => {
// a Buffer or undefined, containing the optional application data of the frame
console.log(payload)
})
```
## `undici:websocket:pong`
This message is published after the client receives a pong frame.
```js
import diagnosticsChannel from 'diagnostics_channel'
diagnosticsChannel.channel('undici:websocket:pong').subscribe(({ payload }) => {
// a Buffer or undefined, containing the optional application data of the frame
console.log(payload)
})
```

View file

@ -0,0 +1,60 @@
# Interface: DispatchInterceptor
Extends: `Function`
A function that can be applied to the `Dispatcher.Dispatch` function before it is invoked with a dispatch request.
This allows one to write logic to intercept both the outgoing request, and the incoming response.
### Parameter: `Dispatcher.Dispatch`
The base dispatch function you are decorating.
### ReturnType: `Dispatcher.Dispatch`
A dispatch function that has been altered to provide additional logic
### Basic Example
Here is an example of an interceptor being used to provide a JWT bearer token
```js
'use strict'
const insertHeaderInterceptor = dispatch => {
return function InterceptedDispatch(opts, handler){
opts.headers.push('Authorization', 'Bearer [Some token]')
return dispatch(opts, handler)
}
}
const client = new Client('https://localhost:3000', {
interceptors: { Client: [insertHeaderInterceptor] }
})
```
### Basic Example 2
Here is a contrived example of an interceptor stripping the headers from a response.
```js
'use strict'
const clearHeadersInterceptor = dispatch => {
const { DecoratorHandler } = require('undici')
class ResultInterceptor extends DecoratorHandler {
onHeaders (statusCode, headers, resume) {
return super.onHeaders(statusCode, [], resume)
}
}
return function InterceptedDispatch(opts, handler){
return dispatch(opts, new ResultInterceptor(handler))
}
}
const client = new Client('https://localhost:3000', {
interceptors: { Client: [clearHeadersInterceptor] }
})
```

1065
node_modules/undici/docs/docs/api/Dispatcher.md generated vendored Normal file

File diff suppressed because it is too large Load diff

48
node_modules/undici/docs/docs/api/Errors.md generated vendored Normal file
View file

@ -0,0 +1,48 @@
# Errors
Undici exposes a variety of error objects that you can use to enhance your error handling.
You can find all the error objects inside the `errors` key.
```js
import { errors } from 'undici'
```
| Error | Error Codes | Description |
| ------------------------------------ | ------------------------------------- | ------------------------------------------------------------------------- |
| `UndiciError` | `UND_ERR` | all errors below are extended from `UndiciError`. |
| `ConnectTimeoutError` | `UND_ERR_CONNECT_TIMEOUT` | socket is destroyed due to connect timeout. |
| `HeadersTimeoutError` | `UND_ERR_HEADERS_TIMEOUT` | socket is destroyed due to headers timeout. |
| `HeadersOverflowError` | `UND_ERR_HEADERS_OVERFLOW` | socket is destroyed due to headers' max size being exceeded. |
| `BodyTimeoutError` | `UND_ERR_BODY_TIMEOUT` | socket is destroyed due to body timeout. |
| `ResponseStatusCodeError` | `UND_ERR_RESPONSE_STATUS_CODE` | an error is thrown when `throwOnError` is `true` for status codes >= 400. |
| `InvalidArgumentError` | `UND_ERR_INVALID_ARG` | passed an invalid argument. |
| `InvalidReturnValueError` | `UND_ERR_INVALID_RETURN_VALUE` | returned an invalid value. |
| `RequestAbortedError` | `UND_ERR_ABORTED` | the request has been aborted by the user |
| `ClientDestroyedError` | `UND_ERR_DESTROYED` | trying to use a destroyed client. |
| `ClientClosedError` | `UND_ERR_CLOSED` | trying to use a closed client. |
| `SocketError` | `UND_ERR_SOCKET` | there is an error with the socket. |
| `NotSupportedError` | `UND_ERR_NOT_SUPPORTED` | encountered unsupported functionality. |
| `RequestContentLengthMismatchError` | `UND_ERR_REQ_CONTENT_LENGTH_MISMATCH` | request body does not match content-length header |
| `ResponseContentLengthMismatchError` | `UND_ERR_RES_CONTENT_LENGTH_MISMATCH` | response body does not match content-length header |
| `InformationalError` | `UND_ERR_INFO` | expected error with reason |
| `ResponseExceededMaxSizeError` | `UND_ERR_RES_EXCEEDED_MAX_SIZE` | response body exceed the max size allowed |
| `SecureProxyConnectionError` | `UND_ERR_PRX_TLS` | tls connection to a proxy failed |
### `SocketError`
The `SocketError` has a `.socket` property which holds socket metadata:
```ts
interface SocketInfo {
localAddress?: string
localPort?: number
remoteAddress?: string
remotePort?: number
remoteFamily?: string
timeout?: number
bytesWritten?: number
bytesRead?: number
}
```
Be aware that in some cases the `.socket` property can be `null`.

21
node_modules/undici/docs/docs/api/EventSource.md generated vendored Normal file
View file

@ -0,0 +1,21 @@
# EventSource
Undici exposes a WHATWG spec-compliant implementation of [EventSource](https://developer.mozilla.org/en-US/docs/Web/API/EventSource)
for [Server-Sent Events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events).
## Instantiating EventSource
Undici exports a EventSource class. You can instantiate the EventSource as
follows:
```mjs
import { EventSource } from 'undici'
const evenSource = new EventSource('http://localhost:3000')
evenSource.onmessage = (event) => {
console.log(event.data)
}
```
More information about the EventSource API can be found on
[MDN](https://developer.mozilla.org/en-US/docs/Web/API/EventSource).

57
node_modules/undici/docs/docs/api/Fetch.md generated vendored Normal file
View file

@ -0,0 +1,57 @@
# Fetch
Undici exposes a fetch() method starts the process of fetching a resource from the network.
Documentation and examples can be found on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/fetch).
## File
This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/File)
In Node versions v18.13.0 and above and v19.2.0 and above, undici will default to using Node's [File](https://nodejs.org/api/buffer.html#class-file) class. In versions where it's not available, it will default to the undici one.
## FormData
This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/FormData).
If any parameters are passed to the FormData constructor other than `undefined`, an error will be thrown. Other parameters are ignored.
## Response
This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/Response)
## Request
This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/Request)
## Header
This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/Headers)
# Body Mixins
`Response` and `Request` body inherit body mixin methods. These methods include:
- [`.arrayBuffer()`](https://fetch.spec.whatwg.org/#dom-body-arraybuffer)
- [`.blob()`](https://fetch.spec.whatwg.org/#dom-body-blob)
- [`.formData()`](https://fetch.spec.whatwg.org/#dom-body-formdata)
- [`.json()`](https://fetch.spec.whatwg.org/#dom-body-json)
- [`.text()`](https://fetch.spec.whatwg.org/#dom-body-text)
There is an ongoing discussion regarding `.formData()` and its usefulness and performance in server environments. It is recommended to use a dedicated library for parsing `multipart/form-data` bodies, such as [Busboy](https://www.npmjs.com/package/busboy) or [@fastify/busboy](https://www.npmjs.com/package/@fastify/busboy).
These libraries can be interfaced with fetch with the following example code:
```mjs
import { Busboy } from '@fastify/busboy'
import { Readable } from 'node:stream'
const response = await fetch('...')
const busboy = new Busboy({
headers: {
'content-type': response.headers.get('content-type')
}
})
Readable.fromWeb(response.body).pipe(busboy)
```

540
node_modules/undici/docs/docs/api/MockAgent.md generated vendored Normal file
View file

@ -0,0 +1,540 @@
# Class: MockAgent
Extends: `undici.Dispatcher`
A mocked Agent class that implements the Agent API. It allows one to intercept HTTP requests made through undici and return mocked responses instead.
## `new MockAgent([options])`
Arguments:
* **options** `MockAgentOptions` (optional) - It extends the `Agent` options.
Returns: `MockAgent`
### Parameter: `MockAgentOptions`
Extends: [`AgentOptions`](Agent.md#parameter-agentoptions)
* **agent** `Agent` (optional) - Default: `new Agent([options])` - a custom agent encapsulated by the MockAgent.
### Example - Basic MockAgent instantiation
This will instantiate the MockAgent. It will not do anything until registered as the agent to use with requests and mock interceptions are added.
```js
import { MockAgent } from 'undici'
const mockAgent = new MockAgent()
```
### Example - Basic MockAgent instantiation with custom agent
```js
import { Agent, MockAgent } from 'undici'
const agent = new Agent()
const mockAgent = new MockAgent({ agent })
```
## Instance Methods
### `MockAgent.get(origin)`
This method creates and retrieves MockPool or MockClient instances which can then be used to intercept HTTP requests. If the number of connections on the mock agent is set to 1, a MockClient instance is returned. Otherwise a MockPool instance is returned.
For subsequent `MockAgent.get` calls on the same origin, the same mock instance will be returned.
Arguments:
* **origin** `string | RegExp | (value) => boolean` - a matcher for the pool origin to be retrieved from the MockAgent.
| Matcher type | Condition to pass |
|:------------:| -------------------------- |
| `string` | Exact match against string |
| `RegExp` | Regex must pass |
| `Function` | Function must return true |
Returns: `MockClient | MockPool`.
| `MockAgentOptions` | Mock instance returned |
| -------------------- | ---------------------- |
| `connections === 1` | `MockClient` |
| `connections` > `1` | `MockPool` |
#### Example - Basic Mocked Request
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({ path: '/foo' }).reply(200, 'foo')
const { statusCode, body } = await request('http://localhost:3000/foo')
console.log('response received', statusCode) // response received 200
for await (const data of body) {
console.log('data', data.toString('utf8')) // data foo
}
```
#### Example - Basic Mocked Request with local mock agent dispatcher
```js
import { MockAgent, request } from 'undici'
const mockAgent = new MockAgent()
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({ path: '/foo' }).reply(200, 'foo')
const {
statusCode,
body
} = await request('http://localhost:3000/foo', { dispatcher: mockAgent })
console.log('response received', statusCode) // response received 200
for await (const data of body) {
console.log('data', data.toString('utf8')) // data foo
}
```
#### Example - Basic Mocked Request with local mock pool dispatcher
```js
import { MockAgent, request } from 'undici'
const mockAgent = new MockAgent()
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({ path: '/foo' }).reply(200, 'foo')
const {
statusCode,
body
} = await request('http://localhost:3000/foo', { dispatcher: mockPool })
console.log('response received', statusCode) // response received 200
for await (const data of body) {
console.log('data', data.toString('utf8')) // data foo
}
```
#### Example - Basic Mocked Request with local mock client dispatcher
```js
import { MockAgent, request } from 'undici'
const mockAgent = new MockAgent({ connections: 1 })
const mockClient = mockAgent.get('http://localhost:3000')
mockClient.intercept({ path: '/foo' }).reply(200, 'foo')
const {
statusCode,
body
} = await request('http://localhost:3000/foo', { dispatcher: mockClient })
console.log('response received', statusCode) // response received 200
for await (const data of body) {
console.log('data', data.toString('utf8')) // data foo
}
```
#### Example - Basic Mocked requests with multiple intercepts
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({ path: '/foo' }).reply(200, 'foo')
mockPool.intercept({ path: '/hello'}).reply(200, 'hello')
const result1 = await request('http://localhost:3000/foo')
console.log('response received', result1.statusCode) // response received 200
for await (const data of result1.body) {
console.log('data', data.toString('utf8')) // data foo
}
const result2 = await request('http://localhost:3000/hello')
console.log('response received', result2.statusCode) // response received 200
for await (const data of result2.body) {
console.log('data', data.toString('utf8')) // data hello
}
```
#### Example - Mock different requests within the same file
```js
const { MockAgent, setGlobalDispatcher } = require('undici');
const agent = new MockAgent();
agent.disableNetConnect();
setGlobalDispatcher(agent);
describe('Test', () => {
it('200', async () => {
const mockAgent = agent.get('http://test.com');
// your test
});
it('200', async () => {
const mockAgent = agent.get('http://testing.com');
// your test
});
});
```
#### Example - Mocked request with query body, headers and trailers
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({
path: '/foo?hello=there&see=ya',
method: 'POST',
body: 'form1=data1&form2=data2'
}).reply(200, { foo: 'bar' }, {
headers: { 'content-type': 'application/json' },
trailers: { 'Content-MD5': 'test' }
})
const {
statusCode,
headers,
trailers,
body
} = await request('http://localhost:3000/foo?hello=there&see=ya', {
method: 'POST',
body: 'form1=data1&form2=data2'
})
console.log('response received', statusCode) // response received 200
console.log('headers', headers) // { 'content-type': 'application/json' }
for await (const data of body) {
console.log('data', data.toString('utf8')) // '{"foo":"bar"}'
}
console.log('trailers', trailers) // { 'content-md5': 'test' }
```
#### Example - Mocked request with origin regex
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get(new RegExp('http://localhost:3000'))
mockPool.intercept({ path: '/foo' }).reply(200, 'foo')
const {
statusCode,
body
} = await request('http://localhost:3000/foo')
console.log('response received', statusCode) // response received 200
for await (const data of body) {
console.log('data', data.toString('utf8')) // data foo
}
```
#### Example - Mocked request with origin function
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get((origin) => origin === 'http://localhost:3000')
mockPool.intercept({ path: '/foo' }).reply(200, 'foo')
const {
statusCode,
body
} = await request('http://localhost:3000/foo')
console.log('response received', statusCode) // response received 200
for await (const data of body) {
console.log('data', data.toString('utf8')) // data foo
}
```
### `MockAgent.close()`
Closes the mock agent and waits for registered mock pools and clients to also close before resolving.
Returns: `Promise<void>`
#### Example - clean up after tests are complete
```js
import { MockAgent, setGlobalDispatcher } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
await mockAgent.close()
```
### `MockAgent.dispatch(options, handlers)`
Implements [`Agent.dispatch(options, handlers)`](Agent.md#parameter-agentdispatchoptions).
### `MockAgent.request(options[, callback])`
See [`Dispatcher.request(options [, callback])`](Dispatcher.md#dispatcherrequestoptions-callback).
#### Example - MockAgent request
```js
import { MockAgent } from 'undici'
const mockAgent = new MockAgent()
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({ path: '/foo' }).reply(200, 'foo')
const {
statusCode,
body
} = await mockAgent.request({
origin: 'http://localhost:3000',
path: '/foo',
method: 'GET'
})
console.log('response received', statusCode) // response received 200
for await (const data of body) {
console.log('data', data.toString('utf8')) // data foo
}
```
### `MockAgent.deactivate()`
This method disables mocking in MockAgent.
Returns: `void`
#### Example - Deactivate Mocking
```js
import { MockAgent, setGlobalDispatcher } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
mockAgent.deactivate()
```
### `MockAgent.activate()`
This method enables mocking in a MockAgent instance. When instantiated, a MockAgent is automatically activated. Therefore, this method is only effective after `MockAgent.deactivate` has been called.
Returns: `void`
#### Example - Activate Mocking
```js
import { MockAgent, setGlobalDispatcher } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
mockAgent.deactivate()
// No mocking will occur
// Later
mockAgent.activate()
```
### `MockAgent.enableNetConnect([host])`
When requests are not matched in a MockAgent intercept, a real HTTP request is attempted. We can control this further through the use of `enableNetConnect`. This is achieved by defining host matchers so only matching requests will be attempted.
When using a string, it should only include the **hostname and optionally, the port**. In addition, calling this method multiple times with a string will allow all HTTP requests that match these values.
Arguments:
* **host** `string | RegExp | (value) => boolean` - (optional)
Returns: `void`
#### Example - Allow all non-matching urls to be dispatched in a real HTTP request
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
mockAgent.enableNetConnect()
await request('http://example.com')
// A real request is made
```
#### Example - Allow requests matching a host string to make real requests
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
mockAgent.enableNetConnect('example-1.com')
mockAgent.enableNetConnect('example-2.com:8080')
await request('http://example-1.com')
// A real request is made
await request('http://example-2.com:8080')
// A real request is made
await request('http://example-3.com')
// Will throw
```
#### Example - Allow requests matching a host regex to make real requests
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
mockAgent.enableNetConnect(new RegExp('example.com'))
await request('http://example.com')
// A real request is made
```
#### Example - Allow requests matching a host function to make real requests
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
mockAgent.enableNetConnect((value) => value === 'example.com')
await request('http://example.com')
// A real request is made
```
### `MockAgent.disableNetConnect()`
This method causes all requests to throw when requests are not matched in a MockAgent intercept.
Returns: `void`
#### Example - Disable all non-matching requests by throwing an error for each
```js
import { MockAgent, request } from 'undici'
const mockAgent = new MockAgent()
mockAgent.disableNetConnect()
await request('http://example.com')
// Will throw
```
### `MockAgent.pendingInterceptors()`
This method returns any pending interceptors registered on a mock agent. A pending interceptor meets one of the following criteria:
- Is registered with neither `.times(<number>)` nor `.persist()`, and has not been invoked;
- Is persistent (i.e., registered with `.persist()`) and has not been invoked;
- Is registered with `.times(<number>)` and has not been invoked `<number>` of times.
Returns: `PendingInterceptor[]` (where `PendingInterceptor` is a `MockDispatch` with an additional `origin: string`)
#### Example - List all pending inteceptors
```js
const agent = new MockAgent()
agent.disableNetConnect()
agent
.get('https://example.com')
.intercept({ method: 'GET', path: '/' })
.reply(200)
const pendingInterceptors = agent.pendingInterceptors()
// Returns [
// {
// timesInvoked: 0,
// times: 1,
// persist: false,
// consumed: false,
// pending: true,
// path: '/',
// method: 'GET',
// body: undefined,
// headers: undefined,
// data: {
// error: null,
// statusCode: 200,
// data: '',
// headers: {},
// trailers: {}
// },
// origin: 'https://example.com'
// }
// ]
```
### `MockAgent.assertNoPendingInterceptors([options])`
This method throws if the mock agent has any pending interceptors. A pending interceptor meets one of the following criteria:
- Is registered with neither `.times(<number>)` nor `.persist()`, and has not been invoked;
- Is persistent (i.e., registered with `.persist()`) and has not been invoked;
- Is registered with `.times(<number>)` and has not been invoked `<number>` of times.
#### Example - Check that there are no pending interceptors
```js
const agent = new MockAgent()
agent.disableNetConnect()
agent
.get('https://example.com')
.intercept({ method: 'GET', path: '/' })
.reply(200)
agent.assertNoPendingInterceptors()
// Throws an UndiciError with the following message:
//
// 1 interceptor is pending:
//
// ┌─────────┬────────┬───────────────────────┬──────┬─────────────┬────────────┬─────────────┬───────────┐
// │ (index) │ Method │ Origin │ Path │ Status code │ Persistent │ Invocations │ Remaining │
// ├─────────┼────────┼───────────────────────┼──────┼─────────────┼────────────┼─────────────┼───────────┤
// │ 0 │ 'GET' │ 'https://example.com' │ '/' │ 200 │ '❌' │ 0 │ 1 │
// └─────────┴────────┴───────────────────────┴──────┴─────────────┴────────────┴─────────────┴───────────┘
```

77
node_modules/undici/docs/docs/api/MockClient.md generated vendored Normal file
View file

@ -0,0 +1,77 @@
# Class: MockClient
Extends: `undici.Client`
A mock client class that implements the same api as [MockPool](MockPool.md).
## `new MockClient(origin, [options])`
Arguments:
* **origin** `string` - It should only include the **protocol, hostname, and port**.
* **options** `MockClientOptions` - It extends the `Client` options.
Returns: `MockClient`
### Parameter: `MockClientOptions`
Extends: `ClientOptions`
* **agent** `Agent` - the agent to associate this MockClient with.
### Example - Basic MockClient instantiation
We can use MockAgent to instantiate a MockClient ready to be used to intercept specified requests. It will not do anything until registered as the agent to use and any mock request are registered.
```js
import { MockAgent } from 'undici'
// Connections must be set to 1 to return a MockClient instance
const mockAgent = new MockAgent({ connections: 1 })
const mockClient = mockAgent.get('http://localhost:3000')
```
## Instance Methods
### `MockClient.intercept(options)`
Implements: [`MockPool.intercept(options)`](MockPool.md#mockpoolinterceptoptions)
### `MockClient.close()`
Implements: [`MockPool.close()`](MockPool.md#mockpoolclose)
### `MockClient.dispatch(options, handlers)`
Implements [`Dispatcher.dispatch(options, handlers)`](Dispatcher.md#dispatcherdispatchoptions-handler).
### `MockClient.request(options[, callback])`
See [`Dispatcher.request(options [, callback])`](Dispatcher.md#dispatcherrequestoptions-callback).
#### Example - MockClient request
```js
import { MockAgent } from 'undici'
const mockAgent = new MockAgent({ connections: 1 })
const mockClient = mockAgent.get('http://localhost:3000')
mockClient.intercept({ path: '/foo' }).reply(200, 'foo')
const {
statusCode,
body
} = await mockClient.request({
origin: 'http://localhost:3000',
path: '/foo',
method: 'GET'
})
console.log('response received', statusCode) // response received 200
for await (const data of body) {
console.log('data', data.toString('utf8')) // data foo
}
```

12
node_modules/undici/docs/docs/api/MockErrors.md generated vendored Normal file
View file

@ -0,0 +1,12 @@
# MockErrors
Undici exposes a variety of mock error objects that you can use to enhance your mock error handling.
You can find all the mock error objects inside the `mockErrors` key.
```js
import { mockErrors } from 'undici'
```
| Mock Error | Mock Error Codes | Description |
| --------------------- | ------------------------------- | ---------------------------------------------------------- |
| `MockNotMatchedError` | `UND_MOCK_ERR_MOCK_NOT_MATCHED` | The request does not match any registered mock dispatches. |

547
node_modules/undici/docs/docs/api/MockPool.md generated vendored Normal file
View file

@ -0,0 +1,547 @@
# Class: MockPool
Extends: `undici.Pool`
A mock Pool class that implements the Pool API and is used by MockAgent to intercept real requests and return mocked responses.
## `new MockPool(origin, [options])`
Arguments:
* **origin** `string` - It should only include the **protocol, hostname, and port**.
* **options** `MockPoolOptions` - It extends the `Pool` options.
Returns: `MockPool`
### Parameter: `MockPoolOptions`
Extends: `PoolOptions`
* **agent** `Agent` - the agent to associate this MockPool with.
### Example - Basic MockPool instantiation
We can use MockAgent to instantiate a MockPool ready to be used to intercept specified requests. It will not do anything until registered as the agent to use and any mock request are registered.
```js
import { MockAgent } from 'undici'
const mockAgent = new MockAgent()
const mockPool = mockAgent.get('http://localhost:3000')
```
## Instance Methods
### `MockPool.intercept(options)`
This method defines the interception rules for matching against requests for a MockPool or MockPool. We can intercept multiple times on a single instance, but each intercept is only used once. For example if you expect to make 2 requests inside a test, you need to call `intercept()` twice. Assuming you use `disableNetConnect()` you will get `MockNotMatchedError` on the second request when you only call `intercept()` once.
When defining interception rules, all the rules must pass for a request to be intercepted. If a request is not intercepted, a real request will be attempted.
| Matcher type | Condition to pass |
|:------------:| -------------------------- |
| `string` | Exact match against string |
| `RegExp` | Regex must pass |
| `Function` | Function must return true |
Arguments:
* **options** `MockPoolInterceptOptions` - Interception options.
Returns: `MockInterceptor` corresponding to the input options.
### Parameter: `MockPoolInterceptOptions`
* **path** `string | RegExp | (path: string) => boolean` - a matcher for the HTTP request path. When a `RegExp` or callback is used, it will match against the request path including all query parameters in alphabetical order. When a `string` is provided, the query parameters can be conveniently specified through the `MockPoolInterceptOptions.query` setting.
* **method** `string | RegExp | (method: string) => boolean` - (optional) - a matcher for the HTTP request method. Defaults to `GET`.
* **body** `string | RegExp | (body: string) => boolean` - (optional) - a matcher for the HTTP request body.
* **headers** `Record<string, string | RegExp | (body: string) => boolean`> - (optional) - a matcher for the HTTP request headers. To be intercepted, a request must match all defined headers. Extra headers not defined here may (or may not) be included in the request and do not affect the interception in any way.
* **query** `Record<string, any> | null` - (optional) - a matcher for the HTTP request query string params. Only applies when a `string` was provided for `MockPoolInterceptOptions.path`.
### Return: `MockInterceptor`
We can define the behaviour of an intercepted request with the following options.
* **reply** `(statusCode: number, replyData: string | Buffer | object | MockInterceptor.MockResponseDataHandler, responseOptions?: MockResponseOptions) => MockScope` - define a reply for a matching request. You can define the replyData as a callback to read incoming request data. Default for `responseOptions` is `{}`.
* **reply** `(callback: MockInterceptor.MockReplyOptionsCallback) => MockScope` - define a reply for a matching request, allowing dynamic mocking of all reply options rather than just the data.
* **replyWithError** `(error: Error) => MockScope` - define an error for a matching request to throw.
* **defaultReplyHeaders** `(headers: Record<string, string>) => MockInterceptor` - define default headers to be included in subsequent replies. These are in addition to headers on a specific reply.
* **defaultReplyTrailers** `(trailers: Record<string, string>) => MockInterceptor` - define default trailers to be included in subsequent replies. These are in addition to trailers on a specific reply.
* **replyContentLength** `() => MockInterceptor` - define automatically calculated `content-length` headers to be included in subsequent replies.
The reply data of an intercepted request may either be a string, buffer, or JavaScript object. Objects are converted to JSON while strings and buffers are sent as-is.
By default, `reply` and `replyWithError` define the behaviour for the first matching request only. Subsequent requests will not be affected (this can be changed using the returned `MockScope`).
### Parameter: `MockResponseOptions`
* **headers** `Record<string, string>` - headers to be included on the mocked reply.
* **trailers** `Record<string, string>` - trailers to be included on the mocked reply.
### Return: `MockScope`
A `MockScope` is associated with a single `MockInterceptor`. With this, we can configure the default behaviour of a intercepted reply.
* **delay** `(waitInMs: number) => MockScope` - delay the associated reply by a set amount in ms.
* **persist** `() => MockScope` - any matching request will always reply with the defined response indefinitely.
* **times** `(repeatTimes: number) => MockScope` - any matching request will reply with the defined response a fixed amount of times. This is overridden by **persist**.
#### Example - Basic Mocked Request
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
// MockPool
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({ path: '/foo' }).reply(200, 'foo')
const {
statusCode,
body
} = await request('http://localhost:3000/foo')
console.log('response received', statusCode) // response received 200
for await (const data of body) {
console.log('data', data.toString('utf8')) // data foo
}
```
#### Example - Mocked request using reply data callbacks
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({
path: '/echo',
method: 'GET',
headers: {
'User-Agent': 'undici',
Host: 'example.com'
}
}).reply(200, ({ headers }) => ({ message: headers.get('message') }))
const { statusCode, body, headers } = await request('http://localhost:3000', {
headers: {
message: 'hello world!'
}
})
console.log('response received', statusCode) // response received 200
console.log('headers', headers) // { 'content-type': 'application/json' }
for await (const data of body) {
console.log('data', data.toString('utf8')) // { "message":"hello world!" }
}
```
#### Example - Mocked request using reply options callback
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({
path: '/echo',
method: 'GET',
headers: {
'User-Agent': 'undici',
Host: 'example.com'
}
}).reply(({ headers }) => ({ statusCode: 200, data: { message: headers.get('message') }})))
const { statusCode, body, headers } = await request('http://localhost:3000', {
headers: {
message: 'hello world!'
}
})
console.log('response received', statusCode) // response received 200
console.log('headers', headers) // { 'content-type': 'application/json' }
for await (const data of body) {
console.log('data', data.toString('utf8')) // { "message":"hello world!" }
}
```
#### Example - Basic Mocked requests with multiple intercepts
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({
path: '/foo',
method: 'GET'
}).reply(200, 'foo')
mockPool.intercept({
path: '/hello',
method: 'GET',
}).reply(200, 'hello')
const result1 = await request('http://localhost:3000/foo')
console.log('response received', result1.statusCode) // response received 200
for await (const data of result1.body) {
console.log('data', data.toString('utf8')) // data foo
}
const result2 = await request('http://localhost:3000/hello')
console.log('response received', result2.statusCode) // response received 200
for await (const data of result2.body) {
console.log('data', data.toString('utf8')) // data hello
}
```
#### Example - Mocked request with query body, request headers and response headers and trailers
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({
path: '/foo?hello=there&see=ya',
method: 'POST',
body: 'form1=data1&form2=data2',
headers: {
'User-Agent': 'undici',
Host: 'example.com'
}
}).reply(200, { foo: 'bar' }, {
headers: { 'content-type': 'application/json' },
trailers: { 'Content-MD5': 'test' }
})
const {
statusCode,
headers,
trailers,
body
} = await request('http://localhost:3000/foo?hello=there&see=ya', {
method: 'POST',
body: 'form1=data1&form2=data2',
headers: {
foo: 'bar',
'User-Agent': 'undici',
Host: 'example.com'
}
})
console.log('response received', statusCode) // response received 200
console.log('headers', headers) // { 'content-type': 'application/json' }
for await (const data of body) {
console.log('data', data.toString('utf8')) // '{"foo":"bar"}'
}
console.log('trailers', trailers) // { 'content-md5': 'test' }
```
#### Example - Mocked request using different matchers
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({
path: '/foo',
method: /^GET$/,
body: (value) => value === 'form=data',
headers: {
'User-Agent': 'undici',
Host: /^example.com$/
}
}).reply(200, 'foo')
const {
statusCode,
body
} = await request('http://localhost:3000/foo', {
method: 'GET',
body: 'form=data',
headers: {
foo: 'bar',
'User-Agent': 'undici',
Host: 'example.com'
}
})
console.log('response received', statusCode) // response received 200
for await (const data of body) {
console.log('data', data.toString('utf8')) // data foo
}
```
#### Example - Mocked request with reply with a defined error
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({
path: '/foo',
method: 'GET'
}).replyWithError(new Error('kaboom'))
try {
await request('http://localhost:3000/foo', {
method: 'GET'
})
} catch (error) {
console.error(error) // Error: kaboom
}
```
#### Example - Mocked request with defaultReplyHeaders
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({
path: '/foo',
method: 'GET'
}).defaultReplyHeaders({ foo: 'bar' })
.reply(200, 'foo')
const { headers } = await request('http://localhost:3000/foo')
console.log('headers', headers) // headers { foo: 'bar' }
```
#### Example - Mocked request with defaultReplyTrailers
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({
path: '/foo',
method: 'GET'
}).defaultReplyTrailers({ foo: 'bar' })
.reply(200, 'foo')
const { trailers } = await request('http://localhost:3000/foo')
console.log('trailers', trailers) // trailers { foo: 'bar' }
```
#### Example - Mocked request with automatic content-length calculation
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({
path: '/foo',
method: 'GET'
}).replyContentLength().reply(200, 'foo')
const { headers } = await request('http://localhost:3000/foo')
console.log('headers', headers) // headers { 'content-length': '3' }
```
#### Example - Mocked request with automatic content-length calculation on an object
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({
path: '/foo',
method: 'GET'
}).replyContentLength().reply(200, { foo: 'bar' })
const { headers } = await request('http://localhost:3000/foo')
console.log('headers', headers) // headers { 'content-length': '13' }
```
#### Example - Mocked request with persist enabled
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({
path: '/foo',
method: 'GET'
}).reply(200, 'foo').persist()
const result1 = await request('http://localhost:3000/foo')
// Will match and return mocked data
const result2 = await request('http://localhost:3000/foo')
// Will match and return mocked data
// Etc
```
#### Example - Mocked request with times enabled
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({
path: '/foo',
method: 'GET'
}).reply(200, 'foo').times(2)
const result1 = await request('http://localhost:3000/foo')
// Will match and return mocked data
const result2 = await request('http://localhost:3000/foo')
// Will match and return mocked data
const result3 = await request('http://localhost:3000/foo')
// Will not match and make attempt a real request
```
#### Example - Mocked request with path callback
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
import querystring from 'querystring'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get('http://localhost:3000')
const matchPath = requestPath => {
const [pathname, search] = requestPath.split('?')
const requestQuery = querystring.parse(search)
if (!pathname.startsWith('/foo')) {
return false
}
if (!Object.keys(requestQuery).includes('foo') || requestQuery.foo !== 'bar') {
return false
}
return true
}
mockPool.intercept({
path: matchPath,
method: 'GET'
}).reply(200, 'foo')
const result = await request('http://localhost:3000/foo?foo=bar')
// Will match and return mocked data
```
### `MockPool.close()`
Closes the mock pool and de-registers from associated MockAgent.
Returns: `Promise<void>`
#### Example - clean up after tests are complete
```js
import { MockAgent } from 'undici'
const mockAgent = new MockAgent()
const mockPool = mockAgent.get('http://localhost:3000')
await mockPool.close()
```
### `MockPool.dispatch(options, handlers)`
Implements [`Dispatcher.dispatch(options, handlers)`](Dispatcher.md#dispatcherdispatchoptions-handler).
### `MockPool.request(options[, callback])`
See [`Dispatcher.request(options [, callback])`](Dispatcher.md#dispatcherrequestoptions-callback).
#### Example - MockPool request
```js
import { MockAgent } from 'undici'
const mockAgent = new MockAgent()
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({
path: '/foo',
method: 'GET',
}).reply(200, 'foo')
const {
statusCode,
body
} = await mockPool.request({
origin: 'http://localhost:3000',
path: '/foo',
method: 'GET'
})
console.log('response received', statusCode) // response received 200
for await (const data of body) {
console.log('data', data.toString('utf8')) // data foo
}
```

84
node_modules/undici/docs/docs/api/Pool.md generated vendored Normal file
View file

@ -0,0 +1,84 @@
# Class: Pool
Extends: `undici.Dispatcher`
A pool of [Client](Client.md) instances connected to the same upstream target.
Requests are not guaranteed to be dispatched in order of invocation.
## `new Pool(url[, options])`
Arguments:
* **url** `URL | string` - It should only include the **protocol, hostname, and port**.
* **options** `PoolOptions` (optional)
### Parameter: `PoolOptions`
Extends: [`ClientOptions`](Client.md#parameter-clientoptions)
* **factory** `(origin: URL, opts: Object) => Dispatcher` - Default: `(origin, opts) => new Client(origin, opts)`
* **connections** `number | null` (optional) - Default: `null` - The number of `Client` instances to create. When set to `null`, the `Pool` instance will create an unlimited amount of `Client` instances.
* **interceptors** `{ Pool: DispatchInterceptor[] } }` - Default: `{ Pool: [] }` - A list of interceptors that are applied to the dispatch method. Additional logic can be applied (such as, but not limited to: 302 status code handling, authentication, cookies, compression and caching).
## Instance Properties
### `Pool.closed`
Implements [Client.closed](Client.md#clientclosed)
### `Pool.destroyed`
Implements [Client.destroyed](Client.md#clientdestroyed)
### `Pool.stats`
Returns [`PoolStats`](PoolStats.md) instance for this pool.
## Instance Methods
### `Pool.close([callback])`
Implements [`Dispatcher.close([callback])`](Dispatcher.md#dispatcherclosecallback-promise).
### `Pool.destroy([error, callback])`
Implements [`Dispatcher.destroy([error, callback])`](Dispatcher.md#dispatcherdestroyerror-callback-promise).
### `Pool.connect(options[, callback])`
See [`Dispatcher.connect(options[, callback])`](Dispatcher.md#dispatcherconnectoptions-callback).
### `Pool.dispatch(options, handler)`
Implements [`Dispatcher.dispatch(options, handler)`](Dispatcher.md#dispatcherdispatchoptions-handler).
### `Pool.pipeline(options, handler)`
See [`Dispatcher.pipeline(options, handler)`](Dispatcher.md#dispatcherpipelineoptions-handler).
### `Pool.request(options[, callback])`
See [`Dispatcher.request(options [, callback])`](Dispatcher.md#dispatcherrequestoptions-callback).
### `Pool.stream(options, factory[, callback])`
See [`Dispatcher.stream(options, factory[, callback])`](Dispatcher.md#dispatcherstreamoptions-factory-callback).
### `Pool.upgrade(options[, callback])`
See [`Dispatcher.upgrade(options[, callback])`](Dispatcher.md#dispatcherupgradeoptions-callback).
## Instance Events
### Event: `'connect'`
See [Dispatcher Event: `'connect'`](Dispatcher.md#event-connect).
### Event: `'disconnect'`
See [Dispatcher Event: `'disconnect'`](Dispatcher.md#event-disconnect).
### Event: `'drain'`
See [Dispatcher Event: `'drain'`](Dispatcher.md#event-drain).

35
node_modules/undici/docs/docs/api/PoolStats.md generated vendored Normal file
View file

@ -0,0 +1,35 @@
# Class: PoolStats
Aggregate stats for a [Pool](Pool.md) or [BalancedPool](BalancedPool.md).
## `new PoolStats(pool)`
Arguments:
* **pool** `Pool` - Pool or BalancedPool from which to return stats.
## Instance Properties
### `PoolStats.connected`
Number of open socket connections in this pool.
### `PoolStats.free`
Number of open socket connections in this pool that do not have an active request.
### `PoolStats.pending`
Number of pending requests across all clients in this pool.
### `PoolStats.queued`
Number of queued requests across all clients in this pool.
### `PoolStats.running`
Number of currently active requests across all clients in this pool.
### `PoolStats.size`
Number of active, pending, or queued requests across all clients in this pool.

130
node_modules/undici/docs/docs/api/ProxyAgent.md generated vendored Normal file
View file

@ -0,0 +1,130 @@
# Class: ProxyAgent
Extends: `undici.Dispatcher`
A Proxy Agent class that implements the Agent API. It allows the connection through proxy in a simple way.
## `new ProxyAgent([options])`
Arguments:
* **options** `ProxyAgentOptions` (required) - It extends the `Agent` options.
Returns: `ProxyAgent`
### Parameter: `ProxyAgentOptions`
Extends: [`AgentOptions`](Agent.md#parameter-agentoptions)
* **uri** `string | URL` (required) - The URI of the proxy server. This can be provided as a string, as an instance of the URL class, or as an object with a `uri` property of type string.
If the `uri` is provided as a string or `uri` is an object with an `uri` property of type string, then it will be parsed into a `URL` object according to the [WHATWG URL Specification](https://url.spec.whatwg.org).
For detailed information on the parsing process and potential validation errors, please refer to the ["Writing" section](https://url.spec.whatwg.org/#writing) of the WHATWG URL Specification.
* **token** `string` (optional) - It can be passed by a string of token for authentication.
* **auth** `string` (**deprecated**) - Use token.
* **clientFactory** `(origin: URL, opts: Object) => Dispatcher` (optional) - Default: `(origin, opts) => new Pool(origin, opts)`
* **requestTls** `BuildOptions` (optional) - Options object passed when creating the underlying socket via the connector builder for the request. See [TLS](https://nodejs.org/api/tls.html#tlsconnectoptions-callback).
* **proxyTls** `BuildOptions` (optional) - Options object passed when creating the underlying socket via the connector builder for the proxy server. See [TLS](https://nodejs.org/api/tls.html#tlsconnectoptions-callback).
Examples:
```js
import { ProxyAgent } from 'undici'
const proxyAgent = new ProxyAgent('my.proxy.server')
// or
const proxyAgent = new ProxyAgent(new URL('my.proxy.server'))
// or
const proxyAgent = new ProxyAgent({ uri: 'my.proxy.server' })
```
#### Example - Basic ProxyAgent instantiation
This will instantiate the ProxyAgent. It will not do anything until registered as the agent to use with requests.
```js
import { ProxyAgent } from 'undici'
const proxyAgent = new ProxyAgent('my.proxy.server')
```
#### Example - Basic Proxy Request with global agent dispatcher
```js
import { setGlobalDispatcher, request, ProxyAgent } from 'undici'
const proxyAgent = new ProxyAgent('my.proxy.server')
setGlobalDispatcher(proxyAgent)
const { statusCode, body } = await request('http://localhost:3000/foo')
console.log('response received', statusCode) // response received 200
for await (const data of body) {
console.log('data', data.toString('utf8')) // data foo
}
```
#### Example - Basic Proxy Request with local agent dispatcher
```js
import { ProxyAgent, request } from 'undici'
const proxyAgent = new ProxyAgent('my.proxy.server')
const {
statusCode,
body
} = await request('http://localhost:3000/foo', { dispatcher: proxyAgent })
console.log('response received', statusCode) // response received 200
for await (const data of body) {
console.log('data', data.toString('utf8')) // data foo
}
```
#### Example - Basic Proxy Request with authentication
```js
import { setGlobalDispatcher, request, ProxyAgent } from 'undici';
const proxyAgent = new ProxyAgent({
uri: 'my.proxy.server',
// token: 'Bearer xxxx'
token: `Basic ${Buffer.from('username:password').toString('base64')}`
});
setGlobalDispatcher(proxyAgent);
const { statusCode, body } = await request('http://localhost:3000/foo');
console.log('response received', statusCode); // response received 200
for await (const data of body) {
console.log('data', data.toString('utf8')); // data foo
}
```
### `ProxyAgent.close()`
Closes the proxy agent and waits for registered pools and clients to also close before resolving.
Returns: `Promise<void>`
#### Example - clean up after tests are complete
```js
import { ProxyAgent, setGlobalDispatcher } from 'undici'
const proxyAgent = new ProxyAgent('my.proxy.server')
setGlobalDispatcher(proxyAgent)
await proxyAgent.close()
```
### `ProxyAgent.dispatch(options, handlers)`
Implements [`Agent.dispatch(options, handlers)`](Agent.md#parameter-agentdispatchoptions).
### `ProxyAgent.request(options[, callback])`
See [`Dispatcher.request(options [, callback])`](Dispatcher.md#dispatcherrequestoptions-callback).

96
node_modules/undici/docs/docs/api/RedirectHandler.md generated vendored Normal file
View file

@ -0,0 +1,96 @@
# Class: RedirectHandler
A class that handles redirection logic for HTTP requests.
## `new RedirectHandler(dispatch, maxRedirections, opts, handler, redirectionLimitReached)`
Arguments:
- **dispatch** `function` - The dispatch function to be called after every retry.
- **maxRedirections** `number` - Maximum number of redirections allowed.
- **opts** `object` - Options for handling redirection.
- **handler** `object` - An object containing handlers for different stages of the request lifecycle.
- **redirectionLimitReached** `boolean` (default: `false`) - A flag that the implementer can provide to enable or disable the feature. If set to `false`, it indicates that the caller doesn't want to use the feature and prefers the old behavior.
Returns: `RedirectHandler`
### Parameters
- **dispatch** `(options: Dispatch.DispatchOptions, handlers: Dispatch.DispatchHandlers) => Promise<Dispatch.DispatchResponse>` (required) - Dispatch function to be called after every redirection.
- **maxRedirections** `number` (required) - Maximum number of redirections allowed.
- **opts** `object` (required) - Options for handling redirection.
- **handler** `object` (required) - Handlers for different stages of the request lifecycle.
- **redirectionLimitReached** `boolean` (default: `false`) - A flag that the implementer can provide to enable or disable the feature. If set to `false`, it indicates that the caller doesn't want to use the feature and prefers the old behavior.
### Properties
- **location** `string` - The current redirection location.
- **abort** `function` - The abort function.
- **opts** `object` - The options for handling redirection.
- **maxRedirections** `number` - Maximum number of redirections allowed.
- **handler** `object` - Handlers for different stages of the request lifecycle.
- **history** `Array` - An array representing the history of URLs during redirection.
- **redirectionLimitReached** `boolean` - Indicates whether the redirection limit has been reached.
### Methods
#### `onConnect(abort)`
Called when the connection is established.
Parameters:
- **abort** `function` - The abort function.
#### `onUpgrade(statusCode, headers, socket)`
Called when an upgrade is requested.
Parameters:
- **statusCode** `number` - The HTTP status code.
- **headers** `object` - The headers received in the response.
- **socket** `object` - The socket object.
#### `onError(error)`
Called when an error occurs.
Parameters:
- **error** `Error` - The error that occurred.
#### `onHeaders(statusCode, headers, resume, statusText)`
Called when headers are received.
Parameters:
- **statusCode** `number` - The HTTP status code.
- **headers** `object` - The headers received in the response.
- **resume** `function` - The resume function.
- **statusText** `string` - The status text.
#### `onData(chunk)`
Called when data is received.
Parameters:
- **chunk** `Buffer` - The data chunk received.
#### `onComplete(trailers)`
Called when the request is complete.
Parameters:
- **trailers** `object` - The trailers received.
#### `onBodySent(chunk)`
Called when the request body is sent.
Parameters:
- **chunk** `Buffer` - The chunk of the request body sent.

45
node_modules/undici/docs/docs/api/RetryAgent.md generated vendored Normal file
View file

@ -0,0 +1,45 @@
# Class: RetryAgent
Extends: `undici.Dispatcher`
A `undici.Dispatcher` that allows to automatically retry a request.
Wraps a `undici.RetryHandler`.
## `new RetryAgent(dispatcher, [options])`
Arguments:
* **dispatcher** `undici.Dispatcher` (required) - the dispatcher to wrap
* **options** `RetryHandlerOptions` (optional) - the options
Returns: `ProxyAgent`
### Parameter: `RetryHandlerOptions`
- **retry** `(err: Error, context: RetryContext, callback: (err?: Error | null) => void) => void` (optional) - Function to be called after every retry. It should pass error if no more retries should be performed.
- **maxRetries** `number` (optional) - Maximum number of retries. Default: `5`
- **maxTimeout** `number` (optional) - Maximum number of milliseconds to wait before retrying. Default: `30000` (30 seconds)
- **minTimeout** `number` (optional) - Minimum number of milliseconds to wait before retrying. Default: `500` (half a second)
- **timeoutFactor** `number` (optional) - Factor to multiply the timeout by for each retry attempt. Default: `2`
- **retryAfter** `boolean` (optional) - It enables automatic retry after the `Retry-After` header is received. Default: `true`
-
- **methods** `string[]` (optional) - Array of HTTP methods to retry. Default: `['GET', 'PUT', 'HEAD', 'OPTIONS', 'DELETE']`
- **statusCodes** `number[]` (optional) - Array of HTTP status codes to retry. Default: `[429, 500, 502, 503, 504]`
- **errorCodes** `string[]` (optional) - Array of Error codes to retry. Default: `['ECONNRESET', 'ECONNREFUSED', 'ENOTFOUND', 'ENETDOWN','ENETUNREACH', 'EHOSTDOWN', 'UND_ERR_SOCKET']`
**`RetryContext`**
- `state`: `RetryState` - Current retry state. It can be mutated.
- `opts`: `Dispatch.DispatchOptions & RetryOptions` - Options passed to the retry handler.
Example:
```js
import { Agent, RetryAgent } from 'undici'
const agent = new RetryAgent(new Agent())
const res = await agent.request('http://example.com')
console.log(res.statuCode)
console.log(await res.body.text())
```

114
node_modules/undici/docs/docs/api/RetryHandler.md generated vendored Normal file
View file

@ -0,0 +1,114 @@
# Class: RetryHandler
Extends: `undici.DispatcherHandlers`
A handler class that implements the retry logic for a request.
## `new RetryHandler(dispatchOptions, retryHandlers, [retryOptions])`
Arguments:
- **options** `Dispatch.DispatchOptions & RetryOptions` (required) - It is an intersection of `Dispatcher.DispatchOptions` and `RetryOptions`.
- **retryHandlers** `RetryHandlers` (required) - Object containing the `dispatch` to be used on every retry, and `handler` for handling the `dispatch` lifecycle.
Returns: `retryHandler`
### Parameter: `Dispatch.DispatchOptions & RetryOptions`
Extends: [`Dispatch.DispatchOptions`](Dispatcher.md#parameter-dispatchoptions).
#### `RetryOptions`
- **retry** `(err: Error, context: RetryContext, callback: (err?: Error | null) => void) => void` (optional) - Function to be called after every retry. It should pass error if no more retries should be performed.
- **maxRetries** `number` (optional) - Maximum number of retries. Default: `5`
- **maxTimeout** `number` (optional) - Maximum number of milliseconds to wait before retrying. Default: `30000` (30 seconds)
- **minTimeout** `number` (optional) - Minimum number of milliseconds to wait before retrying. Default: `500` (half a second)
- **timeoutFactor** `number` (optional) - Factor to multiply the timeout by for each retry attempt. Default: `2`
- **retryAfter** `boolean` (optional) - It enables automatic retry after the `Retry-After` header is received. Default: `true`
-
- **methods** `string[]` (optional) - Array of HTTP methods to retry. Default: `['GET', 'PUT', 'HEAD', 'OPTIONS', 'DELETE']`
- **statusCodes** `number[]` (optional) - Array of HTTP status codes to retry. Default: `[429, 500, 502, 503, 504]`
- **errorCodes** `string[]` (optional) - Array of Error codes to retry. Default: `['ECONNRESET', 'ECONNREFUSED', 'ENOTFOUND', 'ENETDOWN','ENETUNREACH', 'EHOSTDOWN', 'UND_ERR_SOCKET']`
**`RetryContext`**
- `state`: `RetryState` - Current retry state. It can be mutated.
- `opts`: `Dispatch.DispatchOptions & RetryOptions` - Options passed to the retry handler.
**`RetryState`**
It represents the retry state for a given request.
- `counter`: `number` - Current retry attempt.
### Parameter `RetryHandlers`
- **dispatch** `(options: Dispatch.DispatchOptions, handlers: Dispatch.DispatchHandlers) => Promise<Dispatch.DispatchResponse>` (required) - Dispatch function to be called after every retry.
- **handler** Extends [`Dispatch.DispatchHandlers`](Dispatcher.md#dispatcherdispatchoptions-handler) (required) - Handler function to be called after the request is successful or the retries are exhausted.
Examples:
```js
const client = new Client(`http://localhost:${server.address().port}`);
const chunks = [];
const handler = new RetryHandler(
{
...dispatchOptions,
retryOptions: {
// custom retry function
retry: function (err, state, callback) {
counter++;
if (err.code && err.code === "UND_ERR_DESTROYED") {
callback(err);
return;
}
if (err.statusCode === 206) {
callback(err);
return;
}
setTimeout(() => callback(null), 1000);
},
},
},
{
dispatch: (...args) => {
return client.dispatch(...args);
},
handler: {
onConnect() {},
onBodySent() {},
onHeaders(status, _rawHeaders, resume, _statusMessage) {
// do something with headers
},
onData(chunk) {
chunks.push(chunk);
return true;
},
onComplete() {},
onError() {
// handle error properly
},
},
}
);
```
#### Example - Basic RetryHandler with defaults
```js
const client = new Client(`http://localhost:${server.address().port}`);
const handler = new RetryHandler(dispatchOptions, {
dispatch: client.dispatch.bind(client),
handler: {
onConnect() {},
onBodySent() {},
onHeaders(status, _rawHeaders, resume, _statusMessage) {},
onData(chunk) {},
onComplete() {},
onError(err) {},
},
});
```

25
node_modules/undici/docs/docs/api/Util.md generated vendored Normal file
View file

@ -0,0 +1,25 @@
# Util
Utility API for third-party implementations of the dispatcher API.
## `parseHeaders(headers, [obj])`
Receives a header object and returns the parsed value.
Arguments:
- **headers** `Record<string, string | string[]> | (Buffer | string | (Buffer | string)[])[]` (required) - Header object.
- **obj** `Record<string, string | string[]>` (optional) - Object to specify a proxy object. The parsed value is assigned to this object. But, if **headers** is an object, it is not used.
Returns: `Record<string, string | string[]>` If **headers** is an object, it is **headers**. Otherwise, if **obj** is specified, it is equivalent to **obj**.
## `headerNameToString(value)`
Retrieves a header name and returns its lowercase value.
Arguments:
- **value** `string | Buffer` (required) - Header name.
Returns: `string`

43
node_modules/undici/docs/docs/api/WebSocket.md generated vendored Normal file
View file

@ -0,0 +1,43 @@
# Class: WebSocket
> ⚠️ Warning: the WebSocket API is experimental.
Extends: [`EventTarget`](https://developer.mozilla.org/en-US/docs/Web/API/EventTarget)
The WebSocket object provides a way to manage a WebSocket connection to a server, allowing bidirectional communication. The API follows the [WebSocket spec](https://developer.mozilla.org/en-US/docs/Web/API/WebSocket) and [RFC 6455](https://datatracker.ietf.org/doc/html/rfc6455).
## `new WebSocket(url[, protocol])`
Arguments:
* **url** `URL | string` - The url's protocol *must* be `ws` or `wss`.
* **protocol** `string | string[] | WebSocketInit` (optional) - Subprotocol(s) to request the server use, or a [`Dispatcher`](./Dispatcher.md).
### Example:
This example will not work in browsers or other platforms that don't allow passing an object.
```mjs
import { WebSocket, ProxyAgent } from 'undici'
const proxyAgent = new ProxyAgent('my.proxy.server')
const ws = new WebSocket('wss://echo.websocket.events', {
dispatcher: proxyAgent,
protocols: ['echo', 'chat']
})
```
If you do not need a custom Dispatcher, it's recommended to use the following pattern:
```mjs
import { WebSocket } from 'undici'
const ws = new WebSocket('wss://echo.websocket.events', ['echo', 'chat'])
```
## Read More
- [MDN - WebSocket](https://developer.mozilla.org/en-US/docs/Web/API/WebSocket)
- [The WebSocket Specification](https://www.rfc-editor.org/rfc/rfc6455)
- [The WHATWG WebSocket Specification](https://websockets.spec.whatwg.org/)

91
node_modules/undici/docs/docs/api/api-lifecycle.md generated vendored Normal file
View file

@ -0,0 +1,91 @@
# Client Lifecycle
An Undici [Client](Client.md) can be best described as a state machine. The following list is a summary of the various state transitions the `Client` will go through in its lifecycle. This document also contains detailed breakdowns of each state.
> This diagram is not a perfect representation of the undici Client. Since the Client class is not actually implemented as a state-machine, actual execution may deviate slightly from what is described below. Consider this as a general resource for understanding the inner workings of the Undici client rather than some kind of formal specification.
## State Transition Overview
* A `Client` begins in the **idle** state with no socket connection and no requests in queue.
* The *connect* event transitions the `Client` to the **pending** state where requests can be queued prior to processing.
* The *close* and *destroy* events transition the `Client` to the **destroyed** state. Since there are no requests in the queue, the *close* event immediately transitions to the **destroyed** state.
* The **pending** state indicates the underlying socket connection has been successfully established and requests are queueing.
* The *process* event transitions the `Client` to the **processing** state where requests are processed.
* If requests are queued, the *close* event transitions to the **processing** state; otherwise, it transitions to the **destroyed** state.
* The *destroy* event transitions to the **destroyed** state.
* The **processing** state initializes to the **processing.running** state.
* If the current request requires draining, the *needDrain* event transitions the `Client` into the **processing.busy** state which will return to the **processing.running** state with the *drainComplete* event.
* After all queued requests are completed, the *keepalive* event transitions the `Client` back to the **pending** state. If no requests are queued during the timeout, the **close** event transitions the `Client` to the **destroyed** state.
* If the *close* event is fired while the `Client` still has queued requests, the `Client` transitions to the **process.closing** state where it will complete all existing requests before firing the *done* event.
* The *done* event gracefully transitions the `Client` to the **destroyed** state.
* At any point in time, the *destroy* event will transition the `Client` from the **processing** state to the **destroyed** state, destroying any queued requests.
* The **destroyed** state is a final state and the `Client` is no longer functional.
A state diagram representing an Undici Client instance:
```mermaid
stateDiagram-v2
[*] --> idle
idle --> pending : connect
idle --> destroyed : destroy/close
pending --> idle : timeout
pending --> destroyed : destroy
state close_fork <<fork>>
pending --> close_fork : close
close_fork --> processing
close_fork --> destroyed
pending --> processing : process
processing --> pending : keepalive
processing --> destroyed : done
processing --> destroyed : destroy
destroyed --> [*]
state processing {
[*] --> running
running --> closing : close
running --> busy : needDrain
busy --> running : drainComplete
running --> [*] : keepalive
closing --> [*] : done
}
```
## State details
### idle
The **idle** state is the initial state of a `Client` instance. While an `origin` is required for instantiating a `Client` instance, the underlying socket connection will not be established until a request is queued using [`Client.dispatch()`](Client.md#clientdispatchoptions-handlers). By calling `Client.dispatch()` directly or using one of the multiple implementations ([`Client.connect()`](Client.md#clientconnectoptions-callback), [`Client.pipeline()`](Client.md#clientpipelineoptions-handler), [`Client.request()`](Client.md#clientrequestoptions-callback), [`Client.stream()`](Client.md#clientstreamoptions-factory-callback), and [`Client.upgrade()`](Client.md#clientupgradeoptions-callback)), the `Client` instance will transition from **idle** to [**pending**](#pending) and then most likely directly to [**processing**](#processing).
Calling [`Client.close()`](Client.md#clientclosecallback) or [`Client.destroy()`](Client.md#clientdestroyerror-callback) transitions directly to the [**destroyed**](#destroyed) state since the `Client` instance will have no queued requests in this state.
### pending
The **pending** state signifies a non-processing `Client`. Upon entering this state, the `Client` establishes a socket connection and emits the [`'connect'`](Client.md#event-connect) event signalling a connection was successfully established with the `origin` provided during `Client` instantiation. The internal queue is initially empty, and requests can start queueing.
Calling [`Client.close()`](Client.md#clientclosecallback) with queued requests, transitions the `Client` to the [**processing**](#processing) state. Without queued requests, it transitions to the [**destroyed**](#destroyed) state.
Calling [`Client.destroy()`](Client.md#clientdestroyerror-callback) transitions directly to the [**destroyed**](#destroyed) state regardless of existing requests.
### processing
The **processing** state is a state machine within itself. It initializes to the [**processing.running**](#running) state. The [`Client.dispatch()`](Client.md#clientdispatchoptions-handlers), [`Client.close()`](Client.md#clientclosecallback), and [`Client.destroy()`](Client.md#clientdestroyerror-callback) can be called at any time while the `Client` is in this state. `Client.dispatch()` will add more requests to the queue while existing requests continue to be processed. `Client.close()` will transition to the [**processing.closing**](#closing) state. And `Client.destroy()` will transition to [**destroyed**](#destroyed).
#### running
In the **processing.running** sub-state, queued requests are being processed in a FIFO order. If a request body requires draining, the *needDrain* event transitions to the [**processing.busy**](#busy) sub-state. The *close* event transitions the Client to the [**process.closing**](#closing) sub-state. If all queued requests are processed and neither [`Client.close()`](Client.md#clientclosecallback) nor [`Client.destroy()`](Client.md#clientdestroyerror-callback) are called, then the [**processing**](#processing) machine will trigger a *keepalive* event transitioning the `Client` back to the [**pending**](#pending) state. During this time, the `Client` is waiting for the socket connection to timeout, and once it does, it triggers the *timeout* event and transitions to the [**idle**](#idle) state.
#### busy
This sub-state is only entered when a request body is an instance of [Stream](https://nodejs.org/api/stream.html) and requires draining. The `Client` cannot process additional requests while in this state and must wait until the currently processing request body is completely drained before transitioning back to [**processing.running**](#running).
#### closing
This sub-state is only entered when a `Client` instance has queued requests and the [`Client.close()`](Client.md#clientclosecallback) method is called. In this state, the `Client` instance continues to process requests as usual, with the one exception that no additional requests can be queued. Once all of the queued requests are processed, the `Client` will trigger the *done* event gracefully entering the [**destroyed**](#destroyed) state without an error.
### destroyed
The **destroyed** state is a final state for the `Client` instance. Once in this state, a `Client` is nonfunctional. Calling any other `Client` methods will result in an `ClientDestroyedError`.

View file

@ -0,0 +1,64 @@
# Client certificate
Client certificate authentication can be configured with the `Client`, the required options are passed along through the `connect` option.
The client certificates must be signed by a trusted CA. The Node.js default is to trust the well-known CAs curated by Mozilla.
Setting the server option `requestCert: true` tells the server to request the client certificate.
The server option `rejectUnauthorized: false` allows us to handle any invalid certificate errors in client code. The `authorized` property on the socket of the incoming request will show if the client certificate was valid. The `authorizationError` property will give the reason if the certificate was not valid.
### Client Certificate Authentication
```js
const { readFileSync } = require('node:fs')
const { join } = require('node:path')
const { createServer } = require('node:https')
const { Client } = require('undici')
const serverOptions = {
ca: [
readFileSync(join(__dirname, 'client-ca-crt.pem'), 'utf8')
],
key: readFileSync(join(__dirname, 'server-key.pem'), 'utf8'),
cert: readFileSync(join(__dirname, 'server-crt.pem'), 'utf8'),
requestCert: true,
rejectUnauthorized: false
}
const server = createServer(serverOptions, (req, res) => {
// true if client cert is valid
if(req.client.authorized === true) {
console.log('valid')
} else {
console.error(req.client.authorizationError)
}
res.end()
})
server.listen(0, function () {
const tls = {
ca: [
readFileSync(join(__dirname, 'server-ca-crt.pem'), 'utf8')
],
key: readFileSync(join(__dirname, 'client-key.pem'), 'utf8'),
cert: readFileSync(join(__dirname, 'client-crt.pem'), 'utf8'),
rejectUnauthorized: false,
servername: 'agent1'
}
const client = new Client(`https://localhost:${server.address().port}`, {
connect: tls
})
client.request({
path: '/',
method: 'GET'
}, (err, { body }) => {
body.on('data', (buf) => {})
body.on('end', () => {
client.close()
server.close()
})
})
})
```

View file

@ -0,0 +1,136 @@
# Mocking Request
Undici has its own mocking [utility](../api/MockAgent.md). It allow us to intercept undici HTTP requests and return mocked values instead. It can be useful for testing purposes.
Example:
```js
// bank.mjs
import { request } from 'undici'
export async function bankTransfer(recipient, amount) {
const { body } = await request('http://localhost:3000/bank-transfer',
{
method: 'POST',
headers: {
'X-TOKEN-SECRET': 'SuperSecretToken',
},
body: JSON.stringify({
recipient,
amount
})
}
)
return await body.json()
}
```
And this is what the test file looks like:
```js
// index.test.mjs
import { strict as assert } from 'assert'
import { MockAgent, setGlobalDispatcher, } from 'undici'
import { bankTransfer } from './bank.mjs'
const mockAgent = new MockAgent();
setGlobalDispatcher(mockAgent);
// Provide the base url to the request
const mockPool = mockAgent.get('http://localhost:3000');
// intercept the request
mockPool.intercept({
path: '/bank-transfer',
method: 'POST',
headers: {
'X-TOKEN-SECRET': 'SuperSecretToken',
},
body: JSON.stringify({
recipient: '1234567890',
amount: '100'
})
}).reply(200, {
message: 'transaction processed'
})
const success = await bankTransfer('1234567890', '100')
assert.deepEqual(success, { message: 'transaction processed' })
// if you dont want to check whether the body or the headers contain the same value
// just remove it from interceptor
mockPool.intercept({
path: '/bank-transfer',
method: 'POST',
}).reply(400, {
message: 'bank account not found'
})
const badRequest = await bankTransfer('1234567890', '100')
assert.deepEqual(badRequest, { message: 'bank account not found' })
```
Explore other MockAgent functionality [here](../api/MockAgent.md)
## Debug Mock Value
When the interceptor and the request options are not the same, undici will automatically make a real HTTP request. To prevent real requests from being made, use `mockAgent.disableNetConnect()`:
```js
const mockAgent = new MockAgent();
setGlobalDispatcher(mockAgent);
mockAgent.disableNetConnect()
// Provide the base url to the request
const mockPool = mockAgent.get('http://localhost:3000');
mockPool.intercept({
path: '/bank-transfer',
method: 'POST',
}).reply(200, {
message: 'transaction processed'
})
const badRequest = await bankTransfer('1234567890', '100')
// Will throw an error
// MockNotMatchedError: Mock dispatch not matched for path '/bank-transfer':
// subsequent request to origin http://localhost:3000 was not allowed (net.connect disabled)
```
## Reply with data based on request
If the mocked response needs to be dynamically derived from the request parameters, you can provide a function instead of an object to `reply`:
```js
mockPool.intercept({
path: '/bank-transfer',
method: 'POST',
headers: {
'X-TOKEN-SECRET': 'SuperSecretToken',
},
body: JSON.stringify({
recipient: '1234567890',
amount: '100'
})
}).reply(200, (opts) => {
// do something with opts
return { message: 'transaction processed' }
})
```
in this case opts will be
```
{
method: 'POST',
headers: { 'X-TOKEN-SECRET': 'SuperSecretToken' },
body: '{"recipient":"1234567890","amount":"100"}',
origin: 'http://localhost:3000',
path: '/bank-transfer'
}
```

127
node_modules/undici/docs/docs/best-practices/proxy.md generated vendored Normal file
View file

@ -0,0 +1,127 @@
# Connecting through a proxy
Connecting through a proxy is possible by:
- Using [AgentProxy](../api/ProxyAgent.md).
- Configuring `Client` or `Pool` constructor.
The proxy url should be passed to the `Client` or `Pool` constructor, while the upstream server url
should be added to every request call in the `path`.
For instance, if you need to send a request to the `/hello` route of your upstream server,
the `path` should be `path: 'http://upstream.server:port/hello?foo=bar'`.
If you proxy requires basic authentication, you can send it via the `proxy-authorization` header.
### Connect without authentication
```js
import { Client } from 'undici'
import { createServer } from 'http'
import { createProxy } from 'proxy'
const server = await buildServer()
const proxyServer = await buildProxy()
const serverUrl = `http://localhost:${server.address().port}`
const proxyUrl = `http://localhost:${proxyServer.address().port}`
server.on('request', (req, res) => {
console.log(req.url) // '/hello?foo=bar'
res.setHeader('content-type', 'application/json')
res.end(JSON.stringify({ hello: 'world' }))
})
const client = new Client(proxyUrl)
const response = await client.request({
method: 'GET',
path: serverUrl + '/hello?foo=bar'
})
response.body.setEncoding('utf8')
let data = ''
for await (const chunk of response.body) {
data += chunk
}
console.log(response.statusCode) // 200
console.log(JSON.parse(data)) // { hello: 'world' }
server.close()
proxyServer.close()
client.close()
function buildServer () {
return new Promise((resolve, reject) => {
const server = createServer()
server.listen(0, () => resolve(server))
})
}
function buildProxy () {
return new Promise((resolve, reject) => {
const server = createProxy(createServer())
server.listen(0, () => resolve(server))
})
}
```
### Connect with authentication
```js
import { Client } from 'undici'
import { createServer } from 'http'
import { createProxy } from 'proxy'
const server = await buildServer()
const proxyServer = await buildProxy()
const serverUrl = `http://localhost:${server.address().port}`
const proxyUrl = `http://localhost:${proxyServer.address().port}`
proxyServer.authenticate = function (req) {
return req.headers['proxy-authorization'] === `Basic ${Buffer.from('user:pass').toString('base64')}`
}
server.on('request', (req, res) => {
console.log(req.url) // '/hello?foo=bar'
res.setHeader('content-type', 'application/json')
res.end(JSON.stringify({ hello: 'world' }))
})
const client = new Client(proxyUrl)
const response = await client.request({
method: 'GET',
path: serverUrl + '/hello?foo=bar',
headers: {
'proxy-authorization': `Basic ${Buffer.from('user:pass').toString('base64')}`
}
})
response.body.setEncoding('utf8')
let data = ''
for await (const chunk of response.body) {
data += chunk
}
console.log(response.statusCode) // 200
console.log(JSON.parse(data)) // { hello: 'world' }
server.close()
proxyServer.close()
client.close()
function buildServer () {
return new Promise((resolve, reject) => {
const server = createServer()
server.listen(0, () => resolve(server))
})
}
function buildProxy () {
return new Promise((resolve, reject) => {
const server = createProxy(createServer())
server.listen(0, () => resolve(server))
})
}
```

View file

@ -0,0 +1,20 @@
# Writing tests
Undici is tuned for a production use case and its default will keep
a socket open for a few seconds after an HTTP request is completed to
remove the overhead of opening up a new socket. These settings that makes
Undici shine in production are not a good fit for using Undici in automated
tests, as it will result in longer execution times.
The following are good defaults that will keep the socket open for only 10ms:
```js
import { request, setGlobalDispatcher, Agent } from 'undici'
const agent = new Agent({
keepAliveTimeout: 10, // milliseconds
keepAliveMaxTimeout: 10 // milliseconds
})
setGlobalDispatcher(agent)
```

21
node_modules/undici/index-fetch.js generated vendored Normal file
View file

@ -0,0 +1,21 @@
'use strict'
const fetchImpl = require('./lib/web/fetch').fetch
module.exports.fetch = function fetch (resource, init = undefined) {
return fetchImpl(resource, init).catch((err) => {
if (err && typeof err === 'object') {
Error.captureStackTrace(err, this)
}
throw err
})
}
module.exports.FormData = require('./lib/web/fetch/formdata').FormData
module.exports.Headers = require('./lib/web/fetch/headers').Headers
module.exports.Response = require('./lib/web/fetch/response').Response
module.exports.Request = require('./lib/web/fetch/request').Request
module.exports.WebSocket = require('./lib/web/websocket/websocket').WebSocket
module.exports.MessageEvent = require('./lib/web/websocket/events').MessageEvent
module.exports.EventSource = require('./lib/web/eventsource/eventsource').EventSource

3
node_modules/undici/index.d.ts generated vendored Normal file
View file

@ -0,0 +1,3 @@
export * from './types/index'
import Undici from './types/index'
export default Undici

165
node_modules/undici/index.js generated vendored Normal file
View file

@ -0,0 +1,165 @@
'use strict'
const Client = require('./lib/dispatcher/client')
const Dispatcher = require('./lib/dispatcher/dispatcher')
const Pool = require('./lib/dispatcher/pool')
const BalancedPool = require('./lib/dispatcher/balanced-pool')
const Agent = require('./lib/dispatcher/agent')
const ProxyAgent = require('./lib/dispatcher/proxy-agent')
const RetryAgent = require('./lib/dispatcher/retry-agent')
const errors = require('./lib/core/errors')
const util = require('./lib/core/util')
const { InvalidArgumentError } = errors
const api = require('./lib/api')
const buildConnector = require('./lib/core/connect')
const MockClient = require('./lib/mock/mock-client')
const MockAgent = require('./lib/mock/mock-agent')
const MockPool = require('./lib/mock/mock-pool')
const mockErrors = require('./lib/mock/mock-errors')
const RetryHandler = require('./lib/handler/retry-handler')
const { getGlobalDispatcher, setGlobalDispatcher } = require('./lib/global')
const DecoratorHandler = require('./lib/handler/decorator-handler')
const RedirectHandler = require('./lib/handler/redirect-handler')
const createRedirectInterceptor = require('./lib/interceptor/redirect-interceptor')
Object.assign(Dispatcher.prototype, api)
module.exports.Dispatcher = Dispatcher
module.exports.Client = Client
module.exports.Pool = Pool
module.exports.BalancedPool = BalancedPool
module.exports.Agent = Agent
module.exports.ProxyAgent = ProxyAgent
module.exports.RetryAgent = RetryAgent
module.exports.RetryHandler = RetryHandler
module.exports.DecoratorHandler = DecoratorHandler
module.exports.RedirectHandler = RedirectHandler
module.exports.createRedirectInterceptor = createRedirectInterceptor
module.exports.interceptors = {
redirect: require('./lib/interceptor/redirect'),
retry: require('./lib/interceptor/retry')
}
module.exports.buildConnector = buildConnector
module.exports.errors = errors
module.exports.util = {
parseHeaders: util.parseHeaders,
headerNameToString: util.headerNameToString
}
function makeDispatcher (fn) {
return (url, opts, handler) => {
if (typeof opts === 'function') {
handler = opts
opts = null
}
if (!url || (typeof url !== 'string' && typeof url !== 'object' && !(url instanceof URL))) {
throw new InvalidArgumentError('invalid url')
}
if (opts != null && typeof opts !== 'object') {
throw new InvalidArgumentError('invalid opts')
}
if (opts && opts.path != null) {
if (typeof opts.path !== 'string') {
throw new InvalidArgumentError('invalid opts.path')
}
let path = opts.path
if (!opts.path.startsWith('/')) {
path = `/${path}`
}
url = new URL(util.parseOrigin(url).origin + path)
} else {
if (!opts) {
opts = typeof url === 'object' ? url : {}
}
url = util.parseURL(url)
}
const { agent, dispatcher = getGlobalDispatcher() } = opts
if (agent) {
throw new InvalidArgumentError('unsupported opts.agent. Did you mean opts.client?')
}
return fn.call(dispatcher, {
...opts,
origin: url.origin,
path: url.search ? `${url.pathname}${url.search}` : url.pathname,
method: opts.method || (opts.body ? 'PUT' : 'GET')
}, handler)
}
}
module.exports.setGlobalDispatcher = setGlobalDispatcher
module.exports.getGlobalDispatcher = getGlobalDispatcher
const fetchImpl = require('./lib/web/fetch').fetch
module.exports.fetch = async function fetch (init, options = undefined) {
try {
return await fetchImpl(init, options)
} catch (err) {
if (err && typeof err === 'object') {
Error.captureStackTrace(err, this)
}
throw err
}
}
module.exports.Headers = require('./lib/web/fetch/headers').Headers
module.exports.Response = require('./lib/web/fetch/response').Response
module.exports.Request = require('./lib/web/fetch/request').Request
module.exports.FormData = require('./lib/web/fetch/formdata').FormData
module.exports.File = require('./lib/web/fetch/file').File
module.exports.FileReader = require('./lib/web/fileapi/filereader').FileReader
const { setGlobalOrigin, getGlobalOrigin } = require('./lib/web/fetch/global')
module.exports.setGlobalOrigin = setGlobalOrigin
module.exports.getGlobalOrigin = getGlobalOrigin
const { CacheStorage } = require('./lib/web/cache/cachestorage')
const { kConstruct } = require('./lib/web/cache/symbols')
// Cache & CacheStorage are tightly coupled with fetch. Even if it may run
// in an older version of Node, it doesn't have any use without fetch.
module.exports.caches = new CacheStorage(kConstruct)
const { deleteCookie, getCookies, getSetCookies, setCookie } = require('./lib/web/cookies')
module.exports.deleteCookie = deleteCookie
module.exports.getCookies = getCookies
module.exports.getSetCookies = getSetCookies
module.exports.setCookie = setCookie
const { parseMIMEType, serializeAMimeType } = require('./lib/web/fetch/data-url')
module.exports.parseMIMEType = parseMIMEType
module.exports.serializeAMimeType = serializeAMimeType
const { CloseEvent, ErrorEvent, MessageEvent } = require('./lib/web/websocket/events')
module.exports.WebSocket = require('./lib/web/websocket/websocket').WebSocket
module.exports.CloseEvent = CloseEvent
module.exports.ErrorEvent = ErrorEvent
module.exports.MessageEvent = MessageEvent
module.exports.request = makeDispatcher(api.request)
module.exports.stream = makeDispatcher(api.stream)
module.exports.pipeline = makeDispatcher(api.pipeline)
module.exports.connect = makeDispatcher(api.connect)
module.exports.upgrade = makeDispatcher(api.upgrade)
module.exports.MockClient = MockClient
module.exports.MockPool = MockPool
module.exports.MockAgent = MockAgent
module.exports.mockErrors = mockErrors
const { EventSource } = require('./lib/web/eventsource/eventsource')
module.exports.EventSource = EventSource

57
node_modules/undici/lib/api/abort-signal.js generated vendored Normal file
View file

@ -0,0 +1,57 @@
const { addAbortListener } = require('../core/util')
const { RequestAbortedError } = require('../core/errors')
const kListener = Symbol('kListener')
const kSignal = Symbol('kSignal')
function abort (self) {
if (self.abort) {
self.abort(self[kSignal]?.reason)
} else {
self.reason = self[kSignal]?.reason ?? new RequestAbortedError()
}
removeSignal(self)
}
function addSignal (self, signal) {
self.reason = null
self[kSignal] = null
self[kListener] = null
if (!signal) {
return
}
if (signal.aborted) {
abort(self)
return
}
self[kSignal] = signal
self[kListener] = () => {
abort(self)
}
addAbortListener(self[kSignal], self[kListener])
}
function removeSignal (self) {
if (!self[kSignal]) {
return
}
if ('removeEventListener' in self[kSignal]) {
self[kSignal].removeEventListener('abort', self[kListener])
} else {
self[kSignal].removeListener('abort', self[kListener])
}
self[kSignal] = null
self[kListener] = null
}
module.exports = {
addSignal,
removeSignal
}

108
node_modules/undici/lib/api/api-connect.js generated vendored Normal file
View file

@ -0,0 +1,108 @@
'use strict'
const assert = require('node:assert')
const { AsyncResource } = require('node:async_hooks')
const { InvalidArgumentError, SocketError } = require('../core/errors')
const util = require('../core/util')
const { addSignal, removeSignal } = require('./abort-signal')
class ConnectHandler extends AsyncResource {
constructor (opts, callback) {
if (!opts || typeof opts !== 'object') {
throw new InvalidArgumentError('invalid opts')
}
if (typeof callback !== 'function') {
throw new InvalidArgumentError('invalid callback')
}
const { signal, opaque, responseHeaders } = opts
if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
}
super('UNDICI_CONNECT')
this.opaque = opaque || null
this.responseHeaders = responseHeaders || null
this.callback = callback
this.abort = null
addSignal(this, signal)
}
onConnect (abort, context) {
if (this.reason) {
abort(this.reason)
return
}
assert(this.callback)
this.abort = abort
this.context = context
}
onHeaders () {
throw new SocketError('bad connect', null)
}
onUpgrade (statusCode, rawHeaders, socket) {
const { callback, opaque, context } = this
removeSignal(this)
this.callback = null
let headers = rawHeaders
// Indicates is an HTTP2Session
if (headers != null) {
headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
}
this.runInAsyncScope(callback, null, null, {
statusCode,
headers,
socket,
opaque,
context
})
}
onError (err) {
const { callback, opaque } = this
removeSignal(this)
if (callback) {
this.callback = null
queueMicrotask(() => {
this.runInAsyncScope(callback, null, err, { opaque })
})
}
}
}
function connect (opts, callback) {
if (callback === undefined) {
return new Promise((resolve, reject) => {
connect.call(this, opts, (err, data) => {
return err ? reject(err) : resolve(data)
})
})
}
try {
const connectHandler = new ConnectHandler(opts, callback)
this.dispatch({ ...opts, method: 'CONNECT' }, connectHandler)
} catch (err) {
if (typeof callback !== 'function') {
throw err
}
const opaque = opts?.opaque
queueMicrotask(() => callback(err, { opaque }))
}
}
module.exports = connect

251
node_modules/undici/lib/api/api-pipeline.js generated vendored Normal file
View file

@ -0,0 +1,251 @@
'use strict'
const {
Readable,
Duplex,
PassThrough
} = require('node:stream')
const {
InvalidArgumentError,
InvalidReturnValueError,
RequestAbortedError
} = require('../core/errors')
const util = require('../core/util')
const { AsyncResource } = require('node:async_hooks')
const { addSignal, removeSignal } = require('./abort-signal')
const assert = require('node:assert')
const kResume = Symbol('resume')
class PipelineRequest extends Readable {
constructor () {
super({ autoDestroy: true })
this[kResume] = null
}
_read () {
const { [kResume]: resume } = this
if (resume) {
this[kResume] = null
resume()
}
}
_destroy (err, callback) {
this._read()
callback(err)
}
}
class PipelineResponse extends Readable {
constructor (resume) {
super({ autoDestroy: true })
this[kResume] = resume
}
_read () {
this[kResume]()
}
_destroy (err, callback) {
if (!err && !this._readableState.endEmitted) {
err = new RequestAbortedError()
}
callback(err)
}
}
class PipelineHandler extends AsyncResource {
constructor (opts, handler) {
if (!opts || typeof opts !== 'object') {
throw new InvalidArgumentError('invalid opts')
}
if (typeof handler !== 'function') {
throw new InvalidArgumentError('invalid handler')
}
const { signal, method, opaque, onInfo, responseHeaders } = opts
if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
}
if (method === 'CONNECT') {
throw new InvalidArgumentError('invalid method')
}
if (onInfo && typeof onInfo !== 'function') {
throw new InvalidArgumentError('invalid onInfo callback')
}
super('UNDICI_PIPELINE')
this.opaque = opaque || null
this.responseHeaders = responseHeaders || null
this.handler = handler
this.abort = null
this.context = null
this.onInfo = onInfo || null
this.req = new PipelineRequest().on('error', util.nop)
this.ret = new Duplex({
readableObjectMode: opts.objectMode,
autoDestroy: true,
read: () => {
const { body } = this
if (body?.resume) {
body.resume()
}
},
write: (chunk, encoding, callback) => {
const { req } = this
if (req.push(chunk, encoding) || req._readableState.destroyed) {
callback()
} else {
req[kResume] = callback
}
},
destroy: (err, callback) => {
const { body, req, res, ret, abort } = this
if (!err && !ret._readableState.endEmitted) {
err = new RequestAbortedError()
}
if (abort && err) {
abort()
}
util.destroy(body, err)
util.destroy(req, err)
util.destroy(res, err)
removeSignal(this)
callback(err)
}
}).on('prefinish', () => {
const { req } = this
// Node < 15 does not call _final in same tick.
req.push(null)
})
this.res = null
addSignal(this, signal)
}
onConnect (abort, context) {
const { ret, res } = this
if (this.reason) {
abort(this.reason)
return
}
assert(!res, 'pipeline cannot be retried')
assert(!ret.destroyed)
this.abort = abort
this.context = context
}
onHeaders (statusCode, rawHeaders, resume) {
const { opaque, handler, context } = this
if (statusCode < 200) {
if (this.onInfo) {
const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
this.onInfo({ statusCode, headers })
}
return
}
this.res = new PipelineResponse(resume)
let body
try {
this.handler = null
const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
body = this.runInAsyncScope(handler, null, {
statusCode,
headers,
opaque,
body: this.res,
context
})
} catch (err) {
this.res.on('error', util.nop)
throw err
}
if (!body || typeof body.on !== 'function') {
throw new InvalidReturnValueError('expected Readable')
}
body
.on('data', (chunk) => {
const { ret, body } = this
if (!ret.push(chunk) && body.pause) {
body.pause()
}
})
.on('error', (err) => {
const { ret } = this
util.destroy(ret, err)
})
.on('end', () => {
const { ret } = this
ret.push(null)
})
.on('close', () => {
const { ret } = this
if (!ret._readableState.ended) {
util.destroy(ret, new RequestAbortedError())
}
})
this.body = body
}
onData (chunk) {
const { res } = this
return res.push(chunk)
}
onComplete (trailers) {
const { res } = this
res.push(null)
}
onError (err) {
const { ret } = this
this.handler = null
util.destroy(ret, err)
}
}
function pipeline (opts, handler) {
try {
const pipelineHandler = new PipelineHandler(opts, handler)
this.dispatch({ ...opts, body: pipelineHandler.req }, pipelineHandler)
return pipelineHandler.ret
} catch (err) {
return new PassThrough().destroy(err)
}
}
module.exports = pipeline

182
node_modules/undici/lib/api/api-request.js generated vendored Normal file
View file

@ -0,0 +1,182 @@
'use strict'
const assert = require('node:assert')
const { Readable } = require('./readable')
const { InvalidArgumentError } = require('../core/errors')
const util = require('../core/util')
const { getResolveErrorBodyCallback } = require('./util')
const { AsyncResource } = require('node:async_hooks')
const { addSignal, removeSignal } = require('./abort-signal')
class RequestHandler extends AsyncResource {
constructor (opts, callback) {
if (!opts || typeof opts !== 'object') {
throw new InvalidArgumentError('invalid opts')
}
const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError, highWaterMark } = opts
try {
if (typeof callback !== 'function') {
throw new InvalidArgumentError('invalid callback')
}
if (highWaterMark && (typeof highWaterMark !== 'number' || highWaterMark < 0)) {
throw new InvalidArgumentError('invalid highWaterMark')
}
if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
}
if (method === 'CONNECT') {
throw new InvalidArgumentError('invalid method')
}
if (onInfo && typeof onInfo !== 'function') {
throw new InvalidArgumentError('invalid onInfo callback')
}
super('UNDICI_REQUEST')
} catch (err) {
if (util.isStream(body)) {
util.destroy(body.on('error', util.nop), err)
}
throw err
}
this.responseHeaders = responseHeaders || null
this.opaque = opaque || null
this.callback = callback
this.res = null
this.abort = null
this.body = body
this.trailers = {}
this.context = null
this.onInfo = onInfo || null
this.throwOnError = throwOnError
this.highWaterMark = highWaterMark
if (util.isStream(body)) {
body.on('error', (err) => {
this.onError(err)
})
}
addSignal(this, signal)
}
onConnect (abort, context) {
if (this.reason) {
abort(this.reason)
return
}
assert(this.callback)
this.abort = abort
this.context = context
}
onHeaders (statusCode, rawHeaders, resume, statusMessage) {
const { callback, opaque, abort, context, responseHeaders, highWaterMark } = this
const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
if (statusCode < 200) {
if (this.onInfo) {
this.onInfo({ statusCode, headers })
}
return
}
const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers
const contentType = parsedHeaders['content-type']
const contentLength = parsedHeaders['content-length']
const body = new Readable({ resume, abort, contentType, contentLength, highWaterMark })
this.callback = null
this.res = body
if (callback !== null) {
if (this.throwOnError && statusCode >= 400) {
this.runInAsyncScope(getResolveErrorBodyCallback, null,
{ callback, body, contentType, statusCode, statusMessage, headers }
)
} else {
this.runInAsyncScope(callback, null, null, {
statusCode,
headers,
trailers: this.trailers,
opaque,
body,
context
})
}
}
}
onData (chunk) {
const { res } = this
return res.push(chunk)
}
onComplete (trailers) {
const { res } = this
removeSignal(this)
util.parseHeaders(trailers, this.trailers)
res.push(null)
}
onError (err) {
const { res, callback, body, opaque } = this
removeSignal(this)
if (callback) {
// TODO: Does this need queueMicrotask?
this.callback = null
queueMicrotask(() => {
this.runInAsyncScope(callback, null, err, { opaque })
})
}
if (res) {
this.res = null
// Ensure all queued handlers are invoked before destroying res.
queueMicrotask(() => {
util.destroy(res, err)
})
}
if (body) {
this.body = null
util.destroy(body, err)
}
}
}
function request (opts, callback) {
if (callback === undefined) {
return new Promise((resolve, reject) => {
request.call(this, opts, (err, data) => {
return err ? reject(err) : resolve(data)
})
})
}
try {
this.dispatch(opts, new RequestHandler(opts, callback))
} catch (err) {
if (typeof callback !== 'function') {
throw err
}
const opaque = opts?.opaque
queueMicrotask(() => callback(err, { opaque }))
}
}
module.exports = request
module.exports.RequestHandler = RequestHandler

220
node_modules/undici/lib/api/api-stream.js generated vendored Normal file
View file

@ -0,0 +1,220 @@
'use strict'
const assert = require('node:assert')
const { finished, PassThrough } = require('node:stream')
const { InvalidArgumentError, InvalidReturnValueError } = require('../core/errors')
const util = require('../core/util')
const { getResolveErrorBodyCallback } = require('./util')
const { AsyncResource } = require('node:async_hooks')
const { addSignal, removeSignal } = require('./abort-signal')
class StreamHandler extends AsyncResource {
constructor (opts, factory, callback) {
if (!opts || typeof opts !== 'object') {
throw new InvalidArgumentError('invalid opts')
}
const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError } = opts
try {
if (typeof callback !== 'function') {
throw new InvalidArgumentError('invalid callback')
}
if (typeof factory !== 'function') {
throw new InvalidArgumentError('invalid factory')
}
if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
}
if (method === 'CONNECT') {
throw new InvalidArgumentError('invalid method')
}
if (onInfo && typeof onInfo !== 'function') {
throw new InvalidArgumentError('invalid onInfo callback')
}
super('UNDICI_STREAM')
} catch (err) {
if (util.isStream(body)) {
util.destroy(body.on('error', util.nop), err)
}
throw err
}
this.responseHeaders = responseHeaders || null
this.opaque = opaque || null
this.factory = factory
this.callback = callback
this.res = null
this.abort = null
this.context = null
this.trailers = null
this.body = body
this.onInfo = onInfo || null
this.throwOnError = throwOnError || false
if (util.isStream(body)) {
body.on('error', (err) => {
this.onError(err)
})
}
addSignal(this, signal)
}
onConnect (abort, context) {
if (this.reason) {
abort(this.reason)
return
}
assert(this.callback)
this.abort = abort
this.context = context
}
onHeaders (statusCode, rawHeaders, resume, statusMessage) {
const { factory, opaque, context, callback, responseHeaders } = this
const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
if (statusCode < 200) {
if (this.onInfo) {
this.onInfo({ statusCode, headers })
}
return
}
this.factory = null
let res
if (this.throwOnError && statusCode >= 400) {
const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers
const contentType = parsedHeaders['content-type']
res = new PassThrough()
this.callback = null
this.runInAsyncScope(getResolveErrorBodyCallback, null,
{ callback, body: res, contentType, statusCode, statusMessage, headers }
)
} else {
if (factory === null) {
return
}
res = this.runInAsyncScope(factory, null, {
statusCode,
headers,
opaque,
context
})
if (
!res ||
typeof res.write !== 'function' ||
typeof res.end !== 'function' ||
typeof res.on !== 'function'
) {
throw new InvalidReturnValueError('expected Writable')
}
// TODO: Avoid finished. It registers an unnecessary amount of listeners.
finished(res, { readable: false }, (err) => {
const { callback, res, opaque, trailers, abort } = this
this.res = null
if (err || !res.readable) {
util.destroy(res, err)
}
this.callback = null
this.runInAsyncScope(callback, null, err || null, { opaque, trailers })
if (err) {
abort()
}
})
}
res.on('drain', resume)
this.res = res
const needDrain = res.writableNeedDrain !== undefined
? res.writableNeedDrain
: res._writableState?.needDrain
return needDrain !== true
}
onData (chunk) {
const { res } = this
return res ? res.write(chunk) : true
}
onComplete (trailers) {
const { res } = this
removeSignal(this)
if (!res) {
return
}
this.trailers = util.parseHeaders(trailers)
res.end()
}
onError (err) {
const { res, callback, opaque, body } = this
removeSignal(this)
this.factory = null
if (res) {
this.res = null
util.destroy(res, err)
} else if (callback) {
this.callback = null
queueMicrotask(() => {
this.runInAsyncScope(callback, null, err, { opaque })
})
}
if (body) {
this.body = null
util.destroy(body, err)
}
}
}
function stream (opts, factory, callback) {
if (callback === undefined) {
return new Promise((resolve, reject) => {
stream.call(this, opts, factory, (err, data) => {
return err ? reject(err) : resolve(data)
})
})
}
try {
this.dispatch(opts, new StreamHandler(opts, factory, callback))
} catch (err) {
if (typeof callback !== 'function') {
throw err
}
const opaque = opts?.opaque
queueMicrotask(() => callback(err, { opaque }))
}
}
module.exports = stream

108
node_modules/undici/lib/api/api-upgrade.js generated vendored Normal file
View file

@ -0,0 +1,108 @@
'use strict'
const { InvalidArgumentError, SocketError } = require('../core/errors')
const { AsyncResource } = require('node:async_hooks')
const util = require('../core/util')
const { addSignal, removeSignal } = require('./abort-signal')
const assert = require('node:assert')
class UpgradeHandler extends AsyncResource {
constructor (opts, callback) {
if (!opts || typeof opts !== 'object') {
throw new InvalidArgumentError('invalid opts')
}
if (typeof callback !== 'function') {
throw new InvalidArgumentError('invalid callback')
}
const { signal, opaque, responseHeaders } = opts
if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
}
super('UNDICI_UPGRADE')
this.responseHeaders = responseHeaders || null
this.opaque = opaque || null
this.callback = callback
this.abort = null
this.context = null
addSignal(this, signal)
}
onConnect (abort, context) {
if (this.reason) {
abort(this.reason)
return
}
assert(this.callback)
this.abort = abort
this.context = null
}
onHeaders () {
throw new SocketError('bad upgrade', null)
}
onUpgrade (statusCode, rawHeaders, socket) {
const { callback, opaque, context } = this
assert.strictEqual(statusCode, 101)
removeSignal(this)
this.callback = null
const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
this.runInAsyncScope(callback, null, null, {
headers,
socket,
opaque,
context
})
}
onError (err) {
const { callback, opaque } = this
removeSignal(this)
if (callback) {
this.callback = null
queueMicrotask(() => {
this.runInAsyncScope(callback, null, err, { opaque })
})
}
}
}
function upgrade (opts, callback) {
if (callback === undefined) {
return new Promise((resolve, reject) => {
upgrade.call(this, opts, (err, data) => {
return err ? reject(err) : resolve(data)
})
})
}
try {
const upgradeHandler = new UpgradeHandler(opts, callback)
this.dispatch({
...opts,
method: opts.method || 'GET',
upgrade: opts.protocol || 'Websocket'
}, upgradeHandler)
} catch (err) {
if (typeof callback !== 'function') {
throw err
}
const opaque = opts?.opaque
queueMicrotask(() => callback(err, { opaque }))
}
}
module.exports = upgrade

7
node_modules/undici/lib/api/index.js generated vendored Normal file
View file

@ -0,0 +1,7 @@
'use strict'
module.exports.request = require('./api-request')
module.exports.stream = require('./api-stream')
module.exports.pipeline = require('./api-pipeline')
module.exports.upgrade = require('./api-upgrade')
module.exports.connect = require('./api-connect')

357
node_modules/undici/lib/api/readable.js generated vendored Normal file
View file

@ -0,0 +1,357 @@
// Ported from https://github.com/nodejs/undici/pull/907
'use strict'
const assert = require('node:assert')
const { Readable } = require('node:stream')
const { RequestAbortedError, NotSupportedError, InvalidArgumentError, AbortError } = require('../core/errors')
const util = require('../core/util')
const { ReadableStreamFrom } = require('../core/util')
const kConsume = Symbol('kConsume')
const kReading = Symbol('kReading')
const kBody = Symbol('kBody')
const kAbort = Symbol('kAbort')
const kContentType = Symbol('kContentType')
const kContentLength = Symbol('kContentLength')
const noop = () => {}
class BodyReadable extends Readable {
constructor ({
resume,
abort,
contentType = '',
contentLength,
highWaterMark = 64 * 1024 // Same as nodejs fs streams.
}) {
super({
autoDestroy: true,
read: resume,
highWaterMark
})
this._readableState.dataEmitted = false
this[kAbort] = abort
this[kConsume] = null
this[kBody] = null
this[kContentType] = contentType
this[kContentLength] = contentLength
// Is stream being consumed through Readable API?
// This is an optimization so that we avoid checking
// for 'data' and 'readable' listeners in the hot path
// inside push().
this[kReading] = false
}
destroy (err) {
if (!err && !this._readableState.endEmitted) {
err = new RequestAbortedError()
}
if (err) {
this[kAbort]()
}
return super.destroy(err)
}
_destroy (err, callback) {
// Workaround for Node "bug". If the stream is destroyed in same
// tick as it is created, then a user who is waiting for a
// promise (i.e micro tick) for installing a 'error' listener will
// never get a chance and will always encounter an unhandled exception.
setImmediate(() => {
callback(err)
})
}
on (ev, ...args) {
if (ev === 'data' || ev === 'readable') {
this[kReading] = true
}
return super.on(ev, ...args)
}
addListener (ev, ...args) {
return this.on(ev, ...args)
}
off (ev, ...args) {
const ret = super.off(ev, ...args)
if (ev === 'data' || ev === 'readable') {
this[kReading] = (
this.listenerCount('data') > 0 ||
this.listenerCount('readable') > 0
)
}
return ret
}
removeListener (ev, ...args) {
return this.off(ev, ...args)
}
push (chunk) {
if (this[kConsume] && chunk !== null) {
consumePush(this[kConsume], chunk)
return this[kReading] ? super.push(chunk) : true
}
return super.push(chunk)
}
// https://fetch.spec.whatwg.org/#dom-body-text
async text () {
return consume(this, 'text')
}
// https://fetch.spec.whatwg.org/#dom-body-json
async json () {
return consume(this, 'json')
}
// https://fetch.spec.whatwg.org/#dom-body-blob
async blob () {
return consume(this, 'blob')
}
// https://fetch.spec.whatwg.org/#dom-body-arraybuffer
async arrayBuffer () {
return consume(this, 'arrayBuffer')
}
// https://fetch.spec.whatwg.org/#dom-body-formdata
async formData () {
// TODO: Implement.
throw new NotSupportedError()
}
// https://fetch.spec.whatwg.org/#dom-body-bodyused
get bodyUsed () {
return util.isDisturbed(this)
}
// https://fetch.spec.whatwg.org/#dom-body-body
get body () {
if (!this[kBody]) {
this[kBody] = ReadableStreamFrom(this)
if (this[kConsume]) {
// TODO: Is this the best way to force a lock?
this[kBody].getReader() // Ensure stream is locked.
assert(this[kBody].locked)
}
}
return this[kBody]
}
async dump (opts) {
let limit = Number.isFinite(opts?.limit) ? opts.limit : 128 * 1024
const signal = opts?.signal
if (signal != null && (typeof signal !== 'object' || !('aborted' in signal))) {
throw new InvalidArgumentError('signal must be an AbortSignal')
}
signal?.throwIfAborted()
if (this._readableState.closeEmitted) {
return null
}
return await new Promise((resolve, reject) => {
if (this[kContentLength] > limit) {
this.destroy(new AbortError())
}
const onAbort = () => {
this.destroy(signal.reason ?? new AbortError())
}
signal?.addEventListener('abort', onAbort)
this
.on('close', function () {
signal?.removeEventListener('abort', onAbort)
if (signal?.aborted) {
reject(signal.reason ?? new AbortError())
} else {
resolve(null)
}
})
.on('error', noop)
.on('data', function (chunk) {
limit -= chunk.length
if (limit <= 0) {
this.destroy()
}
})
.resume()
})
}
}
// https://streams.spec.whatwg.org/#readablestream-locked
function isLocked (self) {
// Consume is an implicit lock.
return (self[kBody] && self[kBody].locked === true) || self[kConsume]
}
// https://fetch.spec.whatwg.org/#body-unusable
function isUnusable (self) {
return util.isDisturbed(self) || isLocked(self)
}
async function consume (stream, type) {
assert(!stream[kConsume])
return new Promise((resolve, reject) => {
if (isUnusable(stream)) {
const rState = stream._readableState
if (rState.destroyed && rState.closeEmitted === false) {
stream
.on('error', err => {
reject(err)
})
.on('close', () => {
reject(new TypeError('unusable'))
})
} else {
reject(rState.errored ?? new TypeError('unusable'))
}
} else {
queueMicrotask(() => {
stream[kConsume] = {
type,
stream,
resolve,
reject,
length: 0,
body: []
}
stream
.on('error', function (err) {
consumeFinish(this[kConsume], err)
})
.on('close', function () {
if (this[kConsume].body !== null) {
consumeFinish(this[kConsume], new RequestAbortedError())
}
})
consumeStart(stream[kConsume])
})
}
})
}
function consumeStart (consume) {
if (consume.body === null) {
return
}
const { _readableState: state } = consume.stream
if (state.bufferIndex) {
const start = state.bufferIndex
const end = state.buffer.length
for (let n = start; n < end; n++) {
consumePush(consume, state.buffer[n])
}
} else {
for (const chunk of state.buffer) {
consumePush(consume, chunk)
}
}
if (state.endEmitted) {
consumeEnd(this[kConsume])
} else {
consume.stream.on('end', function () {
consumeEnd(this[kConsume])
})
}
consume.stream.resume()
while (consume.stream.read() != null) {
// Loop
}
}
/**
* @param {Buffer[]} chunks
* @param {number} length
*/
function chunksDecode (chunks, length) {
if (chunks.length === 0 || length === 0) {
return ''
}
const buffer = chunks.length === 1 ? chunks[0] : Buffer.concat(chunks, length)
const bufferLength = buffer.length
// Skip BOM.
const start =
bufferLength > 2 &&
buffer[0] === 0xef &&
buffer[1] === 0xbb &&
buffer[2] === 0xbf
? 3
: 0
return buffer.utf8Slice(start, bufferLength)
}
function consumeEnd (consume) {
const { type, body, resolve, stream, length } = consume
try {
if (type === 'text') {
resolve(chunksDecode(body, length))
} else if (type === 'json') {
resolve(JSON.parse(chunksDecode(body, length)))
} else if (type === 'arrayBuffer') {
const dst = new Uint8Array(length)
let pos = 0
for (const buf of body) {
dst.set(buf, pos)
pos += buf.byteLength
}
resolve(dst.buffer)
} else if (type === 'blob') {
resolve(new Blob(body, { type: stream[kContentType] }))
}
consumeFinish(consume)
} catch (err) {
stream.destroy(err)
}
}
function consumePush (consume, chunk) {
consume.length += chunk.length
consume.body.push(chunk)
}
function consumeFinish (consume, err) {
if (consume.body === null) {
return
}
if (err) {
consume.reject(err)
} else {
consume.resolve()
}
consume.type = null
consume.stream = null
consume.resolve = null
consume.reject = null
consume.length = 0
consume.body = null
}
module.exports = { Readable: BodyReadable, chunksDecode }

86
node_modules/undici/lib/api/util.js generated vendored Normal file
View file

@ -0,0 +1,86 @@
const assert = require('node:assert')
const {
ResponseStatusCodeError
} = require('../core/errors')
const { chunksDecode } = require('./readable')
const CHUNK_LIMIT = 128 * 1024
async function getResolveErrorBodyCallback ({ callback, body, contentType, statusCode, statusMessage, headers }) {
assert(body)
let chunks = []
let length = 0
for await (const chunk of body) {
chunks.push(chunk)
length += chunk.length
if (length > CHUNK_LIMIT) {
chunks = null
break
}
}
const message = `Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`
if (statusCode === 204 || !contentType || !chunks) {
queueMicrotask(() => callback(new ResponseStatusCodeError(message, statusCode, headers)))
return
}
const stackTraceLimit = Error.stackTraceLimit
Error.stackTraceLimit = 0
let payload
try {
if (isContentTypeApplicationJson(contentType)) {
payload = JSON.parse(chunksDecode(chunks, length))
} else if (isContentTypeText(contentType)) {
payload = chunksDecode(chunks, length)
}
} catch {
// process in a callback to avoid throwing in the microtask queue
} finally {
Error.stackTraceLimit = stackTraceLimit
}
queueMicrotask(() => callback(new ResponseStatusCodeError(message, statusCode, headers, payload)))
}
const isContentTypeApplicationJson = (contentType) => {
return (
contentType.length > 15 &&
contentType[11] === '/' &&
contentType[0] === 'a' &&
contentType[1] === 'p' &&
contentType[2] === 'p' &&
contentType[3] === 'l' &&
contentType[4] === 'i' &&
contentType[5] === 'c' &&
contentType[6] === 'a' &&
contentType[7] === 't' &&
contentType[8] === 'i' &&
contentType[9] === 'o' &&
contentType[10] === 'n' &&
contentType[12] === 'j' &&
contentType[13] === 's' &&
contentType[14] === 'o' &&
contentType[15] === 'n'
)
}
const isContentTypeText = (contentType) => {
return (
contentType.length > 4 &&
contentType[4] === '/' &&
contentType[0] === 't' &&
contentType[1] === 'e' &&
contentType[2] === 'x' &&
contentType[3] === 't'
)
}
module.exports = {
getResolveErrorBodyCallback,
isContentTypeApplicationJson,
isContentTypeText
}

193
node_modules/undici/lib/core/connect.js generated vendored Normal file
View file

@ -0,0 +1,193 @@
'use strict'
const net = require('node:net')
const assert = require('node:assert')
const util = require('./util')
const { InvalidArgumentError, ConnectTimeoutError } = require('./errors')
let tls // include tls conditionally since it is not always available
// TODO: session re-use does not wait for the first
// connection to resolve the session and might therefore
// resolve the same servername multiple times even when
// re-use is enabled.
let SessionCache
// FIXME: remove workaround when the Node bug is fixed
// https://github.com/nodejs/node/issues/49344#issuecomment-1741776308
if (global.FinalizationRegistry && !(process.env.NODE_V8_COVERAGE || process.env.UNDICI_NO_FG)) {
SessionCache = class WeakSessionCache {
constructor (maxCachedSessions) {
this._maxCachedSessions = maxCachedSessions
this._sessionCache = new Map()
this._sessionRegistry = new global.FinalizationRegistry((key) => {
if (this._sessionCache.size < this._maxCachedSessions) {
return
}
const ref = this._sessionCache.get(key)
if (ref !== undefined && ref.deref() === undefined) {
this._sessionCache.delete(key)
}
})
}
get (sessionKey) {
const ref = this._sessionCache.get(sessionKey)
return ref ? ref.deref() : null
}
set (sessionKey, session) {
if (this._maxCachedSessions === 0) {
return
}
this._sessionCache.set(sessionKey, new WeakRef(session))
this._sessionRegistry.register(session, sessionKey)
}
}
} else {
SessionCache = class SimpleSessionCache {
constructor (maxCachedSessions) {
this._maxCachedSessions = maxCachedSessions
this._sessionCache = new Map()
}
get (sessionKey) {
return this._sessionCache.get(sessionKey)
}
set (sessionKey, session) {
if (this._maxCachedSessions === 0) {
return
}
if (this._sessionCache.size >= this._maxCachedSessions) {
// remove the oldest session
const { value: oldestKey } = this._sessionCache.keys().next()
this._sessionCache.delete(oldestKey)
}
this._sessionCache.set(sessionKey, session)
}
}
}
function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, ...opts }) {
if (maxCachedSessions != null && (!Number.isInteger(maxCachedSessions) || maxCachedSessions < 0)) {
throw new InvalidArgumentError('maxCachedSessions must be a positive integer or zero')
}
const options = { path: socketPath, ...opts }
const sessionCache = new SessionCache(maxCachedSessions == null ? 100 : maxCachedSessions)
timeout = timeout == null ? 10e3 : timeout
allowH2 = allowH2 != null ? allowH2 : false
return function connect ({ hostname, host, protocol, port, servername, localAddress, httpSocket }, callback) {
let socket
if (protocol === 'https:') {
if (!tls) {
tls = require('node:tls')
}
servername = servername || options.servername || util.getServerName(host) || null
const sessionKey = servername || hostname
const session = sessionCache.get(sessionKey) || null
assert(sessionKey)
socket = tls.connect({
highWaterMark: 16384, // TLS in node can't have bigger HWM anyway...
...options,
servername,
session,
localAddress,
// TODO(HTTP/2): Add support for h2c
ALPNProtocols: allowH2 ? ['http/1.1', 'h2'] : ['http/1.1'],
socket: httpSocket, // upgrade socket connection
port: port || 443,
host: hostname
})
socket
.on('session', function (session) {
// TODO (fix): Can a session become invalid once established? Don't think so?
sessionCache.set(sessionKey, session)
})
} else {
assert(!httpSocket, 'httpSocket can only be sent on TLS update')
socket = net.connect({
highWaterMark: 64 * 1024, // Same as nodejs fs streams.
...options,
localAddress,
port: port || 80,
host: hostname
})
}
// Set TCP keep alive options on the socket here instead of in connect() for the case of assigning the socket
if (options.keepAlive == null || options.keepAlive) {
const keepAliveInitialDelay = options.keepAliveInitialDelay === undefined ? 60e3 : options.keepAliveInitialDelay
socket.setKeepAlive(true, keepAliveInitialDelay)
}
const cancelTimeout = setupTimeout(() => onConnectTimeout(socket), timeout)
socket
.setNoDelay(true)
.once(protocol === 'https:' ? 'secureConnect' : 'connect', function () {
cancelTimeout()
if (callback) {
const cb = callback
callback = null
cb(null, this)
}
})
.on('error', function (err) {
cancelTimeout()
if (callback) {
const cb = callback
callback = null
cb(err)
}
})
return socket
}
}
function setupTimeout (onConnectTimeout, timeout) {
if (!timeout) {
return () => {}
}
let s1 = null
let s2 = null
const timeoutId = setTimeout(() => {
// setImmediate is added to make sure that we priotorise socket error events over timeouts
s1 = setImmediate(() => {
if (process.platform === 'win32') {
// Windows needs an extra setImmediate probably due to implementation differences in the socket logic
s2 = setImmediate(() => onConnectTimeout())
} else {
onConnectTimeout()
}
})
}, timeout)
return () => {
clearTimeout(timeoutId)
clearImmediate(s1)
clearImmediate(s2)
}
}
function onConnectTimeout (socket) {
let message = 'Connect Timeout Error'
if (Array.isArray(socket.autoSelectFamilyAttemptedAddresses)) {
message += ` (attempted addresses: ${socket.autoSelectFamilyAttemptedAddresses.join(', ')})`
}
util.destroy(socket, new ConnectTimeoutError(message))
}
module.exports = buildConnector

118
node_modules/undici/lib/core/constants.js generated vendored Normal file
View file

@ -0,0 +1,118 @@
'use strict'
/** @type {Record<string, string | undefined>} */
const headerNameLowerCasedRecord = {}
// https://developer.mozilla.org/docs/Web/HTTP/Headers
const wellknownHeaderNames = [
'Accept',
'Accept-Encoding',
'Accept-Language',
'Accept-Ranges',
'Access-Control-Allow-Credentials',
'Access-Control-Allow-Headers',
'Access-Control-Allow-Methods',
'Access-Control-Allow-Origin',
'Access-Control-Expose-Headers',
'Access-Control-Max-Age',
'Access-Control-Request-Headers',
'Access-Control-Request-Method',
'Age',
'Allow',
'Alt-Svc',
'Alt-Used',
'Authorization',
'Cache-Control',
'Clear-Site-Data',
'Connection',
'Content-Disposition',
'Content-Encoding',
'Content-Language',
'Content-Length',
'Content-Location',
'Content-Range',
'Content-Security-Policy',
'Content-Security-Policy-Report-Only',
'Content-Type',
'Cookie',
'Cross-Origin-Embedder-Policy',
'Cross-Origin-Opener-Policy',
'Cross-Origin-Resource-Policy',
'Date',
'Device-Memory',
'Downlink',
'ECT',
'ETag',
'Expect',
'Expect-CT',
'Expires',
'Forwarded',
'From',
'Host',
'If-Match',
'If-Modified-Since',
'If-None-Match',
'If-Range',
'If-Unmodified-Since',
'Keep-Alive',
'Last-Modified',
'Link',
'Location',
'Max-Forwards',
'Origin',
'Permissions-Policy',
'Pragma',
'Proxy-Authenticate',
'Proxy-Authorization',
'RTT',
'Range',
'Referer',
'Referrer-Policy',
'Refresh',
'Retry-After',
'Sec-WebSocket-Accept',
'Sec-WebSocket-Extensions',
'Sec-WebSocket-Key',
'Sec-WebSocket-Protocol',
'Sec-WebSocket-Version',
'Server',
'Server-Timing',
'Service-Worker-Allowed',
'Service-Worker-Navigation-Preload',
'Set-Cookie',
'SourceMap',
'Strict-Transport-Security',
'Supports-Loading-Mode',
'TE',
'Timing-Allow-Origin',
'Trailer',
'Transfer-Encoding',
'Upgrade',
'Upgrade-Insecure-Requests',
'User-Agent',
'Vary',
'Via',
'WWW-Authenticate',
'X-Content-Type-Options',
'X-DNS-Prefetch-Control',
'X-Frame-Options',
'X-Permitted-Cross-Domain-Policies',
'X-Powered-By',
'X-Requested-With',
'X-XSS-Protection'
]
for (let i = 0; i < wellknownHeaderNames.length; ++i) {
const key = wellknownHeaderNames[i]
const lowerCasedKey = key.toLowerCase()
headerNameLowerCasedRecord[key] = headerNameLowerCasedRecord[lowerCasedKey] =
lowerCasedKey
}
// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`.
Object.setPrototypeOf(headerNameLowerCasedRecord, null)
module.exports = {
wellknownHeaderNames,
headerNameLowerCasedRecord
}

202
node_modules/undici/lib/core/diagnostics.js generated vendored Normal file
View file

@ -0,0 +1,202 @@
'use strict'
const diagnosticsChannel = require('node:diagnostics_channel')
const util = require('node:util')
const undiciDebugLog = util.debuglog('undici')
const fetchDebuglog = util.debuglog('fetch')
const websocketDebuglog = util.debuglog('websocket')
let isClientSet = false
const channels = {
// Client
beforeConnect: diagnosticsChannel.channel('undici:client:beforeConnect'),
connected: diagnosticsChannel.channel('undici:client:connected'),
connectError: diagnosticsChannel.channel('undici:client:connectError'),
sendHeaders: diagnosticsChannel.channel('undici:client:sendHeaders'),
// Request
create: diagnosticsChannel.channel('undici:request:create'),
bodySent: diagnosticsChannel.channel('undici:request:bodySent'),
headers: diagnosticsChannel.channel('undici:request:headers'),
trailers: diagnosticsChannel.channel('undici:request:trailers'),
error: diagnosticsChannel.channel('undici:request:error'),
// WebSocket
open: diagnosticsChannel.channel('undici:websocket:open'),
close: diagnosticsChannel.channel('undici:websocket:close'),
socketError: diagnosticsChannel.channel('undici:websocket:socket_error'),
ping: diagnosticsChannel.channel('undici:websocket:ping'),
pong: diagnosticsChannel.channel('undici:websocket:pong')
}
if (undiciDebugLog.enabled || fetchDebuglog.enabled) {
const debuglog = fetchDebuglog.enabled ? fetchDebuglog : undiciDebugLog
// Track all Client events
diagnosticsChannel.channel('undici:client:beforeConnect').subscribe(evt => {
const {
connectParams: { version, protocol, port, host }
} = evt
debuglog(
'connecting to %s using %s%s',
`${host}${port ? `:${port}` : ''}`,
protocol,
version
)
})
diagnosticsChannel.channel('undici:client:connected').subscribe(evt => {
const {
connectParams: { version, protocol, port, host }
} = evt
debuglog(
'connected to %s using %s%s',
`${host}${port ? `:${port}` : ''}`,
protocol,
version
)
})
diagnosticsChannel.channel('undici:client:connectError').subscribe(evt => {
const {
connectParams: { version, protocol, port, host },
error
} = evt
debuglog(
'connection to %s using %s%s errored - %s',
`${host}${port ? `:${port}` : ''}`,
protocol,
version,
error.message
)
})
diagnosticsChannel.channel('undici:client:sendHeaders').subscribe(evt => {
const {
request: { method, path, origin }
} = evt
debuglog('sending request to %s %s/%s', method, origin, path)
})
// Track Request events
diagnosticsChannel.channel('undici:request:headers').subscribe(evt => {
const {
request: { method, path, origin },
response: { statusCode }
} = evt
debuglog(
'received response to %s %s/%s - HTTP %d',
method,
origin,
path,
statusCode
)
})
diagnosticsChannel.channel('undici:request:trailers').subscribe(evt => {
const {
request: { method, path, origin }
} = evt
debuglog('trailers received from %s %s/%s', method, origin, path)
})
diagnosticsChannel.channel('undici:request:error').subscribe(evt => {
const {
request: { method, path, origin },
error
} = evt
debuglog(
'request to %s %s/%s errored - %s',
method,
origin,
path,
error.message
)
})
isClientSet = true
}
if (websocketDebuglog.enabled) {
if (!isClientSet) {
const debuglog = undiciDebugLog.enabled ? undiciDebugLog : websocketDebuglog
diagnosticsChannel.channel('undici:client:beforeConnect').subscribe(evt => {
const {
connectParams: { version, protocol, port, host }
} = evt
debuglog(
'connecting to %s%s using %s%s',
host,
port ? `:${port}` : '',
protocol,
version
)
})
diagnosticsChannel.channel('undici:client:connected').subscribe(evt => {
const {
connectParams: { version, protocol, port, host }
} = evt
debuglog(
'connected to %s%s using %s%s',
host,
port ? `:${port}` : '',
protocol,
version
)
})
diagnosticsChannel.channel('undici:client:connectError').subscribe(evt => {
const {
connectParams: { version, protocol, port, host },
error
} = evt
debuglog(
'connection to %s%s using %s%s errored - %s',
host,
port ? `:${port}` : '',
protocol,
version,
error.message
)
})
diagnosticsChannel.channel('undici:client:sendHeaders').subscribe(evt => {
const {
request: { method, path, origin }
} = evt
debuglog('sending request to %s %s/%s', method, origin, path)
})
}
// Track all WebSocket events
diagnosticsChannel.channel('undici:websocket:open').subscribe(evt => {
const {
address: { address, port }
} = evt
websocketDebuglog('connection opened %s%s', address, port ? `:${port}` : '')
})
diagnosticsChannel.channel('undici:websocket:close').subscribe(evt => {
const { websocket, code, reason } = evt
websocketDebuglog(
'closed connection to %s - %s %s',
websocket.url,
code,
reason
)
})
diagnosticsChannel.channel('undici:websocket:socket_error').subscribe(err => {
websocketDebuglog('connection errored - %s', err.message)
})
diagnosticsChannel.channel('undici:websocket:ping').subscribe(evt => {
websocketDebuglog('ping received')
})
diagnosticsChannel.channel('undici:websocket:pong').subscribe(evt => {
websocketDebuglog('pong received')
})
}
module.exports = {
channels
}

231
node_modules/undici/lib/core/errors.js generated vendored Normal file
View file

@ -0,0 +1,231 @@
'use strict'
class UndiciError extends Error {
constructor (message) {
super(message)
this.name = 'UndiciError'
this.code = 'UND_ERR'
}
}
class ConnectTimeoutError extends UndiciError {
constructor (message) {
super(message)
this.name = 'ConnectTimeoutError'
this.message = message || 'Connect Timeout Error'
this.code = 'UND_ERR_CONNECT_TIMEOUT'
}
}
class HeadersTimeoutError extends UndiciError {
constructor (message) {
super(message)
this.name = 'HeadersTimeoutError'
this.message = message || 'Headers Timeout Error'
this.code = 'UND_ERR_HEADERS_TIMEOUT'
}
}
class HeadersOverflowError extends UndiciError {
constructor (message) {
super(message)
this.name = 'HeadersOverflowError'
this.message = message || 'Headers Overflow Error'
this.code = 'UND_ERR_HEADERS_OVERFLOW'
}
}
class BodyTimeoutError extends UndiciError {
constructor (message) {
super(message)
this.name = 'BodyTimeoutError'
this.message = message || 'Body Timeout Error'
this.code = 'UND_ERR_BODY_TIMEOUT'
}
}
class ResponseStatusCodeError extends UndiciError {
constructor (message, statusCode, headers, body) {
super(message)
this.name = 'ResponseStatusCodeError'
this.message = message || 'Response Status Code Error'
this.code = 'UND_ERR_RESPONSE_STATUS_CODE'
this.body = body
this.status = statusCode
this.statusCode = statusCode
this.headers = headers
}
}
class InvalidArgumentError extends UndiciError {
constructor (message) {
super(message)
this.name = 'InvalidArgumentError'
this.message = message || 'Invalid Argument Error'
this.code = 'UND_ERR_INVALID_ARG'
}
}
class InvalidReturnValueError extends UndiciError {
constructor (message) {
super(message)
this.name = 'InvalidReturnValueError'
this.message = message || 'Invalid Return Value Error'
this.code = 'UND_ERR_INVALID_RETURN_VALUE'
}
}
class AbortError extends UndiciError {
constructor (message) {
super(message)
this.name = 'AbortError'
this.message = message || 'The operation was aborted'
}
}
class RequestAbortedError extends AbortError {
constructor (message) {
super(message)
this.name = 'AbortError'
this.message = message || 'Request aborted'
this.code = 'UND_ERR_ABORTED'
}
}
class InformationalError extends UndiciError {
constructor (message) {
super(message)
this.name = 'InformationalError'
this.message = message || 'Request information'
this.code = 'UND_ERR_INFO'
}
}
class RequestContentLengthMismatchError extends UndiciError {
constructor (message) {
super(message)
this.name = 'RequestContentLengthMismatchError'
this.message = message || 'Request body length does not match content-length header'
this.code = 'UND_ERR_REQ_CONTENT_LENGTH_MISMATCH'
}
}
class ResponseContentLengthMismatchError extends UndiciError {
constructor (message) {
super(message)
this.name = 'ResponseContentLengthMismatchError'
this.message = message || 'Response body length does not match content-length header'
this.code = 'UND_ERR_RES_CONTENT_LENGTH_MISMATCH'
}
}
class ClientDestroyedError extends UndiciError {
constructor (message) {
super(message)
this.name = 'ClientDestroyedError'
this.message = message || 'The client is destroyed'
this.code = 'UND_ERR_DESTROYED'
}
}
class ClientClosedError extends UndiciError {
constructor (message) {
super(message)
this.name = 'ClientClosedError'
this.message = message || 'The client is closed'
this.code = 'UND_ERR_CLOSED'
}
}
class SocketError extends UndiciError {
constructor (message, socket) {
super(message)
this.name = 'SocketError'
this.message = message || 'Socket error'
this.code = 'UND_ERR_SOCKET'
this.socket = socket
}
}
class NotSupportedError extends UndiciError {
constructor (message) {
super(message)
this.name = 'NotSupportedError'
this.message = message || 'Not supported error'
this.code = 'UND_ERR_NOT_SUPPORTED'
}
}
class BalancedPoolMissingUpstreamError extends UndiciError {
constructor (message) {
super(message)
this.name = 'MissingUpstreamError'
this.message = message || 'No upstream has been added to the BalancedPool'
this.code = 'UND_ERR_BPL_MISSING_UPSTREAM'
}
}
class HTTPParserError extends Error {
constructor (message, code, data) {
super(message)
this.name = 'HTTPParserError'
this.code = code ? `HPE_${code}` : undefined
this.data = data ? data.toString() : undefined
}
}
class ResponseExceededMaxSizeError extends UndiciError {
constructor (message) {
super(message)
this.name = 'ResponseExceededMaxSizeError'
this.message = message || 'Response content exceeded max size'
this.code = 'UND_ERR_RES_EXCEEDED_MAX_SIZE'
}
}
class RequestRetryError extends UndiciError {
constructor (message, code, { headers, data }) {
super(message)
this.name = 'RequestRetryError'
this.message = message || 'Request retry error'
this.code = 'UND_ERR_REQ_RETRY'
this.statusCode = code
this.data = data
this.headers = headers
}
}
class SecureProxyConnectionError extends UndiciError {
constructor (cause, message, options) {
super(message, { cause, ...(options ?? {}) })
this.name = 'SecureProxyConnectionError'
this.message = message || 'Secure Proxy Connection failed'
this.code = 'UND_ERR_PRX_TLS'
this.cause = cause
}
}
module.exports = {
AbortError,
HTTPParserError,
UndiciError,
HeadersTimeoutError,
HeadersOverflowError,
BodyTimeoutError,
RequestContentLengthMismatchError,
ConnectTimeoutError,
ResponseStatusCodeError,
InvalidArgumentError,
InvalidReturnValueError,
RequestAbortedError,
ClientDestroyedError,
ClientClosedError,
InformationalError,
SocketError,
NotSupportedError,
ResponseContentLengthMismatchError,
BalancedPoolMissingUpstreamError,
ResponseExceededMaxSizeError,
RequestRetryError,
SecureProxyConnectionError
}

396
node_modules/undici/lib/core/request.js generated vendored Normal file
View file

@ -0,0 +1,396 @@
'use strict'
const {
InvalidArgumentError,
NotSupportedError
} = require('./errors')
const assert = require('node:assert')
const {
isValidHTTPToken,
isValidHeaderChar,
isStream,
destroy,
isBuffer,
isFormDataLike,
isIterable,
isBlobLike,
buildURL,
validateHandler,
getServerName
} = require('./util')
const { channels } = require('./diagnostics.js')
const { headerNameLowerCasedRecord } = require('./constants')
// Verifies that a given path is valid does not contain control chars \x00 to \x20
const invalidPathRegex = /[^\u0021-\u00ff]/
const kHandler = Symbol('handler')
class Request {
constructor (origin, {
path,
method,
body,
headers,
query,
idempotent,
blocking,
upgrade,
headersTimeout,
bodyTimeout,
reset,
throwOnError,
expectContinue,
servername
}, handler) {
if (typeof path !== 'string') {
throw new InvalidArgumentError('path must be a string')
} else if (
path[0] !== '/' &&
!(path.startsWith('http://') || path.startsWith('https://')) &&
method !== 'CONNECT'
) {
throw new InvalidArgumentError('path must be an absolute URL or start with a slash')
} else if (invalidPathRegex.exec(path) !== null) {
throw new InvalidArgumentError('invalid request path')
}
if (typeof method !== 'string') {
throw new InvalidArgumentError('method must be a string')
} else if (!isValidHTTPToken(method)) {
throw new InvalidArgumentError('invalid request method')
}
if (upgrade && typeof upgrade !== 'string') {
throw new InvalidArgumentError('upgrade must be a string')
}
if (headersTimeout != null && (!Number.isFinite(headersTimeout) || headersTimeout < 0)) {
throw new InvalidArgumentError('invalid headersTimeout')
}
if (bodyTimeout != null && (!Number.isFinite(bodyTimeout) || bodyTimeout < 0)) {
throw new InvalidArgumentError('invalid bodyTimeout')
}
if (reset != null && typeof reset !== 'boolean') {
throw new InvalidArgumentError('invalid reset')
}
if (expectContinue != null && typeof expectContinue !== 'boolean') {
throw new InvalidArgumentError('invalid expectContinue')
}
this.headersTimeout = headersTimeout
this.bodyTimeout = bodyTimeout
this.throwOnError = throwOnError === true
this.method = method
this.abort = null
if (body == null) {
this.body = null
} else if (isStream(body)) {
this.body = body
const rState = this.body._readableState
if (!rState || !rState.autoDestroy) {
this.endHandler = function autoDestroy () {
destroy(this)
}
this.body.on('end', this.endHandler)
}
this.errorHandler = err => {
if (this.abort) {
this.abort(err)
} else {
this.error = err
}
}
this.body.on('error', this.errorHandler)
} else if (isBuffer(body)) {
this.body = body.byteLength ? body : null
} else if (ArrayBuffer.isView(body)) {
this.body = body.buffer.byteLength ? Buffer.from(body.buffer, body.byteOffset, body.byteLength) : null
} else if (body instanceof ArrayBuffer) {
this.body = body.byteLength ? Buffer.from(body) : null
} else if (typeof body === 'string') {
this.body = body.length ? Buffer.from(body) : null
} else if (isFormDataLike(body) || isIterable(body) || isBlobLike(body)) {
this.body = body
} else {
throw new InvalidArgumentError('body must be a string, a Buffer, a Readable stream, an iterable, or an async iterable')
}
this.completed = false
this.aborted = false
this.upgrade = upgrade || null
this.path = query ? buildURL(path, query) : path
this.origin = origin
this.idempotent = idempotent == null
? method === 'HEAD' || method === 'GET'
: idempotent
this.blocking = blocking == null ? false : blocking
this.reset = reset == null ? null : reset
this.host = null
this.contentLength = null
this.contentType = null
this.headers = []
// Only for H2
this.expectContinue = expectContinue != null ? expectContinue : false
if (Array.isArray(headers)) {
if (headers.length % 2 !== 0) {
throw new InvalidArgumentError('headers array must be even')
}
for (let i = 0; i < headers.length; i += 2) {
processHeader(this, headers[i], headers[i + 1])
}
} else if (headers && typeof headers === 'object') {
if (headers[Symbol.iterator]) {
for (const header of headers) {
if (!Array.isArray(header) || header.length !== 2) {
throw new InvalidArgumentError('headers must be in key-value pair format')
}
processHeader(this, header[0], header[1])
}
} else {
const keys = Object.keys(headers)
for (let i = 0; i < keys.length; ++i) {
processHeader(this, keys[i], headers[keys[i]])
}
}
} else if (headers != null) {
throw new InvalidArgumentError('headers must be an object or an array')
}
validateHandler(handler, method, upgrade)
this.servername = servername || getServerName(this.host)
this[kHandler] = handler
if (channels.create.hasSubscribers) {
channels.create.publish({ request: this })
}
}
onBodySent (chunk) {
if (this[kHandler].onBodySent) {
try {
return this[kHandler].onBodySent(chunk)
} catch (err) {
this.abort(err)
}
}
}
onRequestSent () {
if (channels.bodySent.hasSubscribers) {
channels.bodySent.publish({ request: this })
}
if (this[kHandler].onRequestSent) {
try {
return this[kHandler].onRequestSent()
} catch (err) {
this.abort(err)
}
}
}
onConnect (abort) {
assert(!this.aborted)
assert(!this.completed)
if (this.error) {
abort(this.error)
} else {
this.abort = abort
return this[kHandler].onConnect(abort)
}
}
onResponseStarted () {
return this[kHandler].onResponseStarted?.()
}
onHeaders (statusCode, headers, resume, statusText) {
assert(!this.aborted)
assert(!this.completed)
if (channels.headers.hasSubscribers) {
channels.headers.publish({ request: this, response: { statusCode, headers, statusText } })
}
try {
return this[kHandler].onHeaders(statusCode, headers, resume, statusText)
} catch (err) {
this.abort(err)
}
}
onData (chunk) {
assert(!this.aborted)
assert(!this.completed)
try {
return this[kHandler].onData(chunk)
} catch (err) {
this.abort(err)
return false
}
}
onUpgrade (statusCode, headers, socket) {
assert(!this.aborted)
assert(!this.completed)
return this[kHandler].onUpgrade(statusCode, headers, socket)
}
onComplete (trailers) {
this.onFinally()
assert(!this.aborted)
this.completed = true
if (channels.trailers.hasSubscribers) {
channels.trailers.publish({ request: this, trailers })
}
try {
return this[kHandler].onComplete(trailers)
} catch (err) {
// TODO (fix): This might be a bad idea?
this.onError(err)
}
}
onError (error) {
this.onFinally()
if (channels.error.hasSubscribers) {
channels.error.publish({ request: this, error })
}
if (this.aborted) {
return
}
this.aborted = true
return this[kHandler].onError(error)
}
onFinally () {
if (this.errorHandler) {
this.body.off('error', this.errorHandler)
this.errorHandler = null
}
if (this.endHandler) {
this.body.off('end', this.endHandler)
this.endHandler = null
}
}
addHeader (key, value) {
processHeader(this, key, value)
return this
}
}
function processHeader (request, key, val) {
if (val && (typeof val === 'object' && !Array.isArray(val))) {
throw new InvalidArgumentError(`invalid ${key} header`)
} else if (val === undefined) {
return
}
let headerName = headerNameLowerCasedRecord[key]
if (headerName === undefined) {
headerName = key.toLowerCase()
if (headerNameLowerCasedRecord[headerName] === undefined && !isValidHTTPToken(headerName)) {
throw new InvalidArgumentError('invalid header key')
}
}
if (Array.isArray(val)) {
const arr = []
for (let i = 0; i < val.length; i++) {
if (typeof val[i] === 'string') {
if (!isValidHeaderChar(val[i])) {
throw new InvalidArgumentError(`invalid ${key} header`)
}
arr.push(val[i])
} else if (val[i] === null) {
arr.push('')
} else if (typeof val[i] === 'object') {
throw new InvalidArgumentError(`invalid ${key} header`)
} else {
arr.push(`${val[i]}`)
}
}
val = arr
} else if (typeof val === 'string') {
if (!isValidHeaderChar(val)) {
throw new InvalidArgumentError(`invalid ${key} header`)
}
} else if (val === null) {
val = ''
} else if (typeof val === 'object') {
throw new InvalidArgumentError(`invalid ${key} header`)
} else {
val = `${val}`
}
if (request.host === null && headerName === 'host') {
if (typeof val !== 'string') {
throw new InvalidArgumentError('invalid host header')
}
// Consumed by Client
request.host = val
} else if (request.contentLength === null && headerName === 'content-length') {
request.contentLength = parseInt(val, 10)
if (!Number.isFinite(request.contentLength)) {
throw new InvalidArgumentError('invalid content-length header')
}
} else if (request.contentType === null && headerName === 'content-type') {
request.contentType = val
request.headers.push(key, val)
} else if (headerName === 'transfer-encoding' || headerName === 'keep-alive' || headerName === 'upgrade') {
throw new InvalidArgumentError(`invalid ${headerName} header`)
} else if (headerName === 'connection') {
const value = typeof val === 'string' ? val.toLowerCase() : null
if (value !== 'close' && value !== 'keep-alive') {
throw new InvalidArgumentError('invalid connection header')
}
if (value === 'close') {
request.reset = true
}
} else if (headerName === 'expect') {
throw new NotSupportedError('expect header not supported')
} else {
request.headers.push(key, val)
}
}
module.exports = Request

64
node_modules/undici/lib/core/symbols.js generated vendored Normal file
View file

@ -0,0 +1,64 @@
module.exports = {
kClose: Symbol('close'),
kDestroy: Symbol('destroy'),
kDispatch: Symbol('dispatch'),
kUrl: Symbol('url'),
kWriting: Symbol('writing'),
kResuming: Symbol('resuming'),
kQueue: Symbol('queue'),
kConnect: Symbol('connect'),
kConnecting: Symbol('connecting'),
kHeadersList: Symbol('headers list'),
kKeepAliveDefaultTimeout: Symbol('default keep alive timeout'),
kKeepAliveMaxTimeout: Symbol('max keep alive timeout'),
kKeepAliveTimeoutThreshold: Symbol('keep alive timeout threshold'),
kKeepAliveTimeoutValue: Symbol('keep alive timeout'),
kKeepAlive: Symbol('keep alive'),
kHeadersTimeout: Symbol('headers timeout'),
kBodyTimeout: Symbol('body timeout'),
kServerName: Symbol('server name'),
kLocalAddress: Symbol('local address'),
kHost: Symbol('host'),
kNoRef: Symbol('no ref'),
kBodyUsed: Symbol('used'),
kRunning: Symbol('running'),
kBlocking: Symbol('blocking'),
kPending: Symbol('pending'),
kSize: Symbol('size'),
kBusy: Symbol('busy'),
kQueued: Symbol('queued'),
kFree: Symbol('free'),
kConnected: Symbol('connected'),
kClosed: Symbol('closed'),
kNeedDrain: Symbol('need drain'),
kReset: Symbol('reset'),
kDestroyed: Symbol.for('nodejs.stream.destroyed'),
kResume: Symbol('resume'),
kOnError: Symbol('on error'),
kMaxHeadersSize: Symbol('max headers size'),
kRunningIdx: Symbol('running index'),
kPendingIdx: Symbol('pending index'),
kError: Symbol('error'),
kClients: Symbol('clients'),
kClient: Symbol('client'),
kParser: Symbol('parser'),
kOnDestroyed: Symbol('destroy callbacks'),
kPipelining: Symbol('pipelining'),
kSocket: Symbol('socket'),
kHostHeader: Symbol('host header'),
kConnector: Symbol('connector'),
kStrictContentLength: Symbol('strict content length'),
kMaxRedirections: Symbol('maxRedirections'),
kMaxRequests: Symbol('maxRequestsPerClient'),
kProxy: Symbol('proxy agent options'),
kCounter: Symbol('socket request counter'),
kInterceptors: Symbol('dispatch interceptors'),
kMaxResponseSize: Symbol('max response size'),
kHTTP2Session: Symbol('http2Session'),
kHTTP2SessionState: Symbol('http2Session state'),
kRetryHandlerDefaultRetry: Symbol('retry agent default retry'),
kConstruct: Symbol('constructable'),
kListeners: Symbol('listeners'),
kHTTPContext: Symbol('http context'),
kMaxConcurrentStreams: Symbol('max concurrent streams')
}

152
node_modules/undici/lib/core/tree.js generated vendored Normal file
View file

@ -0,0 +1,152 @@
'use strict'
const {
wellknownHeaderNames,
headerNameLowerCasedRecord
} = require('./constants')
class TstNode {
/** @type {any} */
value = null
/** @type {null | TstNode} */
left = null
/** @type {null | TstNode} */
middle = null
/** @type {null | TstNode} */
right = null
/** @type {number} */
code
/**
* @param {string} key
* @param {any} value
* @param {number} index
*/
constructor (key, value, index) {
if (index === undefined || index >= key.length) {
throw new TypeError('Unreachable')
}
const code = this.code = key.charCodeAt(index)
// check code is ascii string
if (code > 0x7F) {
throw new TypeError('key must be ascii string')
}
if (key.length !== ++index) {
this.middle = new TstNode(key, value, index)
} else {
this.value = value
}
}
/**
* @param {string} key
* @param {any} value
*/
add (key, value) {
const length = key.length
if (length === 0) {
throw new TypeError('Unreachable')
}
let index = 0
let node = this
while (true) {
const code = key.charCodeAt(index)
// check code is ascii string
if (code > 0x7F) {
throw new TypeError('key must be ascii string')
}
if (node.code === code) {
if (length === ++index) {
node.value = value
break
} else if (node.middle !== null) {
node = node.middle
} else {
node.middle = new TstNode(key, value, index)
break
}
} else if (node.code < code) {
if (node.left !== null) {
node = node.left
} else {
node.left = new TstNode(key, value, index)
break
}
} else if (node.right !== null) {
node = node.right
} else {
node.right = new TstNode(key, value, index)
break
}
}
}
/**
* @param {Uint8Array} key
* @return {TstNode | null}
*/
search (key) {
const keylength = key.length
let index = 0
let node = this
while (node !== null && index < keylength) {
let code = key[index]
// A-Z
// First check if it is bigger than 0x5a.
// Lowercase letters have higher char codes than uppercase ones.
// Also we assume that headers will mostly contain lowercase characters.
if (code <= 0x5a && code >= 0x41) {
// Lowercase for uppercase.
code |= 32
}
while (node !== null) {
if (code === node.code) {
if (keylength === ++index) {
// Returns Node since it is the last key.
return node
}
node = node.middle
break
}
node = node.code < code ? node.left : node.right
}
}
return null
}
}
class TernarySearchTree {
/** @type {TstNode | null} */
node = null
/**
* @param {string} key
* @param {any} value
* */
insert (key, value) {
if (this.node === null) {
this.node = new TstNode(key, value, 0)
} else {
this.node.add(key, value)
}
}
/**
* @param {Uint8Array} key
* @return {any}
*/
lookup (key) {
return this.node?.search(key)?.value ?? null
}
}
const tree = new TernarySearchTree()
for (let i = 0; i < wellknownHeaderNames.length; ++i) {
const key = headerNameLowerCasedRecord[wellknownHeaderNames[i]]
tree.insert(key, key)
}
module.exports = {
TernarySearchTree,
tree
}

613
node_modules/undici/lib/core/util.js generated vendored Normal file
View file

@ -0,0 +1,613 @@
'use strict'
const assert = require('node:assert')
const { kDestroyed, kBodyUsed, kListeners } = require('./symbols')
const { IncomingMessage } = require('node:http')
const stream = require('node:stream')
const net = require('node:net')
const { InvalidArgumentError } = require('./errors')
const { Blob } = require('node:buffer')
const nodeUtil = require('node:util')
const { stringify } = require('node:querystring')
const { headerNameLowerCasedRecord } = require('./constants')
const { tree } = require('./tree')
const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v))
function nop () {}
function isStream (obj) {
return obj && typeof obj === 'object' && typeof obj.pipe === 'function' && typeof obj.on === 'function'
}
// based on https://github.com/node-fetch/fetch-blob/blob/8ab587d34080de94140b54f07168451e7d0b655e/index.js#L229-L241 (MIT License)
function isBlobLike (object) {
if (object === null) {
return false
} else if (object instanceof Blob) {
return true
} else if (typeof object !== 'object') {
return false
} else {
const sTag = object[Symbol.toStringTag]
return (sTag === 'Blob' || sTag === 'File') && (
('stream' in object && typeof object.stream === 'function') ||
('arrayBuffer' in object && typeof object.arrayBuffer === 'function')
)
}
}
function buildURL (url, queryParams) {
if (url.includes('?') || url.includes('#')) {
throw new Error('Query params cannot be passed when url already contains "?" or "#".')
}
const stringified = stringify(queryParams)
if (stringified) {
url += '?' + stringified
}
return url
}
function parseURL (url) {
if (typeof url === 'string') {
url = new URL(url)
if (!/^https?:/.test(url.origin || url.protocol)) {
throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.')
}
return url
}
if (!url || typeof url !== 'object') {
throw new InvalidArgumentError('Invalid URL: The URL argument must be a non-null object.')
}
if (!/^https?:/.test(url.origin || url.protocol)) {
throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.')
}
if (!(url instanceof URL)) {
if (url.port != null && url.port !== '' && !Number.isFinite(parseInt(url.port))) {
throw new InvalidArgumentError('Invalid URL: port must be a valid integer or a string representation of an integer.')
}
if (url.path != null && typeof url.path !== 'string') {
throw new InvalidArgumentError('Invalid URL path: the path must be a string or null/undefined.')
}
if (url.pathname != null && typeof url.pathname !== 'string') {
throw new InvalidArgumentError('Invalid URL pathname: the pathname must be a string or null/undefined.')
}
if (url.hostname != null && typeof url.hostname !== 'string') {
throw new InvalidArgumentError('Invalid URL hostname: the hostname must be a string or null/undefined.')
}
if (url.origin != null && typeof url.origin !== 'string') {
throw new InvalidArgumentError('Invalid URL origin: the origin must be a string or null/undefined.')
}
const port = url.port != null
? url.port
: (url.protocol === 'https:' ? 443 : 80)
let origin = url.origin != null
? url.origin
: `${url.protocol}//${url.hostname}:${port}`
let path = url.path != null
? url.path
: `${url.pathname || ''}${url.search || ''}`
if (origin.endsWith('/')) {
origin = origin.substring(0, origin.length - 1)
}
if (path && !path.startsWith('/')) {
path = `/${path}`
}
// new URL(path, origin) is unsafe when `path` contains an absolute URL
// From https://developer.mozilla.org/en-US/docs/Web/API/URL/URL:
// If first parameter is a relative URL, second param is required, and will be used as the base URL.
// If first parameter is an absolute URL, a given second param will be ignored.
url = new URL(origin + path)
}
return url
}
function parseOrigin (url) {
url = parseURL(url)
if (url.pathname !== '/' || url.search || url.hash) {
throw new InvalidArgumentError('invalid url')
}
return url
}
function getHostname (host) {
if (host[0] === '[') {
const idx = host.indexOf(']')
assert(idx !== -1)
return host.substring(1, idx)
}
const idx = host.indexOf(':')
if (idx === -1) return host
return host.substring(0, idx)
}
// IP addresses are not valid server names per RFC6066
// > Currently, the only server names supported are DNS hostnames
function getServerName (host) {
if (!host) {
return null
}
assert.strictEqual(typeof host, 'string')
const servername = getHostname(host)
if (net.isIP(servername)) {
return ''
}
return servername
}
function deepClone (obj) {
return JSON.parse(JSON.stringify(obj))
}
function isAsyncIterable (obj) {
return !!(obj != null && typeof obj[Symbol.asyncIterator] === 'function')
}
function isIterable (obj) {
return !!(obj != null && (typeof obj[Symbol.iterator] === 'function' || typeof obj[Symbol.asyncIterator] === 'function'))
}
function bodyLength (body) {
if (body == null) {
return 0
} else if (isStream(body)) {
const state = body._readableState
return state && state.objectMode === false && state.ended === true && Number.isFinite(state.length)
? state.length
: null
} else if (isBlobLike(body)) {
return body.size != null ? body.size : null
} else if (isBuffer(body)) {
return body.byteLength
}
return null
}
function isDestroyed (body) {
return body && !!(body.destroyed || body[kDestroyed] || (stream.isDestroyed?.(body)))
}
function isReadableAborted (stream) {
const state = stream?._readableState
return isDestroyed(stream) && state && !state.endEmitted
}
function destroy (stream, err) {
if (stream == null || !isStream(stream) || isDestroyed(stream)) {
return
}
if (typeof stream.destroy === 'function') {
if (Object.getPrototypeOf(stream).constructor === IncomingMessage) {
// See: https://github.com/nodejs/node/pull/38505/files
stream.socket = null
}
stream.destroy(err)
} else if (err) {
queueMicrotask(() => {
stream.emit('error', err)
})
}
if (stream.destroyed !== true) {
stream[kDestroyed] = true
}
}
const KEEPALIVE_TIMEOUT_EXPR = /timeout=(\d+)/
function parseKeepAliveTimeout (val) {
const m = val.toString().match(KEEPALIVE_TIMEOUT_EXPR)
return m ? parseInt(m[1], 10) * 1000 : null
}
/**
* Retrieves a header name and returns its lowercase value.
* @param {string | Buffer} value Header name
* @returns {string}
*/
function headerNameToString (value) {
return typeof value === 'string'
? headerNameLowerCasedRecord[value] ?? value.toLowerCase()
: tree.lookup(value) ?? value.toString('latin1').toLowerCase()
}
/**
* Receive the buffer as a string and return its lowercase value.
* @param {Buffer} value Header name
* @returns {string}
*/
function bufferToLowerCasedHeaderName (value) {
return tree.lookup(value) ?? value.toString('latin1').toLowerCase()
}
/**
* @param {Record<string, string | string[]> | (Buffer | string | (Buffer | string)[])[]} headers
* @param {Record<string, string | string[]>} [obj]
* @returns {Record<string, string | string[]>}
*/
function parseHeaders (headers, obj) {
if (obj === undefined) obj = {}
for (let i = 0; i < headers.length; i += 2) {
const key = headerNameToString(headers[i])
let val = obj[key]
if (val) {
if (typeof val === 'string') {
val = [val]
obj[key] = val
}
val.push(headers[i + 1].toString('utf8'))
} else {
const headersValue = headers[i + 1]
if (typeof headersValue === 'string') {
obj[key] = headersValue
} else {
obj[key] = Array.isArray(headersValue) ? headersValue.map(x => x.toString('utf8')) : headersValue.toString('utf8')
}
}
}
// See https://github.com/nodejs/node/pull/46528
if ('content-length' in obj && 'content-disposition' in obj) {
obj['content-disposition'] = Buffer.from(obj['content-disposition']).toString('latin1')
}
return obj
}
function parseRawHeaders (headers) {
const len = headers.length
const ret = new Array(len)
let hasContentLength = false
let contentDispositionIdx = -1
let key
let val
let kLen = 0
for (let n = 0; n < headers.length; n += 2) {
key = headers[n]
val = headers[n + 1]
typeof key !== 'string' && (key = key.toString())
typeof val !== 'string' && (val = val.toString('utf8'))
kLen = key.length
if (kLen === 14 && key[7] === '-' && (key === 'content-length' || key.toLowerCase() === 'content-length')) {
hasContentLength = true
} else if (kLen === 19 && key[7] === '-' && (key === 'content-disposition' || key.toLowerCase() === 'content-disposition')) {
contentDispositionIdx = n + 1
}
ret[n] = key
ret[n + 1] = val
}
// See https://github.com/nodejs/node/pull/46528
if (hasContentLength && contentDispositionIdx !== -1) {
ret[contentDispositionIdx] = Buffer.from(ret[contentDispositionIdx]).toString('latin1')
}
return ret
}
function isBuffer (buffer) {
// See, https://github.com/mcollina/undici/pull/319
return buffer instanceof Uint8Array || Buffer.isBuffer(buffer)
}
function validateHandler (handler, method, upgrade) {
if (!handler || typeof handler !== 'object') {
throw new InvalidArgumentError('handler must be an object')
}
if (typeof handler.onConnect !== 'function') {
throw new InvalidArgumentError('invalid onConnect method')
}
if (typeof handler.onError !== 'function') {
throw new InvalidArgumentError('invalid onError method')
}
if (typeof handler.onBodySent !== 'function' && handler.onBodySent !== undefined) {
throw new InvalidArgumentError('invalid onBodySent method')
}
if (upgrade || method === 'CONNECT') {
if (typeof handler.onUpgrade !== 'function') {
throw new InvalidArgumentError('invalid onUpgrade method')
}
} else {
if (typeof handler.onHeaders !== 'function') {
throw new InvalidArgumentError('invalid onHeaders method')
}
if (typeof handler.onData !== 'function') {
throw new InvalidArgumentError('invalid onData method')
}
if (typeof handler.onComplete !== 'function') {
throw new InvalidArgumentError('invalid onComplete method')
}
}
}
// A body is disturbed if it has been read from and it cannot
// be re-used without losing state or data.
function isDisturbed (body) {
// TODO (fix): Why is body[kBodyUsed] needed?
return !!(body && (stream.isDisturbed(body) || body[kBodyUsed]))
}
function isErrored (body) {
return !!(body && stream.isErrored(body))
}
function isReadable (body) {
return !!(body && stream.isReadable(body))
}
function getSocketInfo (socket) {
return {
localAddress: socket.localAddress,
localPort: socket.localPort,
remoteAddress: socket.remoteAddress,
remotePort: socket.remotePort,
remoteFamily: socket.remoteFamily,
timeout: socket.timeout,
bytesWritten: socket.bytesWritten,
bytesRead: socket.bytesRead
}
}
/** @type {globalThis['ReadableStream']} */
function ReadableStreamFrom (iterable) {
// We cannot use ReadableStream.from here because it does not return a byte stream.
let iterator
return new ReadableStream(
{
async start () {
iterator = iterable[Symbol.asyncIterator]()
},
async pull (controller) {
const { done, value } = await iterator.next()
if (done) {
queueMicrotask(() => {
controller.close()
controller.byobRequest?.respond(0)
})
} else {
const buf = Buffer.isBuffer(value) ? value : Buffer.from(value)
if (buf.byteLength) {
controller.enqueue(new Uint8Array(buf))
}
}
return controller.desiredSize > 0
},
async cancel (reason) {
await iterator.return()
},
type: 'bytes'
}
)
}
// The chunk should be a FormData instance and contains
// all the required methods.
function isFormDataLike (object) {
return (
object &&
typeof object === 'object' &&
typeof object.append === 'function' &&
typeof object.delete === 'function' &&
typeof object.get === 'function' &&
typeof object.getAll === 'function' &&
typeof object.has === 'function' &&
typeof object.set === 'function' &&
object[Symbol.toStringTag] === 'FormData'
)
}
function addAbortListener (signal, listener) {
if ('addEventListener' in signal) {
signal.addEventListener('abort', listener, { once: true })
return () => signal.removeEventListener('abort', listener)
}
signal.addListener('abort', listener)
return () => signal.removeListener('abort', listener)
}
const hasToWellFormed = typeof String.prototype.toWellFormed === 'function'
const hasIsWellFormed = typeof String.prototype.isWellFormed === 'function'
/**
* @param {string} val
*/
function toUSVString (val) {
return hasToWellFormed ? `${val}`.toWellFormed() : nodeUtil.toUSVString(val)
}
/**
* @param {string} val
*/
// TODO: move this to webidl
function isUSVString (val) {
return hasIsWellFormed ? `${val}`.isWellFormed() : toUSVString(val) === `${val}`
}
/**
* @see https://tools.ietf.org/html/rfc7230#section-3.2.6
* @param {number} c
*/
function isTokenCharCode (c) {
switch (c) {
case 0x22:
case 0x28:
case 0x29:
case 0x2c:
case 0x2f:
case 0x3a:
case 0x3b:
case 0x3c:
case 0x3d:
case 0x3e:
case 0x3f:
case 0x40:
case 0x5b:
case 0x5c:
case 0x5d:
case 0x7b:
case 0x7d:
// DQUOTE and "(),/:;<=>?@[\]{}"
return false
default:
// VCHAR %x21-7E
return c >= 0x21 && c <= 0x7e
}
}
/**
* @param {string} characters
*/
function isValidHTTPToken (characters) {
if (characters.length === 0) {
return false
}
for (let i = 0; i < characters.length; ++i) {
if (!isTokenCharCode(characters.charCodeAt(i))) {
return false
}
}
return true
}
// headerCharRegex have been lifted from
// https://github.com/nodejs/node/blob/main/lib/_http_common.js
/**
* Matches if val contains an invalid field-vchar
* field-value = *( field-content / obs-fold )
* field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ]
* field-vchar = VCHAR / obs-text
*/
const headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/
/**
* @param {string} characters
*/
function isValidHeaderChar (characters) {
return !headerCharRegex.test(characters)
}
// Parsed accordingly to RFC 9110
// https://www.rfc-editor.org/rfc/rfc9110#field.content-range
function parseRangeHeader (range) {
if (range == null || range === '') return { start: 0, end: null, size: null }
const m = range ? range.match(/^bytes (\d+)-(\d+)\/(\d+)?$/) : null
return m
? {
start: parseInt(m[1]),
end: m[2] ? parseInt(m[2]) : null,
size: m[3] ? parseInt(m[3]) : null
}
: null
}
function addListener (obj, name, listener) {
const listeners = (obj[kListeners] ??= [])
listeners.push([name, listener])
obj.on(name, listener)
return obj
}
function removeAllListeners (obj) {
for (const [name, listener] of obj[kListeners] ?? []) {
obj.removeListener(name, listener)
}
obj[kListeners] = null
}
function errorRequest (client, request, err) {
try {
request.onError(err)
assert(request.aborted)
} catch (err) {
client.emit('error', err)
}
}
const kEnumerableProperty = Object.create(null)
kEnumerableProperty.enumerable = true
module.exports = {
kEnumerableProperty,
nop,
isDisturbed,
isErrored,
isReadable,
toUSVString,
isUSVString,
isReadableAborted,
isBlobLike,
parseOrigin,
parseURL,
getServerName,
isStream,
isIterable,
isAsyncIterable,
isDestroyed,
headerNameToString,
bufferToLowerCasedHeaderName,
addListener,
removeAllListeners,
errorRequest,
parseRawHeaders,
parseHeaders,
parseKeepAliveTimeout,
destroy,
bodyLength,
deepClone,
ReadableStreamFrom,
isBuffer,
validateHandler,
getSocketInfo,
isFormDataLike,
buildURL,
addAbortListener,
isValidHTTPToken,
isValidHeaderChar,
isTokenCharCode,
parseRangeHeader,
nodeMajor,
nodeMinor,
nodeHasAutoSelectFamily: nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 13),
safeHTTPMethods: ['GET', 'HEAD', 'OPTIONS', 'TRACE']
}

129
node_modules/undici/lib/dispatcher/agent.js generated vendored Normal file
View file

@ -0,0 +1,129 @@
'use strict'
const { InvalidArgumentError } = require('../core/errors')
const { kClients, kRunning, kClose, kDestroy, kDispatch, kInterceptors } = require('../core/symbols')
const DispatcherBase = require('./dispatcher-base')
const Pool = require('./pool')
const Client = require('./client')
const util = require('../core/util')
const createRedirectInterceptor = require('../interceptor/redirect-interceptor')
const kOnConnect = Symbol('onConnect')
const kOnDisconnect = Symbol('onDisconnect')
const kOnConnectionError = Symbol('onConnectionError')
const kMaxRedirections = Symbol('maxRedirections')
const kOnDrain = Symbol('onDrain')
const kFactory = Symbol('factory')
const kOptions = Symbol('options')
function defaultFactory (origin, opts) {
return opts && opts.connections === 1
? new Client(origin, opts)
: new Pool(origin, opts)
}
class Agent extends DispatcherBase {
constructor ({ factory = defaultFactory, maxRedirections = 0, connect, ...options } = {}) {
super()
if (typeof factory !== 'function') {
throw new InvalidArgumentError('factory must be a function.')
}
if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') {
throw new InvalidArgumentError('connect must be a function or an object')
}
if (!Number.isInteger(maxRedirections) || maxRedirections < 0) {
throw new InvalidArgumentError('maxRedirections must be a positive number')
}
if (connect && typeof connect !== 'function') {
connect = { ...connect }
}
this[kInterceptors] = options.interceptors?.Agent && Array.isArray(options.interceptors.Agent)
? options.interceptors.Agent
: [createRedirectInterceptor({ maxRedirections })]
this[kOptions] = { ...util.deepClone(options), connect }
this[kOptions].interceptors = options.interceptors
? { ...options.interceptors }
: undefined
this[kMaxRedirections] = maxRedirections
this[kFactory] = factory
this[kClients] = new Map()
this[kOnDrain] = (origin, targets) => {
this.emit('drain', origin, [this, ...targets])
}
this[kOnConnect] = (origin, targets) => {
this.emit('connect', origin, [this, ...targets])
}
this[kOnDisconnect] = (origin, targets, err) => {
this.emit('disconnect', origin, [this, ...targets], err)
}
this[kOnConnectionError] = (origin, targets, err) => {
this.emit('connectionError', origin, [this, ...targets], err)
}
}
get [kRunning] () {
let ret = 0
for (const client of this[kClients].values()) {
ret += client[kRunning]
}
return ret
}
[kDispatch] (opts, handler) {
let key
if (opts.origin && (typeof opts.origin === 'string' || opts.origin instanceof URL)) {
key = String(opts.origin)
} else {
throw new InvalidArgumentError('opts.origin must be a non-empty string or URL.')
}
let dispatcher = this[kClients].get(key)
if (!dispatcher) {
dispatcher = this[kFactory](opts.origin, this[kOptions])
.on('drain', this[kOnDrain])
.on('connect', this[kOnConnect])
.on('disconnect', this[kOnDisconnect])
.on('connectionError', this[kOnConnectionError])
// This introduces a tiny memory leak, as dispatchers are never removed from the map.
// TODO(mcollina): remove te timer when the client/pool do not have any more
// active connections.
this[kClients].set(key, dispatcher)
}
return dispatcher.dispatch(opts, handler)
}
async [kClose] () {
const closePromises = []
for (const client of this[kClients].values()) {
closePromises.push(client.close())
}
this[kClients].clear()
await Promise.all(closePromises)
}
async [kDestroy] (err) {
const destroyPromises = []
for (const client of this[kClients].values()) {
destroyPromises.push(client.destroy(err))
}
this[kClients].clear()
await Promise.all(destroyPromises)
}
}
module.exports = Agent

190
node_modules/undici/lib/dispatcher/balanced-pool.js generated vendored Normal file
View file

@ -0,0 +1,190 @@
'use strict'
const {
BalancedPoolMissingUpstreamError,
InvalidArgumentError
} = require('../core/errors')
const {
PoolBase,
kClients,
kNeedDrain,
kAddClient,
kRemoveClient,
kGetDispatcher
} = require('./pool-base')
const Pool = require('./pool')
const { kUrl, kInterceptors } = require('../core/symbols')
const { parseOrigin } = require('../core/util')
const kFactory = Symbol('factory')
const kOptions = Symbol('options')
const kGreatestCommonDivisor = Symbol('kGreatestCommonDivisor')
const kCurrentWeight = Symbol('kCurrentWeight')
const kIndex = Symbol('kIndex')
const kWeight = Symbol('kWeight')
const kMaxWeightPerServer = Symbol('kMaxWeightPerServer')
const kErrorPenalty = Symbol('kErrorPenalty')
function getGreatestCommonDivisor (a, b) {
if (b === 0) return a
return getGreatestCommonDivisor(b, a % b)
}
function defaultFactory (origin, opts) {
return new Pool(origin, opts)
}
class BalancedPool extends PoolBase {
constructor (upstreams = [], { factory = defaultFactory, ...opts } = {}) {
super()
this[kOptions] = opts
this[kIndex] = -1
this[kCurrentWeight] = 0
this[kMaxWeightPerServer] = this[kOptions].maxWeightPerServer || 100
this[kErrorPenalty] = this[kOptions].errorPenalty || 15
if (!Array.isArray(upstreams)) {
upstreams = [upstreams]
}
if (typeof factory !== 'function') {
throw new InvalidArgumentError('factory must be a function.')
}
this[kInterceptors] = opts.interceptors?.BalancedPool && Array.isArray(opts.interceptors.BalancedPool)
? opts.interceptors.BalancedPool
: []
this[kFactory] = factory
for (const upstream of upstreams) {
this.addUpstream(upstream)
}
this._updateBalancedPoolStats()
}
addUpstream (upstream) {
const upstreamOrigin = parseOrigin(upstream).origin
if (this[kClients].find((pool) => (
pool[kUrl].origin === upstreamOrigin &&
pool.closed !== true &&
pool.destroyed !== true
))) {
return this
}
const pool = this[kFactory](upstreamOrigin, Object.assign({}, this[kOptions]))
this[kAddClient](pool)
pool.on('connect', () => {
pool[kWeight] = Math.min(this[kMaxWeightPerServer], pool[kWeight] + this[kErrorPenalty])
})
pool.on('connectionError', () => {
pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty])
this._updateBalancedPoolStats()
})
pool.on('disconnect', (...args) => {
const err = args[2]
if (err && err.code === 'UND_ERR_SOCKET') {
// decrease the weight of the pool.
pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty])
this._updateBalancedPoolStats()
}
})
for (const client of this[kClients]) {
client[kWeight] = this[kMaxWeightPerServer]
}
this._updateBalancedPoolStats()
return this
}
_updateBalancedPoolStats () {
this[kGreatestCommonDivisor] = this[kClients].map(p => p[kWeight]).reduce(getGreatestCommonDivisor, 0)
}
removeUpstream (upstream) {
const upstreamOrigin = parseOrigin(upstream).origin
const pool = this[kClients].find((pool) => (
pool[kUrl].origin === upstreamOrigin &&
pool.closed !== true &&
pool.destroyed !== true
))
if (pool) {
this[kRemoveClient](pool)
}
return this
}
get upstreams () {
return this[kClients]
.filter(dispatcher => dispatcher.closed !== true && dispatcher.destroyed !== true)
.map((p) => p[kUrl].origin)
}
[kGetDispatcher] () {
// We validate that pools is greater than 0,
// otherwise we would have to wait until an upstream
// is added, which might never happen.
if (this[kClients].length === 0) {
throw new BalancedPoolMissingUpstreamError()
}
const dispatcher = this[kClients].find(dispatcher => (
!dispatcher[kNeedDrain] &&
dispatcher.closed !== true &&
dispatcher.destroyed !== true
))
if (!dispatcher) {
return
}
const allClientsBusy = this[kClients].map(pool => pool[kNeedDrain]).reduce((a, b) => a && b, true)
if (allClientsBusy) {
return
}
let counter = 0
let maxWeightIndex = this[kClients].findIndex(pool => !pool[kNeedDrain])
while (counter++ < this[kClients].length) {
this[kIndex] = (this[kIndex] + 1) % this[kClients].length
const pool = this[kClients][this[kIndex]]
// find pool index with the largest weight
if (pool[kWeight] > this[kClients][maxWeightIndex][kWeight] && !pool[kNeedDrain]) {
maxWeightIndex = this[kIndex]
}
// decrease the current weight every `this[kClients].length`.
if (this[kIndex] === 0) {
// Set the current weight to the next lower weight.
this[kCurrentWeight] = this[kCurrentWeight] - this[kGreatestCommonDivisor]
if (this[kCurrentWeight] <= 0) {
this[kCurrentWeight] = this[kMaxWeightPerServer]
}
}
if (pool[kWeight] >= this[kCurrentWeight] && (!pool[kNeedDrain])) {
return pool
}
}
this[kCurrentWeight] = this[kClients][maxWeightIndex][kWeight]
this[kIndex] = maxWeightIndex
return this[kClients][maxWeightIndex]
}
}
module.exports = BalancedPool

1347
node_modules/undici/lib/dispatcher/client-h1.js generated vendored Normal file

File diff suppressed because it is too large Load diff

695
node_modules/undici/lib/dispatcher/client-h2.js generated vendored Normal file
View file

@ -0,0 +1,695 @@
'use strict'
const assert = require('node:assert')
const { pipeline } = require('node:stream')
const util = require('../core/util.js')
const {
RequestContentLengthMismatchError,
RequestAbortedError,
SocketError,
InformationalError
} = require('../core/errors.js')
const {
kUrl,
kReset,
kClient,
kRunning,
kPending,
kQueue,
kPendingIdx,
kRunningIdx,
kError,
kSocket,
kStrictContentLength,
kOnError,
kMaxConcurrentStreams,
kHTTP2Session,
kResume
} = require('../core/symbols.js')
const kOpenStreams = Symbol('open streams')
// Experimental
let h2ExperimentalWarned = false
/** @type {import('http2')} */
let http2
try {
http2 = require('node:http2')
} catch {
// @ts-ignore
http2 = { constants: {} }
}
const {
constants: {
HTTP2_HEADER_AUTHORITY,
HTTP2_HEADER_METHOD,
HTTP2_HEADER_PATH,
HTTP2_HEADER_SCHEME,
HTTP2_HEADER_CONTENT_LENGTH,
HTTP2_HEADER_EXPECT,
HTTP2_HEADER_STATUS
}
} = http2
function parseH2Headers (headers) {
const result = []
for (const [name, value] of Object.entries(headers)) {
// h2 may concat the header value by array
// e.g. Set-Cookie
if (Array.isArray(value)) {
for (const subvalue of value) {
// we need to provide each header value of header name
// because the headers handler expect name-value pair
result.push(Buffer.from(name), Buffer.from(subvalue))
}
} else {
result.push(Buffer.from(name), Buffer.from(value))
}
}
return result
}
async function connectH2 (client, socket) {
client[kSocket] = socket
if (!h2ExperimentalWarned) {
h2ExperimentalWarned = true
process.emitWarning('H2 support is experimental, expect them to change at any time.', {
code: 'UNDICI-H2'
})
}
const session = http2.connect(client[kUrl], {
createConnection: () => socket,
peerMaxConcurrentStreams: client[kMaxConcurrentStreams]
})
session[kOpenStreams] = 0
session[kClient] = client
session[kSocket] = socket
util.addListener(session, 'error', onHttp2SessionError)
util.addListener(session, 'frameError', onHttp2FrameError)
util.addListener(session, 'end', onHttp2SessionEnd)
util.addListener(session, 'goaway', onHTTP2GoAway)
util.addListener(session, 'close', function () {
const { [kClient]: client } = this
const { [kSocket]: socket } = client
const err = this[kSocket][kError] || this[kError] || new SocketError('closed', util.getSocketInfo(socket))
client[kHTTP2Session] = null
if (client.destroyed) {
assert(client[kPending] === 0)
// Fail entire queue.
const requests = client[kQueue].splice(client[kRunningIdx])
for (let i = 0; i < requests.length; i++) {
const request = requests[i]
util.errorRequest(client, request, err)
}
}
})
session.unref()
client[kHTTP2Session] = session
socket[kHTTP2Session] = session
util.addListener(socket, 'error', function (err) {
assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID')
this[kError] = err
this[kClient][kOnError](err)
})
util.addListener(socket, 'end', function () {
util.destroy(this, new SocketError('other side closed', util.getSocketInfo(this)))
})
util.addListener(socket, 'close', function () {
const err = this[kError] || new SocketError('closed', util.getSocketInfo(this))
client[kSocket] = null
if (this[kHTTP2Session] != null) {
this[kHTTP2Session].destroy(err)
}
client[kPendingIdx] = client[kRunningIdx]
assert(client[kRunning] === 0)
client.emit('disconnect', client[kUrl], [client], err)
client[kResume]()
})
let closed = false
socket.on('close', () => {
closed = true
})
return {
version: 'h2',
defaultPipelining: Infinity,
write (...args) {
// TODO (fix): return
writeH2(client, ...args)
},
resume () {
},
destroy (err, callback) {
if (closed) {
queueMicrotask(callback)
} else {
// Destroying the socket will trigger the session close
socket.destroy(err).on('close', callback)
}
},
get destroyed () {
return socket.destroyed
},
busy () {
return false
}
}
}
function onHttp2SessionError (err) {
assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID')
this[kSocket][kError] = err
this[kClient][kOnError](err)
}
function onHttp2FrameError (type, code, id) {
if (id === 0) {
const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`)
this[kSocket][kError] = err
this[kClient][kOnError](err)
}
}
function onHttp2SessionEnd () {
const err = new SocketError('other side closed', util.getSocketInfo(this[kSocket]))
this.destroy(err)
util.destroy(this[kSocket], err)
}
/**
* This is the root cause of #3011
* We need to handle GOAWAY frames properly, and trigger the session close
* along with the socket right away
* Find a way to trigger the close cycle from here on.
*/
function onHTTP2GoAway (code) {
const err = new InformationalError(`HTTP/2: "GOAWAY" frame received with code ${code}`)
// We need to trigger the close cycle right away
// We need to destroy the session and the socket
// Requests should be failed with the error after the current one is handled
this[kSocket][kError] = err
this[kClient][kOnError](err)
this.unref()
// We send the GOAWAY frame response as no error
this.destroy()
util.destroy(this[kSocket], err)
}
// https://www.rfc-editor.org/rfc/rfc7230#section-3.3.2
function shouldSendContentLength (method) {
return method !== 'GET' && method !== 'HEAD' && method !== 'OPTIONS' && method !== 'TRACE' && method !== 'CONNECT'
}
function writeH2 (client, request) {
const session = client[kHTTP2Session]
const { body, method, path, host, upgrade, expectContinue, signal, headers: reqHeaders } = request
if (upgrade) {
util.errorRequest(client, request, new Error('Upgrade not supported for H2'))
return false
}
if (request.aborted) {
return false
}
const headers = {}
for (let n = 0; n < reqHeaders.length; n += 2) {
const key = reqHeaders[n + 0]
const val = reqHeaders[n + 1]
if (Array.isArray(val)) {
for (let i = 0; i < val.length; i++) {
if (headers[key]) {
headers[key] += `,${val[i]}`
} else {
headers[key] = val[i]
}
}
} else {
headers[key] = val
}
}
/** @type {import('node:http2').ClientHttp2Stream} */
let stream
const { hostname, port } = client[kUrl]
headers[HTTP2_HEADER_AUTHORITY] = host || `${hostname}${port ? `:${port}` : ''}`
headers[HTTP2_HEADER_METHOD] = method
const abort = (err) => {
if (request.aborted || request.completed) {
return
}
err = err || new RequestAbortedError()
util.errorRequest(client, request, err)
if (stream != null) {
util.destroy(stream, err)
}
// We do not destroy the socket as we can continue using the session
// the stream get's destroyed and the session remains to create new streams
util.destroy(body, err)
}
try {
// We are already connected, streams are pending.
// We can call on connect, and wait for abort
request.onConnect(abort)
} catch (err) {
util.errorRequest(client, request, err)
}
if (method === 'CONNECT') {
session.ref()
// We are already connected, streams are pending, first request
// will create a new stream. We trigger a request to create the stream and wait until
// `ready` event is triggered
// We disabled endStream to allow the user to write to the stream
stream = session.request(headers, { endStream: false, signal })
if (stream.id && !stream.pending) {
request.onUpgrade(null, null, stream)
++session[kOpenStreams]
} else {
stream.once('ready', () => {
request.onUpgrade(null, null, stream)
++session[kOpenStreams]
})
}
stream.once('close', () => {
session[kOpenStreams] -= 1
if (session[kOpenStreams] === 0) session.unref()
})
return true
}
// https://tools.ietf.org/html/rfc7540#section-8.3
// :path and :scheme headers must be omitted when sending CONNECT
headers[HTTP2_HEADER_PATH] = path
headers[HTTP2_HEADER_SCHEME] = 'https'
// https://tools.ietf.org/html/rfc7231#section-4.3.1
// https://tools.ietf.org/html/rfc7231#section-4.3.2
// https://tools.ietf.org/html/rfc7231#section-4.3.5
// Sending a payload body on a request that does not
// expect it can cause undefined behavior on some
// servers and corrupt connection state. Do not
// re-use the connection for further requests.
const expectsPayload = (
method === 'PUT' ||
method === 'POST' ||
method === 'PATCH'
)
if (body && typeof body.read === 'function') {
// Try to read EOF in order to get length.
body.read(0)
}
let contentLength = util.bodyLength(body)
if (contentLength == null) {
contentLength = request.contentLength
}
if (contentLength === 0 || !expectsPayload) {
// https://tools.ietf.org/html/rfc7230#section-3.3.2
// A user agent SHOULD NOT send a Content-Length header field when
// the request message does not contain a payload body and the method
// semantics do not anticipate such a body.
contentLength = null
}
// https://github.com/nodejs/undici/issues/2046
// A user agent may send a Content-Length header with 0 value, this should be allowed.
if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength != null && request.contentLength !== contentLength) {
if (client[kStrictContentLength]) {
util.errorRequest(client, request, new RequestContentLengthMismatchError())
return false
}
process.emitWarning(new RequestContentLengthMismatchError())
}
if (contentLength != null) {
assert(body, 'no body must not have content length')
headers[HTTP2_HEADER_CONTENT_LENGTH] = `${contentLength}`
}
session.ref()
const shouldEndStream = method === 'GET' || method === 'HEAD' || body === null
if (expectContinue) {
headers[HTTP2_HEADER_EXPECT] = '100-continue'
stream = session.request(headers, { endStream: shouldEndStream, signal })
stream.once('continue', writeBodyH2)
} else {
stream = session.request(headers, {
endStream: shouldEndStream,
signal
})
writeBodyH2()
}
// Increment counter as we have new streams open
++session[kOpenStreams]
stream.once('response', headers => {
const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers
request.onResponseStarted()
// Due to the stream nature, it is possible we face a race condition
// where the stream has been assigned, but the request has been aborted
// the request remains in-flight and headers hasn't been received yet
// for those scenarios, best effort is to destroy the stream immediately
// as there's no value to keep it open.
if (request.aborted) {
const err = new RequestAbortedError()
util.errorRequest(client, request, err)
util.destroy(stream, err)
return
}
if (request.onHeaders(Number(statusCode), parseH2Headers(realHeaders), stream.resume.bind(stream), '') === false) {
stream.pause()
}
stream.on('data', (chunk) => {
if (request.onData(chunk) === false) {
stream.pause()
}
})
})
stream.once('end', () => {
// When state is null, it means we haven't consumed body and the stream still do not have
// a state.
// Present specially when using pipeline or stream
if (stream.state?.state == null || stream.state.state < 6) {
request.onComplete([])
return
}
// Stream is closed or half-closed-remote (6), decrement counter and cleanup
// It does not have sense to continue working with the stream as we do not
// have yet RST_STREAM support on client-side
if (session[kOpenStreams] === 0) {
session.unref()
}
abort(new InformationalError('HTTP/2: stream half-closed (remote)'))
})
stream.once('close', () => {
session[kOpenStreams] -= 1
if (session[kOpenStreams] === 0) {
session.unref()
}
})
stream.once('error', function (err) {
abort(err)
})
stream.once('frameError', (type, code) => {
abort(new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`))
})
// stream.on('aborted', () => {
// // TODO(HTTP/2): Support aborted
// })
// stream.on('timeout', () => {
// // TODO(HTTP/2): Support timeout
// })
// stream.on('push', headers => {
// // TODO(HTTP/2): Support push
// })
// stream.on('trailers', headers => {
// // TODO(HTTP/2): Support trailers
// })
return true
function writeBodyH2 () {
/* istanbul ignore else: assertion */
if (!body || contentLength === 0) {
writeBuffer({
abort,
client,
request,
contentLength,
expectsPayload,
h2stream: stream,
body: null,
socket: client[kSocket]
})
} else if (util.isBuffer(body)) {
writeBuffer({
abort,
client,
request,
contentLength,
body,
expectsPayload,
h2stream: stream,
socket: client[kSocket]
})
} else if (util.isBlobLike(body)) {
if (typeof body.stream === 'function') {
writeIterable({
abort,
client,
request,
contentLength,
expectsPayload,
h2stream: stream,
body: body.stream(),
socket: client[kSocket]
})
} else {
writeBlob({
abort,
body,
client,
request,
contentLength,
expectsPayload,
h2stream: stream,
socket: client[kSocket]
})
}
} else if (util.isStream(body)) {
writeStream({
body,
client,
request,
contentLength,
expectsPayload,
socket: client[kSocket],
h2stream: stream,
header: ''
})
} else if (util.isIterable(body)) {
writeIterable({
body,
client,
request,
contentLength,
expectsPayload,
header: '',
h2stream: stream,
socket: client[kSocket]
})
} else {
assert(false)
}
}
}
function writeBuffer ({ abort, h2stream, body, client, request, socket, contentLength, expectsPayload }) {
try {
if (body != null && util.isBuffer(body)) {
assert(contentLength === body.byteLength, 'buffer body must have content length')
h2stream.cork()
h2stream.write(body)
h2stream.uncork()
h2stream.end()
request.onBodySent(body)
}
if (!expectsPayload) {
socket[kReset] = true
}
request.onRequestSent()
client[kResume]()
} catch (error) {
abort(error)
}
}
function writeStream ({ abort, socket, expectsPayload, h2stream, body, client, request, contentLength }) {
assert(contentLength !== 0 || client[kRunning] === 0, 'stream body cannot be pipelined')
// For HTTP/2, is enough to pipe the stream
const pipe = pipeline(
body,
h2stream,
(err) => {
if (err) {
util.destroy(pipe, err)
abort(err)
} else {
util.removeAllListeners(pipe)
request.onRequestSent()
if (!expectsPayload) {
socket[kReset] = true
}
client[kResume]()
}
}
)
util.addListener(pipe, 'data', onPipeData)
function onPipeData (chunk) {
request.onBodySent(chunk)
}
}
async function writeBlob ({ abort, h2stream, body, client, request, socket, contentLength, expectsPayload }) {
assert(contentLength === body.size, 'blob body must have content length')
try {
if (contentLength != null && contentLength !== body.size) {
throw new RequestContentLengthMismatchError()
}
const buffer = Buffer.from(await body.arrayBuffer())
h2stream.cork()
h2stream.write(buffer)
h2stream.uncork()
h2stream.end()
request.onBodySent(buffer)
request.onRequestSent()
if (!expectsPayload) {
socket[kReset] = true
}
client[kResume]()
} catch (err) {
abort(err)
}
}
async function writeIterable ({ abort, h2stream, body, client, request, socket, contentLength, expectsPayload }) {
assert(contentLength !== 0 || client[kRunning] === 0, 'iterator body cannot be pipelined')
let callback = null
function onDrain () {
if (callback) {
const cb = callback
callback = null
cb()
}
}
const waitForDrain = () => new Promise((resolve, reject) => {
assert(callback === null)
if (socket[kError]) {
reject(socket[kError])
} else {
callback = resolve
}
})
h2stream
.on('close', onDrain)
.on('drain', onDrain)
try {
// It's up to the user to somehow abort the async iterable.
for await (const chunk of body) {
if (socket[kError]) {
throw socket[kError]
}
const res = h2stream.write(chunk)
request.onBodySent(chunk)
if (!res) {
await waitForDrain()
}
}
h2stream.end()
request.onRequestSent()
if (!expectsPayload) {
socket[kReset] = true
}
client[kResume]()
} catch (err) {
abort(err)
} finally {
h2stream
.off('close', onDrain)
.off('drain', onDrain)
}
}
module.exports = connectH2

615
node_modules/undici/lib/dispatcher/client.js generated vendored Normal file
View file

@ -0,0 +1,615 @@
// @ts-check
'use strict'
const assert = require('node:assert')
const net = require('node:net')
const http = require('node:http')
const util = require('../core/util.js')
const { channels } = require('../core/diagnostics.js')
const Request = require('../core/request.js')
const DispatcherBase = require('./dispatcher-base')
const {
InvalidArgumentError,
InformationalError,
ClientDestroyedError
} = require('../core/errors.js')
const buildConnector = require('../core/connect.js')
const {
kUrl,
kServerName,
kClient,
kBusy,
kConnect,
kResuming,
kRunning,
kPending,
kSize,
kQueue,
kConnected,
kConnecting,
kNeedDrain,
kKeepAliveDefaultTimeout,
kHostHeader,
kPendingIdx,
kRunningIdx,
kError,
kPipelining,
kKeepAliveTimeoutValue,
kMaxHeadersSize,
kKeepAliveMaxTimeout,
kKeepAliveTimeoutThreshold,
kHeadersTimeout,
kBodyTimeout,
kStrictContentLength,
kConnector,
kMaxRedirections,
kMaxRequests,
kCounter,
kClose,
kDestroy,
kDispatch,
kInterceptors,
kLocalAddress,
kMaxResponseSize,
kOnError,
kHTTPContext,
kMaxConcurrentStreams,
kResume
} = require('../core/symbols.js')
const connectH1 = require('./client-h1.js')
const connectH2 = require('./client-h2.js')
let deprecatedInterceptorWarned = false
const kClosedResolve = Symbol('kClosedResolve')
function getPipelining (client) {
return client[kPipelining] ?? client[kHTTPContext]?.defaultPipelining ?? 1
}
/**
* @type {import('../../types/client.js').default}
*/
class Client extends DispatcherBase {
/**
*
* @param {string|URL} url
* @param {import('../../types/client.js').Client.Options} options
*/
constructor (url, {
interceptors,
maxHeaderSize,
headersTimeout,
socketTimeout,
requestTimeout,
connectTimeout,
bodyTimeout,
idleTimeout,
keepAlive,
keepAliveTimeout,
maxKeepAliveTimeout,
keepAliveMaxTimeout,
keepAliveTimeoutThreshold,
socketPath,
pipelining,
tls,
strictContentLength,
maxCachedSessions,
maxRedirections,
connect,
maxRequestsPerClient,
localAddress,
maxResponseSize,
autoSelectFamily,
autoSelectFamilyAttemptTimeout,
// h2
maxConcurrentStreams,
allowH2
} = {}) {
super()
if (keepAlive !== undefined) {
throw new InvalidArgumentError('unsupported keepAlive, use pipelining=0 instead')
}
if (socketTimeout !== undefined) {
throw new InvalidArgumentError('unsupported socketTimeout, use headersTimeout & bodyTimeout instead')
}
if (requestTimeout !== undefined) {
throw new InvalidArgumentError('unsupported requestTimeout, use headersTimeout & bodyTimeout instead')
}
if (idleTimeout !== undefined) {
throw new InvalidArgumentError('unsupported idleTimeout, use keepAliveTimeout instead')
}
if (maxKeepAliveTimeout !== undefined) {
throw new InvalidArgumentError('unsupported maxKeepAliveTimeout, use keepAliveMaxTimeout instead')
}
if (maxHeaderSize != null && !Number.isFinite(maxHeaderSize)) {
throw new InvalidArgumentError('invalid maxHeaderSize')
}
if (socketPath != null && typeof socketPath !== 'string') {
throw new InvalidArgumentError('invalid socketPath')
}
if (connectTimeout != null && (!Number.isFinite(connectTimeout) || connectTimeout < 0)) {
throw new InvalidArgumentError('invalid connectTimeout')
}
if (keepAliveTimeout != null && (!Number.isFinite(keepAliveTimeout) || keepAliveTimeout <= 0)) {
throw new InvalidArgumentError('invalid keepAliveTimeout')
}
if (keepAliveMaxTimeout != null && (!Number.isFinite(keepAliveMaxTimeout) || keepAliveMaxTimeout <= 0)) {
throw new InvalidArgumentError('invalid keepAliveMaxTimeout')
}
if (keepAliveTimeoutThreshold != null && !Number.isFinite(keepAliveTimeoutThreshold)) {
throw new InvalidArgumentError('invalid keepAliveTimeoutThreshold')
}
if (headersTimeout != null && (!Number.isInteger(headersTimeout) || headersTimeout < 0)) {
throw new InvalidArgumentError('headersTimeout must be a positive integer or zero')
}
if (bodyTimeout != null && (!Number.isInteger(bodyTimeout) || bodyTimeout < 0)) {
throw new InvalidArgumentError('bodyTimeout must be a positive integer or zero')
}
if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') {
throw new InvalidArgumentError('connect must be a function or an object')
}
if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) {
throw new InvalidArgumentError('maxRedirections must be a positive number')
}
if (maxRequestsPerClient != null && (!Number.isInteger(maxRequestsPerClient) || maxRequestsPerClient < 0)) {
throw new InvalidArgumentError('maxRequestsPerClient must be a positive number')
}
if (localAddress != null && (typeof localAddress !== 'string' || net.isIP(localAddress) === 0)) {
throw new InvalidArgumentError('localAddress must be valid string IP address')
}
if (maxResponseSize != null && (!Number.isInteger(maxResponseSize) || maxResponseSize < -1)) {
throw new InvalidArgumentError('maxResponseSize must be a positive number')
}
if (
autoSelectFamilyAttemptTimeout != null &&
(!Number.isInteger(autoSelectFamilyAttemptTimeout) || autoSelectFamilyAttemptTimeout < -1)
) {
throw new InvalidArgumentError('autoSelectFamilyAttemptTimeout must be a positive number')
}
// h2
if (allowH2 != null && typeof allowH2 !== 'boolean') {
throw new InvalidArgumentError('allowH2 must be a valid boolean value')
}
if (maxConcurrentStreams != null && (typeof maxConcurrentStreams !== 'number' || maxConcurrentStreams < 1)) {
throw new InvalidArgumentError('maxConcurrentStreams must be a positive integer, greater than 0')
}
if (typeof connect !== 'function') {
connect = buildConnector({
...tls,
maxCachedSessions,
allowH2,
socketPath,
timeout: connectTimeout,
...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined),
...connect
})
}
if (interceptors?.Client && Array.isArray(interceptors.Client)) {
this[kInterceptors] = interceptors.Client
if (!deprecatedInterceptorWarned) {
deprecatedInterceptorWarned = true
process.emitWarning('Client.Options#interceptor is deprecated. Use Dispatcher#compose instead.', {
code: 'UNDICI-CLIENT-INTERCEPTOR-DEPRECATED'
})
}
} else {
this[kInterceptors] = [createRedirectInterceptor({ maxRedirections })]
}
this[kUrl] = util.parseOrigin(url)
this[kConnector] = connect
this[kPipelining] = pipelining != null ? pipelining : 1
this[kMaxHeadersSize] = maxHeaderSize || http.maxHeaderSize
this[kKeepAliveDefaultTimeout] = keepAliveTimeout == null ? 4e3 : keepAliveTimeout
this[kKeepAliveMaxTimeout] = keepAliveMaxTimeout == null ? 600e3 : keepAliveMaxTimeout
this[kKeepAliveTimeoutThreshold] = keepAliveTimeoutThreshold == null ? 1e3 : keepAliveTimeoutThreshold
this[kKeepAliveTimeoutValue] = this[kKeepAliveDefaultTimeout]
this[kServerName] = null
this[kLocalAddress] = localAddress != null ? localAddress : null
this[kResuming] = 0 // 0, idle, 1, scheduled, 2 resuming
this[kNeedDrain] = 0 // 0, idle, 1, scheduled, 2 resuming
this[kHostHeader] = `host: ${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ''}\r\n`
this[kBodyTimeout] = bodyTimeout != null ? bodyTimeout : 300e3
this[kHeadersTimeout] = headersTimeout != null ? headersTimeout : 300e3
this[kStrictContentLength] = strictContentLength == null ? true : strictContentLength
this[kMaxRedirections] = maxRedirections
this[kMaxRequests] = maxRequestsPerClient
this[kClosedResolve] = null
this[kMaxResponseSize] = maxResponseSize > -1 ? maxResponseSize : -1
this[kMaxConcurrentStreams] = maxConcurrentStreams != null ? maxConcurrentStreams : 100 // Max peerConcurrentStreams for a Node h2 server
this[kHTTPContext] = null
// kQueue is built up of 3 sections separated by
// the kRunningIdx and kPendingIdx indices.
// | complete | running | pending |
// ^ kRunningIdx ^ kPendingIdx ^ kQueue.length
// kRunningIdx points to the first running element.
// kPendingIdx points to the first pending element.
// This implements a fast queue with an amortized
// time of O(1).
this[kQueue] = []
this[kRunningIdx] = 0
this[kPendingIdx] = 0
this[kResume] = (sync) => resume(this, sync)
this[kOnError] = (err) => onError(this, err)
}
get pipelining () {
return this[kPipelining]
}
set pipelining (value) {
this[kPipelining] = value
this[kResume](true)
}
get [kPending] () {
return this[kQueue].length - this[kPendingIdx]
}
get [kRunning] () {
return this[kPendingIdx] - this[kRunningIdx]
}
get [kSize] () {
return this[kQueue].length - this[kRunningIdx]
}
get [kConnected] () {
return !!this[kHTTPContext] && !this[kConnecting] && !this[kHTTPContext].destroyed
}
get [kBusy] () {
return Boolean(
this[kHTTPContext]?.busy(null) ||
(this[kSize] >= (getPipelining(this) || 1)) ||
this[kPending] > 0
)
}
/* istanbul ignore: only used for test */
[kConnect] (cb) {
connect(this)
this.once('connect', cb)
}
[kDispatch] (opts, handler) {
const origin = opts.origin || this[kUrl].origin
const request = new Request(origin, opts, handler)
this[kQueue].push(request)
if (this[kResuming]) {
// Do nothing.
} else if (util.bodyLength(request.body) == null && util.isIterable(request.body)) {
// Wait a tick in case stream/iterator is ended in the same tick.
this[kResuming] = 1
queueMicrotask(() => resume(this))
} else {
this[kResume](true)
}
if (this[kResuming] && this[kNeedDrain] !== 2 && this[kBusy]) {
this[kNeedDrain] = 2
}
return this[kNeedDrain] < 2
}
async [kClose] () {
// TODO: for H2 we need to gracefully flush the remaining enqueued
// request and close each stream.
return new Promise((resolve) => {
if (this[kSize]) {
this[kClosedResolve] = resolve
} else {
resolve(null)
}
})
}
async [kDestroy] (err) {
return new Promise((resolve) => {
const requests = this[kQueue].splice(this[kPendingIdx])
for (let i = 0; i < requests.length; i++) {
const request = requests[i]
util.errorRequest(this, request, err)
}
const callback = () => {
if (this[kClosedResolve]) {
// TODO (fix): Should we error here with ClientDestroyedError?
this[kClosedResolve]()
this[kClosedResolve] = null
}
resolve(null)
}
if (this[kHTTPContext]) {
this[kHTTPContext].destroy(err, callback)
this[kHTTPContext] = null
} else {
queueMicrotask(callback)
}
this[kResume]()
})
}
}
const createRedirectInterceptor = require('../interceptor/redirect-interceptor.js')
function onError (client, err) {
if (
client[kRunning] === 0 &&
err.code !== 'UND_ERR_INFO' &&
err.code !== 'UND_ERR_SOCKET'
) {
// Error is not caused by running request and not a recoverable
// socket error.
assert(client[kPendingIdx] === client[kRunningIdx])
const requests = client[kQueue].splice(client[kRunningIdx])
for (let i = 0; i < requests.length; i++) {
const request = requests[i]
util.errorRequest(client, request, err)
}
assert(client[kSize] === 0)
}
}
async function connect (client) {
assert(!client[kConnecting])
assert(!client[kHTTPContext])
let { host, hostname, protocol, port } = client[kUrl]
// Resolve ipv6
if (hostname[0] === '[') {
const idx = hostname.indexOf(']')
assert(idx !== -1)
const ip = hostname.substring(1, idx)
assert(net.isIP(ip))
hostname = ip
}
client[kConnecting] = true
if (channels.beforeConnect.hasSubscribers) {
channels.beforeConnect.publish({
connectParams: {
host,
hostname,
protocol,
port,
version: client[kHTTPContext]?.version,
servername: client[kServerName],
localAddress: client[kLocalAddress]
},
connector: client[kConnector]
})
}
try {
const socket = await new Promise((resolve, reject) => {
client[kConnector]({
host,
hostname,
protocol,
port,
servername: client[kServerName],
localAddress: client[kLocalAddress]
}, (err, socket) => {
if (err) {
reject(err)
} else {
resolve(socket)
}
})
})
if (client.destroyed) {
util.destroy(socket.on('error', () => {}), new ClientDestroyedError())
return
}
assert(socket)
try {
client[kHTTPContext] = socket.alpnProtocol === 'h2'
? await connectH2(client, socket)
: await connectH1(client, socket)
} catch (err) {
socket.destroy().on('error', () => {})
throw err
}
client[kConnecting] = false
socket[kCounter] = 0
socket[kMaxRequests] = client[kMaxRequests]
socket[kClient] = client
socket[kError] = null
if (channels.connected.hasSubscribers) {
channels.connected.publish({
connectParams: {
host,
hostname,
protocol,
port,
version: client[kHTTPContext]?.version,
servername: client[kServerName],
localAddress: client[kLocalAddress]
},
connector: client[kConnector],
socket
})
}
client.emit('connect', client[kUrl], [client])
} catch (err) {
if (client.destroyed) {
return
}
client[kConnecting] = false
if (channels.connectError.hasSubscribers) {
channels.connectError.publish({
connectParams: {
host,
hostname,
protocol,
port,
version: client[kHTTPContext]?.version,
servername: client[kServerName],
localAddress: client[kLocalAddress]
},
connector: client[kConnector],
error: err
})
}
if (err.code === 'ERR_TLS_CERT_ALTNAME_INVALID') {
assert(client[kRunning] === 0)
while (client[kPending] > 0 && client[kQueue][client[kPendingIdx]].servername === client[kServerName]) {
const request = client[kQueue][client[kPendingIdx]++]
util.errorRequest(client, request, err)
}
} else {
onError(client, err)
}
client.emit('connectionError', client[kUrl], [client], err)
}
client[kResume]()
}
function emitDrain (client) {
client[kNeedDrain] = 0
client.emit('drain', client[kUrl], [client])
}
function resume (client, sync) {
if (client[kResuming] === 2) {
return
}
client[kResuming] = 2
_resume(client, sync)
client[kResuming] = 0
if (client[kRunningIdx] > 256) {
client[kQueue].splice(0, client[kRunningIdx])
client[kPendingIdx] -= client[kRunningIdx]
client[kRunningIdx] = 0
}
}
function _resume (client, sync) {
while (true) {
if (client.destroyed) {
assert(client[kPending] === 0)
return
}
if (client[kClosedResolve] && !client[kSize]) {
client[kClosedResolve]()
client[kClosedResolve] = null
return
}
if (client[kHTTPContext]) {
client[kHTTPContext].resume()
}
if (client[kBusy]) {
client[kNeedDrain] = 2
} else if (client[kNeedDrain] === 2) {
if (sync) {
client[kNeedDrain] = 1
queueMicrotask(() => emitDrain(client))
} else {
emitDrain(client)
}
continue
}
if (client[kPending] === 0) {
return
}
if (client[kRunning] >= (getPipelining(client) || 1)) {
return
}
const request = client[kQueue][client[kPendingIdx]]
if (client[kUrl].protocol === 'https:' && client[kServerName] !== request.servername) {
if (client[kRunning] > 0) {
return
}
client[kServerName] = request.servername
client[kHTTPContext]?.destroy(new InformationalError('servername changed'), () => {
client[kHTTPContext] = null
resume(client)
})
}
if (client[kConnecting]) {
return
}
if (!client[kHTTPContext]) {
connect(client)
return
}
if (client[kHTTPContext].destroyed) {
return
}
if (client[kHTTPContext].busy(request)) {
return
}
if (!request.aborted && client[kHTTPContext].write(request)) {
client[kPendingIdx]++
} else {
client[kQueue].splice(client[kPendingIdx], 1)
}
}
}
module.exports = Client

192
node_modules/undici/lib/dispatcher/dispatcher-base.js generated vendored Normal file
View file

@ -0,0 +1,192 @@
'use strict'
const Dispatcher = require('./dispatcher')
const {
ClientDestroyedError,
ClientClosedError,
InvalidArgumentError
} = require('../core/errors')
const { kDestroy, kClose, kDispatch, kInterceptors } = require('../core/symbols')
const kDestroyed = Symbol('destroyed')
const kClosed = Symbol('closed')
const kOnDestroyed = Symbol('onDestroyed')
const kOnClosed = Symbol('onClosed')
const kInterceptedDispatch = Symbol('Intercepted Dispatch')
class DispatcherBase extends Dispatcher {
constructor () {
super()
this[kDestroyed] = false
this[kOnDestroyed] = null
this[kClosed] = false
this[kOnClosed] = []
}
get destroyed () {
return this[kDestroyed]
}
get closed () {
return this[kClosed]
}
get interceptors () {
return this[kInterceptors]
}
set interceptors (newInterceptors) {
if (newInterceptors) {
for (let i = newInterceptors.length - 1; i >= 0; i--) {
const interceptor = this[kInterceptors][i]
if (typeof interceptor !== 'function') {
throw new InvalidArgumentError('interceptor must be an function')
}
}
}
this[kInterceptors] = newInterceptors
}
close (callback) {
if (callback === undefined) {
return new Promise((resolve, reject) => {
this.close((err, data) => {
return err ? reject(err) : resolve(data)
})
})
}
if (typeof callback !== 'function') {
throw new InvalidArgumentError('invalid callback')
}
if (this[kDestroyed]) {
queueMicrotask(() => callback(new ClientDestroyedError(), null))
return
}
if (this[kClosed]) {
if (this[kOnClosed]) {
this[kOnClosed].push(callback)
} else {
queueMicrotask(() => callback(null, null))
}
return
}
this[kClosed] = true
this[kOnClosed].push(callback)
const onClosed = () => {
const callbacks = this[kOnClosed]
this[kOnClosed] = null
for (let i = 0; i < callbacks.length; i++) {
callbacks[i](null, null)
}
}
// Should not error.
this[kClose]()
.then(() => this.destroy())
.then(() => {
queueMicrotask(onClosed)
})
}
destroy (err, callback) {
if (typeof err === 'function') {
callback = err
err = null
}
if (callback === undefined) {
return new Promise((resolve, reject) => {
this.destroy(err, (err, data) => {
return err ? /* istanbul ignore next: should never error */ reject(err) : resolve(data)
})
})
}
if (typeof callback !== 'function') {
throw new InvalidArgumentError('invalid callback')
}
if (this[kDestroyed]) {
if (this[kOnDestroyed]) {
this[kOnDestroyed].push(callback)
} else {
queueMicrotask(() => callback(null, null))
}
return
}
if (!err) {
err = new ClientDestroyedError()
}
this[kDestroyed] = true
this[kOnDestroyed] = this[kOnDestroyed] || []
this[kOnDestroyed].push(callback)
const onDestroyed = () => {
const callbacks = this[kOnDestroyed]
this[kOnDestroyed] = null
for (let i = 0; i < callbacks.length; i++) {
callbacks[i](null, null)
}
}
// Should not error.
this[kDestroy](err).then(() => {
queueMicrotask(onDestroyed)
})
}
[kInterceptedDispatch] (opts, handler) {
if (!this[kInterceptors] || this[kInterceptors].length === 0) {
this[kInterceptedDispatch] = this[kDispatch]
return this[kDispatch](opts, handler)
}
let dispatch = this[kDispatch].bind(this)
for (let i = this[kInterceptors].length - 1; i >= 0; i--) {
dispatch = this[kInterceptors][i](dispatch)
}
this[kInterceptedDispatch] = dispatch
return dispatch(opts, handler)
}
dispatch (opts, handler) {
if (!handler || typeof handler !== 'object') {
throw new InvalidArgumentError('handler must be an object')
}
try {
if (!opts || typeof opts !== 'object') {
throw new InvalidArgumentError('opts must be an object.')
}
if (this[kDestroyed] || this[kOnDestroyed]) {
throw new ClientDestroyedError()
}
if (this[kClosed]) {
throw new ClientClosedError()
}
return this[kInterceptedDispatch](opts, handler)
} catch (err) {
if (typeof handler.onError !== 'function') {
throw new InvalidArgumentError('invalid onError method')
}
handler.onError(err)
return false
}
}
}
module.exports = DispatcherBase

65
node_modules/undici/lib/dispatcher/dispatcher.js generated vendored Normal file
View file

@ -0,0 +1,65 @@
'use strict'
const EventEmitter = require('node:events')
class Dispatcher extends EventEmitter {
dispatch () {
throw new Error('not implemented')
}
close () {
throw new Error('not implemented')
}
destroy () {
throw new Error('not implemented')
}
compose (...args) {
// So we handle [interceptor1, interceptor2] or interceptor1, interceptor2, ...
const interceptors = Array.isArray(args[0]) ? args[0] : args
let dispatch = this.dispatch.bind(this)
for (const interceptor of interceptors) {
if (interceptor == null) {
continue
}
if (typeof interceptor !== 'function') {
throw new TypeError(`invalid interceptor, expected function received ${typeof interceptor}`)
}
dispatch = interceptor(dispatch)
if (dispatch == null || typeof dispatch !== 'function' || dispatch.length !== 2) {
throw new TypeError('invalid interceptor')
}
}
return new ComposedDispatcher(this, dispatch)
}
}
class ComposedDispatcher extends Dispatcher {
#dispatcher = null
#dispatch = null
constructor (dispatcher, dispatch) {
super()
this.#dispatcher = dispatcher
this.#dispatch = dispatch
}
dispatch (...args) {
this.#dispatch(...args)
}
close (...args) {
return this.#dispatcher.close(...args)
}
destroy (...args) {
return this.#dispatcher.destroy(...args)
}
}
module.exports = Dispatcher

117
node_modules/undici/lib/dispatcher/fixed-queue.js generated vendored Normal file
View file

@ -0,0 +1,117 @@
/* eslint-disable */
'use strict'
// Extracted from node/lib/internal/fixed_queue.js
// Currently optimal queue size, tested on V8 6.0 - 6.6. Must be power of two.
const kSize = 2048;
const kMask = kSize - 1;
// The FixedQueue is implemented as a singly-linked list of fixed-size
// circular buffers. It looks something like this:
//
// head tail
// | |
// v v
// +-----------+ <-----\ +-----------+ <------\ +-----------+
// | [null] | \----- | next | \------- | next |
// +-----------+ +-----------+ +-----------+
// | item | <-- bottom | item | <-- bottom | [empty] |
// | item | | item | | [empty] |
// | item | | item | | [empty] |
// | item | | item | | [empty] |
// | item | | item | bottom --> | item |
// | item | | item | | item |
// | ... | | ... | | ... |
// | item | | item | | item |
// | item | | item | | item |
// | [empty] | <-- top | item | | item |
// | [empty] | | item | | item |
// | [empty] | | [empty] | <-- top top --> | [empty] |
// +-----------+ +-----------+ +-----------+
//
// Or, if there is only one circular buffer, it looks something
// like either of these:
//
// head tail head tail
// | | | |
// v v v v
// +-----------+ +-----------+
// | [null] | | [null] |
// +-----------+ +-----------+
// | [empty] | | item |
// | [empty] | | item |
// | item | <-- bottom top --> | [empty] |
// | item | | [empty] |
// | [empty] | <-- top bottom --> | item |
// | [empty] | | item |
// +-----------+ +-----------+
//
// Adding a value means moving `top` forward by one, removing means
// moving `bottom` forward by one. After reaching the end, the queue
// wraps around.
//
// When `top === bottom` the current queue is empty and when
// `top + 1 === bottom` it's full. This wastes a single space of storage
// but allows much quicker checks.
class FixedCircularBuffer {
constructor() {
this.bottom = 0;
this.top = 0;
this.list = new Array(kSize);
this.next = null;
}
isEmpty() {
return this.top === this.bottom;
}
isFull() {
return ((this.top + 1) & kMask) === this.bottom;
}
push(data) {
this.list[this.top] = data;
this.top = (this.top + 1) & kMask;
}
shift() {
const nextItem = this.list[this.bottom];
if (nextItem === undefined)
return null;
this.list[this.bottom] = undefined;
this.bottom = (this.bottom + 1) & kMask;
return nextItem;
}
}
module.exports = class FixedQueue {
constructor() {
this.head = this.tail = new FixedCircularBuffer();
}
isEmpty() {
return this.head.isEmpty();
}
push(data) {
if (this.head.isFull()) {
// Head is full: Creates a new queue, sets the old queue's `.next` to it,
// and sets it as the new main queue.
this.head = this.head.next = new FixedCircularBuffer();
}
this.head.push(data);
}
shift() {
const tail = this.tail;
const next = tail.shift();
if (tail.isEmpty() && tail.next !== null) {
// If there is another queue, it forms the new tail.
this.tail = tail.next;
}
return next;
}
};

194
node_modules/undici/lib/dispatcher/pool-base.js generated vendored Normal file
View file

@ -0,0 +1,194 @@
'use strict'
const DispatcherBase = require('./dispatcher-base')
const FixedQueue = require('./fixed-queue')
const { kConnected, kSize, kRunning, kPending, kQueued, kBusy, kFree, kUrl, kClose, kDestroy, kDispatch } = require('../core/symbols')
const PoolStats = require('./pool-stats')
const kClients = Symbol('clients')
const kNeedDrain = Symbol('needDrain')
const kQueue = Symbol('queue')
const kClosedResolve = Symbol('closed resolve')
const kOnDrain = Symbol('onDrain')
const kOnConnect = Symbol('onConnect')
const kOnDisconnect = Symbol('onDisconnect')
const kOnConnectionError = Symbol('onConnectionError')
const kGetDispatcher = Symbol('get dispatcher')
const kAddClient = Symbol('add client')
const kRemoveClient = Symbol('remove client')
const kStats = Symbol('stats')
class PoolBase extends DispatcherBase {
constructor () {
super()
this[kQueue] = new FixedQueue()
this[kClients] = []
this[kQueued] = 0
const pool = this
this[kOnDrain] = function onDrain (origin, targets) {
const queue = pool[kQueue]
let needDrain = false
while (!needDrain) {
const item = queue.shift()
if (!item) {
break
}
pool[kQueued]--
needDrain = !this.dispatch(item.opts, item.handler)
}
this[kNeedDrain] = needDrain
if (!this[kNeedDrain] && pool[kNeedDrain]) {
pool[kNeedDrain] = false
pool.emit('drain', origin, [pool, ...targets])
}
if (pool[kClosedResolve] && queue.isEmpty()) {
Promise
.all(pool[kClients].map(c => c.close()))
.then(pool[kClosedResolve])
}
}
this[kOnConnect] = (origin, targets) => {
pool.emit('connect', origin, [pool, ...targets])
}
this[kOnDisconnect] = (origin, targets, err) => {
pool.emit('disconnect', origin, [pool, ...targets], err)
}
this[kOnConnectionError] = (origin, targets, err) => {
pool.emit('connectionError', origin, [pool, ...targets], err)
}
this[kStats] = new PoolStats(this)
}
get [kBusy] () {
return this[kNeedDrain]
}
get [kConnected] () {
return this[kClients].filter(client => client[kConnected]).length
}
get [kFree] () {
return this[kClients].filter(client => client[kConnected] && !client[kNeedDrain]).length
}
get [kPending] () {
let ret = this[kQueued]
for (const { [kPending]: pending } of this[kClients]) {
ret += pending
}
return ret
}
get [kRunning] () {
let ret = 0
for (const { [kRunning]: running } of this[kClients]) {
ret += running
}
return ret
}
get [kSize] () {
let ret = this[kQueued]
for (const { [kSize]: size } of this[kClients]) {
ret += size
}
return ret
}
get stats () {
return this[kStats]
}
async [kClose] () {
if (this[kQueue].isEmpty()) {
return Promise.all(this[kClients].map(c => c.close()))
} else {
return new Promise((resolve) => {
this[kClosedResolve] = resolve
})
}
}
async [kDestroy] (err) {
while (true) {
const item = this[kQueue].shift()
if (!item) {
break
}
item.handler.onError(err)
}
return Promise.all(this[kClients].map(c => c.destroy(err)))
}
[kDispatch] (opts, handler) {
const dispatcher = this[kGetDispatcher]()
if (!dispatcher) {
this[kNeedDrain] = true
this[kQueue].push({ opts, handler })
this[kQueued]++
} else if (!dispatcher.dispatch(opts, handler)) {
dispatcher[kNeedDrain] = true
this[kNeedDrain] = !this[kGetDispatcher]()
}
return !this[kNeedDrain]
}
[kAddClient] (client) {
client
.on('drain', this[kOnDrain])
.on('connect', this[kOnConnect])
.on('disconnect', this[kOnDisconnect])
.on('connectionError', this[kOnConnectionError])
this[kClients].push(client)
if (this[kNeedDrain]) {
queueMicrotask(() => {
if (this[kNeedDrain]) {
this[kOnDrain](client[kUrl], [this, client])
}
})
}
return this
}
[kRemoveClient] (client) {
client.close(() => {
const idx = this[kClients].indexOf(client)
if (idx !== -1) {
this[kClients].splice(idx, 1)
}
})
this[kNeedDrain] = this[kClients].some(dispatcher => (
!dispatcher[kNeedDrain] &&
dispatcher.closed !== true &&
dispatcher.destroyed !== true
))
}
}
module.exports = {
PoolBase,
kClients,
kNeedDrain,
kAddClient,
kRemoveClient,
kGetDispatcher
}

34
node_modules/undici/lib/dispatcher/pool-stats.js generated vendored Normal file
View file

@ -0,0 +1,34 @@
const { kFree, kConnected, kPending, kQueued, kRunning, kSize } = require('../core/symbols')
const kPool = Symbol('pool')
class PoolStats {
constructor (pool) {
this[kPool] = pool
}
get connected () {
return this[kPool][kConnected]
}
get free () {
return this[kPool][kFree]
}
get pending () {
return this[kPool][kPending]
}
get queued () {
return this[kPool][kQueued]
}
get running () {
return this[kPool][kRunning]
}
get size () {
return this[kPool][kSize]
}
}
module.exports = PoolStats

93
node_modules/undici/lib/dispatcher/pool.js generated vendored Normal file
View file

@ -0,0 +1,93 @@
'use strict'
const {
PoolBase,
kClients,
kNeedDrain,
kAddClient,
kGetDispatcher
} = require('./pool-base')
const Client = require('./client')
const {
InvalidArgumentError
} = require('../core/errors')
const util = require('../core/util')
const { kUrl, kInterceptors } = require('../core/symbols')
const buildConnector = require('../core/connect')
const kOptions = Symbol('options')
const kConnections = Symbol('connections')
const kFactory = Symbol('factory')
function defaultFactory (origin, opts) {
return new Client(origin, opts)
}
class Pool extends PoolBase {
constructor (origin, {
connections,
factory = defaultFactory,
connect,
connectTimeout,
tls,
maxCachedSessions,
socketPath,
autoSelectFamily,
autoSelectFamilyAttemptTimeout,
allowH2,
...options
} = {}) {
super()
if (connections != null && (!Number.isFinite(connections) || connections < 0)) {
throw new InvalidArgumentError('invalid connections')
}
if (typeof factory !== 'function') {
throw new InvalidArgumentError('factory must be a function.')
}
if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') {
throw new InvalidArgumentError('connect must be a function or an object')
}
if (typeof connect !== 'function') {
connect = buildConnector({
...tls,
maxCachedSessions,
allowH2,
socketPath,
timeout: connectTimeout,
...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined),
...connect
})
}
this[kInterceptors] = options.interceptors?.Pool && Array.isArray(options.interceptors.Pool)
? options.interceptors.Pool
: []
this[kConnections] = connections || null
this[kUrl] = util.parseOrigin(origin)
this[kOptions] = { ...util.deepClone(options), connect, allowH2 }
this[kOptions].interceptors = options.interceptors
? { ...options.interceptors }
: undefined
this[kFactory] = factory
}
[kGetDispatcher] () {
for (const client of this[kClients]) {
if (!client[kNeedDrain]) {
return client
}
}
if (!this[kConnections] || this[kClients].length < this[kConnections]) {
const dispatcher = this[kFactory](this[kUrl], this[kOptions])
this[kAddClient](dispatcher)
return dispatcher
}
}
}
module.exports = Pool

190
node_modules/undici/lib/dispatcher/proxy-agent.js generated vendored Normal file
View file

@ -0,0 +1,190 @@
'use strict'
const { kProxy, kClose, kDestroy, kInterceptors } = require('../core/symbols')
const { URL } = require('node:url')
const Agent = require('./agent')
const Pool = require('./pool')
const DispatcherBase = require('./dispatcher-base')
const { InvalidArgumentError, RequestAbortedError, SecureProxyConnectionError } = require('../core/errors')
const buildConnector = require('../core/connect')
const kAgent = Symbol('proxy agent')
const kClient = Symbol('proxy client')
const kProxyHeaders = Symbol('proxy headers')
const kRequestTls = Symbol('request tls settings')
const kProxyTls = Symbol('proxy tls settings')
const kConnectEndpoint = Symbol('connect endpoint function')
function defaultProtocolPort (protocol) {
return protocol === 'https:' ? 443 : 80
}
function defaultFactory (origin, opts) {
return new Pool(origin, opts)
}
class ProxyAgent extends DispatcherBase {
constructor (opts) {
super()
if (!opts || (typeof opts === 'object' && !(opts instanceof URL) && !opts.uri)) {
throw new InvalidArgumentError('Proxy uri is mandatory')
}
const { clientFactory = defaultFactory } = opts
if (typeof clientFactory !== 'function') {
throw new InvalidArgumentError('Proxy opts.clientFactory must be a function.')
}
const url = this.#getUrl(opts)
const { href, origin, port, protocol, username, password, hostname: proxyHostname } = url
this[kProxy] = { uri: href, protocol }
this[kInterceptors] = opts.interceptors?.ProxyAgent && Array.isArray(opts.interceptors.ProxyAgent)
? opts.interceptors.ProxyAgent
: []
this[kRequestTls] = opts.requestTls
this[kProxyTls] = opts.proxyTls
this[kProxyHeaders] = opts.headers || {}
if (opts.auth && opts.token) {
throw new InvalidArgumentError('opts.auth cannot be used in combination with opts.token')
} else if (opts.auth) {
/* @deprecated in favour of opts.token */
this[kProxyHeaders]['proxy-authorization'] = `Basic ${opts.auth}`
} else if (opts.token) {
this[kProxyHeaders]['proxy-authorization'] = opts.token
} else if (username && password) {
this[kProxyHeaders]['proxy-authorization'] = `Basic ${Buffer.from(`${decodeURIComponent(username)}:${decodeURIComponent(password)}`).toString('base64')}`
}
const connect = buildConnector({ ...opts.proxyTls })
this[kConnectEndpoint] = buildConnector({ ...opts.requestTls })
this[kClient] = clientFactory(url, { connect })
this[kAgent] = new Agent({
...opts,
connect: async (opts, callback) => {
let requestedPath = opts.host
if (!opts.port) {
requestedPath += `:${defaultProtocolPort(opts.protocol)}`
}
try {
const { socket, statusCode } = await this[kClient].connect({
origin,
port,
path: requestedPath,
signal: opts.signal,
headers: {
...this[kProxyHeaders],
host: opts.host
},
servername: this[kProxyTls]?.servername || proxyHostname
})
if (statusCode !== 200) {
socket.on('error', () => {}).destroy()
callback(new RequestAbortedError(`Proxy response (${statusCode}) !== 200 when HTTP Tunneling`))
}
if (opts.protocol !== 'https:') {
callback(null, socket)
return
}
let servername
if (this[kRequestTls]) {
servername = this[kRequestTls].servername
} else {
servername = opts.servername
}
this[kConnectEndpoint]({ ...opts, servername, httpSocket: socket }, callback)
} catch (err) {
if (err.code === 'ERR_TLS_CERT_ALTNAME_INVALID') {
// Throw a custom error to avoid loop in client.js#connect
callback(new SecureProxyConnectionError(err))
} else {
callback(err)
}
}
}
})
}
dispatch (opts, handler) {
const headers = buildHeaders(opts.headers)
throwIfProxyAuthIsSent(headers)
if (headers && !('host' in headers) && !('Host' in headers)) {
const { host } = new URL(opts.origin)
headers.host = host
}
return this[kAgent].dispatch(
{
...opts,
headers
},
handler
)
}
/**
* @param {import('../types/proxy-agent').ProxyAgent.Options | string | URL} opts
* @returns {URL}
*/
#getUrl (opts) {
if (typeof opts === 'string') {
return new URL(opts)
} else if (opts instanceof URL) {
return opts
} else {
return new URL(opts.uri)
}
}
async [kClose] () {
await this[kAgent].close()
await this[kClient].close()
}
async [kDestroy] () {
await this[kAgent].destroy()
await this[kClient].destroy()
}
}
/**
* @param {string[] | Record<string, string>} headers
* @returns {Record<string, string>}
*/
function buildHeaders (headers) {
// When using undici.fetch, the headers list is stored
// as an array.
if (Array.isArray(headers)) {
/** @type {Record<string, string>} */
const headersPair = {}
for (let i = 0; i < headers.length; i += 2) {
headersPair[headers[i]] = headers[i + 1]
}
return headersPair
}
return headers
}
/**
* @param {Record<string, string>} headers
*
* Previous versions of ProxyAgent suggests the Proxy-Authorization in request headers
* Nevertheless, it was changed and to avoid a security vulnerability by end users
* this check was created.
* It should be removed in the next major version for performance reasons
*/
function throwIfProxyAuthIsSent (headers) {
const existProxyAuth = headers && Object.keys(headers)
.find((key) => key.toLowerCase() === 'proxy-authorization')
if (existProxyAuth) {
throw new InvalidArgumentError('Proxy-Authorization should be sent in ProxyAgent constructor')
}
}
module.exports = ProxyAgent

35
node_modules/undici/lib/dispatcher/retry-agent.js generated vendored Normal file
View file

@ -0,0 +1,35 @@
'use strict'
const Dispatcher = require('./dispatcher')
const RetryHandler = require('../handler/retry-handler')
class RetryAgent extends Dispatcher {
#agent = null
#options = null
constructor (agent, options = {}) {
super(options)
this.#agent = agent
this.#options = options
}
dispatch (opts, handler) {
const retry = new RetryHandler({
...opts,
retryOptions: this.#options
}, {
dispatch: this.#agent.dispatch.bind(this.#agent),
handler
})
return this.#agent.dispatch(opts, retry)
}
close () {
return this.#agent.close()
}
destroy () {
return this.#agent.destroy()
}
}
module.exports = RetryAgent

32
node_modules/undici/lib/global.js generated vendored Normal file
View file

@ -0,0 +1,32 @@
'use strict'
// We include a version number for the Dispatcher API. In case of breaking changes,
// this version number must be increased to avoid conflicts.
const globalDispatcher = Symbol.for('undici.globalDispatcher.1')
const { InvalidArgumentError } = require('./core/errors')
const Agent = require('./dispatcher/agent')
if (getGlobalDispatcher() === undefined) {
setGlobalDispatcher(new Agent())
}
function setGlobalDispatcher (agent) {
if (!agent || typeof agent.dispatch !== 'function') {
throw new InvalidArgumentError('Argument agent must implement Agent')
}
Object.defineProperty(globalThis, globalDispatcher, {
value: agent,
writable: true,
enumerable: false,
configurable: false
})
}
function getGlobalDispatcher () {
return globalThis[globalDispatcher]
}
module.exports = {
setGlobalDispatcher,
getGlobalDispatcher
}

44
node_modules/undici/lib/handler/decorator-handler.js generated vendored Normal file
View file

@ -0,0 +1,44 @@
'use strict'
module.exports = class DecoratorHandler {
#handler
constructor (handler) {
if (typeof handler !== 'object' || handler === null) {
throw new TypeError('handler must be an object')
}
this.#handler = handler
}
onConnect (...args) {
return this.#handler.onConnect?.(...args)
}
onError (...args) {
return this.#handler.onError?.(...args)
}
onUpgrade (...args) {
return this.#handler.onUpgrade?.(...args)
}
onResponseStarted (...args) {
return this.#handler.onResponseStarted?.(...args)
}
onHeaders (...args) {
return this.#handler.onHeaders?.(...args)
}
onData (...args) {
return this.#handler.onData?.(...args)
}
onComplete (...args) {
return this.#handler.onComplete?.(...args)
}
onBodySent (...args) {
return this.#handler.onBodySent?.(...args)
}
}

232
node_modules/undici/lib/handler/redirect-handler.js generated vendored Normal file
View file

@ -0,0 +1,232 @@
'use strict'
const util = require('../core/util')
const { kBodyUsed } = require('../core/symbols')
const assert = require('node:assert')
const { InvalidArgumentError } = require('../core/errors')
const EE = require('node:events')
const redirectableStatusCodes = [300, 301, 302, 303, 307, 308]
const kBody = Symbol('body')
class BodyAsyncIterable {
constructor (body) {
this[kBody] = body
this[kBodyUsed] = false
}
async * [Symbol.asyncIterator] () {
assert(!this[kBodyUsed], 'disturbed')
this[kBodyUsed] = true
yield * this[kBody]
}
}
class RedirectHandler {
constructor (dispatch, maxRedirections, opts, handler) {
if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) {
throw new InvalidArgumentError('maxRedirections must be a positive number')
}
util.validateHandler(handler, opts.method, opts.upgrade)
this.dispatch = dispatch
this.location = null
this.abort = null
this.opts = { ...opts, maxRedirections: 0 } // opts must be a copy
this.maxRedirections = maxRedirections
this.handler = handler
this.history = []
this.redirectionLimitReached = false
if (util.isStream(this.opts.body)) {
// TODO (fix): Provide some way for the user to cache the file to e.g. /tmp
// so that it can be dispatched again?
// TODO (fix): Do we need 100-expect support to provide a way to do this properly?
if (util.bodyLength(this.opts.body) === 0) {
this.opts.body
.on('data', function () {
assert(false)
})
}
if (typeof this.opts.body.readableDidRead !== 'boolean') {
this.opts.body[kBodyUsed] = false
EE.prototype.on.call(this.opts.body, 'data', function () {
this[kBodyUsed] = true
})
}
} else if (this.opts.body && typeof this.opts.body.pipeTo === 'function') {
// TODO (fix): We can't access ReadableStream internal state
// to determine whether or not it has been disturbed. This is just
// a workaround.
this.opts.body = new BodyAsyncIterable(this.opts.body)
} else if (
this.opts.body &&
typeof this.opts.body !== 'string' &&
!ArrayBuffer.isView(this.opts.body) &&
util.isIterable(this.opts.body)
) {
// TODO: Should we allow re-using iterable if !this.opts.idempotent
// or through some other flag?
this.opts.body = new BodyAsyncIterable(this.opts.body)
}
}
onConnect (abort) {
this.abort = abort
this.handler.onConnect(abort, { history: this.history })
}
onUpgrade (statusCode, headers, socket) {
this.handler.onUpgrade(statusCode, headers, socket)
}
onError (error) {
this.handler.onError(error)
}
onHeaders (statusCode, headers, resume, statusText) {
this.location = this.history.length >= this.maxRedirections || util.isDisturbed(this.opts.body)
? null
: parseLocation(statusCode, headers)
if (this.opts.throwOnMaxRedirect && this.history.length >= this.maxRedirections) {
if (this.request) {
this.request.abort(new Error('max redirects'))
}
this.redirectionLimitReached = true
this.abort(new Error('max redirects'))
return
}
if (this.opts.origin) {
this.history.push(new URL(this.opts.path, this.opts.origin))
}
if (!this.location) {
return this.handler.onHeaders(statusCode, headers, resume, statusText)
}
const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin)))
const path = search ? `${pathname}${search}` : pathname
// Remove headers referring to the original URL.
// By default it is Host only, unless it's a 303 (see below), which removes also all Content-* headers.
// https://tools.ietf.org/html/rfc7231#section-6.4
this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin)
this.opts.path = path
this.opts.origin = origin
this.opts.maxRedirections = 0
this.opts.query = null
// https://tools.ietf.org/html/rfc7231#section-6.4.4
// In case of HTTP 303, always replace method to be either HEAD or GET
if (statusCode === 303 && this.opts.method !== 'HEAD') {
this.opts.method = 'GET'
this.opts.body = null
}
}
onData (chunk) {
if (this.location) {
/*
https://tools.ietf.org/html/rfc7231#section-6.4
TLDR: undici always ignores 3xx response bodies.
Redirection is used to serve the requested resource from another URL, so it is assumes that
no body is generated (and thus can be ignored). Even though generating a body is not prohibited.
For status 301, 302, 303, 307 and 308 (the latter from RFC 7238), the specs mention that the body usually
(which means it's optional and not mandated) contain just an hyperlink to the value of
the Location response header, so the body can be ignored safely.
For status 300, which is "Multiple Choices", the spec mentions both generating a Location
response header AND a response body with the other possible location to follow.
Since the spec explicitly chooses not to specify a format for such body and leave it to
servers and browsers implementors, we ignore the body as there is no specified way to eventually parse it.
*/
} else {
return this.handler.onData(chunk)
}
}
onComplete (trailers) {
if (this.location) {
/*
https://tools.ietf.org/html/rfc7231#section-6.4
TLDR: undici always ignores 3xx response trailers as they are not expected in case of redirections
and neither are useful if present.
See comment on onData method above for more detailed information.
*/
this.location = null
this.abort = null
this.dispatch(this.opts, this)
} else {
this.handler.onComplete(trailers)
}
}
onBodySent (chunk) {
if (this.handler.onBodySent) {
this.handler.onBodySent(chunk)
}
}
}
function parseLocation (statusCode, headers) {
if (redirectableStatusCodes.indexOf(statusCode) === -1) {
return null
}
for (let i = 0; i < headers.length; i += 2) {
if (headers[i].length === 8 && util.headerNameToString(headers[i]) === 'location') {
return headers[i + 1]
}
}
}
// https://tools.ietf.org/html/rfc7231#section-6.4.4
function shouldRemoveHeader (header, removeContent, unknownOrigin) {
if (header.length === 4) {
return util.headerNameToString(header) === 'host'
}
if (removeContent && util.headerNameToString(header).startsWith('content-')) {
return true
}
if (unknownOrigin && (header.length === 13 || header.length === 6 || header.length === 19)) {
const name = util.headerNameToString(header)
return name === 'authorization' || name === 'cookie' || name === 'proxy-authorization'
}
return false
}
// https://tools.ietf.org/html/rfc7231#section-6.4
function cleanRequestHeaders (headers, removeContent, unknownOrigin) {
const ret = []
if (Array.isArray(headers)) {
for (let i = 0; i < headers.length; i += 2) {
if (!shouldRemoveHeader(headers[i], removeContent, unknownOrigin)) {
ret.push(headers[i], headers[i + 1])
}
}
} else if (headers && typeof headers === 'object') {
for (const key of Object.keys(headers)) {
if (!shouldRemoveHeader(key, removeContent, unknownOrigin)) {
ret.push(key, headers[key])
}
}
} else {
assert(headers == null, 'headers must be an object or an array')
}
return ret
}
module.exports = RedirectHandler

350
node_modules/undici/lib/handler/retry-handler.js generated vendored Normal file
View file

@ -0,0 +1,350 @@
'use strict'
const assert = require('node:assert')
const { kRetryHandlerDefaultRetry } = require('../core/symbols')
const { RequestRetryError } = require('../core/errors')
const { isDisturbed, parseHeaders, parseRangeHeader } = require('../core/util')
function calculateRetryAfterHeader (retryAfter) {
const current = Date.now()
const diff = new Date(retryAfter).getTime() - current
return diff
}
class RetryHandler {
constructor (opts, handlers) {
const { retryOptions, ...dispatchOpts } = opts
const {
// Retry scoped
retry: retryFn,
maxRetries,
maxTimeout,
minTimeout,
timeoutFactor,
// Response scoped
methods,
errorCodes,
retryAfter,
statusCodes
} = retryOptions ?? {}
this.dispatch = handlers.dispatch
this.handler = handlers.handler
this.opts = dispatchOpts
this.abort = null
this.aborted = false
this.retryOpts = {
retry: retryFn ?? RetryHandler[kRetryHandlerDefaultRetry],
retryAfter: retryAfter ?? true,
maxTimeout: maxTimeout ?? 30 * 1000, // 30s,
minTimeout: minTimeout ?? 500, // .5s
timeoutFactor: timeoutFactor ?? 2,
maxRetries: maxRetries ?? 5,
// What errors we should retry
methods: methods ?? ['GET', 'HEAD', 'OPTIONS', 'PUT', 'DELETE', 'TRACE'],
// Indicates which errors to retry
statusCodes: statusCodes ?? [500, 502, 503, 504, 429],
// List of errors to retry
errorCodes: errorCodes ?? [
'ECONNRESET',
'ECONNREFUSED',
'ENOTFOUND',
'ENETDOWN',
'ENETUNREACH',
'EHOSTDOWN',
'EHOSTUNREACH',
'EPIPE',
'UND_ERR_SOCKET'
]
}
this.retryCount = 0
this.retryCountCheckpoint = 0
this.start = 0
this.end = null
this.etag = null
this.resume = null
// Handle possible onConnect duplication
this.handler.onConnect(reason => {
this.aborted = true
if (this.abort) {
this.abort(reason)
} else {
this.reason = reason
}
})
}
onRequestSent () {
if (this.handler.onRequestSent) {
this.handler.onRequestSent()
}
}
onUpgrade (statusCode, headers, socket) {
if (this.handler.onUpgrade) {
this.handler.onUpgrade(statusCode, headers, socket)
}
}
onConnect (abort) {
if (this.aborted) {
abort(this.reason)
} else {
this.abort = abort
}
}
onBodySent (chunk) {
if (this.handler.onBodySent) return this.handler.onBodySent(chunk)
}
static [kRetryHandlerDefaultRetry] (err, { state, opts }, cb) {
const { statusCode, code, headers } = err
const { method, retryOptions } = opts
const {
maxRetries,
minTimeout,
maxTimeout,
timeoutFactor,
statusCodes,
errorCodes,
methods
} = retryOptions
const { counter } = state
// Any code that is not a Undici's originated and allowed to retry
if (
code &&
code !== 'UND_ERR_REQ_RETRY' &&
!errorCodes.includes(code)
) {
cb(err)
return
}
// If a set of method are provided and the current method is not in the list
if (Array.isArray(methods) && !methods.includes(method)) {
cb(err)
return
}
// If a set of status code are provided and the current status code is not in the list
if (
statusCode != null &&
Array.isArray(statusCodes) &&
!statusCodes.includes(statusCode)
) {
cb(err)
return
}
// If we reached the max number of retries
if (counter > maxRetries) {
cb(err)
return
}
let retryAfterHeader = headers?.['retry-after']
if (retryAfterHeader) {
retryAfterHeader = Number(retryAfterHeader)
retryAfterHeader = Number.isNaN(retryAfterHeader)
? calculateRetryAfterHeader(retryAfterHeader)
: retryAfterHeader * 1e3 // Retry-After is in seconds
}
const retryTimeout =
retryAfterHeader > 0
? Math.min(retryAfterHeader, maxTimeout)
: Math.min(minTimeout * timeoutFactor ** (counter - 1), maxTimeout)
setTimeout(() => cb(null), retryTimeout)
}
onHeaders (statusCode, rawHeaders, resume, statusMessage) {
const headers = parseHeaders(rawHeaders)
this.retryCount += 1
if (statusCode >= 300) {
if (this.retryOpts.statusCodes.includes(statusCode) === false) {
return this.handler.onHeaders(
statusCode,
rawHeaders,
resume,
statusMessage
)
} else {
this.abort(
new RequestRetryError('Request failed', statusCode, {
headers,
count: this.retryCount
})
)
return false
}
}
// Checkpoint for resume from where we left it
if (this.resume != null) {
this.resume = null
if (statusCode !== 206) {
return true
}
const contentRange = parseRangeHeader(headers['content-range'])
// If no content range
if (!contentRange) {
this.abort(
new RequestRetryError('Content-Range mismatch', statusCode, {
headers,
count: this.retryCount
})
)
return false
}
// Let's start with a weak etag check
if (this.etag != null && this.etag !== headers.etag) {
this.abort(
new RequestRetryError('ETag mismatch', statusCode, {
headers,
count: this.retryCount
})
)
return false
}
const { start, size, end = size } = contentRange
assert(this.start === start, 'content-range mismatch')
assert(this.end == null || this.end === end, 'content-range mismatch')
this.resume = resume
return true
}
if (this.end == null) {
if (statusCode === 206) {
// First time we receive 206
const range = parseRangeHeader(headers['content-range'])
if (range == null) {
return this.handler.onHeaders(
statusCode,
rawHeaders,
resume,
statusMessage
)
}
const { start, size, end = size } = range
assert(
start != null && Number.isFinite(start),
'content-range mismatch'
)
assert(
end != null && Number.isFinite(end),
'invalid content-length'
)
this.start = start
this.end = end
}
// We make our best to checkpoint the body for further range headers
if (this.end == null) {
const contentLength = headers['content-length']
this.end = contentLength != null ? Number(contentLength) : null
}
assert(Number.isFinite(this.start))
assert(
this.end == null || Number.isFinite(this.end),
'invalid content-length'
)
this.resume = resume
this.etag = headers.etag != null ? headers.etag : null
return this.handler.onHeaders(
statusCode,
rawHeaders,
resume,
statusMessage
)
}
const err = new RequestRetryError('Request failed', statusCode, {
headers,
count: this.retryCount
})
this.abort(err)
return false
}
onData (chunk) {
this.start += chunk.length
return this.handler.onData(chunk)
}
onComplete (rawTrailers) {
this.retryCount = 0
return this.handler.onComplete(rawTrailers)
}
onError (err) {
if (this.aborted || isDisturbed(this.opts.body)) {
return this.handler.onError(err)
}
// We reconcile in case of a mix between network errors
// and server error response
if (this.retryCount - this.retryCountCheckpoint > 0) {
// We count the difference between the last checkpoint and the current retry count
this.retryCount = this.retryCountCheckpoint + (this.retryCount - this.retryCountCheckpoint)
} else {
this.retryCount += 1
}
this.retryOpts.retry(
err,
{
state: { counter: this.retryCount },
opts: { retryOptions: this.retryOpts, ...this.opts }
},
onRetry.bind(this)
)
function onRetry (err) {
if (err != null || this.aborted || isDisturbed(this.opts.body)) {
return this.handler.onError(err)
}
if (this.start !== 0) {
this.opts = {
...this.opts,
headers: {
...this.opts.headers,
range: `bytes=${this.start}-${this.end ?? ''}`
}
}
}
try {
this.retryCountCheckpoint = this.retryCount
this.dispatch(this.opts, this)
} catch (err) {
this.handler.onError(err)
}
}
}
}
module.exports = RetryHandler

View file

@ -0,0 +1,21 @@
'use strict'
const RedirectHandler = require('../handler/redirect-handler')
function createRedirectInterceptor ({ maxRedirections: defaultMaxRedirections }) {
return (dispatch) => {
return function Intercept (opts, handler) {
const { maxRedirections = defaultMaxRedirections } = opts
if (!maxRedirections) {
return dispatch(opts, handler)
}
const redirectHandler = new RedirectHandler(dispatch, maxRedirections, opts, handler)
opts = { ...opts, maxRedirections: 0 } // Stop sub dispatcher from also redirecting.
return dispatch(opts, redirectHandler)
}
}
}
module.exports = createRedirectInterceptor

24
node_modules/undici/lib/interceptor/redirect.js generated vendored Normal file
View file

@ -0,0 +1,24 @@
'use strict'
const RedirectHandler = require('../handler/redirect-handler')
module.exports = opts => {
const globalMaxRedirections = opts?.maxRedirections
return dispatch => {
return function redirectInterceptor (opts, handler) {
const { maxRedirections = globalMaxRedirections, ...baseOpts } = opts
if (!maxRedirections) {
return dispatch(opts, handler)
}
const redirectHandler = new RedirectHandler(
dispatch,
maxRedirections,
opts,
handler
)
return dispatch(baseOpts, redirectHandler)
}
}
}

19
node_modules/undici/lib/interceptor/retry.js generated vendored Normal file
View file

@ -0,0 +1,19 @@
'use strict'
const RetryHandler = require('../handler/retry-handler')
module.exports = globalOpts => {
return dispatch => {
return function retryInterceptor (opts, handler) {
return dispatch(
opts,
new RetryHandler(
{ ...opts, retryOptions: { ...globalOpts, ...opts.retryOptions } },
{
handler,
dispatch
}
)
)
}
}
}

199
node_modules/undici/lib/llhttp/constants.d.ts generated vendored Normal file
View file

@ -0,0 +1,199 @@
import { IEnumMap } from './utils';
export declare type HTTPMode = 'loose' | 'strict';
export declare enum ERROR {
OK = 0,
INTERNAL = 1,
STRICT = 2,
LF_EXPECTED = 3,
UNEXPECTED_CONTENT_LENGTH = 4,
CLOSED_CONNECTION = 5,
INVALID_METHOD = 6,
INVALID_URL = 7,
INVALID_CONSTANT = 8,
INVALID_VERSION = 9,
INVALID_HEADER_TOKEN = 10,
INVALID_CONTENT_LENGTH = 11,
INVALID_CHUNK_SIZE = 12,
INVALID_STATUS = 13,
INVALID_EOF_STATE = 14,
INVALID_TRANSFER_ENCODING = 15,
CB_MESSAGE_BEGIN = 16,
CB_HEADERS_COMPLETE = 17,
CB_MESSAGE_COMPLETE = 18,
CB_CHUNK_HEADER = 19,
CB_CHUNK_COMPLETE = 20,
PAUSED = 21,
PAUSED_UPGRADE = 22,
PAUSED_H2_UPGRADE = 23,
USER = 24
}
export declare enum TYPE {
BOTH = 0,
REQUEST = 1,
RESPONSE = 2
}
export declare enum FLAGS {
CONNECTION_KEEP_ALIVE = 1,
CONNECTION_CLOSE = 2,
CONNECTION_UPGRADE = 4,
CHUNKED = 8,
UPGRADE = 16,
CONTENT_LENGTH = 32,
SKIPBODY = 64,
TRAILING = 128,
TRANSFER_ENCODING = 512
}
export declare enum LENIENT_FLAGS {
HEADERS = 1,
CHUNKED_LENGTH = 2,
KEEP_ALIVE = 4
}
export declare enum METHODS {
DELETE = 0,
GET = 1,
HEAD = 2,
POST = 3,
PUT = 4,
CONNECT = 5,
OPTIONS = 6,
TRACE = 7,
COPY = 8,
LOCK = 9,
MKCOL = 10,
MOVE = 11,
PROPFIND = 12,
PROPPATCH = 13,
SEARCH = 14,
UNLOCK = 15,
BIND = 16,
REBIND = 17,
UNBIND = 18,
ACL = 19,
REPORT = 20,
MKACTIVITY = 21,
CHECKOUT = 22,
MERGE = 23,
'M-SEARCH' = 24,
NOTIFY = 25,
SUBSCRIBE = 26,
UNSUBSCRIBE = 27,
PATCH = 28,
PURGE = 29,
MKCALENDAR = 30,
LINK = 31,
UNLINK = 32,
SOURCE = 33,
PRI = 34,
DESCRIBE = 35,
ANNOUNCE = 36,
SETUP = 37,
PLAY = 38,
PAUSE = 39,
TEARDOWN = 40,
GET_PARAMETER = 41,
SET_PARAMETER = 42,
REDIRECT = 43,
RECORD = 44,
FLUSH = 45
}
export declare const METHODS_HTTP: METHODS[];
export declare const METHODS_ICE: METHODS[];
export declare const METHODS_RTSP: METHODS[];
export declare const METHOD_MAP: IEnumMap;
export declare const H_METHOD_MAP: IEnumMap;
export declare enum FINISH {
SAFE = 0,
SAFE_WITH_CB = 1,
UNSAFE = 2
}
export declare type CharList = Array<string | number>;
export declare const ALPHA: CharList;
export declare const NUM_MAP: {
0: number;
1: number;
2: number;
3: number;
4: number;
5: number;
6: number;
7: number;
8: number;
9: number;
};
export declare const HEX_MAP: {
0: number;
1: number;
2: number;
3: number;
4: number;
5: number;
6: number;
7: number;
8: number;
9: number;
A: number;
B: number;
C: number;
D: number;
E: number;
F: number;
a: number;
b: number;
c: number;
d: number;
e: number;
f: number;
};
export declare const NUM: CharList;
export declare const ALPHANUM: CharList;
export declare const MARK: CharList;
export declare const USERINFO_CHARS: CharList;
export declare const STRICT_URL_CHAR: CharList;
export declare const URL_CHAR: CharList;
export declare const HEX: CharList;
export declare const STRICT_TOKEN: CharList;
export declare const TOKEN: CharList;
export declare const HEADER_CHARS: CharList;
export declare const CONNECTION_TOKEN_CHARS: CharList;
export declare const MAJOR: {
0: number;
1: number;
2: number;
3: number;
4: number;
5: number;
6: number;
7: number;
8: number;
9: number;
};
export declare const MINOR: {
0: number;
1: number;
2: number;
3: number;
4: number;
5: number;
6: number;
7: number;
8: number;
9: number;
};
export declare enum HEADER_STATE {
GENERAL = 0,
CONNECTION = 1,
CONTENT_LENGTH = 2,
TRANSFER_ENCODING = 3,
UPGRADE = 4,
CONNECTION_KEEP_ALIVE = 5,
CONNECTION_CLOSE = 6,
CONNECTION_UPGRADE = 7,
TRANSFER_ENCODING_CHUNKED = 8
}
export declare const SPECIAL_HEADERS: {
connection: HEADER_STATE;
'content-length': HEADER_STATE;
'proxy-connection': HEADER_STATE;
'transfer-encoding': HEADER_STATE;
upgrade: HEADER_STATE;
};

278
node_modules/undici/lib/llhttp/constants.js generated vendored Normal file
View file

@ -0,0 +1,278 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.SPECIAL_HEADERS = exports.HEADER_STATE = exports.MINOR = exports.MAJOR = exports.CONNECTION_TOKEN_CHARS = exports.HEADER_CHARS = exports.TOKEN = exports.STRICT_TOKEN = exports.HEX = exports.URL_CHAR = exports.STRICT_URL_CHAR = exports.USERINFO_CHARS = exports.MARK = exports.ALPHANUM = exports.NUM = exports.HEX_MAP = exports.NUM_MAP = exports.ALPHA = exports.FINISH = exports.H_METHOD_MAP = exports.METHOD_MAP = exports.METHODS_RTSP = exports.METHODS_ICE = exports.METHODS_HTTP = exports.METHODS = exports.LENIENT_FLAGS = exports.FLAGS = exports.TYPE = exports.ERROR = void 0;
const utils_1 = require("./utils");
// C headers
var ERROR;
(function (ERROR) {
ERROR[ERROR["OK"] = 0] = "OK";
ERROR[ERROR["INTERNAL"] = 1] = "INTERNAL";
ERROR[ERROR["STRICT"] = 2] = "STRICT";
ERROR[ERROR["LF_EXPECTED"] = 3] = "LF_EXPECTED";
ERROR[ERROR["UNEXPECTED_CONTENT_LENGTH"] = 4] = "UNEXPECTED_CONTENT_LENGTH";
ERROR[ERROR["CLOSED_CONNECTION"] = 5] = "CLOSED_CONNECTION";
ERROR[ERROR["INVALID_METHOD"] = 6] = "INVALID_METHOD";
ERROR[ERROR["INVALID_URL"] = 7] = "INVALID_URL";
ERROR[ERROR["INVALID_CONSTANT"] = 8] = "INVALID_CONSTANT";
ERROR[ERROR["INVALID_VERSION"] = 9] = "INVALID_VERSION";
ERROR[ERROR["INVALID_HEADER_TOKEN"] = 10] = "INVALID_HEADER_TOKEN";
ERROR[ERROR["INVALID_CONTENT_LENGTH"] = 11] = "INVALID_CONTENT_LENGTH";
ERROR[ERROR["INVALID_CHUNK_SIZE"] = 12] = "INVALID_CHUNK_SIZE";
ERROR[ERROR["INVALID_STATUS"] = 13] = "INVALID_STATUS";
ERROR[ERROR["INVALID_EOF_STATE"] = 14] = "INVALID_EOF_STATE";
ERROR[ERROR["INVALID_TRANSFER_ENCODING"] = 15] = "INVALID_TRANSFER_ENCODING";
ERROR[ERROR["CB_MESSAGE_BEGIN"] = 16] = "CB_MESSAGE_BEGIN";
ERROR[ERROR["CB_HEADERS_COMPLETE"] = 17] = "CB_HEADERS_COMPLETE";
ERROR[ERROR["CB_MESSAGE_COMPLETE"] = 18] = "CB_MESSAGE_COMPLETE";
ERROR[ERROR["CB_CHUNK_HEADER"] = 19] = "CB_CHUNK_HEADER";
ERROR[ERROR["CB_CHUNK_COMPLETE"] = 20] = "CB_CHUNK_COMPLETE";
ERROR[ERROR["PAUSED"] = 21] = "PAUSED";
ERROR[ERROR["PAUSED_UPGRADE"] = 22] = "PAUSED_UPGRADE";
ERROR[ERROR["PAUSED_H2_UPGRADE"] = 23] = "PAUSED_H2_UPGRADE";
ERROR[ERROR["USER"] = 24] = "USER";
})(ERROR = exports.ERROR || (exports.ERROR = {}));
var TYPE;
(function (TYPE) {
TYPE[TYPE["BOTH"] = 0] = "BOTH";
TYPE[TYPE["REQUEST"] = 1] = "REQUEST";
TYPE[TYPE["RESPONSE"] = 2] = "RESPONSE";
})(TYPE = exports.TYPE || (exports.TYPE = {}));
var FLAGS;
(function (FLAGS) {
FLAGS[FLAGS["CONNECTION_KEEP_ALIVE"] = 1] = "CONNECTION_KEEP_ALIVE";
FLAGS[FLAGS["CONNECTION_CLOSE"] = 2] = "CONNECTION_CLOSE";
FLAGS[FLAGS["CONNECTION_UPGRADE"] = 4] = "CONNECTION_UPGRADE";
FLAGS[FLAGS["CHUNKED"] = 8] = "CHUNKED";
FLAGS[FLAGS["UPGRADE"] = 16] = "UPGRADE";
FLAGS[FLAGS["CONTENT_LENGTH"] = 32] = "CONTENT_LENGTH";
FLAGS[FLAGS["SKIPBODY"] = 64] = "SKIPBODY";
FLAGS[FLAGS["TRAILING"] = 128] = "TRAILING";
// 1 << 8 is unused
FLAGS[FLAGS["TRANSFER_ENCODING"] = 512] = "TRANSFER_ENCODING";
})(FLAGS = exports.FLAGS || (exports.FLAGS = {}));
var LENIENT_FLAGS;
(function (LENIENT_FLAGS) {
LENIENT_FLAGS[LENIENT_FLAGS["HEADERS"] = 1] = "HEADERS";
LENIENT_FLAGS[LENIENT_FLAGS["CHUNKED_LENGTH"] = 2] = "CHUNKED_LENGTH";
LENIENT_FLAGS[LENIENT_FLAGS["KEEP_ALIVE"] = 4] = "KEEP_ALIVE";
})(LENIENT_FLAGS = exports.LENIENT_FLAGS || (exports.LENIENT_FLAGS = {}));
var METHODS;
(function (METHODS) {
METHODS[METHODS["DELETE"] = 0] = "DELETE";
METHODS[METHODS["GET"] = 1] = "GET";
METHODS[METHODS["HEAD"] = 2] = "HEAD";
METHODS[METHODS["POST"] = 3] = "POST";
METHODS[METHODS["PUT"] = 4] = "PUT";
/* pathological */
METHODS[METHODS["CONNECT"] = 5] = "CONNECT";
METHODS[METHODS["OPTIONS"] = 6] = "OPTIONS";
METHODS[METHODS["TRACE"] = 7] = "TRACE";
/* WebDAV */
METHODS[METHODS["COPY"] = 8] = "COPY";
METHODS[METHODS["LOCK"] = 9] = "LOCK";
METHODS[METHODS["MKCOL"] = 10] = "MKCOL";
METHODS[METHODS["MOVE"] = 11] = "MOVE";
METHODS[METHODS["PROPFIND"] = 12] = "PROPFIND";
METHODS[METHODS["PROPPATCH"] = 13] = "PROPPATCH";
METHODS[METHODS["SEARCH"] = 14] = "SEARCH";
METHODS[METHODS["UNLOCK"] = 15] = "UNLOCK";
METHODS[METHODS["BIND"] = 16] = "BIND";
METHODS[METHODS["REBIND"] = 17] = "REBIND";
METHODS[METHODS["UNBIND"] = 18] = "UNBIND";
METHODS[METHODS["ACL"] = 19] = "ACL";
/* subversion */
METHODS[METHODS["REPORT"] = 20] = "REPORT";
METHODS[METHODS["MKACTIVITY"] = 21] = "MKACTIVITY";
METHODS[METHODS["CHECKOUT"] = 22] = "CHECKOUT";
METHODS[METHODS["MERGE"] = 23] = "MERGE";
/* upnp */
METHODS[METHODS["M-SEARCH"] = 24] = "M-SEARCH";
METHODS[METHODS["NOTIFY"] = 25] = "NOTIFY";
METHODS[METHODS["SUBSCRIBE"] = 26] = "SUBSCRIBE";
METHODS[METHODS["UNSUBSCRIBE"] = 27] = "UNSUBSCRIBE";
/* RFC-5789 */
METHODS[METHODS["PATCH"] = 28] = "PATCH";
METHODS[METHODS["PURGE"] = 29] = "PURGE";
/* CalDAV */
METHODS[METHODS["MKCALENDAR"] = 30] = "MKCALENDAR";
/* RFC-2068, section 19.6.1.2 */
METHODS[METHODS["LINK"] = 31] = "LINK";
METHODS[METHODS["UNLINK"] = 32] = "UNLINK";
/* icecast */
METHODS[METHODS["SOURCE"] = 33] = "SOURCE";
/* RFC-7540, section 11.6 */
METHODS[METHODS["PRI"] = 34] = "PRI";
/* RFC-2326 RTSP */
METHODS[METHODS["DESCRIBE"] = 35] = "DESCRIBE";
METHODS[METHODS["ANNOUNCE"] = 36] = "ANNOUNCE";
METHODS[METHODS["SETUP"] = 37] = "SETUP";
METHODS[METHODS["PLAY"] = 38] = "PLAY";
METHODS[METHODS["PAUSE"] = 39] = "PAUSE";
METHODS[METHODS["TEARDOWN"] = 40] = "TEARDOWN";
METHODS[METHODS["GET_PARAMETER"] = 41] = "GET_PARAMETER";
METHODS[METHODS["SET_PARAMETER"] = 42] = "SET_PARAMETER";
METHODS[METHODS["REDIRECT"] = 43] = "REDIRECT";
METHODS[METHODS["RECORD"] = 44] = "RECORD";
/* RAOP */
METHODS[METHODS["FLUSH"] = 45] = "FLUSH";
})(METHODS = exports.METHODS || (exports.METHODS = {}));
exports.METHODS_HTTP = [
METHODS.DELETE,
METHODS.GET,
METHODS.HEAD,
METHODS.POST,
METHODS.PUT,
METHODS.CONNECT,
METHODS.OPTIONS,
METHODS.TRACE,
METHODS.COPY,
METHODS.LOCK,
METHODS.MKCOL,
METHODS.MOVE,
METHODS.PROPFIND,
METHODS.PROPPATCH,
METHODS.SEARCH,
METHODS.UNLOCK,
METHODS.BIND,
METHODS.REBIND,
METHODS.UNBIND,
METHODS.ACL,
METHODS.REPORT,
METHODS.MKACTIVITY,
METHODS.CHECKOUT,
METHODS.MERGE,
METHODS['M-SEARCH'],
METHODS.NOTIFY,
METHODS.SUBSCRIBE,
METHODS.UNSUBSCRIBE,
METHODS.PATCH,
METHODS.PURGE,
METHODS.MKCALENDAR,
METHODS.LINK,
METHODS.UNLINK,
METHODS.PRI,
// TODO(indutny): should we allow it with HTTP?
METHODS.SOURCE,
];
exports.METHODS_ICE = [
METHODS.SOURCE,
];
exports.METHODS_RTSP = [
METHODS.OPTIONS,
METHODS.DESCRIBE,
METHODS.ANNOUNCE,
METHODS.SETUP,
METHODS.PLAY,
METHODS.PAUSE,
METHODS.TEARDOWN,
METHODS.GET_PARAMETER,
METHODS.SET_PARAMETER,
METHODS.REDIRECT,
METHODS.RECORD,
METHODS.FLUSH,
// For AirPlay
METHODS.GET,
METHODS.POST,
];
exports.METHOD_MAP = utils_1.enumToMap(METHODS);
exports.H_METHOD_MAP = {};
Object.keys(exports.METHOD_MAP).forEach((key) => {
if (/^H/.test(key)) {
exports.H_METHOD_MAP[key] = exports.METHOD_MAP[key];
}
});
var FINISH;
(function (FINISH) {
FINISH[FINISH["SAFE"] = 0] = "SAFE";
FINISH[FINISH["SAFE_WITH_CB"] = 1] = "SAFE_WITH_CB";
FINISH[FINISH["UNSAFE"] = 2] = "UNSAFE";
})(FINISH = exports.FINISH || (exports.FINISH = {}));
exports.ALPHA = [];
for (let i = 'A'.charCodeAt(0); i <= 'Z'.charCodeAt(0); i++) {
// Upper case
exports.ALPHA.push(String.fromCharCode(i));
// Lower case
exports.ALPHA.push(String.fromCharCode(i + 0x20));
}
exports.NUM_MAP = {
0: 0, 1: 1, 2: 2, 3: 3, 4: 4,
5: 5, 6: 6, 7: 7, 8: 8, 9: 9,
};
exports.HEX_MAP = {
0: 0, 1: 1, 2: 2, 3: 3, 4: 4,
5: 5, 6: 6, 7: 7, 8: 8, 9: 9,
A: 0XA, B: 0XB, C: 0XC, D: 0XD, E: 0XE, F: 0XF,
a: 0xa, b: 0xb, c: 0xc, d: 0xd, e: 0xe, f: 0xf,
};
exports.NUM = [
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
];
exports.ALPHANUM = exports.ALPHA.concat(exports.NUM);
exports.MARK = ['-', '_', '.', '!', '~', '*', '\'', '(', ')'];
exports.USERINFO_CHARS = exports.ALPHANUM
.concat(exports.MARK)
.concat(['%', ';', ':', '&', '=', '+', '$', ',']);
// TODO(indutny): use RFC
exports.STRICT_URL_CHAR = [
'!', '"', '$', '%', '&', '\'',
'(', ')', '*', '+', ',', '-', '.', '/',
':', ';', '<', '=', '>',
'@', '[', '\\', ']', '^', '_',
'`',
'{', '|', '}', '~',
].concat(exports.ALPHANUM);
exports.URL_CHAR = exports.STRICT_URL_CHAR
.concat(['\t', '\f']);
// All characters with 0x80 bit set to 1
for (let i = 0x80; i <= 0xff; i++) {
exports.URL_CHAR.push(i);
}
exports.HEX = exports.NUM.concat(['a', 'b', 'c', 'd', 'e', 'f', 'A', 'B', 'C', 'D', 'E', 'F']);
/* Tokens as defined by rfc 2616. Also lowercases them.
* token = 1*<any CHAR except CTLs or separators>
* separators = "(" | ")" | "<" | ">" | "@"
* | "," | ";" | ":" | "\" | <">
* | "/" | "[" | "]" | "?" | "="
* | "{" | "}" | SP | HT
*/
exports.STRICT_TOKEN = [
'!', '#', '$', '%', '&', '\'',
'*', '+', '-', '.',
'^', '_', '`',
'|', '~',
].concat(exports.ALPHANUM);
exports.TOKEN = exports.STRICT_TOKEN.concat([' ']);
/*
* Verify that a char is a valid visible (printable) US-ASCII
* character or %x80-FF
*/
exports.HEADER_CHARS = ['\t'];
for (let i = 32; i <= 255; i++) {
if (i !== 127) {
exports.HEADER_CHARS.push(i);
}
}
// ',' = \x44
exports.CONNECTION_TOKEN_CHARS = exports.HEADER_CHARS.filter((c) => c !== 44);
exports.MAJOR = exports.NUM_MAP;
exports.MINOR = exports.MAJOR;
var HEADER_STATE;
(function (HEADER_STATE) {
HEADER_STATE[HEADER_STATE["GENERAL"] = 0] = "GENERAL";
HEADER_STATE[HEADER_STATE["CONNECTION"] = 1] = "CONNECTION";
HEADER_STATE[HEADER_STATE["CONTENT_LENGTH"] = 2] = "CONTENT_LENGTH";
HEADER_STATE[HEADER_STATE["TRANSFER_ENCODING"] = 3] = "TRANSFER_ENCODING";
HEADER_STATE[HEADER_STATE["UPGRADE"] = 4] = "UPGRADE";
HEADER_STATE[HEADER_STATE["CONNECTION_KEEP_ALIVE"] = 5] = "CONNECTION_KEEP_ALIVE";
HEADER_STATE[HEADER_STATE["CONNECTION_CLOSE"] = 6] = "CONNECTION_CLOSE";
HEADER_STATE[HEADER_STATE["CONNECTION_UPGRADE"] = 7] = "CONNECTION_UPGRADE";
HEADER_STATE[HEADER_STATE["TRANSFER_ENCODING_CHUNKED"] = 8] = "TRANSFER_ENCODING_CHUNKED";
})(HEADER_STATE = exports.HEADER_STATE || (exports.HEADER_STATE = {}));
exports.SPECIAL_HEADERS = {
'connection': HEADER_STATE.CONNECTION,
'content-length': HEADER_STATE.CONTENT_LENGTH,
'proxy-connection': HEADER_STATE.CONNECTION,
'transfer-encoding': HEADER_STATE.TRANSFER_ENCODING,
'upgrade': HEADER_STATE.UPGRADE,
};
//# sourceMappingURL=constants.js.map

1
node_modules/undici/lib/llhttp/constants.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

3
node_modules/undici/lib/llhttp/llhttp-wasm.js generated vendored Normal file

File diff suppressed because one or more lines are too long

3
node_modules/undici/lib/llhttp/llhttp_simd-wasm.js generated vendored Normal file

File diff suppressed because one or more lines are too long

4
node_modules/undici/lib/llhttp/utils.d.ts generated vendored Normal file
View file

@ -0,0 +1,4 @@
export interface IEnumMap {
[key: string]: number;
}
export declare function enumToMap(obj: any): IEnumMap;

15
node_modules/undici/lib/llhttp/utils.js generated vendored Normal file
View file

@ -0,0 +1,15 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.enumToMap = void 0;
function enumToMap(obj) {
const res = {};
Object.keys(obj).forEach((key) => {
const value = obj[key];
if (typeof value === 'number') {
res[key] = value;
}
});
return res;
}
exports.enumToMap = enumToMap;
//# sourceMappingURL=utils.js.map

1
node_modules/undici/lib/llhttp/utils.js.map generated vendored Normal file
View file

@ -0,0 +1 @@
{"version":3,"file":"utils.js","sourceRoot":"","sources":["../../src/llhttp/utils.ts"],"names":[],"mappings":";;;AAIA,SAAgB,SAAS,CAAC,GAAQ;IAChC,MAAM,GAAG,GAAa,EAAE,CAAC;IAEzB,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,CAAC,GAAG,EAAE,EAAE;QAC/B,MAAM,KAAK,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;QACvB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,GAAG,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;SAClB;IACH,CAAC,CAAC,CAAC;IAEH,OAAO,GAAG,CAAC;AACb,CAAC;AAXD,8BAWC"}

32
node_modules/undici/lib/llhttp/wasm_build_env.txt generated vendored Normal file
View file

@ -0,0 +1,32 @@
alpine-baselayout-data-3.4.0-r0
musl-1.2.3-r4
busybox-1.35.0-r29
busybox-binsh-1.35.0-r29
alpine-baselayout-3.4.0-r0
alpine-keys-2.4-r1
ca-certificates-bundle-20220614-r4
libcrypto3-3.0.8-r3
libssl3-3.0.8-r3
ssl_client-1.35.0-r29
zlib-1.2.13-r0
apk-tools-2.12.10-r1
scanelf-1.3.5-r1
musl-utils-1.2.3-r4
libc-utils-0.7.2-r3
libgcc-12.2.1_git20220924-r4
libstdc++-12.2.1_git20220924-r4
libffi-3.4.4-r0
xz-libs-5.2.9-r0
libxml2-2.10.4-r0
zstd-libs-1.5.5-r0
llvm15-libs-15.0.7-r0
clang15-libs-15.0.7-r0
libstdc++-dev-12.2.1_git20220924-r4
clang15-15.0.7-r0
lld-libs-15.0.7-r0
lld-15.0.7-r0
wasi-libc-0.20220525-r1
wasi-libcxx-15.0.7-r0
wasi-libcxxabi-15.0.7-r0
wasi-compiler-rt-15.0.7-r0
wasi-sdk-16-r0

160
node_modules/undici/lib/mock/mock-agent.js generated vendored Normal file
View file

@ -0,0 +1,160 @@
'use strict'
const { kClients } = require('../core/symbols')
const Agent = require('../dispatcher/agent')
const {
kAgent,
kMockAgentSet,
kMockAgentGet,
kDispatches,
kIsMockActive,
kNetConnect,
kGetNetConnect,
kOptions,
kFactory
} = require('./mock-symbols')
const MockClient = require('./mock-client')
const MockPool = require('./mock-pool')
const { matchValue, buildMockOptions } = require('./mock-utils')
const { InvalidArgumentError, UndiciError } = require('../core/errors')
const Dispatcher = require('../dispatcher/dispatcher')
const Pluralizer = require('./pluralizer')
const PendingInterceptorsFormatter = require('./pending-interceptors-formatter')
class MockAgent extends Dispatcher {
constructor (opts) {
super(opts)
this[kNetConnect] = true
this[kIsMockActive] = true
// Instantiate Agent and encapsulate
if ((opts?.agent && typeof opts.agent.dispatch !== 'function')) {
throw new InvalidArgumentError('Argument opts.agent must implement Agent')
}
const agent = opts?.agent ? opts.agent : new Agent(opts)
this[kAgent] = agent
this[kClients] = agent[kClients]
this[kOptions] = buildMockOptions(opts)
}
get (origin) {
let dispatcher = this[kMockAgentGet](origin)
if (!dispatcher) {
dispatcher = this[kFactory](origin)
this[kMockAgentSet](origin, dispatcher)
}
return dispatcher
}
dispatch (opts, handler) {
// Call MockAgent.get to perform additional setup before dispatching as normal
this.get(opts.origin)
return this[kAgent].dispatch(opts, handler)
}
async close () {
await this[kAgent].close()
this[kClients].clear()
}
deactivate () {
this[kIsMockActive] = false
}
activate () {
this[kIsMockActive] = true
}
enableNetConnect (matcher) {
if (typeof matcher === 'string' || typeof matcher === 'function' || matcher instanceof RegExp) {
if (Array.isArray(this[kNetConnect])) {
this[kNetConnect].push(matcher)
} else {
this[kNetConnect] = [matcher]
}
} else if (typeof matcher === 'undefined') {
this[kNetConnect] = true
} else {
throw new InvalidArgumentError('Unsupported matcher. Must be one of String|Function|RegExp.')
}
}
disableNetConnect () {
this[kNetConnect] = false
}
// This is required to bypass issues caused by using global symbols - see:
// https://github.com/nodejs/undici/issues/1447
get isMockActive () {
return this[kIsMockActive]
}
[kMockAgentSet] (origin, dispatcher) {
this[kClients].set(origin, dispatcher)
}
[kFactory] (origin) {
const mockOptions = Object.assign({ agent: this }, this[kOptions])
return this[kOptions] && this[kOptions].connections === 1
? new MockClient(origin, mockOptions)
: new MockPool(origin, mockOptions)
}
[kMockAgentGet] (origin) {
// First check if we can immediately find it
const client = this[kClients].get(origin)
if (client) {
return client
}
// If the origin is not a string create a dummy parent pool and return to user
if (typeof origin !== 'string') {
const dispatcher = this[kFactory]('http://localhost:9999')
this[kMockAgentSet](origin, dispatcher)
return dispatcher
}
// If we match, create a pool and assign the same dispatches
for (const [keyMatcher, nonExplicitDispatcher] of Array.from(this[kClients])) {
if (nonExplicitDispatcher && typeof keyMatcher !== 'string' && matchValue(keyMatcher, origin)) {
const dispatcher = this[kFactory](origin)
this[kMockAgentSet](origin, dispatcher)
dispatcher[kDispatches] = nonExplicitDispatcher[kDispatches]
return dispatcher
}
}
}
[kGetNetConnect] () {
return this[kNetConnect]
}
pendingInterceptors () {
const mockAgentClients = this[kClients]
return Array.from(mockAgentClients.entries())
.flatMap(([origin, scope]) => scope[kDispatches].map(dispatch => ({ ...dispatch, origin })))
.filter(({ pending }) => pending)
}
assertNoPendingInterceptors ({ pendingInterceptorsFormatter = new PendingInterceptorsFormatter() } = {}) {
const pending = this.pendingInterceptors()
if (pending.length === 0) {
return
}
const pluralizer = new Pluralizer('interceptor', 'interceptors').pluralize(pending.length)
throw new UndiciError(`
${pluralizer.count} ${pluralizer.noun} ${pluralizer.is} pending:
${pendingInterceptorsFormatter.format(pending)}
`.trim())
}
}
module.exports = MockAgent

59
node_modules/undici/lib/mock/mock-client.js generated vendored Normal file
View file

@ -0,0 +1,59 @@
'use strict'
const { promisify } = require('node:util')
const Client = require('../dispatcher/client')
const { buildMockDispatch } = require('./mock-utils')
const {
kDispatches,
kMockAgent,
kClose,
kOriginalClose,
kOrigin,
kOriginalDispatch,
kConnected
} = require('./mock-symbols')
const { MockInterceptor } = require('./mock-interceptor')
const Symbols = require('../core/symbols')
const { InvalidArgumentError } = require('../core/errors')
/**
* MockClient provides an API that extends the Client to influence the mockDispatches.
*/
class MockClient extends Client {
constructor (origin, opts) {
super(origin, opts)
if (!opts || !opts.agent || typeof opts.agent.dispatch !== 'function') {
throw new InvalidArgumentError('Argument opts.agent must implement Agent')
}
this[kMockAgent] = opts.agent
this[kOrigin] = origin
this[kDispatches] = []
this[kConnected] = 1
this[kOriginalDispatch] = this.dispatch
this[kOriginalClose] = this.close.bind(this)
this.dispatch = buildMockDispatch.call(this)
this.close = this[kClose]
}
get [Symbols.kConnected] () {
return this[kConnected]
}
/**
* Sets up the base interceptor for mocking replies from undici.
*/
intercept (opts) {
return new MockInterceptor(opts, this[kDispatches])
}
async [kClose] () {
await promisify(this[kOriginalClose])()
this[kConnected] = 0
this[kMockAgent][Symbols.kClients].delete(this[kOrigin])
}
}
module.exports = MockClient

17
node_modules/undici/lib/mock/mock-errors.js generated vendored Normal file
View file

@ -0,0 +1,17 @@
'use strict'
const { UndiciError } = require('../core/errors')
class MockNotMatchedError extends UndiciError {
constructor (message) {
super(message)
Error.captureStackTrace(this, MockNotMatchedError)
this.name = 'MockNotMatchedError'
this.message = message || 'The request does not match any registered mock dispatches'
this.code = 'UND_MOCK_ERR_MOCK_NOT_MATCHED'
}
}
module.exports = {
MockNotMatchedError
}

207
node_modules/undici/lib/mock/mock-interceptor.js generated vendored Normal file
View file

@ -0,0 +1,207 @@
'use strict'
const { getResponseData, buildKey, addMockDispatch } = require('./mock-utils')
const {
kDispatches,
kDispatchKey,
kDefaultHeaders,
kDefaultTrailers,
kContentLength,
kMockDispatch
} = require('./mock-symbols')
const { InvalidArgumentError } = require('../core/errors')
const { buildURL } = require('../core/util')
/**
* Defines the scope API for an interceptor reply
*/
class MockScope {
constructor (mockDispatch) {
this[kMockDispatch] = mockDispatch
}
/**
* Delay a reply by a set amount in ms.
*/
delay (waitInMs) {
if (typeof waitInMs !== 'number' || !Number.isInteger(waitInMs) || waitInMs <= 0) {
throw new InvalidArgumentError('waitInMs must be a valid integer > 0')
}
this[kMockDispatch].delay = waitInMs
return this
}
/**
* For a defined reply, never mark as consumed.
*/
persist () {
this[kMockDispatch].persist = true
return this
}
/**
* Allow one to define a reply for a set amount of matching requests.
*/
times (repeatTimes) {
if (typeof repeatTimes !== 'number' || !Number.isInteger(repeatTimes) || repeatTimes <= 0) {
throw new InvalidArgumentError('repeatTimes must be a valid integer > 0')
}
this[kMockDispatch].times = repeatTimes
return this
}
}
/**
* Defines an interceptor for a Mock
*/
class MockInterceptor {
constructor (opts, mockDispatches) {
if (typeof opts !== 'object') {
throw new InvalidArgumentError('opts must be an object')
}
if (typeof opts.path === 'undefined') {
throw new InvalidArgumentError('opts.path must be defined')
}
if (typeof opts.method === 'undefined') {
opts.method = 'GET'
}
// See https://github.com/nodejs/undici/issues/1245
// As per RFC 3986, clients are not supposed to send URI
// fragments to servers when they retrieve a document,
if (typeof opts.path === 'string') {
if (opts.query) {
opts.path = buildURL(opts.path, opts.query)
} else {
// Matches https://github.com/nodejs/undici/blob/main/lib/web/fetch/index.js#L1811
const parsedURL = new URL(opts.path, 'data://')
opts.path = parsedURL.pathname + parsedURL.search
}
}
if (typeof opts.method === 'string') {
opts.method = opts.method.toUpperCase()
}
this[kDispatchKey] = buildKey(opts)
this[kDispatches] = mockDispatches
this[kDefaultHeaders] = {}
this[kDefaultTrailers] = {}
this[kContentLength] = false
}
createMockScopeDispatchData ({ statusCode, data, responseOptions }) {
const responseData = getResponseData(data)
const contentLength = this[kContentLength] ? { 'content-length': responseData.length } : {}
const headers = { ...this[kDefaultHeaders], ...contentLength, ...responseOptions.headers }
const trailers = { ...this[kDefaultTrailers], ...responseOptions.trailers }
return { statusCode, data, headers, trailers }
}
validateReplyParameters (replyParameters) {
if (typeof replyParameters.statusCode === 'undefined') {
throw new InvalidArgumentError('statusCode must be defined')
}
if (typeof replyParameters.responseOptions !== 'object' || replyParameters.responseOptions === null) {
throw new InvalidArgumentError('responseOptions must be an object')
}
}
/**
* Mock an undici request with a defined reply.
*/
reply (replyOptionsCallbackOrStatusCode) {
// Values of reply aren't available right now as they
// can only be available when the reply callback is invoked.
if (typeof replyOptionsCallbackOrStatusCode === 'function') {
// We'll first wrap the provided callback in another function,
// this function will properly resolve the data from the callback
// when invoked.
const wrappedDefaultsCallback = (opts) => {
// Our reply options callback contains the parameter for statusCode, data and options.
const resolvedData = replyOptionsCallbackOrStatusCode(opts)
// Check if it is in the right format
if (typeof resolvedData !== 'object' || resolvedData === null) {
throw new InvalidArgumentError('reply options callback must return an object')
}
const replyParameters = { data: '', responseOptions: {}, ...resolvedData }
this.validateReplyParameters(replyParameters)
// Since the values can be obtained immediately we return them
// from this higher order function that will be resolved later.
return {
...this.createMockScopeDispatchData(replyParameters)
}
}
// Add usual dispatch data, but this time set the data parameter to function that will eventually provide data.
const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], wrappedDefaultsCallback)
return new MockScope(newMockDispatch)
}
// We can have either one or three parameters, if we get here,
// we should have 1-3 parameters. So we spread the arguments of
// this function to obtain the parameters, since replyData will always
// just be the statusCode.
const replyParameters = {
statusCode: replyOptionsCallbackOrStatusCode,
data: arguments[1] === undefined ? '' : arguments[1],
responseOptions: arguments[2] === undefined ? {} : arguments[2]
}
this.validateReplyParameters(replyParameters)
// Send in-already provided data like usual
const dispatchData = this.createMockScopeDispatchData(replyParameters)
const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], dispatchData)
return new MockScope(newMockDispatch)
}
/**
* Mock an undici request with a defined error.
*/
replyWithError (error) {
if (typeof error === 'undefined') {
throw new InvalidArgumentError('error must be defined')
}
const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], { error })
return new MockScope(newMockDispatch)
}
/**
* Set default reply headers on the interceptor for subsequent replies
*/
defaultReplyHeaders (headers) {
if (typeof headers === 'undefined') {
throw new InvalidArgumentError('headers must be defined')
}
this[kDefaultHeaders] = headers
return this
}
/**
* Set default reply trailers on the interceptor for subsequent replies
*/
defaultReplyTrailers (trailers) {
if (typeof trailers === 'undefined') {
throw new InvalidArgumentError('trailers must be defined')
}
this[kDefaultTrailers] = trailers
return this
}
/**
* Set reply content length header for replies on the interceptor
*/
replyContentLength () {
this[kContentLength] = true
return this
}
}
module.exports.MockInterceptor = MockInterceptor
module.exports.MockScope = MockScope

59
node_modules/undici/lib/mock/mock-pool.js generated vendored Normal file
View file

@ -0,0 +1,59 @@
'use strict'
const { promisify } = require('node:util')
const Pool = require('../dispatcher/pool')
const { buildMockDispatch } = require('./mock-utils')
const {
kDispatches,
kMockAgent,
kClose,
kOriginalClose,
kOrigin,
kOriginalDispatch,
kConnected
} = require('./mock-symbols')
const { MockInterceptor } = require('./mock-interceptor')
const Symbols = require('../core/symbols')
const { InvalidArgumentError } = require('../core/errors')
/**
* MockPool provides an API that extends the Pool to influence the mockDispatches.
*/
class MockPool extends Pool {
constructor (origin, opts) {
super(origin, opts)
if (!opts || !opts.agent || typeof opts.agent.dispatch !== 'function') {
throw new InvalidArgumentError('Argument opts.agent must implement Agent')
}
this[kMockAgent] = opts.agent
this[kOrigin] = origin
this[kDispatches] = []
this[kConnected] = 1
this[kOriginalDispatch] = this.dispatch
this[kOriginalClose] = this.close.bind(this)
this.dispatch = buildMockDispatch.call(this)
this.close = this[kClose]
}
get [Symbols.kConnected] () {
return this[kConnected]
}
/**
* Sets up the base interceptor for mocking replies from undici.
*/
intercept (opts) {
return new MockInterceptor(opts, this[kDispatches])
}
async [kClose] () {
await promisify(this[kOriginalClose])()
this[kConnected] = 0
this[kMockAgent][Symbols.kClients].delete(this[kOrigin])
}
}
module.exports = MockPool

23
node_modules/undici/lib/mock/mock-symbols.js generated vendored Normal file
View file

@ -0,0 +1,23 @@
'use strict'
module.exports = {
kAgent: Symbol('agent'),
kOptions: Symbol('options'),
kFactory: Symbol('factory'),
kDispatches: Symbol('dispatches'),
kDispatchKey: Symbol('dispatch key'),
kDefaultHeaders: Symbol('default headers'),
kDefaultTrailers: Symbol('default trailers'),
kContentLength: Symbol('content length'),
kMockAgent: Symbol('mock agent'),
kMockAgentSet: Symbol('mock agent set'),
kMockAgentGet: Symbol('mock agent get'),
kMockDispatch: Symbol('mock dispatch'),
kClose: Symbol('close'),
kOriginalClose: Symbol('original agent close'),
kOrigin: Symbol('origin'),
kIsMockActive: Symbol('is mock active'),
kNetConnect: Symbol('net connect'),
kGetNetConnect: Symbol('get net connect'),
kConnected: Symbol('connected')
}

363
node_modules/undici/lib/mock/mock-utils.js generated vendored Normal file
View file

@ -0,0 +1,363 @@
'use strict'
const { MockNotMatchedError } = require('./mock-errors')
const {
kDispatches,
kMockAgent,
kOriginalDispatch,
kOrigin,
kGetNetConnect
} = require('./mock-symbols')
const { buildURL } = require('../core/util')
const { STATUS_CODES } = require('node:http')
const {
types: {
isPromise
}
} = require('node:util')
function matchValue (match, value) {
if (typeof match === 'string') {
return match === value
}
if (match instanceof RegExp) {
return match.test(value)
}
if (typeof match === 'function') {
return match(value) === true
}
return false
}
function lowerCaseEntries (headers) {
return Object.fromEntries(
Object.entries(headers).map(([headerName, headerValue]) => {
return [headerName.toLocaleLowerCase(), headerValue]
})
)
}
/**
* @param {import('../../index').Headers|string[]|Record<string, string>} headers
* @param {string} key
*/
function getHeaderByName (headers, key) {
if (Array.isArray(headers)) {
for (let i = 0; i < headers.length; i += 2) {
if (headers[i].toLocaleLowerCase() === key.toLocaleLowerCase()) {
return headers[i + 1]
}
}
return undefined
} else if (typeof headers.get === 'function') {
return headers.get(key)
} else {
return lowerCaseEntries(headers)[key.toLocaleLowerCase()]
}
}
/** @param {string[]} headers */
function buildHeadersFromArray (headers) { // fetch HeadersList
const clone = headers.slice()
const entries = []
for (let index = 0; index < clone.length; index += 2) {
entries.push([clone[index], clone[index + 1]])
}
return Object.fromEntries(entries)
}
function matchHeaders (mockDispatch, headers) {
if (typeof mockDispatch.headers === 'function') {
if (Array.isArray(headers)) { // fetch HeadersList
headers = buildHeadersFromArray(headers)
}
return mockDispatch.headers(headers ? lowerCaseEntries(headers) : {})
}
if (typeof mockDispatch.headers === 'undefined') {
return true
}
if (typeof headers !== 'object' || typeof mockDispatch.headers !== 'object') {
return false
}
for (const [matchHeaderName, matchHeaderValue] of Object.entries(mockDispatch.headers)) {
const headerValue = getHeaderByName(headers, matchHeaderName)
if (!matchValue(matchHeaderValue, headerValue)) {
return false
}
}
return true
}
function safeUrl (path) {
if (typeof path !== 'string') {
return path
}
const pathSegments = path.split('?')
if (pathSegments.length !== 2) {
return path
}
const qp = new URLSearchParams(pathSegments.pop())
qp.sort()
return [...pathSegments, qp.toString()].join('?')
}
function matchKey (mockDispatch, { path, method, body, headers }) {
const pathMatch = matchValue(mockDispatch.path, path)
const methodMatch = matchValue(mockDispatch.method, method)
const bodyMatch = typeof mockDispatch.body !== 'undefined' ? matchValue(mockDispatch.body, body) : true
const headersMatch = matchHeaders(mockDispatch, headers)
return pathMatch && methodMatch && bodyMatch && headersMatch
}
function getResponseData (data) {
if (Buffer.isBuffer(data)) {
return data
} else if (typeof data === 'object') {
return JSON.stringify(data)
} else {
return data.toString()
}
}
function getMockDispatch (mockDispatches, key) {
const basePath = key.query ? buildURL(key.path, key.query) : key.path
const resolvedPath = typeof basePath === 'string' ? safeUrl(basePath) : basePath
// Match path
let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path }) => matchValue(safeUrl(path), resolvedPath))
if (matchedMockDispatches.length === 0) {
throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`)
}
// Match method
matchedMockDispatches = matchedMockDispatches.filter(({ method }) => matchValue(method, key.method))
if (matchedMockDispatches.length === 0) {
throw new MockNotMatchedError(`Mock dispatch not matched for method '${key.method}' on path '${resolvedPath}'`)
}
// Match body
matchedMockDispatches = matchedMockDispatches.filter(({ body }) => typeof body !== 'undefined' ? matchValue(body, key.body) : true)
if (matchedMockDispatches.length === 0) {
throw new MockNotMatchedError(`Mock dispatch not matched for body '${key.body}' on path '${resolvedPath}'`)
}
// Match headers
matchedMockDispatches = matchedMockDispatches.filter((mockDispatch) => matchHeaders(mockDispatch, key.headers))
if (matchedMockDispatches.length === 0) {
const headers = typeof key.headers === 'object' ? JSON.stringify(key.headers) : key.headers
throw new MockNotMatchedError(`Mock dispatch not matched for headers '${headers}' on path '${resolvedPath}'`)
}
return matchedMockDispatches[0]
}
function addMockDispatch (mockDispatches, key, data) {
const baseData = { timesInvoked: 0, times: 1, persist: false, consumed: false }
const replyData = typeof data === 'function' ? { callback: data } : { ...data }
const newMockDispatch = { ...baseData, ...key, pending: true, data: { error: null, ...replyData } }
mockDispatches.push(newMockDispatch)
return newMockDispatch
}
function deleteMockDispatch (mockDispatches, key) {
const index = mockDispatches.findIndex(dispatch => {
if (!dispatch.consumed) {
return false
}
return matchKey(dispatch, key)
})
if (index !== -1) {
mockDispatches.splice(index, 1)
}
}
function buildKey (opts) {
const { path, method, body, headers, query } = opts
return {
path,
method,
body,
headers,
query
}
}
function generateKeyValues (data) {
const keys = Object.keys(data)
const result = []
for (let i = 0; i < keys.length; ++i) {
const key = keys[i]
const value = data[key]
const name = Buffer.from(`${key}`)
if (Array.isArray(value)) {
for (let j = 0; j < value.length; ++j) {
result.push(name, Buffer.from(`${value[j]}`))
}
} else {
result.push(name, Buffer.from(`${value}`))
}
}
return result
}
/**
* @see https://developer.mozilla.org/en-US/docs/Web/HTTP/Status
* @param {number} statusCode
*/
function getStatusText (statusCode) {
return STATUS_CODES[statusCode] || 'unknown'
}
async function getResponse (body) {
const buffers = []
for await (const data of body) {
buffers.push(data)
}
return Buffer.concat(buffers).toString('utf8')
}
/**
* Mock dispatch function used to simulate undici dispatches
*/
function mockDispatch (opts, handler) {
// Get mock dispatch from built key
const key = buildKey(opts)
const mockDispatch = getMockDispatch(this[kDispatches], key)
mockDispatch.timesInvoked++
// Here's where we resolve a callback if a callback is present for the dispatch data.
if (mockDispatch.data.callback) {
mockDispatch.data = { ...mockDispatch.data, ...mockDispatch.data.callback(opts) }
}
// Parse mockDispatch data
const { data: { statusCode, data, headers, trailers, error }, delay, persist } = mockDispatch
const { timesInvoked, times } = mockDispatch
// If it's used up and not persistent, mark as consumed
mockDispatch.consumed = !persist && timesInvoked >= times
mockDispatch.pending = timesInvoked < times
// If specified, trigger dispatch error
if (error !== null) {
deleteMockDispatch(this[kDispatches], key)
handler.onError(error)
return true
}
// Handle the request with a delay if necessary
if (typeof delay === 'number' && delay > 0) {
setTimeout(() => {
handleReply(this[kDispatches])
}, delay)
} else {
handleReply(this[kDispatches])
}
function handleReply (mockDispatches, _data = data) {
// fetch's HeadersList is a 1D string array
const optsHeaders = Array.isArray(opts.headers)
? buildHeadersFromArray(opts.headers)
: opts.headers
const body = typeof _data === 'function'
? _data({ ...opts, headers: optsHeaders })
: _data
// util.types.isPromise is likely needed for jest.
if (isPromise(body)) {
// If handleReply is asynchronous, throwing an error
// in the callback will reject the promise, rather than
// synchronously throw the error, which breaks some tests.
// Rather, we wait for the callback to resolve if it is a
// promise, and then re-run handleReply with the new body.
body.then((newData) => handleReply(mockDispatches, newData))
return
}
const responseData = getResponseData(body)
const responseHeaders = generateKeyValues(headers)
const responseTrailers = generateKeyValues(trailers)
handler.onConnect?.(err => handler.onError(err), null)
handler.onHeaders?.(statusCode, responseHeaders, resume, getStatusText(statusCode))
handler.onData?.(Buffer.from(responseData))
handler.onComplete?.(responseTrailers)
deleteMockDispatch(mockDispatches, key)
}
function resume () {}
return true
}
function buildMockDispatch () {
const agent = this[kMockAgent]
const origin = this[kOrigin]
const originalDispatch = this[kOriginalDispatch]
return function dispatch (opts, handler) {
if (agent.isMockActive) {
try {
mockDispatch.call(this, opts, handler)
} catch (error) {
if (error instanceof MockNotMatchedError) {
const netConnect = agent[kGetNetConnect]()
if (netConnect === false) {
throw new MockNotMatchedError(`${error.message}: subsequent request to origin ${origin} was not allowed (net.connect disabled)`)
}
if (checkNetConnect(netConnect, origin)) {
originalDispatch.call(this, opts, handler)
} else {
throw new MockNotMatchedError(`${error.message}: subsequent request to origin ${origin} was not allowed (net.connect is not enabled for this origin)`)
}
} else {
throw error
}
}
} else {
originalDispatch.call(this, opts, handler)
}
}
}
function checkNetConnect (netConnect, origin) {
const url = new URL(origin)
if (netConnect === true) {
return true
} else if (Array.isArray(netConnect) && netConnect.some((matcher) => matchValue(matcher, url.host))) {
return true
}
return false
}
function buildMockOptions (opts) {
if (opts) {
const { agent, ...mockOptions } = opts
return mockOptions
}
}
module.exports = {
getResponseData,
getMockDispatch,
addMockDispatch,
deleteMockDispatch,
buildKey,
generateKeyValues,
matchValue,
getResponse,
getStatusText,
mockDispatch,
buildMockDispatch,
checkNetConnect,
buildMockOptions,
getHeaderByName,
buildHeadersFromArray
}

View file

@ -0,0 +1,43 @@
'use strict'
const { Transform } = require('node:stream')
const { Console } = require('node:console')
const PERSISTENT = process.versions.icu ? '✅' : 'Y '
const NOT_PERSISTENT = process.versions.icu ? '❌' : 'N '
/**
* Gets the output of `console.table(…)` as a string.
*/
module.exports = class PendingInterceptorsFormatter {
constructor ({ disableColors } = {}) {
this.transform = new Transform({
transform (chunk, _enc, cb) {
cb(null, chunk)
}
})
this.logger = new Console({
stdout: this.transform,
inspectOptions: {
colors: !disableColors && !process.env.CI
}
})
}
format (pendingInterceptors) {
const withPrettyHeaders = pendingInterceptors.map(
({ method, path, data: { statusCode }, persist, times, timesInvoked, origin }) => ({
Method: method,
Origin: origin,
Path: path,
'Status code': statusCode,
Persistent: persist ? PERSISTENT : NOT_PERSISTENT,
Invocations: timesInvoked,
Remaining: persist ? Infinity : times - timesInvoked
}))
this.logger.table(withPrettyHeaders)
return this.transform.read().toString()
}
}

29
node_modules/undici/lib/mock/pluralizer.js generated vendored Normal file
View file

@ -0,0 +1,29 @@
'use strict'
const singulars = {
pronoun: 'it',
is: 'is',
was: 'was',
this: 'this'
}
const plurals = {
pronoun: 'they',
is: 'are',
was: 'were',
this: 'these'
}
module.exports = class Pluralizer {
constructor (singular, plural) {
this.singular = singular
this.plural = plural
}
pluralize (count) {
const one = count === 1
const keys = one ? singulars : plurals
const noun = one ? this.singular : this.plural
return { ...keys, count, noun }
}
}

97
node_modules/undici/lib/util/timers.js generated vendored Normal file
View file

@ -0,0 +1,97 @@
'use strict'
let fastNow = Date.now()
let fastNowTimeout
const fastTimers = []
function onTimeout () {
fastNow = Date.now()
let len = fastTimers.length
let idx = 0
while (idx < len) {
const timer = fastTimers[idx]
if (timer.state === 0) {
timer.state = fastNow + timer.delay
} else if (timer.state > 0 && fastNow >= timer.state) {
timer.state = -1
timer.callback(timer.opaque)
}
if (timer.state === -1) {
timer.state = -2
if (idx !== len - 1) {
fastTimers[idx] = fastTimers.pop()
} else {
fastTimers.pop()
}
len -= 1
} else {
idx += 1
}
}
if (fastTimers.length > 0) {
refreshTimeout()
}
}
function refreshTimeout () {
if (fastNowTimeout?.refresh) {
fastNowTimeout.refresh()
} else {
clearTimeout(fastNowTimeout)
fastNowTimeout = setTimeout(onTimeout, 1e3)
if (fastNowTimeout.unref) {
fastNowTimeout.unref()
}
}
}
class Timeout {
constructor (callback, delay, opaque) {
this.callback = callback
this.delay = delay
this.opaque = opaque
// -2 not in timer list
// -1 in timer list but inactive
// 0 in timer list waiting for time
// > 0 in timer list waiting for time to expire
this.state = -2
this.refresh()
}
refresh () {
if (this.state === -2) {
fastTimers.push(this)
if (!fastNowTimeout || fastTimers.length === 1) {
refreshTimeout()
}
}
this.state = 0
}
clear () {
this.state = -1
}
}
module.exports = {
setTimeout (callback, delay, opaque) {
return delay < 1e3
? setTimeout(callback, delay, opaque)
: new Timeout(callback, delay, opaque)
},
clearTimeout (timeout) {
if (timeout instanceof Timeout) {
timeout.clear()
} else {
clearTimeout(timeout)
}
}
}

846
node_modules/undici/lib/web/cache/cache.js generated vendored Normal file
View file

@ -0,0 +1,846 @@
'use strict'
const { kConstruct } = require('./symbols')
const { urlEquals, getFieldValues } = require('./util')
const { kEnumerableProperty, isDisturbed } = require('../../core/util')
const { webidl } = require('../fetch/webidl')
const { Response, cloneResponse, fromInnerResponse } = require('../fetch/response')
const { Request, fromInnerRequest } = require('../fetch/request')
const { kState } = require('../fetch/symbols')
const { fetching } = require('../fetch/index')
const { urlIsHttpHttpsScheme, createDeferredPromise, readAllBytes } = require('../fetch/util')
const assert = require('node:assert')
/**
* @see https://w3c.github.io/ServiceWorker/#dfn-cache-batch-operation
* @typedef {Object} CacheBatchOperation
* @property {'delete' | 'put'} type
* @property {any} request
* @property {any} response
* @property {import('../../types/cache').CacheQueryOptions} options
*/
/**
* @see https://w3c.github.io/ServiceWorker/#dfn-request-response-list
* @typedef {[any, any][]} requestResponseList
*/
class Cache {
/**
* @see https://w3c.github.io/ServiceWorker/#dfn-relevant-request-response-list
* @type {requestResponseList}
*/
#relevantRequestResponseList
constructor () {
if (arguments[0] !== kConstruct) {
webidl.illegalConstructor()
}
this.#relevantRequestResponseList = arguments[1]
}
async match (request, options = {}) {
webidl.brandCheck(this, Cache)
webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.match' })
request = webidl.converters.RequestInfo(request)
options = webidl.converters.CacheQueryOptions(options)
const p = this.#internalMatchAll(request, options, 1)
if (p.length === 0) {
return
}
return p[0]
}
async matchAll (request = undefined, options = {}) {
webidl.brandCheck(this, Cache)
if (request !== undefined) request = webidl.converters.RequestInfo(request)
options = webidl.converters.CacheQueryOptions(options)
return this.#internalMatchAll(request, options)
}
async add (request) {
webidl.brandCheck(this, Cache)
webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.add' })
request = webidl.converters.RequestInfo(request)
// 1.
const requests = [request]
// 2.
const responseArrayPromise = this.addAll(requests)
// 3.
return await responseArrayPromise
}
async addAll (requests) {
webidl.brandCheck(this, Cache)
webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.addAll' })
// 1.
const responsePromises = []
// 2.
const requestList = []
// 3.
for (let request of requests) {
if (request === undefined) {
throw webidl.errors.conversionFailed({
prefix: 'Cache.addAll',
argument: 'Argument 1',
types: ['undefined is not allowed']
})
}
request = webidl.converters.RequestInfo(request)
if (typeof request === 'string') {
continue
}
// 3.1
const r = request[kState]
// 3.2
if (!urlIsHttpHttpsScheme(r.url) || r.method !== 'GET') {
throw webidl.errors.exception({
header: 'Cache.addAll',
message: 'Expected http/s scheme when method is not GET.'
})
}
}
// 4.
/** @type {ReturnType<typeof fetching>[]} */
const fetchControllers = []
// 5.
for (const request of requests) {
// 5.1
const r = new Request(request)[kState]
// 5.2
if (!urlIsHttpHttpsScheme(r.url)) {
throw webidl.errors.exception({
header: 'Cache.addAll',
message: 'Expected http/s scheme.'
})
}
// 5.4
r.initiator = 'fetch'
r.destination = 'subresource'
// 5.5
requestList.push(r)
// 5.6
const responsePromise = createDeferredPromise()
// 5.7
fetchControllers.push(fetching({
request: r,
processResponse (response) {
// 1.
if (response.type === 'error' || response.status === 206 || response.status < 200 || response.status > 299) {
responsePromise.reject(webidl.errors.exception({
header: 'Cache.addAll',
message: 'Received an invalid status code or the request failed.'
}))
} else if (response.headersList.contains('vary')) { // 2.
// 2.1
const fieldValues = getFieldValues(response.headersList.get('vary'))
// 2.2
for (const fieldValue of fieldValues) {
// 2.2.1
if (fieldValue === '*') {
responsePromise.reject(webidl.errors.exception({
header: 'Cache.addAll',
message: 'invalid vary field value'
}))
for (const controller of fetchControllers) {
controller.abort()
}
return
}
}
}
},
processResponseEndOfBody (response) {
// 1.
if (response.aborted) {
responsePromise.reject(new DOMException('aborted', 'AbortError'))
return
}
// 2.
responsePromise.resolve(response)
}
}))
// 5.8
responsePromises.push(responsePromise.promise)
}
// 6.
const p = Promise.all(responsePromises)
// 7.
const responses = await p
// 7.1
const operations = []
// 7.2
let index = 0
// 7.3
for (const response of responses) {
// 7.3.1
/** @type {CacheBatchOperation} */
const operation = {
type: 'put', // 7.3.2
request: requestList[index], // 7.3.3
response // 7.3.4
}
operations.push(operation) // 7.3.5
index++ // 7.3.6
}
// 7.5
const cacheJobPromise = createDeferredPromise()
// 7.6.1
let errorData = null
// 7.6.2
try {
this.#batchCacheOperations(operations)
} catch (e) {
errorData = e
}
// 7.6.3
queueMicrotask(() => {
// 7.6.3.1
if (errorData === null) {
cacheJobPromise.resolve(undefined)
} else {
// 7.6.3.2
cacheJobPromise.reject(errorData)
}
})
// 7.7
return cacheJobPromise.promise
}
async put (request, response) {
webidl.brandCheck(this, Cache)
webidl.argumentLengthCheck(arguments, 2, { header: 'Cache.put' })
request = webidl.converters.RequestInfo(request)
response = webidl.converters.Response(response)
// 1.
let innerRequest = null
// 2.
if (request instanceof Request) {
innerRequest = request[kState]
} else { // 3.
innerRequest = new Request(request)[kState]
}
// 4.
if (!urlIsHttpHttpsScheme(innerRequest.url) || innerRequest.method !== 'GET') {
throw webidl.errors.exception({
header: 'Cache.put',
message: 'Expected an http/s scheme when method is not GET'
})
}
// 5.
const innerResponse = response[kState]
// 6.
if (innerResponse.status === 206) {
throw webidl.errors.exception({
header: 'Cache.put',
message: 'Got 206 status'
})
}
// 7.
if (innerResponse.headersList.contains('vary')) {
// 7.1.
const fieldValues = getFieldValues(innerResponse.headersList.get('vary'))
// 7.2.
for (const fieldValue of fieldValues) {
// 7.2.1
if (fieldValue === '*') {
throw webidl.errors.exception({
header: 'Cache.put',
message: 'Got * vary field value'
})
}
}
}
// 8.
if (innerResponse.body && (isDisturbed(innerResponse.body.stream) || innerResponse.body.stream.locked)) {
throw webidl.errors.exception({
header: 'Cache.put',
message: 'Response body is locked or disturbed'
})
}
// 9.
const clonedResponse = cloneResponse(innerResponse)
// 10.
const bodyReadPromise = createDeferredPromise()
// 11.
if (innerResponse.body != null) {
// 11.1
const stream = innerResponse.body.stream
// 11.2
const reader = stream.getReader()
// 11.3
readAllBytes(reader).then(bodyReadPromise.resolve, bodyReadPromise.reject)
} else {
bodyReadPromise.resolve(undefined)
}
// 12.
/** @type {CacheBatchOperation[]} */
const operations = []
// 13.
/** @type {CacheBatchOperation} */
const operation = {
type: 'put', // 14.
request: innerRequest, // 15.
response: clonedResponse // 16.
}
// 17.
operations.push(operation)
// 19.
const bytes = await bodyReadPromise.promise
if (clonedResponse.body != null) {
clonedResponse.body.source = bytes
}
// 19.1
const cacheJobPromise = createDeferredPromise()
// 19.2.1
let errorData = null
// 19.2.2
try {
this.#batchCacheOperations(operations)
} catch (e) {
errorData = e
}
// 19.2.3
queueMicrotask(() => {
// 19.2.3.1
if (errorData === null) {
cacheJobPromise.resolve()
} else { // 19.2.3.2
cacheJobPromise.reject(errorData)
}
})
return cacheJobPromise.promise
}
async delete (request, options = {}) {
webidl.brandCheck(this, Cache)
webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.delete' })
request = webidl.converters.RequestInfo(request)
options = webidl.converters.CacheQueryOptions(options)
/**
* @type {Request}
*/
let r = null
if (request instanceof Request) {
r = request[kState]
if (r.method !== 'GET' && !options.ignoreMethod) {
return false
}
} else {
assert(typeof request === 'string')
r = new Request(request)[kState]
}
/** @type {CacheBatchOperation[]} */
const operations = []
/** @type {CacheBatchOperation} */
const operation = {
type: 'delete',
request: r,
options
}
operations.push(operation)
const cacheJobPromise = createDeferredPromise()
let errorData = null
let requestResponses
try {
requestResponses = this.#batchCacheOperations(operations)
} catch (e) {
errorData = e
}
queueMicrotask(() => {
if (errorData === null) {
cacheJobPromise.resolve(!!requestResponses?.length)
} else {
cacheJobPromise.reject(errorData)
}
})
return cacheJobPromise.promise
}
/**
* @see https://w3c.github.io/ServiceWorker/#dom-cache-keys
* @param {any} request
* @param {import('../../types/cache').CacheQueryOptions} options
* @returns {Promise<readonly Request[]>}
*/
async keys (request = undefined, options = {}) {
webidl.brandCheck(this, Cache)
if (request !== undefined) request = webidl.converters.RequestInfo(request)
options = webidl.converters.CacheQueryOptions(options)
// 1.
let r = null
// 2.
if (request !== undefined) {
// 2.1
if (request instanceof Request) {
// 2.1.1
r = request[kState]
// 2.1.2
if (r.method !== 'GET' && !options.ignoreMethod) {
return []
}
} else if (typeof request === 'string') { // 2.2
r = new Request(request)[kState]
}
}
// 4.
const promise = createDeferredPromise()
// 5.
// 5.1
const requests = []
// 5.2
if (request === undefined) {
// 5.2.1
for (const requestResponse of this.#relevantRequestResponseList) {
// 5.2.1.1
requests.push(requestResponse[0])
}
} else { // 5.3
// 5.3.1
const requestResponses = this.#queryCache(r, options)
// 5.3.2
for (const requestResponse of requestResponses) {
// 5.3.2.1
requests.push(requestResponse[0])
}
}
// 5.4
queueMicrotask(() => {
// 5.4.1
const requestList = []
// 5.4.2
for (const request of requests) {
const requestObject = fromInnerRequest(
request,
new AbortController().signal,
'immutable',
{ settingsObject: request.client }
)
// 5.4.2.1
requestList.push(requestObject)
}
// 5.4.3
promise.resolve(Object.freeze(requestList))
})
return promise.promise
}
/**
* @see https://w3c.github.io/ServiceWorker/#batch-cache-operations-algorithm
* @param {CacheBatchOperation[]} operations
* @returns {requestResponseList}
*/
#batchCacheOperations (operations) {
// 1.
const cache = this.#relevantRequestResponseList
// 2.
const backupCache = [...cache]
// 3.
const addedItems = []
// 4.1
const resultList = []
try {
// 4.2
for (const operation of operations) {
// 4.2.1
if (operation.type !== 'delete' && operation.type !== 'put') {
throw webidl.errors.exception({
header: 'Cache.#batchCacheOperations',
message: 'operation type does not match "delete" or "put"'
})
}
// 4.2.2
if (operation.type === 'delete' && operation.response != null) {
throw webidl.errors.exception({
header: 'Cache.#batchCacheOperations',
message: 'delete operation should not have an associated response'
})
}
// 4.2.3
if (this.#queryCache(operation.request, operation.options, addedItems).length) {
throw new DOMException('???', 'InvalidStateError')
}
// 4.2.4
let requestResponses
// 4.2.5
if (operation.type === 'delete') {
// 4.2.5.1
requestResponses = this.#queryCache(operation.request, operation.options)
// TODO: the spec is wrong, this is needed to pass WPTs
if (requestResponses.length === 0) {
return []
}
// 4.2.5.2
for (const requestResponse of requestResponses) {
const idx = cache.indexOf(requestResponse)
assert(idx !== -1)
// 4.2.5.2.1
cache.splice(idx, 1)
}
} else if (operation.type === 'put') { // 4.2.6
// 4.2.6.1
if (operation.response == null) {
throw webidl.errors.exception({
header: 'Cache.#batchCacheOperations',
message: 'put operation should have an associated response'
})
}
// 4.2.6.2
const r = operation.request
// 4.2.6.3
if (!urlIsHttpHttpsScheme(r.url)) {
throw webidl.errors.exception({
header: 'Cache.#batchCacheOperations',
message: 'expected http or https scheme'
})
}
// 4.2.6.4
if (r.method !== 'GET') {
throw webidl.errors.exception({
header: 'Cache.#batchCacheOperations',
message: 'not get method'
})
}
// 4.2.6.5
if (operation.options != null) {
throw webidl.errors.exception({
header: 'Cache.#batchCacheOperations',
message: 'options must not be defined'
})
}
// 4.2.6.6
requestResponses = this.#queryCache(operation.request)
// 4.2.6.7
for (const requestResponse of requestResponses) {
const idx = cache.indexOf(requestResponse)
assert(idx !== -1)
// 4.2.6.7.1
cache.splice(idx, 1)
}
// 4.2.6.8
cache.push([operation.request, operation.response])
// 4.2.6.10
addedItems.push([operation.request, operation.response])
}
// 4.2.7
resultList.push([operation.request, operation.response])
}
// 4.3
return resultList
} catch (e) { // 5.
// 5.1
this.#relevantRequestResponseList.length = 0
// 5.2
this.#relevantRequestResponseList = backupCache
// 5.3
throw e
}
}
/**
* @see https://w3c.github.io/ServiceWorker/#query-cache
* @param {any} requestQuery
* @param {import('../../types/cache').CacheQueryOptions} options
* @param {requestResponseList} targetStorage
* @returns {requestResponseList}
*/
#queryCache (requestQuery, options, targetStorage) {
/** @type {requestResponseList} */
const resultList = []
const storage = targetStorage ?? this.#relevantRequestResponseList
for (const requestResponse of storage) {
const [cachedRequest, cachedResponse] = requestResponse
if (this.#requestMatchesCachedItem(requestQuery, cachedRequest, cachedResponse, options)) {
resultList.push(requestResponse)
}
}
return resultList
}
/**
* @see https://w3c.github.io/ServiceWorker/#request-matches-cached-item-algorithm
* @param {any} requestQuery
* @param {any} request
* @param {any | null} response
* @param {import('../../types/cache').CacheQueryOptions | undefined} options
* @returns {boolean}
*/
#requestMatchesCachedItem (requestQuery, request, response = null, options) {
// if (options?.ignoreMethod === false && request.method === 'GET') {
// return false
// }
const queryURL = new URL(requestQuery.url)
const cachedURL = new URL(request.url)
if (options?.ignoreSearch) {
cachedURL.search = ''
queryURL.search = ''
}
if (!urlEquals(queryURL, cachedURL, true)) {
return false
}
if (
response == null ||
options?.ignoreVary ||
!response.headersList.contains('vary')
) {
return true
}
const fieldValues = getFieldValues(response.headersList.get('vary'))
for (const fieldValue of fieldValues) {
if (fieldValue === '*') {
return false
}
const requestValue = request.headersList.get(fieldValue)
const queryValue = requestQuery.headersList.get(fieldValue)
// If one has the header and the other doesn't, or one has
// a different value than the other, return false
if (requestValue !== queryValue) {
return false
}
}
return true
}
#internalMatchAll (request, options, maxResponses = Infinity) {
// 1.
let r = null
// 2.
if (request !== undefined) {
if (request instanceof Request) {
// 2.1.1
r = request[kState]
// 2.1.2
if (r.method !== 'GET' && !options.ignoreMethod) {
return []
}
} else if (typeof request === 'string') {
// 2.2.1
r = new Request(request)[kState]
}
}
// 5.
// 5.1
const responses = []
// 5.2
if (request === undefined) {
// 5.2.1
for (const requestResponse of this.#relevantRequestResponseList) {
responses.push(requestResponse[1])
}
} else { // 5.3
// 5.3.1
const requestResponses = this.#queryCache(r, options)
// 5.3.2
for (const requestResponse of requestResponses) {
responses.push(requestResponse[1])
}
}
// 5.4
// We don't implement CORs so we don't need to loop over the responses, yay!
// 5.5.1
const responseList = []
// 5.5.2
for (const response of responses) {
// 5.5.2.1
const responseObject = fromInnerResponse(response, 'immutable', { settingsObject: {} })
responseList.push(responseObject.clone())
if (responseList.length >= maxResponses) {
break
}
}
// 6.
return Object.freeze(responseList)
}
}
Object.defineProperties(Cache.prototype, {
[Symbol.toStringTag]: {
value: 'Cache',
configurable: true
},
match: kEnumerableProperty,
matchAll: kEnumerableProperty,
add: kEnumerableProperty,
addAll: kEnumerableProperty,
put: kEnumerableProperty,
delete: kEnumerableProperty,
keys: kEnumerableProperty
})
const cacheQueryOptionConverters = [
{
key: 'ignoreSearch',
converter: webidl.converters.boolean,
defaultValue: false
},
{
key: 'ignoreMethod',
converter: webidl.converters.boolean,
defaultValue: false
},
{
key: 'ignoreVary',
converter: webidl.converters.boolean,
defaultValue: false
}
]
webidl.converters.CacheQueryOptions = webidl.dictionaryConverter(cacheQueryOptionConverters)
webidl.converters.MultiCacheQueryOptions = webidl.dictionaryConverter([
...cacheQueryOptionConverters,
{
key: 'cacheName',
converter: webidl.converters.DOMString
}
])
webidl.converters.Response = webidl.interfaceConverter(Response)
webidl.converters['sequence<RequestInfo>'] = webidl.sequenceConverter(
webidl.converters.RequestInfo
)
module.exports = {
Cache
}

144
node_modules/undici/lib/web/cache/cachestorage.js generated vendored Normal file
View file

@ -0,0 +1,144 @@
'use strict'
const { kConstruct } = require('./symbols')
const { Cache } = require('./cache')
const { webidl } = require('../fetch/webidl')
const { kEnumerableProperty } = require('../../core/util')
class CacheStorage {
/**
* @see https://w3c.github.io/ServiceWorker/#dfn-relevant-name-to-cache-map
* @type {Map<string, import('./cache').requestResponseList}
*/
#caches = new Map()
constructor () {
if (arguments[0] !== kConstruct) {
webidl.illegalConstructor()
}
}
async match (request, options = {}) {
webidl.brandCheck(this, CacheStorage)
webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.match' })
request = webidl.converters.RequestInfo(request)
options = webidl.converters.MultiCacheQueryOptions(options)
// 1.
if (options.cacheName != null) {
// 1.1.1.1
if (this.#caches.has(options.cacheName)) {
// 1.1.1.1.1
const cacheList = this.#caches.get(options.cacheName)
const cache = new Cache(kConstruct, cacheList)
return await cache.match(request, options)
}
} else { // 2.
// 2.2
for (const cacheList of this.#caches.values()) {
const cache = new Cache(kConstruct, cacheList)
// 2.2.1.2
const response = await cache.match(request, options)
if (response !== undefined) {
return response
}
}
}
}
/**
* @see https://w3c.github.io/ServiceWorker/#cache-storage-has
* @param {string} cacheName
* @returns {Promise<boolean>}
*/
async has (cacheName) {
webidl.brandCheck(this, CacheStorage)
webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.has' })
cacheName = webidl.converters.DOMString(cacheName)
// 2.1.1
// 2.2
return this.#caches.has(cacheName)
}
/**
* @see https://w3c.github.io/ServiceWorker/#dom-cachestorage-open
* @param {string} cacheName
* @returns {Promise<Cache>}
*/
async open (cacheName) {
webidl.brandCheck(this, CacheStorage)
webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.open' })
cacheName = webidl.converters.DOMString(cacheName)
// 2.1
if (this.#caches.has(cacheName)) {
// await caches.open('v1') !== await caches.open('v1')
// 2.1.1
const cache = this.#caches.get(cacheName)
// 2.1.1.1
return new Cache(kConstruct, cache)
}
// 2.2
const cache = []
// 2.3
this.#caches.set(cacheName, cache)
// 2.4
return new Cache(kConstruct, cache)
}
/**
* @see https://w3c.github.io/ServiceWorker/#cache-storage-delete
* @param {string} cacheName
* @returns {Promise<boolean>}
*/
async delete (cacheName) {
webidl.brandCheck(this, CacheStorage)
webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.delete' })
cacheName = webidl.converters.DOMString(cacheName)
return this.#caches.delete(cacheName)
}
/**
* @see https://w3c.github.io/ServiceWorker/#cache-storage-keys
* @returns {Promise<string[]>}
*/
async keys () {
webidl.brandCheck(this, CacheStorage)
// 2.1
const keys = this.#caches.keys()
// 2.2
return [...keys]
}
}
Object.defineProperties(CacheStorage.prototype, {
[Symbol.toStringTag]: {
value: 'CacheStorage',
configurable: true
},
match: kEnumerableProperty,
has: kEnumerableProperty,
open: kEnumerableProperty,
delete: kEnumerableProperty,
keys: kEnumerableProperty
})
module.exports = {
CacheStorage
}

5
node_modules/undici/lib/web/cache/symbols.js generated vendored Normal file
View file

@ -0,0 +1,5 @@
'use strict'
module.exports = {
kConstruct: require('../../core/symbols').kConstruct
}

45
node_modules/undici/lib/web/cache/util.js generated vendored Normal file
View file

@ -0,0 +1,45 @@
'use strict'
const assert = require('node:assert')
const { URLSerializer } = require('../fetch/data-url')
const { isValidHeaderName } = require('../fetch/util')
/**
* @see https://url.spec.whatwg.org/#concept-url-equals
* @param {URL} A
* @param {URL} B
* @param {boolean | undefined} excludeFragment
* @returns {boolean}
*/
function urlEquals (A, B, excludeFragment = false) {
const serializedA = URLSerializer(A, excludeFragment)
const serializedB = URLSerializer(B, excludeFragment)
return serializedA === serializedB
}
/**
* @see https://github.com/chromium/chromium/blob/694d20d134cb553d8d89e5500b9148012b1ba299/content/browser/cache_storage/cache_storage_cache.cc#L260-L262
* @param {string} header
*/
function getFieldValues (header) {
assert(header !== null)
const values = []
for (let value of header.split(',')) {
value = value.trim()
if (isValidHeaderName(value)) {
values.push(value)
}
}
return values
}
module.exports = {
urlEquals,
getFieldValues
}

12
node_modules/undici/lib/web/cookies/constants.js generated vendored Normal file
View file

@ -0,0 +1,12 @@
'use strict'
// https://wicg.github.io/cookie-store/#cookie-maximum-attribute-value-size
const maxAttributeValueSize = 1024
// https://wicg.github.io/cookie-store/#cookie-maximum-name-value-pair-size
const maxNameValuePairSize = 4096
module.exports = {
maxAttributeValueSize,
maxNameValuePairSize
}

184
node_modules/undici/lib/web/cookies/index.js generated vendored Normal file
View file

@ -0,0 +1,184 @@
'use strict'
const { parseSetCookie } = require('./parse')
const { stringify, getHeadersList } = require('./util')
const { webidl } = require('../fetch/webidl')
const { Headers } = require('../fetch/headers')
/**
* @typedef {Object} Cookie
* @property {string} name
* @property {string} value
* @property {Date|number|undefined} expires
* @property {number|undefined} maxAge
* @property {string|undefined} domain
* @property {string|undefined} path
* @property {boolean|undefined} secure
* @property {boolean|undefined} httpOnly
* @property {'Strict'|'Lax'|'None'} sameSite
* @property {string[]} unparsed
*/
/**
* @param {Headers} headers
* @returns {Record<string, string>}
*/
function getCookies (headers) {
webidl.argumentLengthCheck(arguments, 1, { header: 'getCookies' })
webidl.brandCheck(headers, Headers, { strict: false })
const cookie = headers.get('cookie')
const out = {}
if (!cookie) {
return out
}
for (const piece of cookie.split(';')) {
const [name, ...value] = piece.split('=')
out[name.trim()] = value.join('=')
}
return out
}
/**
* @param {Headers} headers
* @param {string} name
* @param {{ path?: string, domain?: string }|undefined} attributes
* @returns {void}
*/
function deleteCookie (headers, name, attributes) {
webidl.argumentLengthCheck(arguments, 2, { header: 'deleteCookie' })
webidl.brandCheck(headers, Headers, { strict: false })
name = webidl.converters.DOMString(name)
attributes = webidl.converters.DeleteCookieAttributes(attributes)
// Matches behavior of
// https://github.com/denoland/deno_std/blob/63827b16330b82489a04614027c33b7904e08be5/http/cookie.ts#L278
setCookie(headers, {
name,
value: '',
expires: new Date(0),
...attributes
})
}
/**
* @param {Headers} headers
* @returns {Cookie[]}
*/
function getSetCookies (headers) {
webidl.argumentLengthCheck(arguments, 1, { header: 'getSetCookies' })
webidl.brandCheck(headers, Headers, { strict: false })
const cookies = getHeadersList(headers).cookies
if (!cookies) {
return []
}
// In older versions of undici, cookies is a list of name:value.
return cookies.map((pair) => parseSetCookie(Array.isArray(pair) ? pair[1] : pair))
}
/**
* @param {Headers} headers
* @param {Cookie} cookie
* @returns {void}
*/
function setCookie (headers, cookie) {
webidl.argumentLengthCheck(arguments, 2, { header: 'setCookie' })
webidl.brandCheck(headers, Headers, { strict: false })
cookie = webidl.converters.Cookie(cookie)
const str = stringify(cookie)
if (str) {
headers.append('Set-Cookie', str)
}
}
webidl.converters.DeleteCookieAttributes = webidl.dictionaryConverter([
{
converter: webidl.nullableConverter(webidl.converters.DOMString),
key: 'path',
defaultValue: null
},
{
converter: webidl.nullableConverter(webidl.converters.DOMString),
key: 'domain',
defaultValue: null
}
])
webidl.converters.Cookie = webidl.dictionaryConverter([
{
converter: webidl.converters.DOMString,
key: 'name'
},
{
converter: webidl.converters.DOMString,
key: 'value'
},
{
converter: webidl.nullableConverter((value) => {
if (typeof value === 'number') {
return webidl.converters['unsigned long long'](value)
}
return new Date(value)
}),
key: 'expires',
defaultValue: null
},
{
converter: webidl.nullableConverter(webidl.converters['long long']),
key: 'maxAge',
defaultValue: null
},
{
converter: webidl.nullableConverter(webidl.converters.DOMString),
key: 'domain',
defaultValue: null
},
{
converter: webidl.nullableConverter(webidl.converters.DOMString),
key: 'path',
defaultValue: null
},
{
converter: webidl.nullableConverter(webidl.converters.boolean),
key: 'secure',
defaultValue: null
},
{
converter: webidl.nullableConverter(webidl.converters.boolean),
key: 'httpOnly',
defaultValue: null
},
{
converter: webidl.converters.USVString,
key: 'sameSite',
allowedValues: ['Strict', 'Lax', 'None']
},
{
converter: webidl.sequenceConverter(webidl.converters.DOMString),
key: 'unparsed',
defaultValue: []
}
])
module.exports = {
getCookies,
deleteCookie,
getSetCookies,
setCookie
}

317
node_modules/undici/lib/web/cookies/parse.js generated vendored Normal file
View file

@ -0,0 +1,317 @@
'use strict'
const { maxNameValuePairSize, maxAttributeValueSize } = require('./constants')
const { isCTLExcludingHtab } = require('./util')
const { collectASequenceOfCodePointsFast } = require('../fetch/data-url')
const assert = require('node:assert')
/**
* @description Parses the field-value attributes of a set-cookie header string.
* @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4
* @param {string} header
* @returns if the header is invalid, null will be returned
*/
function parseSetCookie (header) {
// 1. If the set-cookie-string contains a %x00-08 / %x0A-1F / %x7F
// character (CTL characters excluding HTAB): Abort these steps and
// ignore the set-cookie-string entirely.
if (isCTLExcludingHtab(header)) {
return null
}
let nameValuePair = ''
let unparsedAttributes = ''
let name = ''
let value = ''
// 2. If the set-cookie-string contains a %x3B (";") character:
if (header.includes(';')) {
// 1. The name-value-pair string consists of the characters up to,
// but not including, the first %x3B (";"), and the unparsed-
// attributes consist of the remainder of the set-cookie-string
// (including the %x3B (";") in question).
const position = { position: 0 }
nameValuePair = collectASequenceOfCodePointsFast(';', header, position)
unparsedAttributes = header.slice(position.position)
} else {
// Otherwise:
// 1. The name-value-pair string consists of all the characters
// contained in the set-cookie-string, and the unparsed-
// attributes is the empty string.
nameValuePair = header
}
// 3. If the name-value-pair string lacks a %x3D ("=") character, then
// the name string is empty, and the value string is the value of
// name-value-pair.
if (!nameValuePair.includes('=')) {
value = nameValuePair
} else {
// Otherwise, the name string consists of the characters up to, but
// not including, the first %x3D ("=") character, and the (possibly
// empty) value string consists of the characters after the first
// %x3D ("=") character.
const position = { position: 0 }
name = collectASequenceOfCodePointsFast(
'=',
nameValuePair,
position
)
value = nameValuePair.slice(position.position + 1)
}
// 4. Remove any leading or trailing WSP characters from the name
// string and the value string.
name = name.trim()
value = value.trim()
// 5. If the sum of the lengths of the name string and the value string
// is more than 4096 octets, abort these steps and ignore the set-
// cookie-string entirely.
if (name.length + value.length > maxNameValuePairSize) {
return null
}
// 6. The cookie-name is the name string, and the cookie-value is the
// value string.
return {
name, value, ...parseUnparsedAttributes(unparsedAttributes)
}
}
/**
* Parses the remaining attributes of a set-cookie header
* @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4
* @param {string} unparsedAttributes
* @param {[Object.<string, unknown>]={}} cookieAttributeList
*/
function parseUnparsedAttributes (unparsedAttributes, cookieAttributeList = {}) {
// 1. If the unparsed-attributes string is empty, skip the rest of
// these steps.
if (unparsedAttributes.length === 0) {
return cookieAttributeList
}
// 2. Discard the first character of the unparsed-attributes (which
// will be a %x3B (";") character).
assert(unparsedAttributes[0] === ';')
unparsedAttributes = unparsedAttributes.slice(1)
let cookieAv = ''
// 3. If the remaining unparsed-attributes contains a %x3B (";")
// character:
if (unparsedAttributes.includes(';')) {
// 1. Consume the characters of the unparsed-attributes up to, but
// not including, the first %x3B (";") character.
cookieAv = collectASequenceOfCodePointsFast(
';',
unparsedAttributes,
{ position: 0 }
)
unparsedAttributes = unparsedAttributes.slice(cookieAv.length)
} else {
// Otherwise:
// 1. Consume the remainder of the unparsed-attributes.
cookieAv = unparsedAttributes
unparsedAttributes = ''
}
// Let the cookie-av string be the characters consumed in this step.
let attributeName = ''
let attributeValue = ''
// 4. If the cookie-av string contains a %x3D ("=") character:
if (cookieAv.includes('=')) {
// 1. The (possibly empty) attribute-name string consists of the
// characters up to, but not including, the first %x3D ("=")
// character, and the (possibly empty) attribute-value string
// consists of the characters after the first %x3D ("=")
// character.
const position = { position: 0 }
attributeName = collectASequenceOfCodePointsFast(
'=',
cookieAv,
position
)
attributeValue = cookieAv.slice(position.position + 1)
} else {
// Otherwise:
// 1. The attribute-name string consists of the entire cookie-av
// string, and the attribute-value string is empty.
attributeName = cookieAv
}
// 5. Remove any leading or trailing WSP characters from the attribute-
// name string and the attribute-value string.
attributeName = attributeName.trim()
attributeValue = attributeValue.trim()
// 6. If the attribute-value is longer than 1024 octets, ignore the
// cookie-av string and return to Step 1 of this algorithm.
if (attributeValue.length > maxAttributeValueSize) {
return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList)
}
// 7. Process the attribute-name and attribute-value according to the
// requirements in the following subsections. (Notice that
// attributes with unrecognized attribute-names are ignored.)
const attributeNameLowercase = attributeName.toLowerCase()
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.1
// If the attribute-name case-insensitively matches the string
// "Expires", the user agent MUST process the cookie-av as follows.
if (attributeNameLowercase === 'expires') {
// 1. Let the expiry-time be the result of parsing the attribute-value
// as cookie-date (see Section 5.1.1).
const expiryTime = new Date(attributeValue)
// 2. If the attribute-value failed to parse as a cookie date, ignore
// the cookie-av.
cookieAttributeList.expires = expiryTime
} else if (attributeNameLowercase === 'max-age') {
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.2
// If the attribute-name case-insensitively matches the string "Max-
// Age", the user agent MUST process the cookie-av as follows.
// 1. If the first character of the attribute-value is not a DIGIT or a
// "-" character, ignore the cookie-av.
const charCode = attributeValue.charCodeAt(0)
if ((charCode < 48 || charCode > 57) && attributeValue[0] !== '-') {
return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList)
}
// 2. If the remainder of attribute-value contains a non-DIGIT
// character, ignore the cookie-av.
if (!/^\d+$/.test(attributeValue)) {
return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList)
}
// 3. Let delta-seconds be the attribute-value converted to an integer.
const deltaSeconds = Number(attributeValue)
// 4. Let cookie-age-limit be the maximum age of the cookie (which
// SHOULD be 400 days or less, see Section 4.1.2.2).
// 5. Set delta-seconds to the smaller of its present value and cookie-
// age-limit.
// deltaSeconds = Math.min(deltaSeconds * 1000, maxExpiresMs)
// 6. If delta-seconds is less than or equal to zero (0), let expiry-
// time be the earliest representable date and time. Otherwise, let
// the expiry-time be the current date and time plus delta-seconds
// seconds.
// const expiryTime = deltaSeconds <= 0 ? Date.now() : Date.now() + deltaSeconds
// 7. Append an attribute to the cookie-attribute-list with an
// attribute-name of Max-Age and an attribute-value of expiry-time.
cookieAttributeList.maxAge = deltaSeconds
} else if (attributeNameLowercase === 'domain') {
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.3
// If the attribute-name case-insensitively matches the string "Domain",
// the user agent MUST process the cookie-av as follows.
// 1. Let cookie-domain be the attribute-value.
let cookieDomain = attributeValue
// 2. If cookie-domain starts with %x2E ("."), let cookie-domain be
// cookie-domain without its leading %x2E (".").
if (cookieDomain[0] === '.') {
cookieDomain = cookieDomain.slice(1)
}
// 3. Convert the cookie-domain to lower case.
cookieDomain = cookieDomain.toLowerCase()
// 4. Append an attribute to the cookie-attribute-list with an
// attribute-name of Domain and an attribute-value of cookie-domain.
cookieAttributeList.domain = cookieDomain
} else if (attributeNameLowercase === 'path') {
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.4
// If the attribute-name case-insensitively matches the string "Path",
// the user agent MUST process the cookie-av as follows.
// 1. If the attribute-value is empty or if the first character of the
// attribute-value is not %x2F ("/"):
let cookiePath = ''
if (attributeValue.length === 0 || attributeValue[0] !== '/') {
// 1. Let cookie-path be the default-path.
cookiePath = '/'
} else {
// Otherwise:
// 1. Let cookie-path be the attribute-value.
cookiePath = attributeValue
}
// 2. Append an attribute to the cookie-attribute-list with an
// attribute-name of Path and an attribute-value of cookie-path.
cookieAttributeList.path = cookiePath
} else if (attributeNameLowercase === 'secure') {
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.5
// If the attribute-name case-insensitively matches the string "Secure",
// the user agent MUST append an attribute to the cookie-attribute-list
// with an attribute-name of Secure and an empty attribute-value.
cookieAttributeList.secure = true
} else if (attributeNameLowercase === 'httponly') {
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.6
// If the attribute-name case-insensitively matches the string
// "HttpOnly", the user agent MUST append an attribute to the cookie-
// attribute-list with an attribute-name of HttpOnly and an empty
// attribute-value.
cookieAttributeList.httpOnly = true
} else if (attributeNameLowercase === 'samesite') {
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.7
// If the attribute-name case-insensitively matches the string
// "SameSite", the user agent MUST process the cookie-av as follows:
// 1. Let enforcement be "Default".
let enforcement = 'Default'
const attributeValueLowercase = attributeValue.toLowerCase()
// 2. If cookie-av's attribute-value is a case-insensitive match for
// "None", set enforcement to "None".
if (attributeValueLowercase.includes('none')) {
enforcement = 'None'
}
// 3. If cookie-av's attribute-value is a case-insensitive match for
// "Strict", set enforcement to "Strict".
if (attributeValueLowercase.includes('strict')) {
enforcement = 'Strict'
}
// 4. If cookie-av's attribute-value is a case-insensitive match for
// "Lax", set enforcement to "Lax".
if (attributeValueLowercase.includes('lax')) {
enforcement = 'Lax'
}
// 5. Append an attribute to the cookie-attribute-list with an
// attribute-name of "SameSite" and an attribute-value of
// enforcement.
cookieAttributeList.sameSite = enforcement
} else {
cookieAttributeList.unparsed ??= []
cookieAttributeList.unparsed.push(`${attributeName}=${attributeValue}`)
}
// 8. Return to Step 1 of this algorithm.
return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList)
}
module.exports = {
parseSetCookie,
parseUnparsedAttributes
}

307
node_modules/undici/lib/web/cookies/util.js generated vendored Normal file
View file

@ -0,0 +1,307 @@
'use strict'
const assert = require('node:assert')
const { kHeadersList } = require('../../core/symbols')
/**
* @param {string} value
* @returns {boolean}
*/
function isCTLExcludingHtab (value) {
for (let i = 0; i < value.length; ++i) {
const code = value.charCodeAt(i)
if (
(code >= 0x00 && code <= 0x08) ||
(code >= 0x0A && code <= 0x1F) ||
code === 0x7F
) {
return true
}
}
return false
}
/**
CHAR = <any US-ASCII character (octets 0 - 127)>
token = 1*<any CHAR except CTLs or separators>
separators = "(" | ")" | "<" | ">" | "@"
| "," | ";" | ":" | "\" | <">
| "/" | "[" | "]" | "?" | "="
| "{" | "}" | SP | HT
* @param {string} name
*/
function validateCookieName (name) {
for (let i = 0; i < name.length; ++i) {
const code = name.charCodeAt(i)
if (
code < 0x21 || // exclude CTLs (0-31), SP and HT
code > 0x7E || // exclude non-ascii and DEL
code === 0x22 || // "
code === 0x28 || // (
code === 0x29 || // )
code === 0x3C || // <
code === 0x3E || // >
code === 0x40 || // @
code === 0x2C || // ,
code === 0x3B || // ;
code === 0x3A || // :
code === 0x5C || // \
code === 0x2F || // /
code === 0x5B || // [
code === 0x5D || // ]
code === 0x3F || // ?
code === 0x3D || // =
code === 0x7B || // {
code === 0x7D // }
) {
throw new Error('Invalid cookie name')
}
}
}
/**
cookie-value = *cookie-octet / ( DQUOTE *cookie-octet DQUOTE )
cookie-octet = %x21 / %x23-2B / %x2D-3A / %x3C-5B / %x5D-7E
; US-ASCII characters excluding CTLs,
; whitespace DQUOTE, comma, semicolon,
; and backslash
* @param {string} value
*/
function validateCookieValue (value) {
let len = value.length
let i = 0
// if the value is wrapped in DQUOTE
if (value[0] === '"') {
if (len === 1 || value[len - 1] !== '"') {
throw new Error('Invalid cookie value')
}
--len
++i
}
while (i < len) {
const code = value.charCodeAt(i++)
if (
code < 0x21 || // exclude CTLs (0-31)
code > 0x7E || // non-ascii and DEL (127)
code === 0x22 || // "
code === 0x2C || // ,
code === 0x3B || // ;
code === 0x5C // \
) {
throw new Error('Invalid cookie value')
}
}
}
/**
* path-value = <any CHAR except CTLs or ";">
* @param {string} path
*/
function validateCookiePath (path) {
for (let i = 0; i < path.length; ++i) {
const code = path.charCodeAt(i)
if (
code < 0x20 || // exclude CTLs (0-31)
code === 0x7F || // DEL
code === 0x3B // ;
) {
throw new Error('Invalid cookie path')
}
}
}
/**
* I have no idea why these values aren't allowed to be honest,
* but Deno tests these. - Khafra
* @param {string} domain
*/
function validateCookieDomain (domain) {
if (
domain.startsWith('-') ||
domain.endsWith('.') ||
domain.endsWith('-')
) {
throw new Error('Invalid cookie domain')
}
}
const IMFDays = [
'Sun', 'Mon', 'Tue', 'Wed',
'Thu', 'Fri', 'Sat'
]
const IMFMonths = [
'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'
]
const IMFPaddedNumbers = Array(61).fill(0).map((_, i) => i.toString().padStart(2, '0'))
/**
* @see https://www.rfc-editor.org/rfc/rfc7231#section-7.1.1.1
* @param {number|Date} date
IMF-fixdate = day-name "," SP date1 SP time-of-day SP GMT
; fixed length/zone/capitalization subset of the format
; see Section 3.3 of [RFC5322]
day-name = %x4D.6F.6E ; "Mon", case-sensitive
/ %x54.75.65 ; "Tue", case-sensitive
/ %x57.65.64 ; "Wed", case-sensitive
/ %x54.68.75 ; "Thu", case-sensitive
/ %x46.72.69 ; "Fri", case-sensitive
/ %x53.61.74 ; "Sat", case-sensitive
/ %x53.75.6E ; "Sun", case-sensitive
date1 = day SP month SP year
; e.g., 02 Jun 1982
day = 2DIGIT
month = %x4A.61.6E ; "Jan", case-sensitive
/ %x46.65.62 ; "Feb", case-sensitive
/ %x4D.61.72 ; "Mar", case-sensitive
/ %x41.70.72 ; "Apr", case-sensitive
/ %x4D.61.79 ; "May", case-sensitive
/ %x4A.75.6E ; "Jun", case-sensitive
/ %x4A.75.6C ; "Jul", case-sensitive
/ %x41.75.67 ; "Aug", case-sensitive
/ %x53.65.70 ; "Sep", case-sensitive
/ %x4F.63.74 ; "Oct", case-sensitive
/ %x4E.6F.76 ; "Nov", case-sensitive
/ %x44.65.63 ; "Dec", case-sensitive
year = 4DIGIT
GMT = %x47.4D.54 ; "GMT", case-sensitive
time-of-day = hour ":" minute ":" second
; 00:00:00 - 23:59:60 (leap second)
hour = 2DIGIT
minute = 2DIGIT
second = 2DIGIT
*/
function toIMFDate (date) {
if (typeof date === 'number') {
date = new Date(date)
}
return `${IMFDays[date.getUTCDay()]}, ${IMFPaddedNumbers[date.getUTCDate()]} ${IMFMonths[date.getUTCMonth()]} ${date.getUTCFullYear()} ${IMFPaddedNumbers[date.getUTCHours()]}:${IMFPaddedNumbers[date.getUTCMinutes()]}:${IMFPaddedNumbers[date.getUTCSeconds()]} GMT`
}
/**
max-age-av = "Max-Age=" non-zero-digit *DIGIT
; In practice, both expires-av and max-age-av
; are limited to dates representable by the
; user agent.
* @param {number} maxAge
*/
function validateCookieMaxAge (maxAge) {
if (maxAge < 0) {
throw new Error('Invalid cookie max-age')
}
}
/**
* @see https://www.rfc-editor.org/rfc/rfc6265#section-4.1.1
* @param {import('./index').Cookie} cookie
*/
function stringify (cookie) {
if (cookie.name.length === 0) {
return null
}
validateCookieName(cookie.name)
validateCookieValue(cookie.value)
const out = [`${cookie.name}=${cookie.value}`]
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.1
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.2
if (cookie.name.startsWith('__Secure-')) {
cookie.secure = true
}
if (cookie.name.startsWith('__Host-')) {
cookie.secure = true
cookie.domain = null
cookie.path = '/'
}
if (cookie.secure) {
out.push('Secure')
}
if (cookie.httpOnly) {
out.push('HttpOnly')
}
if (typeof cookie.maxAge === 'number') {
validateCookieMaxAge(cookie.maxAge)
out.push(`Max-Age=${cookie.maxAge}`)
}
if (cookie.domain) {
validateCookieDomain(cookie.domain)
out.push(`Domain=${cookie.domain}`)
}
if (cookie.path) {
validateCookiePath(cookie.path)
out.push(`Path=${cookie.path}`)
}
if (cookie.expires && cookie.expires.toString() !== 'Invalid Date') {
out.push(`Expires=${toIMFDate(cookie.expires)}`)
}
if (cookie.sameSite) {
out.push(`SameSite=${cookie.sameSite}`)
}
for (const part of cookie.unparsed) {
if (!part.includes('=')) {
throw new Error('Invalid unparsed')
}
const [key, ...value] = part.split('=')
out.push(`${key.trim()}=${value.join('=')}`)
}
return out.join('; ')
}
let kHeadersListNode
function getHeadersList (headers) {
if (headers[kHeadersList]) {
return headers[kHeadersList]
}
if (!kHeadersListNode) {
kHeadersListNode = Object.getOwnPropertySymbols(headers).find(
(symbol) => symbol.description === 'headers list'
)
assert(kHeadersListNode, 'Headers cannot be parsed')
}
const headersList = headers[kHeadersListNode]
assert(headersList)
return headersList
}
module.exports = {
isCTLExcludingHtab,
validateCookieName,
validateCookiePath,
validateCookieValue,
toIMFDate,
stringify,
getHeadersList
}

Some files were not shown because too many files have changed in this diff Show more