summaryrefslogtreecommitdiffstats
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rw-r--r--lib/agent.js148
-rw-r--r--lib/api/abort-signal.js54
-rw-r--r--lib/api/api-connect.js104
-rw-r--r--lib/api/api-pipeline.js249
-rw-r--r--lib/api/api-request.js180
-rw-r--r--lib/api/api-stream.js220
-rw-r--r--lib/api/api-upgrade.js105
-rw-r--r--lib/api/index.js7
-rw-r--r--lib/api/readable.js322
-rw-r--r--lib/api/util.js46
-rw-r--r--lib/balanced-pool.js190
-rw-r--r--lib/cache/cache.js838
-rw-r--r--lib/cache/cachestorage.js144
-rw-r--r--lib/cache/symbols.js5
-rw-r--r--lib/cache/util.js49
-rw-r--r--lib/client.js2283
-rw-r--r--lib/compat/dispatcher-weakref.js48
-rw-r--r--lib/cookies/constants.js12
-rw-r--r--lib/cookies/index.js184
-rw-r--r--lib/cookies/parse.js317
-rw-r--r--lib/cookies/util.js291
-rw-r--r--lib/core/connect.js189
-rw-r--r--lib/core/errors.js230
-rw-r--r--lib/core/request.js499
-rw-r--r--lib/core/symbols.js63
-rw-r--r--lib/core/util.js511
-rw-r--r--lib/dispatcher-base.js192
-rw-r--r--lib/dispatcher.js19
-rw-r--r--lib/fetch/LICENSE21
-rw-r--r--lib/fetch/body.js605
-rw-r--r--lib/fetch/constants.js151
-rw-r--r--lib/fetch/dataURL.js627
-rw-r--r--lib/fetch/file.js344
-rw-r--r--lib/fetch/formdata.js265
-rw-r--r--lib/fetch/global.js40
-rw-r--r--lib/fetch/headers.js589
-rw-r--r--lib/fetch/index.js2145
-rw-r--r--lib/fetch/request.js946
-rw-r--r--lib/fetch/response.js571
-rw-r--r--lib/fetch/symbols.js10
-rw-r--r--lib/fetch/util.js1071
-rw-r--r--lib/fetch/webidl.js646
-rw-r--r--lib/fileapi/encoding.js290
-rw-r--r--lib/fileapi/filereader.js344
-rw-r--r--lib/fileapi/progressevent.js78
-rw-r--r--lib/fileapi/symbols.js10
-rw-r--r--lib/fileapi/util.js392
-rw-r--r--lib/global.js32
-rw-r--r--lib/handler/DecoratorHandler.js35
-rw-r--r--lib/handler/RedirectHandler.js216
-rw-r--r--lib/handler/RetryHandler.js336
-rw-r--r--lib/interceptor/redirectInterceptor.js21
-rw-r--r--lib/llhttp/constants.d.ts199
-rw-r--r--lib/llhttp/constants.js278
-rw-r--r--lib/llhttp/utils.d.ts4
-rw-r--r--lib/llhttp/utils.js15
-rw-r--r--lib/llhttp/wasm_build_env.txt32
-rw-r--r--lib/mock/mock-agent.js171
-rw-r--r--lib/mock/mock-client.js59
-rw-r--r--lib/mock/mock-errors.js17
-rw-r--r--lib/mock/mock-interceptor.js206
-rw-r--r--lib/mock/mock-pool.js59
-rw-r--r--lib/mock/mock-symbols.js23
-rw-r--r--lib/mock/mock-utils.js351
-rw-r--r--lib/mock/pending-interceptors-formatter.js40
-rw-r--r--lib/mock/pluralizer.js29
-rw-r--r--lib/node/fixed-queue.js117
-rw-r--r--lib/pool-base.js194
-rw-r--r--lib/pool-stats.js34
-rw-r--r--lib/pool.js94
-rw-r--r--lib/proxy-agent.js189
-rw-r--r--lib/timers.js97
-rw-r--r--lib/websocket/connection.js291
-rw-r--r--lib/websocket/constants.js51
-rw-r--r--lib/websocket/events.js303
-rw-r--r--lib/websocket/frame.js73
-rw-r--r--lib/websocket/receiver.js344
-rw-r--r--lib/websocket/symbols.js12
-rw-r--r--lib/websocket/util.js200
-rw-r--r--lib/websocket/websocket.js641
80 files changed, 21407 insertions, 0 deletions
diff --git a/lib/agent.js b/lib/agent.js
new file mode 100644
index 0000000..0b18f2a
--- /dev/null
+++ b/lib/agent.js
@@ -0,0 +1,148 @@
+'use strict'
+
+const { InvalidArgumentError } = require('./core/errors')
+const { kClients, kRunning, kClose, kDestroy, kDispatch, kInterceptors } = require('./core/symbols')
+const DispatcherBase = require('./dispatcher-base')
+const Pool = require('./pool')
+const Client = require('./client')
+const util = require('./core/util')
+const createRedirectInterceptor = require('./interceptor/redirectInterceptor')
+const { WeakRef, FinalizationRegistry } = require('./compat/dispatcher-weakref')()
+
+const kOnConnect = Symbol('onConnect')
+const kOnDisconnect = Symbol('onDisconnect')
+const kOnConnectionError = Symbol('onConnectionError')
+const kMaxRedirections = Symbol('maxRedirections')
+const kOnDrain = Symbol('onDrain')
+const kFactory = Symbol('factory')
+const kFinalizer = Symbol('finalizer')
+const kOptions = Symbol('options')
+
+function defaultFactory (origin, opts) {
+ return opts && opts.connections === 1
+ ? new Client(origin, opts)
+ : new Pool(origin, opts)
+}
+
+class Agent extends DispatcherBase {
+ constructor ({ factory = defaultFactory, maxRedirections = 0, connect, ...options } = {}) {
+ super()
+
+ if (typeof factory !== 'function') {
+ throw new InvalidArgumentError('factory must be a function.')
+ }
+
+ if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') {
+ throw new InvalidArgumentError('connect must be a function or an object')
+ }
+
+ if (!Number.isInteger(maxRedirections) || maxRedirections < 0) {
+ throw new InvalidArgumentError('maxRedirections must be a positive number')
+ }
+
+ if (connect && typeof connect !== 'function') {
+ connect = { ...connect }
+ }
+
+ this[kInterceptors] = options.interceptors && options.interceptors.Agent && Array.isArray(options.interceptors.Agent)
+ ? options.interceptors.Agent
+ : [createRedirectInterceptor({ maxRedirections })]
+
+ this[kOptions] = { ...util.deepClone(options), connect }
+ this[kOptions].interceptors = options.interceptors
+ ? { ...options.interceptors }
+ : undefined
+ this[kMaxRedirections] = maxRedirections
+ this[kFactory] = factory
+ this[kClients] = new Map()
+ this[kFinalizer] = new FinalizationRegistry(/* istanbul ignore next: gc is undeterministic */ key => {
+ const ref = this[kClients].get(key)
+ if (ref !== undefined && ref.deref() === undefined) {
+ this[kClients].delete(key)
+ }
+ })
+
+ const agent = this
+
+ this[kOnDrain] = (origin, targets) => {
+ agent.emit('drain', origin, [agent, ...targets])
+ }
+
+ this[kOnConnect] = (origin, targets) => {
+ agent.emit('connect', origin, [agent, ...targets])
+ }
+
+ this[kOnDisconnect] = (origin, targets, err) => {
+ agent.emit('disconnect', origin, [agent, ...targets], err)
+ }
+
+ this[kOnConnectionError] = (origin, targets, err) => {
+ agent.emit('connectionError', origin, [agent, ...targets], err)
+ }
+ }
+
+ get [kRunning] () {
+ let ret = 0
+ for (const ref of this[kClients].values()) {
+ const client = ref.deref()
+ /* istanbul ignore next: gc is undeterministic */
+ if (client) {
+ ret += client[kRunning]
+ }
+ }
+ return ret
+ }
+
+ [kDispatch] (opts, handler) {
+ let key
+ if (opts.origin && (typeof opts.origin === 'string' || opts.origin instanceof URL)) {
+ key = String(opts.origin)
+ } else {
+ throw new InvalidArgumentError('opts.origin must be a non-empty string or URL.')
+ }
+
+ const ref = this[kClients].get(key)
+
+ let dispatcher = ref ? ref.deref() : null
+ if (!dispatcher) {
+ dispatcher = this[kFactory](opts.origin, this[kOptions])
+ .on('drain', this[kOnDrain])
+ .on('connect', this[kOnConnect])
+ .on('disconnect', this[kOnDisconnect])
+ .on('connectionError', this[kOnConnectionError])
+
+ this[kClients].set(key, new WeakRef(dispatcher))
+ this[kFinalizer].register(dispatcher, key)
+ }
+
+ return dispatcher.dispatch(opts, handler)
+ }
+
+ async [kClose] () {
+ const closePromises = []
+ for (const ref of this[kClients].values()) {
+ const client = ref.deref()
+ /* istanbul ignore else: gc is undeterministic */
+ if (client) {
+ closePromises.push(client.close())
+ }
+ }
+
+ await Promise.all(closePromises)
+ }
+
+ async [kDestroy] (err) {
+ const destroyPromises = []
+ for (const ref of this[kClients].values()) {
+ const client = ref.deref()
+ /* istanbul ignore else: gc is undeterministic */
+ if (client) {
+ destroyPromises.push(client.destroy(err))
+ }
+ }
+
+ await Promise.all(destroyPromises)
+ }
+}
+
+module.exports = Agent
diff --git a/lib/api/abort-signal.js b/lib/api/abort-signal.js
new file mode 100644
index 0000000..2985c1e
--- /dev/null
+++ b/lib/api/abort-signal.js
@@ -0,0 +1,54 @@
+const { addAbortListener } = require('../core/util')
+const { RequestAbortedError } = require('../core/errors')
+
+const kListener = Symbol('kListener')
+const kSignal = Symbol('kSignal')
+
+function abort (self) {
+ if (self.abort) {
+ self.abort()
+ } else {
+ self.onError(new RequestAbortedError())
+ }
+}
+
+function addSignal (self, signal) {
+ self[kSignal] = null
+ self[kListener] = null
+
+ if (!signal) {
+ return
+ }
+
+ if (signal.aborted) {
+ abort(self)
+ return
+ }
+
+ self[kSignal] = signal
+ self[kListener] = () => {
+ abort(self)
+ }
+
+ addAbortListener(self[kSignal], self[kListener])
+}
+
+function removeSignal (self) {
+ if (!self[kSignal]) {
+ return
+ }
+
+ if ('removeEventListener' in self[kSignal]) {
+ self[kSignal].removeEventListener('abort', self[kListener])
+ } else {
+ self[kSignal].removeListener('abort', self[kListener])
+ }
+
+ self[kSignal] = null
+ self[kListener] = null
+}
+
+module.exports = {
+ addSignal,
+ removeSignal
+}
diff --git a/lib/api/api-connect.js b/lib/api/api-connect.js
new file mode 100644
index 0000000..fd2b6ad
--- /dev/null
+++ b/lib/api/api-connect.js
@@ -0,0 +1,104 @@
+'use strict'
+
+const { AsyncResource } = require('async_hooks')
+const { InvalidArgumentError, RequestAbortedError, SocketError } = require('../core/errors')
+const util = require('../core/util')
+const { addSignal, removeSignal } = require('./abort-signal')
+
+class ConnectHandler extends AsyncResource {
+ constructor (opts, callback) {
+ if (!opts || typeof opts !== 'object') {
+ throw new InvalidArgumentError('invalid opts')
+ }
+
+ if (typeof callback !== 'function') {
+ throw new InvalidArgumentError('invalid callback')
+ }
+
+ const { signal, opaque, responseHeaders } = opts
+
+ if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
+ throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
+ }
+
+ super('UNDICI_CONNECT')
+
+ this.opaque = opaque || null
+ this.responseHeaders = responseHeaders || null
+ this.callback = callback
+ this.abort = null
+
+ addSignal(this, signal)
+ }
+
+ onConnect (abort, context) {
+ if (!this.callback) {
+ throw new RequestAbortedError()
+ }
+
+ this.abort = abort
+ this.context = context
+ }
+
+ onHeaders () {
+ throw new SocketError('bad connect', null)
+ }
+
+ onUpgrade (statusCode, rawHeaders, socket) {
+ const { callback, opaque, context } = this
+
+ removeSignal(this)
+
+ this.callback = null
+
+ let headers = rawHeaders
+ // Indicates is an HTTP2Session
+ if (headers != null) {
+ headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
+ }
+
+ this.runInAsyncScope(callback, null, null, {
+ statusCode,
+ headers,
+ socket,
+ opaque,
+ context
+ })
+ }
+
+ onError (err) {
+ const { callback, opaque } = this
+
+ removeSignal(this)
+
+ if (callback) {
+ this.callback = null
+ queueMicrotask(() => {
+ this.runInAsyncScope(callback, null, err, { opaque })
+ })
+ }
+ }
+}
+
+function connect (opts, callback) {
+ if (callback === undefined) {
+ return new Promise((resolve, reject) => {
+ connect.call(this, opts, (err, data) => {
+ return err ? reject(err) : resolve(data)
+ })
+ })
+ }
+
+ try {
+ const connectHandler = new ConnectHandler(opts, callback)
+ this.dispatch({ ...opts, method: 'CONNECT' }, connectHandler)
+ } catch (err) {
+ if (typeof callback !== 'function') {
+ throw err
+ }
+ const opaque = opts && opts.opaque
+ queueMicrotask(() => callback(err, { opaque }))
+ }
+}
+
+module.exports = connect
diff --git a/lib/api/api-pipeline.js b/lib/api/api-pipeline.js
new file mode 100644
index 0000000..af4a180
--- /dev/null
+++ b/lib/api/api-pipeline.js
@@ -0,0 +1,249 @@
+'use strict'
+
+const {
+ Readable,
+ Duplex,
+ PassThrough
+} = require('stream')
+const {
+ InvalidArgumentError,
+ InvalidReturnValueError,
+ RequestAbortedError
+} = require('../core/errors')
+const util = require('../core/util')
+const { AsyncResource } = require('async_hooks')
+const { addSignal, removeSignal } = require('./abort-signal')
+const assert = require('assert')
+
+const kResume = Symbol('resume')
+
+class PipelineRequest extends Readable {
+ constructor () {
+ super({ autoDestroy: true })
+
+ this[kResume] = null
+ }
+
+ _read () {
+ const { [kResume]: resume } = this
+
+ if (resume) {
+ this[kResume] = null
+ resume()
+ }
+ }
+
+ _destroy (err, callback) {
+ this._read()
+
+ callback(err)
+ }
+}
+
+class PipelineResponse extends Readable {
+ constructor (resume) {
+ super({ autoDestroy: true })
+ this[kResume] = resume
+ }
+
+ _read () {
+ this[kResume]()
+ }
+
+ _destroy (err, callback) {
+ if (!err && !this._readableState.endEmitted) {
+ err = new RequestAbortedError()
+ }
+
+ callback(err)
+ }
+}
+
+class PipelineHandler extends AsyncResource {
+ constructor (opts, handler) {
+ if (!opts || typeof opts !== 'object') {
+ throw new InvalidArgumentError('invalid opts')
+ }
+
+ if (typeof handler !== 'function') {
+ throw new InvalidArgumentError('invalid handler')
+ }
+
+ const { signal, method, opaque, onInfo, responseHeaders } = opts
+
+ if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
+ throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
+ }
+
+ if (method === 'CONNECT') {
+ throw new InvalidArgumentError('invalid method')
+ }
+
+ if (onInfo && typeof onInfo !== 'function') {
+ throw new InvalidArgumentError('invalid onInfo callback')
+ }
+
+ super('UNDICI_PIPELINE')
+
+ this.opaque = opaque || null
+ this.responseHeaders = responseHeaders || null
+ this.handler = handler
+ this.abort = null
+ this.context = null
+ this.onInfo = onInfo || null
+
+ this.req = new PipelineRequest().on('error', util.nop)
+
+ this.ret = new Duplex({
+ readableObjectMode: opts.objectMode,
+ autoDestroy: true,
+ read: () => {
+ const { body } = this
+
+ if (body && body.resume) {
+ body.resume()
+ }
+ },
+ write: (chunk, encoding, callback) => {
+ const { req } = this
+
+ if (req.push(chunk, encoding) || req._readableState.destroyed) {
+ callback()
+ } else {
+ req[kResume] = callback
+ }
+ },
+ destroy: (err, callback) => {
+ const { body, req, res, ret, abort } = this
+
+ if (!err && !ret._readableState.endEmitted) {
+ err = new RequestAbortedError()
+ }
+
+ if (abort && err) {
+ abort()
+ }
+
+ util.destroy(body, err)
+ util.destroy(req, err)
+ util.destroy(res, err)
+
+ removeSignal(this)
+
+ callback(err)
+ }
+ }).on('prefinish', () => {
+ const { req } = this
+
+ // Node < 15 does not call _final in same tick.
+ req.push(null)
+ })
+
+ this.res = null
+
+ addSignal(this, signal)
+ }
+
+ onConnect (abort, context) {
+ const { ret, res } = this
+
+ assert(!res, 'pipeline cannot be retried')
+
+ if (ret.destroyed) {
+ throw new RequestAbortedError()
+ }
+
+ this.abort = abort
+ this.context = context
+ }
+
+ onHeaders (statusCode, rawHeaders, resume) {
+ const { opaque, handler, context } = this
+
+ if (statusCode < 200) {
+ if (this.onInfo) {
+ const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
+ this.onInfo({ statusCode, headers })
+ }
+ return
+ }
+
+ this.res = new PipelineResponse(resume)
+
+ let body
+ try {
+ this.handler = null
+ const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
+ body = this.runInAsyncScope(handler, null, {
+ statusCode,
+ headers,
+ opaque,
+ body: this.res,
+ context
+ })
+ } catch (err) {
+ this.res.on('error', util.nop)
+ throw err
+ }
+
+ if (!body || typeof body.on !== 'function') {
+ throw new InvalidReturnValueError('expected Readable')
+ }
+
+ body
+ .on('data', (chunk) => {
+ const { ret, body } = this
+
+ if (!ret.push(chunk) && body.pause) {
+ body.pause()
+ }
+ })
+ .on('error', (err) => {
+ const { ret } = this
+
+ util.destroy(ret, err)
+ })
+ .on('end', () => {
+ const { ret } = this
+
+ ret.push(null)
+ })
+ .on('close', () => {
+ const { ret } = this
+
+ if (!ret._readableState.ended) {
+ util.destroy(ret, new RequestAbortedError())
+ }
+ })
+
+ this.body = body
+ }
+
+ onData (chunk) {
+ const { res } = this
+ return res.push(chunk)
+ }
+
+ onComplete (trailers) {
+ const { res } = this
+ res.push(null)
+ }
+
+ onError (err) {
+ const { ret } = this
+ this.handler = null
+ util.destroy(ret, err)
+ }
+}
+
+function pipeline (opts, handler) {
+ try {
+ const pipelineHandler = new PipelineHandler(opts, handler)
+ this.dispatch({ ...opts, body: pipelineHandler.req }, pipelineHandler)
+ return pipelineHandler.ret
+ } catch (err) {
+ return new PassThrough().destroy(err)
+ }
+}
+
+module.exports = pipeline
diff --git a/lib/api/api-request.js b/lib/api/api-request.js
new file mode 100644
index 0000000..d4281ce
--- /dev/null
+++ b/lib/api/api-request.js
@@ -0,0 +1,180 @@
+'use strict'
+
+const Readable = require('./readable')
+const {
+ InvalidArgumentError,
+ RequestAbortedError
+} = require('../core/errors')
+const util = require('../core/util')
+const { getResolveErrorBodyCallback } = require('./util')
+const { AsyncResource } = require('async_hooks')
+const { addSignal, removeSignal } = require('./abort-signal')
+
+class RequestHandler extends AsyncResource {
+ constructor (opts, callback) {
+ if (!opts || typeof opts !== 'object') {
+ throw new InvalidArgumentError('invalid opts')
+ }
+
+ const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError, highWaterMark } = opts
+
+ try {
+ if (typeof callback !== 'function') {
+ throw new InvalidArgumentError('invalid callback')
+ }
+
+ if (highWaterMark && (typeof highWaterMark !== 'number' || highWaterMark < 0)) {
+ throw new InvalidArgumentError('invalid highWaterMark')
+ }
+
+ if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
+ throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
+ }
+
+ if (method === 'CONNECT') {
+ throw new InvalidArgumentError('invalid method')
+ }
+
+ if (onInfo && typeof onInfo !== 'function') {
+ throw new InvalidArgumentError('invalid onInfo callback')
+ }
+
+ super('UNDICI_REQUEST')
+ } catch (err) {
+ if (util.isStream(body)) {
+ util.destroy(body.on('error', util.nop), err)
+ }
+ throw err
+ }
+
+ this.responseHeaders = responseHeaders || null
+ this.opaque = opaque || null
+ this.callback = callback
+ this.res = null
+ this.abort = null
+ this.body = body
+ this.trailers = {}
+ this.context = null
+ this.onInfo = onInfo || null
+ this.throwOnError = throwOnError
+ this.highWaterMark = highWaterMark
+
+ if (util.isStream(body)) {
+ body.on('error', (err) => {
+ this.onError(err)
+ })
+ }
+
+ addSignal(this, signal)
+ }
+
+ onConnect (abort, context) {
+ if (!this.callback) {
+ throw new RequestAbortedError()
+ }
+
+ this.abort = abort
+ this.context = context
+ }
+
+ onHeaders (statusCode, rawHeaders, resume, statusMessage) {
+ const { callback, opaque, abort, context, responseHeaders, highWaterMark } = this
+
+ const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
+
+ if (statusCode < 200) {
+ if (this.onInfo) {
+ this.onInfo({ statusCode, headers })
+ }
+ return
+ }
+
+ const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers
+ const contentType = parsedHeaders['content-type']
+ const body = new Readable({ resume, abort, contentType, highWaterMark })
+
+ this.callback = null
+ this.res = body
+ if (callback !== null) {
+ if (this.throwOnError && statusCode >= 400) {
+ this.runInAsyncScope(getResolveErrorBodyCallback, null,
+ { callback, body, contentType, statusCode, statusMessage, headers }
+ )
+ } else {
+ this.runInAsyncScope(callback, null, null, {
+ statusCode,
+ headers,
+ trailers: this.trailers,
+ opaque,
+ body,
+ context
+ })
+ }
+ }
+ }
+
+ onData (chunk) {
+ const { res } = this
+ return res.push(chunk)
+ }
+
+ onComplete (trailers) {
+ const { res } = this
+
+ removeSignal(this)
+
+ util.parseHeaders(trailers, this.trailers)
+
+ res.push(null)
+ }
+
+ onError (err) {
+ const { res, callback, body, opaque } = this
+
+ removeSignal(this)
+
+ if (callback) {
+ // TODO: Does this need queueMicrotask?
+ this.callback = null
+ queueMicrotask(() => {
+ this.runInAsyncScope(callback, null, err, { opaque })
+ })
+ }
+
+ if (res) {
+ this.res = null
+ // Ensure all queued handlers are invoked before destroying res.
+ queueMicrotask(() => {
+ util.destroy(res, err)
+ })
+ }
+
+ if (body) {
+ this.body = null
+ util.destroy(body, err)
+ }
+ }
+}
+
+function request (opts, callback) {
+ if (callback === undefined) {
+ return new Promise((resolve, reject) => {
+ request.call(this, opts, (err, data) => {
+ return err ? reject(err) : resolve(data)
+ })
+ })
+ }
+
+ try {
+ this.dispatch(opts, new RequestHandler(opts, callback))
+ } catch (err) {
+ if (typeof callback !== 'function') {
+ throw err
+ }
+ const opaque = opts && opts.opaque
+ queueMicrotask(() => callback(err, { opaque }))
+ }
+}
+
+module.exports = request
+module.exports.RequestHandler = RequestHandler
diff --git a/lib/api/api-stream.js b/lib/api/api-stream.js
new file mode 100644
index 0000000..c571a6f
--- /dev/null
+++ b/lib/api/api-stream.js
@@ -0,0 +1,220 @@
+'use strict'
+
+const { finished, PassThrough } = require('stream')
+const {
+ InvalidArgumentError,
+ InvalidReturnValueError,
+ RequestAbortedError
+} = require('../core/errors')
+const util = require('../core/util')
+const { getResolveErrorBodyCallback } = require('./util')
+const { AsyncResource } = require('async_hooks')
+const { addSignal, removeSignal } = require('./abort-signal')
+
+class StreamHandler extends AsyncResource {
+ constructor (opts, factory, callback) {
+ if (!opts || typeof opts !== 'object') {
+ throw new InvalidArgumentError('invalid opts')
+ }
+
+ const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError } = opts
+
+ try {
+ if (typeof callback !== 'function') {
+ throw new InvalidArgumentError('invalid callback')
+ }
+
+ if (typeof factory !== 'function') {
+ throw new InvalidArgumentError('invalid factory')
+ }
+
+ if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
+ throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
+ }
+
+ if (method === 'CONNECT') {
+ throw new InvalidArgumentError('invalid method')
+ }
+
+ if (onInfo && typeof onInfo !== 'function') {
+ throw new InvalidArgumentError('invalid onInfo callback')
+ }
+
+ super('UNDICI_STREAM')
+ } catch (err) {
+ if (util.isStream(body)) {
+ util.destroy(body.on('error', util.nop), err)
+ }
+ throw err
+ }
+
+ this.responseHeaders = responseHeaders || null
+ this.opaque = opaque || null
+ this.factory = factory
+ this.callback = callback
+ this.res = null
+ this.abort = null
+ this.context = null
+ this.trailers = null
+ this.body = body
+ this.onInfo = onInfo || null
+ this.throwOnError = throwOnError || false
+
+ if (util.isStream(body)) {
+ body.on('error', (err) => {
+ this.onError(err)
+ })
+ }
+
+ addSignal(this, signal)
+ }
+
+ onConnect (abort, context) {
+ if (!this.callback) {
+ throw new RequestAbortedError()
+ }
+
+ this.abort = abort
+ this.context = context
+ }
+
+ onHeaders (statusCode, rawHeaders, resume, statusMessage) {
+ const { factory, opaque, context, callback, responseHeaders } = this
+
+ const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
+
+ if (statusCode < 200) {
+ if (this.onInfo) {
+ this.onInfo({ statusCode, headers })
+ }
+ return
+ }
+
+ this.factory = null
+
+ let res
+
+ if (this.throwOnError && statusCode >= 400) {
+ const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers
+ const contentType = parsedHeaders['content-type']
+ res = new PassThrough()
+
+ this.callback = null
+ this.runInAsyncScope(getResolveErrorBodyCallback, null,
+ { callback, body: res, contentType, statusCode, statusMessage, headers }
+ )
+ } else {
+ if (factory === null) {
+ return
+ }
+
+ res = this.runInAsyncScope(factory, null, {
+ statusCode,
+ headers,
+ opaque,
+ context
+ })
+
+ if (
+ !res ||
+ typeof res.write !== 'function' ||
+ typeof res.end !== 'function' ||
+ typeof res.on !== 'function'
+ ) {
+ throw new InvalidReturnValueError('expected Writable')
+ }
+
+ // TODO: Avoid finished. It registers an unnecessary amount of listeners.
+ finished(res, { readable: false }, (err) => {
+ const { callback, res, opaque, trailers, abort } = this
+
+ this.res = null
+ if (err || !res.readable) {
+ util.destroy(res, err)
+ }
+
+ this.callback = null
+ this.runInAsyncScope(callback, null, err || null, { opaque, trailers })
+
+ if (err) {
+ abort()
+ }
+ })
+ }
+
+ res.on('drain', resume)
+
+ this.res = res
+
+ const needDrain = res.writableNeedDrain !== undefined
+ ? res.writableNeedDrain
+ : res._writableState && res._writableState.needDrain
+
+ return needDrain !== true
+ }
+
+ onData (chunk) {
+ const { res } = this
+
+ return res ? res.write(chunk) : true
+ }
+
+ onComplete (trailers) {
+ const { res } = this
+
+ removeSignal(this)
+
+ if (!res) {
+ return
+ }
+
+ this.trailers = util.parseHeaders(trailers)
+
+ res.end()
+ }
+
+ onError (err) {
+ const { res, callback, opaque, body } = this
+
+ removeSignal(this)
+
+ this.factory = null
+
+ if (res) {
+ this.res = null
+ util.destroy(res, err)
+ } else if (callback) {
+ this.callback = null
+ queueMicrotask(() => {
+ this.runInAsyncScope(callback, null, err, { opaque })
+ })
+ }
+
+ if (body) {
+ this.body = null
+ util.destroy(body, err)
+ }
+ }
+}
+
+function stream (opts, factory, callback) {
+ if (callback === undefined) {
+ return new Promise((resolve, reject) => {
+ stream.call(this, opts, factory, (err, data) => {
+ return err ? reject(err) : resolve(data)
+ })
+ })
+ }
+
+ try {
+ this.dispatch(opts, new StreamHandler(opts, factory, callback))
+ } catch (err) {
+ if (typeof callback !== 'function') {
+ throw err
+ }
+ const opaque = opts && opts.opaque
+ queueMicrotask(() => callback(err, { opaque }))
+ }
+}
+
+module.exports = stream
diff --git a/lib/api/api-upgrade.js b/lib/api/api-upgrade.js
new file mode 100644
index 0000000..ef783e8
--- /dev/null
+++ b/lib/api/api-upgrade.js
@@ -0,0 +1,105 @@
+'use strict'
+
+const { InvalidArgumentError, RequestAbortedError, SocketError } = require('../core/errors')
+const { AsyncResource } = require('async_hooks')
+const util = require('../core/util')
+const { addSignal, removeSignal } = require('./abort-signal')
+const assert = require('assert')
+
+class UpgradeHandler extends AsyncResource {
+ constructor (opts, callback) {
+ if (!opts || typeof opts !== 'object') {
+ throw new InvalidArgumentError('invalid opts')
+ }
+
+ if (typeof callback !== 'function') {
+ throw new InvalidArgumentError('invalid callback')
+ }
+
+ const { signal, opaque, responseHeaders } = opts
+
+ if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
+ throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
+ }
+
+ super('UNDICI_UPGRADE')
+
+ this.responseHeaders = responseHeaders || null
+ this.opaque = opaque || null
+ this.callback = callback
+ this.abort = null
+ this.context = null
+
+ addSignal(this, signal)
+ }
+
+ onConnect (abort, context) {
+ if (!this.callback) {
+ throw new RequestAbortedError()
+ }
+
+ this.abort = abort
+ this.context = null
+ }
+
+ onHeaders () {
+ throw new SocketError('bad upgrade', null)
+ }
+
+ onUpgrade (statusCode, rawHeaders, socket) {
+ const { callback, opaque, context } = this
+
+ assert.strictEqual(statusCode, 101)
+
+ removeSignal(this)
+
+ this.callback = null
+ const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
+ this.runInAsyncScope(callback, null, null, {
+ headers,
+ socket,
+ opaque,
+ context
+ })
+ }
+
+ onError (err) {
+ const { callback, opaque } = this
+
+ removeSignal(this)
+
+ if (callback) {
+ this.callback = null
+ queueMicrotask(() => {
+ this.runInAsyncScope(callback, null, err, { opaque })
+ })
+ }
+ }
+}
+
+function upgrade (opts, callback) {
+ if (callback === undefined) {
+ return new Promise((resolve, reject) => {
+ upgrade.call(this, opts, (err, data) => {
+ return err ? reject(err) : resolve(data)
+ })
+ })
+ }
+
+ try {
+ const upgradeHandler = new UpgradeHandler(opts, callback)
+ this.dispatch({
+ ...opts,
+ method: opts.method || 'GET',
+ upgrade: opts.protocol || 'Websocket'
+ }, upgradeHandler)
+ } catch (err) {
+ if (typeof callback !== 'function') {
+ throw err
+ }
+ const opaque = opts && opts.opaque
+ queueMicrotask(() => callback(err, { opaque }))
+ }
+}
+
+module.exports = upgrade
diff --git a/lib/api/index.js b/lib/api/index.js
new file mode 100644
index 0000000..8983a5e
--- /dev/null
+++ b/lib/api/index.js
@@ -0,0 +1,7 @@
+'use strict'
+
+module.exports.request = require('./api-request')
+module.exports.stream = require('./api-stream')
+module.exports.pipeline = require('./api-pipeline')
+module.exports.upgrade = require('./api-upgrade')
+module.exports.connect = require('./api-connect')
diff --git a/lib/api/readable.js b/lib/api/readable.js
new file mode 100644
index 0000000..5269dfa
--- /dev/null
+++ b/lib/api/readable.js
@@ -0,0 +1,322 @@
+// Ported from https://github.com/nodejs/undici/pull/907
+
+'use strict'
+
+const assert = require('assert')
+const { Readable } = require('stream')
+const { RequestAbortedError, NotSupportedError, InvalidArgumentError } = require('../core/errors')
+const util = require('../core/util')
+const { ReadableStreamFrom, toUSVString } = require('../core/util')
+
+let Blob
+
+const kConsume = Symbol('kConsume')
+const kReading = Symbol('kReading')
+const kBody = Symbol('kBody')
+const kAbort = Symbol('abort')
+const kContentType = Symbol('kContentType')
+
+const noop = () => {}
+
+module.exports = class BodyReadable extends Readable {
+ constructor ({
+ resume,
+ abort,
+ contentType = '',
+ highWaterMark = 64 * 1024 // Same as nodejs fs streams.
+ }) {
+ super({
+ autoDestroy: true,
+ read: resume,
+ highWaterMark
+ })
+
+ this._readableState.dataEmitted = false
+
+ this[kAbort] = abort
+ this[kConsume] = null
+ this[kBody] = null
+ this[kContentType] = contentType
+
+ // Is stream being consumed through Readable API?
+ // This is an optimization so that we avoid checking
+ // for 'data' and 'readable' listeners in the hot path
+ // inside push().
+ this[kReading] = false
+ }
+
+ destroy (err) {
+ if (this.destroyed) {
+ // Node < 16
+ return this
+ }
+
+ if (!err && !this._readableState.endEmitted) {
+ err = new RequestAbortedError()
+ }
+
+ if (err) {
+ this[kAbort]()
+ }
+
+ return super.destroy(err)
+ }
+
+ emit (ev, ...args) {
+ if (ev === 'data') {
+ // Node < 16.7
+ this._readableState.dataEmitted = true
+ } else if (ev === 'error') {
+ // Node < 16
+ this._readableState.errorEmitted = true
+ }
+ return super.emit(ev, ...args)
+ }
+
+ on (ev, ...args) {
+ if (ev === 'data' || ev === 'readable') {
+ this[kReading] = true
+ }
+ return super.on(ev, ...args)
+ }
+
+ addListener (ev, ...args) {
+ return this.on(ev, ...args)
+ }
+
+ off (ev, ...args) {
+ const ret = super.off(ev, ...args)
+ if (ev === 'data' || ev === 'readable') {
+ this[kReading] = (
+ this.listenerCount('data') > 0 ||
+ this.listenerCount('readable') > 0
+ )
+ }
+ return ret
+ }
+
+ removeListener (ev, ...args) {
+ return this.off(ev, ...args)
+ }
+
+ push (chunk) {
+ if (this[kConsume] && chunk !== null && this.readableLength === 0) {
+ consumePush(this[kConsume], chunk)
+ return this[kReading] ? super.push(chunk) : true
+ }
+ return super.push(chunk)
+ }
+
+ // https://fetch.spec.whatwg.org/#dom-body-text
+ async text () {
+ return consume(this, 'text')
+ }
+
+ // https://fetch.spec.whatwg.org/#dom-body-json
+ async json () {
+ return consume(this, 'json')
+ }
+
+ // https://fetch.spec.whatwg.org/#dom-body-blob
+ async blob () {
+ return consume(this, 'blob')
+ }
+
+ // https://fetch.spec.whatwg.org/#dom-body-arraybuffer
+ async arrayBuffer () {
+ return consume(this, 'arrayBuffer')
+ }
+
+ // https://fetch.spec.whatwg.org/#dom-body-formdata
+ async formData () {
+ // TODO: Implement.
+ throw new NotSupportedError()
+ }
+
+ // https://fetch.spec.whatwg.org/#dom-body-bodyused
+ get bodyUsed () {
+ return util.isDisturbed(this)
+ }
+
+ // https://fetch.spec.whatwg.org/#dom-body-body
+ get body () {
+ if (!this[kBody]) {
+ this[kBody] = ReadableStreamFrom(this)
+ if (this[kConsume]) {
+ // TODO: Is this the best way to force a lock?
+ this[kBody].getReader() // Ensure stream is locked.
+ assert(this[kBody].locked)
+ }
+ }
+ return this[kBody]
+ }
+
+ dump (opts) {
+ let limit = opts && Number.isFinite(opts.limit) ? opts.limit : 262144
+ const signal = opts && opts.signal
+
+ if (signal) {
+ try {
+ if (typeof signal !== 'object' || !('aborted' in signal)) {
+ throw new InvalidArgumentError('signal must be an AbortSignal')
+ }
+ util.throwIfAborted(signal)
+ } catch (err) {
+ return Promise.reject(err)
+ }
+ }
+
+ if (this.closed) {
+ return Promise.resolve(null)
+ }
+
+ return new Promise((resolve, reject) => {
+ const signalListenerCleanup = signal
+ ? util.addAbortListener(signal, () => {
+ this.destroy()
+ })
+ : noop
+
+ this
+ .on('close', function () {
+ signalListenerCleanup()
+ if (signal && signal.aborted) {
+ reject(signal.reason || Object.assign(new Error('The operation was aborted'), { name: 'AbortError' }))
+ } else {
+ resolve(null)
+ }
+ })
+ .on('error', noop)
+ .on('data', function (chunk) {
+ limit -= chunk.length
+ if (limit <= 0) {
+ this.destroy()
+ }
+ })
+ .resume()
+ })
+ }
+}
+
+// https://streams.spec.whatwg.org/#readablestream-locked
+function isLocked (self) {
+ // Consume is an implicit lock.
+ return (self[kBody] && self[kBody].locked === true) || self[kConsume]
+}
+
+// https://fetch.spec.whatwg.org/#body-unusable
+function isUnusable (self) {
+ return util.isDisturbed(self) || isLocked(self)
+}
+
+async function consume (stream, type) {
+ if (isUnusable(stream)) {
+ throw new TypeError('unusable')
+ }
+
+ assert(!stream[kConsume])
+
+ return new Promise((resolve, reject) => {
+ stream[kConsume] = {
+ type,
+ stream,
+ resolve,
+ reject,
+ length: 0,
+ body: []
+ }
+
+ stream
+ .on('error', function (err) {
+ consumeFinish(this[kConsume], err)
+ })
+ .on('close', function () {
+ if (this[kConsume].body !== null) {
+ consumeFinish(this[kConsume], new RequestAbortedError())
+ }
+ })
+
+ process.nextTick(consumeStart, stream[kConsume])
+ })
+}
+
+function consumeStart (consume) {
+ if (consume.body === null) {
+ return
+ }
+
+ const { _readableState: state } = consume.stream
+
+ for (const chunk of state.buffer) {
+ consumePush(consume, chunk)
+ }
+
+ if (state.endEmitted) {
+ consumeEnd(this[kConsume])
+ } else {
+ consume.stream.on('end', function () {
+ consumeEnd(this[kConsume])
+ })
+ }
+
+ consume.stream.resume()
+
+ while (consume.stream.read() != null) {
+ // Loop
+ }
+}
+
+function consumeEnd (consume) {
+ const { type, body, resolve, stream, length } = consume
+
+ try {
+ if (type === 'text') {
+ resolve(toUSVString(Buffer.concat(body)))
+ } else if (type === 'json') {
+ resolve(JSON.parse(Buffer.concat(body)))
+ } else if (type === 'arrayBuffer') {
+ const dst = new Uint8Array(length)
+
+ let pos = 0
+ for (const buf of body) {
+ dst.set(buf, pos)
+ pos += buf.byteLength
+ }
+
+ resolve(dst.buffer)
+ } else if (type === 'blob') {
+ if (!Blob) {
+ Blob = require('buffer').Blob
+ }
+ resolve(new Blob(body, { type: stream[kContentType] }))
+ }
+
+ consumeFinish(consume)
+ } catch (err) {
+ stream.destroy(err)
+ }
+}
+
+function consumePush (consume, chunk) {
+ consume.length += chunk.length
+ consume.body.push(chunk)
+}
+
+function consumeFinish (consume, err) {
+ if (consume.body === null) {
+ return
+ }
+
+ if (err) {
+ consume.reject(err)
+ } else {
+ consume.resolve()
+ }
+
+ consume.type = null
+ consume.stream = null
+ consume.resolve = null
+ consume.reject = null
+ consume.length = 0
+ consume.body = null
+}
diff --git a/lib/api/util.js b/lib/api/util.js
new file mode 100644
index 0000000..bffd702
--- /dev/null
+++ b/lib/api/util.js
@@ -0,0 +1,46 @@
+const assert = require('assert')
+const {
+ ResponseStatusCodeError
+} = require('../core/errors')
+const { toUSVString } = require('../core/util')
+
+async function getResolveErrorBodyCallback ({ callback, body, contentType, statusCode, statusMessage, headers }) {
+ assert(body)
+
+ let chunks = []
+ let limit = 0
+
+ for await (const chunk of body) {
+ chunks.push(chunk)
+ limit += chunk.length
+ if (limit > 128 * 1024) {
+ chunks = null
+ break
+ }
+ }
+
+ if (statusCode === 204 || !contentType || !chunks) {
+ process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers))
+ return
+ }
+
+ try {
+ if (contentType.startsWith('application/json')) {
+ const payload = JSON.parse(toUSVString(Buffer.concat(chunks)))
+ process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload))
+ return
+ }
+
+ if (contentType.startsWith('text/')) {
+ const payload = toUSVString(Buffer.concat(chunks))
+ process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload))
+ return
+ }
+ } catch (err) {
+ // Process in a fallback if error
+ }
+
+ process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers))
+}
+
+module.exports = { getResolveErrorBodyCallback }
diff --git a/lib/balanced-pool.js b/lib/balanced-pool.js
new file mode 100644
index 0000000..10bc6a4
--- /dev/null
+++ b/lib/balanced-pool.js
@@ -0,0 +1,190 @@
+'use strict'
+
+const {
+ BalancedPoolMissingUpstreamError,
+ InvalidArgumentError
+} = require('./core/errors')
+const {
+ PoolBase,
+ kClients,
+ kNeedDrain,
+ kAddClient,
+ kRemoveClient,
+ kGetDispatcher
+} = require('./pool-base')
+const Pool = require('./pool')
+const { kUrl, kInterceptors } = require('./core/symbols')
+const { parseOrigin } = require('./core/util')
+const kFactory = Symbol('factory')
+
+const kOptions = Symbol('options')
+const kGreatestCommonDivisor = Symbol('kGreatestCommonDivisor')
+const kCurrentWeight = Symbol('kCurrentWeight')
+const kIndex = Symbol('kIndex')
+const kWeight = Symbol('kWeight')
+const kMaxWeightPerServer = Symbol('kMaxWeightPerServer')
+const kErrorPenalty = Symbol('kErrorPenalty')
+
+function getGreatestCommonDivisor (a, b) {
+ if (b === 0) return a
+ return getGreatestCommonDivisor(b, a % b)
+}
+
+function defaultFactory (origin, opts) {
+ return new Pool(origin, opts)
+}
+
+class BalancedPool extends PoolBase {
+ constructor (upstreams = [], { factory = defaultFactory, ...opts } = {}) {
+ super()
+
+ this[kOptions] = opts
+ this[kIndex] = -1
+ this[kCurrentWeight] = 0
+
+ this[kMaxWeightPerServer] = this[kOptions].maxWeightPerServer || 100
+ this[kErrorPenalty] = this[kOptions].errorPenalty || 15
+
+ if (!Array.isArray(upstreams)) {
+ upstreams = [upstreams]
+ }
+
+ if (typeof factory !== 'function') {
+ throw new InvalidArgumentError('factory must be a function.')
+ }
+
+ this[kInterceptors] = opts.interceptors && opts.interceptors.BalancedPool && Array.isArray(opts.interceptors.BalancedPool)
+ ? opts.interceptors.BalancedPool
+ : []
+ this[kFactory] = factory
+
+ for (const upstream of upstreams) {
+ this.addUpstream(upstream)
+ }
+ this._updateBalancedPoolStats()
+ }
+
+ addUpstream (upstream) {
+ const upstreamOrigin = parseOrigin(upstream).origin
+
+ if (this[kClients].find((pool) => (
+ pool[kUrl].origin === upstreamOrigin &&
+ pool.closed !== true &&
+ pool.destroyed !== true
+ ))) {
+ return this
+ }
+ const pool = this[kFactory](upstreamOrigin, Object.assign({}, this[kOptions]))
+
+ this[kAddClient](pool)
+ pool.on('connect', () => {
+ pool[kWeight] = Math.min(this[kMaxWeightPerServer], pool[kWeight] + this[kErrorPenalty])
+ })
+
+ pool.on('connectionError', () => {
+ pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty])
+ this._updateBalancedPoolStats()
+ })
+
+ pool.on('disconnect', (...args) => {
+ const err = args[2]
+ if (err && err.code === 'UND_ERR_SOCKET') {
+ // decrease the weight of the pool.
+ pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty])
+ this._updateBalancedPoolStats()
+ }
+ })
+
+ for (const client of this[kClients]) {
+ client[kWeight] = this[kMaxWeightPerServer]
+ }
+
+ this._updateBalancedPoolStats()
+
+ return this
+ }
+
+ _updateBalancedPoolStats () {
+ this[kGreatestCommonDivisor] = this[kClients].map(p => p[kWeight]).reduce(getGreatestCommonDivisor, 0)
+ }
+
+ removeUpstream (upstream) {
+ const upstreamOrigin = parseOrigin(upstream).origin
+
+ const pool = this[kClients].find((pool) => (
+ pool[kUrl].origin === upstreamOrigin &&
+ pool.closed !== true &&
+ pool.destroyed !== true
+ ))
+
+ if (pool) {
+ this[kRemoveClient](pool)
+ }
+
+ return this
+ }
+
+ get upstreams () {
+ return this[kClients]
+ .filter(dispatcher => dispatcher.closed !== true && dispatcher.destroyed !== true)
+ .map((p) => p[kUrl].origin)
+ }
+
+ [kGetDispatcher] () {
+ // We validate that pools is greater than 0,
+ // otherwise we would have to wait until an upstream
+ // is added, which might never happen.
+ if (this[kClients].length === 0) {
+ throw new BalancedPoolMissingUpstreamError()
+ }
+
+ const dispatcher = this[kClients].find(dispatcher => (
+ !dispatcher[kNeedDrain] &&
+ dispatcher.closed !== true &&
+ dispatcher.destroyed !== true
+ ))
+
+ if (!dispatcher) {
+ return
+ }
+
+ const allClientsBusy = this[kClients].map(pool => pool[kNeedDrain]).reduce((a, b) => a && b, true)
+
+ if (allClientsBusy) {
+ return
+ }
+
+ let counter = 0
+
+ let maxWeightIndex = this[kClients].findIndex(pool => !pool[kNeedDrain])
+
+ while (counter++ < this[kClients].length) {
+ this[kIndex] = (this[kIndex] + 1) % this[kClients].length
+ const pool = this[kClients][this[kIndex]]
+
+ // find pool index with the largest weight
+ if (pool[kWeight] > this[kClients][maxWeightIndex][kWeight] && !pool[kNeedDrain]) {
+ maxWeightIndex = this[kIndex]
+ }
+
+ // decrease the current weight every `this[kClients].length`.
+ if (this[kIndex] === 0) {
+ // Set the current weight to the next lower weight.
+ this[kCurrentWeight] = this[kCurrentWeight] - this[kGreatestCommonDivisor]
+
+ if (this[kCurrentWeight] <= 0) {
+ this[kCurrentWeight] = this[kMaxWeightPerServer]
+ }
+ }
+ if (pool[kWeight] >= this[kCurrentWeight] && (!pool[kNeedDrain])) {
+ return pool
+ }
+ }
+
+ this[kCurrentWeight] = this[kClients][maxWeightIndex][kWeight]
+ this[kIndex] = maxWeightIndex
+ return this[kClients][maxWeightIndex]
+ }
+}
+
+module.exports = BalancedPool
diff --git a/lib/cache/cache.js b/lib/cache/cache.js
new file mode 100644
index 0000000..9b31108
--- /dev/null
+++ b/lib/cache/cache.js
@@ -0,0 +1,838 @@
+'use strict'
+
+const { kConstruct } = require('./symbols')
+const { urlEquals, fieldValues: getFieldValues } = require('./util')
+const { kEnumerableProperty, isDisturbed } = require('../core/util')
+const { kHeadersList } = require('../core/symbols')
+const { webidl } = require('../fetch/webidl')
+const { Response, cloneResponse } = require('../fetch/response')
+const { Request } = require('../fetch/request')
+const { kState, kHeaders, kGuard, kRealm } = require('../fetch/symbols')
+const { fetching } = require('../fetch/index')
+const { urlIsHttpHttpsScheme, createDeferredPromise, readAllBytes } = require('../fetch/util')
+const assert = require('assert')
+const { getGlobalDispatcher } = require('../global')
+
+/**
+ * @see https://w3c.github.io/ServiceWorker/#dfn-cache-batch-operation
+ * @typedef {Object} CacheBatchOperation
+ * @property {'delete' | 'put'} type
+ * @property {any} request
+ * @property {any} response
+ * @property {import('../../types/cache').CacheQueryOptions} options
+ */
+
+/**
+ * @see https://w3c.github.io/ServiceWorker/#dfn-request-response-list
+ * @typedef {[any, any][]} requestResponseList
+ */
+
+class Cache {
+ /**
+ * @see https://w3c.github.io/ServiceWorker/#dfn-relevant-request-response-list
+ * @type {requestResponseList}
+ */
+ #relevantRequestResponseList
+
+ constructor () {
+ if (arguments[0] !== kConstruct) {
+ webidl.illegalConstructor()
+ }
+
+ this.#relevantRequestResponseList = arguments[1]
+ }
+
+ async match (request, options = {}) {
+ webidl.brandCheck(this, Cache)
+ webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.match' })
+
+ request = webidl.converters.RequestInfo(request)
+ options = webidl.converters.CacheQueryOptions(options)
+
+ const p = await this.matchAll(request, options)
+
+ if (p.length === 0) {
+ return
+ }
+
+ return p[0]
+ }
+
+ async matchAll (request = undefined, options = {}) {
+ webidl.brandCheck(this, Cache)
+
+ if (request !== undefined) request = webidl.converters.RequestInfo(request)
+ options = webidl.converters.CacheQueryOptions(options)
+
+ // 1.
+ let r = null
+
+ // 2.
+ if (request !== undefined) {
+ if (request instanceof Request) {
+ // 2.1.1
+ r = request[kState]
+
+ // 2.1.2
+ if (r.method !== 'GET' && !options.ignoreMethod) {
+ return []
+ }
+ } else if (typeof request === 'string') {
+ // 2.2.1
+ r = new Request(request)[kState]
+ }
+ }
+
+ // 5.
+ // 5.1
+ const responses = []
+
+ // 5.2
+ if (request === undefined) {
+ // 5.2.1
+ for (const requestResponse of this.#relevantRequestResponseList) {
+ responses.push(requestResponse[1])
+ }
+ } else { // 5.3
+ // 5.3.1
+ const requestResponses = this.#queryCache(r, options)
+
+ // 5.3.2
+ for (const requestResponse of requestResponses) {
+ responses.push(requestResponse[1])
+ }
+ }
+
+ // 5.4
+ // We don't implement CORs so we don't need to loop over the responses, yay!
+
+ // 5.5.1
+ const responseList = []
+
+ // 5.5.2
+ for (const response of responses) {
+ // 5.5.2.1
+ const responseObject = new Response(response.body?.source ?? null)
+ const body = responseObject[kState].body
+ responseObject[kState] = response
+ responseObject[kState].body = body
+ responseObject[kHeaders][kHeadersList] = response.headersList
+ responseObject[kHeaders][kGuard] = 'immutable'
+
+ responseList.push(responseObject)
+ }
+
+ // 6.
+ return Object.freeze(responseList)
+ }
+
+ async add (request) {
+ webidl.brandCheck(this, Cache)
+ webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.add' })
+
+ request = webidl.converters.RequestInfo(request)
+
+ // 1.
+ const requests = [request]
+
+ // 2.
+ const responseArrayPromise = this.addAll(requests)
+
+ // 3.
+ return await responseArrayPromise
+ }
+
+ async addAll (requests) {
+ webidl.brandCheck(this, Cache)
+ webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.addAll' })
+
+ requests = webidl.converters['sequence<RequestInfo>'](requests)
+
+ // 1.
+ const responsePromises = []
+
+ // 2.
+ const requestList = []
+
+ // 3.
+ for (const request of requests) {
+ if (typeof request === 'string') {
+ continue
+ }
+
+ // 3.1
+ const r = request[kState]
+
+ // 3.2
+ if (!urlIsHttpHttpsScheme(r.url) || r.method !== 'GET') {
+ throw webidl.errors.exception({
+ header: 'Cache.addAll',
+ message: 'Expected http/s scheme when method is not GET.'
+ })
+ }
+ }
+
+ // 4.
+ /** @type {ReturnType<typeof fetching>[]} */
+ const fetchControllers = []
+
+ // 5.
+ for (const request of requests) {
+ // 5.1
+ const r = new Request(request)[kState]
+
+ // 5.2
+ if (!urlIsHttpHttpsScheme(r.url)) {
+ throw webidl.errors.exception({
+ header: 'Cache.addAll',
+ message: 'Expected http/s scheme.'
+ })
+ }
+
+ // 5.4
+ r.initiator = 'fetch'
+ r.destination = 'subresource'
+
+ // 5.5
+ requestList.push(r)
+
+ // 5.6
+ const responsePromise = createDeferredPromise()
+
+ // 5.7
+ fetchControllers.push(fetching({
+ request: r,
+ dispatcher: getGlobalDispatcher(),
+ processResponse (response) {
+ // 1.
+ if (response.type === 'error' || response.status === 206 || response.status < 200 || response.status > 299) {
+ responsePromise.reject(webidl.errors.exception({
+ header: 'Cache.addAll',
+ message: 'Received an invalid status code or the request failed.'
+ }))
+ } else if (response.headersList.contains('vary')) { // 2.
+ // 2.1
+ const fieldValues = getFieldValues(response.headersList.get('vary'))
+
+ // 2.2
+ for (const fieldValue of fieldValues) {
+ // 2.2.1
+ if (fieldValue === '*') {
+ responsePromise.reject(webidl.errors.exception({
+ header: 'Cache.addAll',
+ message: 'invalid vary field value'
+ }))
+
+ for (const controller of fetchControllers) {
+ controller.abort()
+ }
+
+ return
+ }
+ }
+ }
+ },
+ processResponseEndOfBody (response) {
+ // 1.
+ if (response.aborted) {
+ responsePromise.reject(new DOMException('aborted', 'AbortError'))
+ return
+ }
+
+ // 2.
+ responsePromise.resolve(response)
+ }
+ }))
+
+ // 5.8
+ responsePromises.push(responsePromise.promise)
+ }
+
+ // 6.
+ const p = Promise.all(responsePromises)
+
+ // 7.
+ const responses = await p
+
+ // 7.1
+ const operations = []
+
+ // 7.2
+ let index = 0
+
+ // 7.3
+ for (const response of responses) {
+ // 7.3.1
+ /** @type {CacheBatchOperation} */
+ const operation = {
+ type: 'put', // 7.3.2
+ request: requestList[index], // 7.3.3
+ response // 7.3.4
+ }
+
+ operations.push(operation) // 7.3.5
+
+ index++ // 7.3.6
+ }
+
+ // 7.5
+ const cacheJobPromise = createDeferredPromise()
+
+ // 7.6.1
+ let errorData = null
+
+ // 7.6.2
+ try {
+ this.#batchCacheOperations(operations)
+ } catch (e) {
+ errorData = e
+ }
+
+ // 7.6.3
+ queueMicrotask(() => {
+ // 7.6.3.1
+ if (errorData === null) {
+ cacheJobPromise.resolve(undefined)
+ } else {
+ // 7.6.3.2
+ cacheJobPromise.reject(errorData)
+ }
+ })
+
+ // 7.7
+ return cacheJobPromise.promise
+ }
+
+ async put (request, response) {
+ webidl.brandCheck(this, Cache)
+ webidl.argumentLengthCheck(arguments, 2, { header: 'Cache.put' })
+
+ request = webidl.converters.RequestInfo(request)
+ response = webidl.converters.Response(response)
+
+ // 1.
+ let innerRequest = null
+
+ // 2.
+ if (request instanceof Request) {
+ innerRequest = request[kState]
+ } else { // 3.
+ innerRequest = new Request(request)[kState]
+ }
+
+ // 4.
+ if (!urlIsHttpHttpsScheme(innerRequest.url) || innerRequest.method !== 'GET') {
+ throw webidl.errors.exception({
+ header: 'Cache.put',
+ message: 'Expected an http/s scheme when method is not GET'
+ })
+ }
+
+ // 5.
+ const innerResponse = response[kState]
+
+ // 6.
+ if (innerResponse.status === 206) {
+ throw webidl.errors.exception({
+ header: 'Cache.put',
+ message: 'Got 206 status'
+ })
+ }
+
+ // 7.
+ if (innerResponse.headersList.contains('vary')) {
+ // 7.1.
+ const fieldValues = getFieldValues(innerResponse.headersList.get('vary'))
+
+ // 7.2.
+ for (const fieldValue of fieldValues) {
+ // 7.2.1
+ if (fieldValue === '*') {
+ throw webidl.errors.exception({
+ header: 'Cache.put',
+ message: 'Got * vary field value'
+ })
+ }
+ }
+ }
+
+ // 8.
+ if (innerResponse.body && (isDisturbed(innerResponse.body.stream) || innerResponse.body.stream.locked)) {
+ throw webidl.errors.exception({
+ header: 'Cache.put',
+ message: 'Response body is locked or disturbed'
+ })
+ }
+
+ // 9.
+ const clonedResponse = cloneResponse(innerResponse)
+
+ // 10.
+ const bodyReadPromise = createDeferredPromise()
+
+ // 11.
+ if (innerResponse.body != null) {
+ // 11.1
+ const stream = innerResponse.body.stream
+
+ // 11.2
+ const reader = stream.getReader()
+
+ // 11.3
+ readAllBytes(reader).then(bodyReadPromise.resolve, bodyReadPromise.reject)
+ } else {
+ bodyReadPromise.resolve(undefined)
+ }
+
+ // 12.
+ /** @type {CacheBatchOperation[]} */
+ const operations = []
+
+ // 13.
+ /** @type {CacheBatchOperation} */
+ const operation = {
+ type: 'put', // 14.
+ request: innerRequest, // 15.
+ response: clonedResponse // 16.
+ }
+
+ // 17.
+ operations.push(operation)
+
+ // 19.
+ const bytes = await bodyReadPromise.promise
+
+ if (clonedResponse.body != null) {
+ clonedResponse.body.source = bytes
+ }
+
+ // 19.1
+ const cacheJobPromise = createDeferredPromise()
+
+ // 19.2.1
+ let errorData = null
+
+ // 19.2.2
+ try {
+ this.#batchCacheOperations(operations)
+ } catch (e) {
+ errorData = e
+ }
+
+ // 19.2.3
+ queueMicrotask(() => {
+ // 19.2.3.1
+ if (errorData === null) {
+ cacheJobPromise.resolve()
+ } else { // 19.2.3.2
+ cacheJobPromise.reject(errorData)
+ }
+ })
+
+ return cacheJobPromise.promise
+ }
+
+ async delete (request, options = {}) {
+ webidl.brandCheck(this, Cache)
+ webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.delete' })
+
+ request = webidl.converters.RequestInfo(request)
+ options = webidl.converters.CacheQueryOptions(options)
+
+ /**
+ * @type {Request}
+ */
+ let r = null
+
+ if (request instanceof Request) {
+ r = request[kState]
+
+ if (r.method !== 'GET' && !options.ignoreMethod) {
+ return false
+ }
+ } else {
+ assert(typeof request === 'string')
+
+ r = new Request(request)[kState]
+ }
+
+ /** @type {CacheBatchOperation[]} */
+ const operations = []
+
+ /** @type {CacheBatchOperation} */
+ const operation = {
+ type: 'delete',
+ request: r,
+ options
+ }
+
+ operations.push(operation)
+
+ const cacheJobPromise = createDeferredPromise()
+
+ let errorData = null
+ let requestResponses
+
+ try {
+ requestResponses = this.#batchCacheOperations(operations)
+ } catch (e) {
+ errorData = e
+ }
+
+ queueMicrotask(() => {
+ if (errorData === null) {
+ cacheJobPromise.resolve(!!requestResponses?.length)
+ } else {
+ cacheJobPromise.reject(errorData)
+ }
+ })
+
+ return cacheJobPromise.promise
+ }
+
+ /**
+ * @see https://w3c.github.io/ServiceWorker/#dom-cache-keys
+ * @param {any} request
+ * @param {import('../../types/cache').CacheQueryOptions} options
+ * @returns {readonly Request[]}
+ */
+ async keys (request = undefined, options = {}) {
+ webidl.brandCheck(this, Cache)
+
+ if (request !== undefined) request = webidl.converters.RequestInfo(request)
+ options = webidl.converters.CacheQueryOptions(options)
+
+ // 1.
+ let r = null
+
+ // 2.
+ if (request !== undefined) {
+ // 2.1
+ if (request instanceof Request) {
+ // 2.1.1
+ r = request[kState]
+
+ // 2.1.2
+ if (r.method !== 'GET' && !options.ignoreMethod) {
+ return []
+ }
+ } else if (typeof request === 'string') { // 2.2
+ r = new Request(request)[kState]
+ }
+ }
+
+ // 4.
+ const promise = createDeferredPromise()
+
+ // 5.
+ // 5.1
+ const requests = []
+
+ // 5.2
+ if (request === undefined) {
+ // 5.2.1
+ for (const requestResponse of this.#relevantRequestResponseList) {
+ // 5.2.1.1
+ requests.push(requestResponse[0])
+ }
+ } else { // 5.3
+ // 5.3.1
+ const requestResponses = this.#queryCache(r, options)
+
+ // 5.3.2
+ for (const requestResponse of requestResponses) {
+ // 5.3.2.1
+ requests.push(requestResponse[0])
+ }
+ }
+
+ // 5.4
+ queueMicrotask(() => {
+ // 5.4.1
+ const requestList = []
+
+ // 5.4.2
+ for (const request of requests) {
+ const requestObject = new Request('https://a')
+ requestObject[kState] = request
+ requestObject[kHeaders][kHeadersList] = request.headersList
+ requestObject[kHeaders][kGuard] = 'immutable'
+ requestObject[kRealm] = request.client
+
+ // 5.4.2.1
+ requestList.push(requestObject)
+ }
+
+ // 5.4.3
+ promise.resolve(Object.freeze(requestList))
+ })
+
+ return promise.promise
+ }
+
+ /**
+ * @see https://w3c.github.io/ServiceWorker/#batch-cache-operations-algorithm
+ * @param {CacheBatchOperation[]} operations
+ * @returns {requestResponseList}
+ */
+ #batchCacheOperations (operations) {
+ // 1.
+ const cache = this.#relevantRequestResponseList
+
+ // 2.
+ const backupCache = [...cache]
+
+ // 3.
+ const addedItems = []
+
+ // 4.1
+ const resultList = []
+
+ try {
+ // 4.2
+ for (const operation of operations) {
+ // 4.2.1
+ if (operation.type !== 'delete' && operation.type !== 'put') {
+ throw webidl.errors.exception({
+ header: 'Cache.#batchCacheOperations',
+ message: 'operation type does not match "delete" or "put"'
+ })
+ }
+
+ // 4.2.2
+ if (operation.type === 'delete' && operation.response != null) {
+ throw webidl.errors.exception({
+ header: 'Cache.#batchCacheOperations',
+ message: 'delete operation should not have an associated response'
+ })
+ }
+
+ // 4.2.3
+ if (this.#queryCache(operation.request, operation.options, addedItems).length) {
+ throw new DOMException('???', 'InvalidStateError')
+ }
+
+ // 4.2.4
+ let requestResponses
+
+ // 4.2.5
+ if (operation.type === 'delete') {
+ // 4.2.5.1
+ requestResponses = this.#queryCache(operation.request, operation.options)
+
+ // TODO: the spec is wrong, this is needed to pass WPTs
+ if (requestResponses.length === 0) {
+ return []
+ }
+
+ // 4.2.5.2
+ for (const requestResponse of requestResponses) {
+ const idx = cache.indexOf(requestResponse)
+ assert(idx !== -1)
+
+ // 4.2.5.2.1
+ cache.splice(idx, 1)
+ }
+ } else if (operation.type === 'put') { // 4.2.6
+ // 4.2.6.1
+ if (operation.response == null) {
+ throw webidl.errors.exception({
+ header: 'Cache.#batchCacheOperations',
+ message: 'put operation should have an associated response'
+ })
+ }
+
+ // 4.2.6.2
+ const r = operation.request
+
+ // 4.2.6.3
+ if (!urlIsHttpHttpsScheme(r.url)) {
+ throw webidl.errors.exception({
+ header: 'Cache.#batchCacheOperations',
+ message: 'expected http or https scheme'
+ })
+ }
+
+ // 4.2.6.4
+ if (r.method !== 'GET') {
+ throw webidl.errors.exception({
+ header: 'Cache.#batchCacheOperations',
+ message: 'not get method'
+ })
+ }
+
+ // 4.2.6.5
+ if (operation.options != null) {
+ throw webidl.errors.exception({
+ header: 'Cache.#batchCacheOperations',
+ message: 'options must not be defined'
+ })
+ }
+
+ // 4.2.6.6
+ requestResponses = this.#queryCache(operation.request)
+
+ // 4.2.6.7
+ for (const requestResponse of requestResponses) {
+ const idx = cache.indexOf(requestResponse)
+ assert(idx !== -1)
+
+ // 4.2.6.7.1
+ cache.splice(idx, 1)
+ }
+
+ // 4.2.6.8
+ cache.push([operation.request, operation.response])
+
+ // 4.2.6.10
+ addedItems.push([operation.request, operation.response])
+ }
+
+ // 4.2.7
+ resultList.push([operation.request, operation.response])
+ }
+
+ // 4.3
+ return resultList
+ } catch (e) { // 5.
+ // 5.1
+ this.#relevantRequestResponseList.length = 0
+
+ // 5.2
+ this.#relevantRequestResponseList = backupCache
+
+ // 5.3
+ throw e
+ }
+ }
+
+ /**
+ * @see https://w3c.github.io/ServiceWorker/#query-cache
+ * @param {any} requestQuery
+ * @param {import('../../types/cache').CacheQueryOptions} options
+ * @param {requestResponseList} targetStorage
+ * @returns {requestResponseList}
+ */
+ #queryCache (requestQuery, options, targetStorage) {
+ /** @type {requestResponseList} */
+ const resultList = []
+
+ const storage = targetStorage ?? this.#relevantRequestResponseList
+
+ for (const requestResponse of storage) {
+ const [cachedRequest, cachedResponse] = requestResponse
+ if (this.#requestMatchesCachedItem(requestQuery, cachedRequest, cachedResponse, options)) {
+ resultList.push(requestResponse)
+ }
+ }
+
+ return resultList
+ }
+
+ /**
+ * @see https://w3c.github.io/ServiceWorker/#request-matches-cached-item-algorithm
+ * @param {any} requestQuery
+ * @param {any} request
+ * @param {any | null} response
+ * @param {import('../../types/cache').CacheQueryOptions | undefined} options
+ * @returns {boolean}
+ */
+ #requestMatchesCachedItem (requestQuery, request, response = null, options) {
+ // if (options?.ignoreMethod === false && request.method === 'GET') {
+ // return false
+ // }
+
+ const queryURL = new URL(requestQuery.url)
+
+ const cachedURL = new URL(request.url)
+
+ if (options?.ignoreSearch) {
+ cachedURL.search = ''
+
+ queryURL.search = ''
+ }
+
+ if (!urlEquals(queryURL, cachedURL, true)) {
+ return false
+ }
+
+ if (
+ response == null ||
+ options?.ignoreVary ||
+ !response.headersList.contains('vary')
+ ) {
+ return true
+ }
+
+ const fieldValues = getFieldValues(response.headersList.get('vary'))
+
+ for (const fieldValue of fieldValues) {
+ if (fieldValue === '*') {
+ return false
+ }
+
+ const requestValue = request.headersList.get(fieldValue)
+ const queryValue = requestQuery.headersList.get(fieldValue)
+
+ // If one has the header and the other doesn't, or one has
+ // a different value than the other, return false
+ if (requestValue !== queryValue) {
+ return false
+ }
+ }
+
+ return true
+ }
+}
+
+Object.defineProperties(Cache.prototype, {
+ [Symbol.toStringTag]: {
+ value: 'Cache',
+ configurable: true
+ },
+ match: kEnumerableProperty,
+ matchAll: kEnumerableProperty,
+ add: kEnumerableProperty,
+ addAll: kEnumerableProperty,
+ put: kEnumerableProperty,
+ delete: kEnumerableProperty,
+ keys: kEnumerableProperty
+})
+
+const cacheQueryOptionConverters = [
+ {
+ key: 'ignoreSearch',
+ converter: webidl.converters.boolean,
+ defaultValue: false
+ },
+ {
+ key: 'ignoreMethod',
+ converter: webidl.converters.boolean,
+ defaultValue: false
+ },
+ {
+ key: 'ignoreVary',
+ converter: webidl.converters.boolean,
+ defaultValue: false
+ }
+]
+
+webidl.converters.CacheQueryOptions = webidl.dictionaryConverter(cacheQueryOptionConverters)
+
+webidl.converters.MultiCacheQueryOptions = webidl.dictionaryConverter([
+ ...cacheQueryOptionConverters,
+ {
+ key: 'cacheName',
+ converter: webidl.converters.DOMString
+ }
+])
+
+webidl.converters.Response = webidl.interfaceConverter(Response)
+
+webidl.converters['sequence<RequestInfo>'] = webidl.sequenceConverter(
+ webidl.converters.RequestInfo
+)
+
+module.exports = {
+ Cache
+}
diff --git a/lib/cache/cachestorage.js b/lib/cache/cachestorage.js
new file mode 100644
index 0000000..7e7f0cf
--- /dev/null
+++ b/lib/cache/cachestorage.js
@@ -0,0 +1,144 @@
+'use strict'
+
+const { kConstruct } = require('./symbols')
+const { Cache } = require('./cache')
+const { webidl } = require('../fetch/webidl')
+const { kEnumerableProperty } = require('../core/util')
+
+class CacheStorage {
+ /**
+ * @see https://w3c.github.io/ServiceWorker/#dfn-relevant-name-to-cache-map
+ * @type {Map<string, import('./cache').requestResponseList}
+ */
+ #caches = new Map()
+
+ constructor () {
+ if (arguments[0] !== kConstruct) {
+ webidl.illegalConstructor()
+ }
+ }
+
+ async match (request, options = {}) {
+ webidl.brandCheck(this, CacheStorage)
+ webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.match' })
+
+ request = webidl.converters.RequestInfo(request)
+ options = webidl.converters.MultiCacheQueryOptions(options)
+
+ // 1.
+ if (options.cacheName != null) {
+ // 1.1.1.1
+ if (this.#caches.has(options.cacheName)) {
+ // 1.1.1.1.1
+ const cacheList = this.#caches.get(options.cacheName)
+ const cache = new Cache(kConstruct, cacheList)
+
+ return await cache.match(request, options)
+ }
+ } else { // 2.
+ // 2.2
+ for (const cacheList of this.#caches.values()) {
+ const cache = new Cache(kConstruct, cacheList)
+
+ // 2.2.1.2
+ const response = await cache.match(request, options)
+
+ if (response !== undefined) {
+ return response
+ }
+ }
+ }
+ }
+
+ /**
+ * @see https://w3c.github.io/ServiceWorker/#cache-storage-has
+ * @param {string} cacheName
+ * @returns {Promise<boolean>}
+ */
+ async has (cacheName) {
+ webidl.brandCheck(this, CacheStorage)
+ webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.has' })
+
+ cacheName = webidl.converters.DOMString(cacheName)
+
+ // 2.1.1
+ // 2.2
+ return this.#caches.has(cacheName)
+ }
+
+ /**
+ * @see https://w3c.github.io/ServiceWorker/#dom-cachestorage-open
+ * @param {string} cacheName
+ * @returns {Promise<Cache>}
+ */
+ async open (cacheName) {
+ webidl.brandCheck(this, CacheStorage)
+ webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.open' })
+
+ cacheName = webidl.converters.DOMString(cacheName)
+
+ // 2.1
+ if (this.#caches.has(cacheName)) {
+ // await caches.open('v1') !== await caches.open('v1')
+
+ // 2.1.1
+ const cache = this.#caches.get(cacheName)
+
+ // 2.1.1.1
+ return new Cache(kConstruct, cache)
+ }
+
+ // 2.2
+ const cache = []
+
+ // 2.3
+ this.#caches.set(cacheName, cache)
+
+ // 2.4
+ return new Cache(kConstruct, cache)
+ }
+
+ /**
+ * @see https://w3c.github.io/ServiceWorker/#cache-storage-delete
+ * @param {string} cacheName
+ * @returns {Promise<boolean>}
+ */
+ async delete (cacheName) {
+ webidl.brandCheck(this, CacheStorage)
+ webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.delete' })
+
+ cacheName = webidl.converters.DOMString(cacheName)
+
+ return this.#caches.delete(cacheName)
+ }
+
+ /**
+ * @see https://w3c.github.io/ServiceWorker/#cache-storage-keys
+ * @returns {string[]}
+ */
+ async keys () {
+ webidl.brandCheck(this, CacheStorage)
+
+ // 2.1
+ const keys = this.#caches.keys()
+
+ // 2.2
+ return [...keys]
+ }
+}
+
+Object.defineProperties(CacheStorage.prototype, {
+ [Symbol.toStringTag]: {
+ value: 'CacheStorage',
+ configurable: true
+ },
+ match: kEnumerableProperty,
+ has: kEnumerableProperty,
+ open: kEnumerableProperty,
+ delete: kEnumerableProperty,
+ keys: kEnumerableProperty
+})
+
+module.exports = {
+ CacheStorage
+}
diff --git a/lib/cache/symbols.js b/lib/cache/symbols.js
new file mode 100644
index 0000000..40448d6
--- /dev/null
+++ b/lib/cache/symbols.js
@@ -0,0 +1,5 @@
+'use strict'
+
+module.exports = {
+ kConstruct: require('../core/symbols').kConstruct
+}
diff --git a/lib/cache/util.js b/lib/cache/util.js
new file mode 100644
index 0000000..44d52b7
--- /dev/null
+++ b/lib/cache/util.js
@@ -0,0 +1,49 @@
+'use strict'
+
+const assert = require('assert')
+const { URLSerializer } = require('../fetch/dataURL')
+const { isValidHeaderName } = require('../fetch/util')
+
+/**
+ * @see https://url.spec.whatwg.org/#concept-url-equals
+ * @param {URL} A
+ * @param {URL} B
+ * @param {boolean | undefined} excludeFragment
+ * @returns {boolean}
+ */
+function urlEquals (A, B, excludeFragment = false) {
+ const serializedA = URLSerializer(A, excludeFragment)
+
+ const serializedB = URLSerializer(B, excludeFragment)
+
+ return serializedA === serializedB
+}
+
+/**
+ * @see https://github.com/chromium/chromium/blob/694d20d134cb553d8d89e5500b9148012b1ba299/content/browser/cache_storage/cache_storage_cache.cc#L260-L262
+ * @param {string} header
+ */
+function fieldValues (header) {
+ assert(header !== null)
+
+ const values = []
+
+ for (let value of header.split(',')) {
+ value = value.trim()
+
+ if (!value.length) {
+ continue
+ } else if (!isValidHeaderName(value)) {
+ continue
+ }
+
+ values.push(value)
+ }
+
+ return values
+}
+
+module.exports = {
+ urlEquals,
+ fieldValues
+}
diff --git a/lib/client.js b/lib/client.js
new file mode 100644
index 0000000..22cb390
--- /dev/null
+++ b/lib/client.js
@@ -0,0 +1,2283 @@
+// @ts-check
+
+'use strict'
+
+/* global WebAssembly */
+
+const assert = require('assert')
+const net = require('net')
+const http = require('http')
+const { pipeline } = require('stream')
+const util = require('./core/util')
+const timers = require('./timers')
+const Request = require('./core/request')
+const DispatcherBase = require('./dispatcher-base')
+const {
+ RequestContentLengthMismatchError,
+ ResponseContentLengthMismatchError,
+ InvalidArgumentError,
+ RequestAbortedError,
+ HeadersTimeoutError,
+ HeadersOverflowError,
+ SocketError,
+ InformationalError,
+ BodyTimeoutError,
+ HTTPParserError,
+ ResponseExceededMaxSizeError,
+ ClientDestroyedError
+} = require('./core/errors')
+const buildConnector = require('./core/connect')
+const {
+ kUrl,
+ kReset,
+ kServerName,
+ kClient,
+ kBusy,
+ kParser,
+ kConnect,
+ kBlocking,
+ kResuming,
+ kRunning,
+ kPending,
+ kSize,
+ kWriting,
+ kQueue,
+ kConnected,
+ kConnecting,
+ kNeedDrain,
+ kNoRef,
+ kKeepAliveDefaultTimeout,
+ kHostHeader,
+ kPendingIdx,
+ kRunningIdx,
+ kError,
+ kPipelining,
+ kSocket,
+ kKeepAliveTimeoutValue,
+ kMaxHeadersSize,
+ kKeepAliveMaxTimeout,
+ kKeepAliveTimeoutThreshold,
+ kHeadersTimeout,
+ kBodyTimeout,
+ kStrictContentLength,
+ kConnector,
+ kMaxRedirections,
+ kMaxRequests,
+ kCounter,
+ kClose,
+ kDestroy,
+ kDispatch,
+ kInterceptors,
+ kLocalAddress,
+ kMaxResponseSize,
+ kHTTPConnVersion,
+ // HTTP2
+ kHost,
+ kHTTP2Session,
+ kHTTP2SessionState,
+ kHTTP2BuildRequest,
+ kHTTP2CopyHeaders,
+ kHTTP1BuildRequest
+} = require('./core/symbols')
+
+/** @type {import('http2')} */
+let http2
+try {
+ http2 = require('http2')
+} catch {
+ // @ts-ignore
+ http2 = { constants: {} }
+}
+
+const {
+ constants: {
+ HTTP2_HEADER_AUTHORITY,
+ HTTP2_HEADER_METHOD,
+ HTTP2_HEADER_PATH,
+ HTTP2_HEADER_SCHEME,
+ HTTP2_HEADER_CONTENT_LENGTH,
+ HTTP2_HEADER_EXPECT,
+ HTTP2_HEADER_STATUS
+ }
+} = http2
+
+// Experimental
+let h2ExperimentalWarned = false
+
+const FastBuffer = Buffer[Symbol.species]
+
+const kClosedResolve = Symbol('kClosedResolve')
+
+const channels = {}
+
+try {
+ const diagnosticsChannel = require('diagnostics_channel')
+ channels.sendHeaders = diagnosticsChannel.channel('undici:client:sendHeaders')
+ channels.beforeConnect = diagnosticsChannel.channel('undici:client:beforeConnect')
+ channels.connectError = diagnosticsChannel.channel('undici:client:connectError')
+ channels.connected = diagnosticsChannel.channel('undici:client:connected')
+} catch {
+ channels.sendHeaders = { hasSubscribers: false }
+ channels.beforeConnect = { hasSubscribers: false }
+ channels.connectError = { hasSubscribers: false }
+ channels.connected = { hasSubscribers: false }
+}
+
+/**
+ * @type {import('../types/client').default}
+ */
+class Client extends DispatcherBase {
+ /**
+ *
+ * @param {string|URL} url
+ * @param {import('../types/client').Client.Options} options
+ */
+ constructor (url, {
+ interceptors,
+ maxHeaderSize,
+ headersTimeout,
+ socketTimeout,
+ requestTimeout,
+ connectTimeout,
+ bodyTimeout,
+ idleTimeout,
+ keepAlive,
+ keepAliveTimeout,
+ maxKeepAliveTimeout,
+ keepAliveMaxTimeout,
+ keepAliveTimeoutThreshold,
+ socketPath,
+ pipelining,
+ tls,
+ strictContentLength,
+ maxCachedSessions,
+ maxRedirections,
+ connect,
+ maxRequestsPerClient,
+ localAddress,
+ maxResponseSize,
+ autoSelectFamily,
+ autoSelectFamilyAttemptTimeout,
+ // h2
+ allowH2,
+ maxConcurrentStreams
+ } = {}) {
+ super()
+
+ if (keepAlive !== undefined) {
+ throw new InvalidArgumentError('unsupported keepAlive, use pipelining=0 instead')
+ }
+
+ if (socketTimeout !== undefined) {
+ throw new InvalidArgumentError('unsupported socketTimeout, use headersTimeout & bodyTimeout instead')
+ }
+
+ if (requestTimeout !== undefined) {
+ throw new InvalidArgumentError('unsupported requestTimeout, use headersTimeout & bodyTimeout instead')
+ }
+
+ if (idleTimeout !== undefined) {
+ throw new InvalidArgumentError('unsupported idleTimeout, use keepAliveTimeout instead')
+ }
+
+ if (maxKeepAliveTimeout !== undefined) {
+ throw new InvalidArgumentError('unsupported maxKeepAliveTimeout, use keepAliveMaxTimeout instead')
+ }
+
+ if (maxHeaderSize != null && !Number.isFinite(maxHeaderSize)) {
+ throw new InvalidArgumentError('invalid maxHeaderSize')
+ }
+
+ if (socketPath != null && typeof socketPath !== 'string') {
+ throw new InvalidArgumentError('invalid socketPath')
+ }
+
+ if (connectTimeout != null && (!Number.isFinite(connectTimeout) || connectTimeout < 0)) {
+ throw new InvalidArgumentError('invalid connectTimeout')
+ }
+
+ if (keepAliveTimeout != null && (!Number.isFinite(keepAliveTimeout) || keepAliveTimeout <= 0)) {
+ throw new InvalidArgumentError('invalid keepAliveTimeout')
+ }
+
+ if (keepAliveMaxTimeout != null && (!Number.isFinite(keepAliveMaxTimeout) || keepAliveMaxTimeout <= 0)) {
+ throw new InvalidArgumentError('invalid keepAliveMaxTimeout')
+ }
+
+ if (keepAliveTimeoutThreshold != null && !Number.isFinite(keepAliveTimeoutThreshold)) {
+ throw new InvalidArgumentError('invalid keepAliveTimeoutThreshold')
+ }
+
+ if (headersTimeout != null && (!Number.isInteger(headersTimeout) || headersTimeout < 0)) {
+ throw new InvalidArgumentError('headersTimeout must be a positive integer or zero')
+ }
+
+ if (bodyTimeout != null && (!Number.isInteger(bodyTimeout) || bodyTimeout < 0)) {
+ throw new InvalidArgumentError('bodyTimeout must be a positive integer or zero')
+ }
+
+ if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') {
+ throw new InvalidArgumentError('connect must be a function or an object')
+ }
+
+ if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) {
+ throw new InvalidArgumentError('maxRedirections must be a positive number')
+ }
+
+ if (maxRequestsPerClient != null && (!Number.isInteger(maxRequestsPerClient) || maxRequestsPerClient < 0)) {
+ throw new InvalidArgumentError('maxRequestsPerClient must be a positive number')
+ }
+
+ if (localAddress != null && (typeof localAddress !== 'string' || net.isIP(localAddress) === 0)) {
+ throw new InvalidArgumentError('localAddress must be valid string IP address')
+ }
+
+ if (maxResponseSize != null && (!Number.isInteger(maxResponseSize) || maxResponseSize < -1)) {
+ throw new InvalidArgumentError('maxResponseSize must be a positive number')
+ }
+
+ if (
+ autoSelectFamilyAttemptTimeout != null &&
+ (!Number.isInteger(autoSelectFamilyAttemptTimeout) || autoSelectFamilyAttemptTimeout < -1)
+ ) {
+ throw new InvalidArgumentError('autoSelectFamilyAttemptTimeout must be a positive number')
+ }
+
+ // h2
+ if (allowH2 != null && typeof allowH2 !== 'boolean') {
+ throw new InvalidArgumentError('allowH2 must be a valid boolean value')
+ }
+
+ if (maxConcurrentStreams != null && (typeof maxConcurrentStreams !== 'number' || maxConcurrentStreams < 1)) {
+ throw new InvalidArgumentError('maxConcurrentStreams must be a possitive integer, greater than 0')
+ }
+
+ if (typeof connect !== 'function') {
+ connect = buildConnector({
+ ...tls,
+ maxCachedSessions,
+ allowH2,
+ socketPath,
+ timeout: connectTimeout,
+ ...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined),
+ ...connect
+ })
+ }
+
+ this[kInterceptors] = interceptors && interceptors.Client && Array.isArray(interceptors.Client)
+ ? interceptors.Client
+ : [createRedirectInterceptor({ maxRedirections })]
+ this[kUrl] = util.parseOrigin(url)
+ this[kConnector] = connect
+ this[kSocket] = null
+ this[kPipelining] = pipelining != null ? pipelining : 1
+ this[kMaxHeadersSize] = maxHeaderSize || http.maxHeaderSize
+ this[kKeepAliveDefaultTimeout] = keepAliveTimeout == null ? 4e3 : keepAliveTimeout
+ this[kKeepAliveMaxTimeout] = keepAliveMaxTimeout == null ? 600e3 : keepAliveMaxTimeout
+ this[kKeepAliveTimeoutThreshold] = keepAliveTimeoutThreshold == null ? 1e3 : keepAliveTimeoutThreshold
+ this[kKeepAliveTimeoutValue] = this[kKeepAliveDefaultTimeout]
+ this[kServerName] = null
+ this[kLocalAddress] = localAddress != null ? localAddress : null
+ this[kResuming] = 0 // 0, idle, 1, scheduled, 2 resuming
+ this[kNeedDrain] = 0 // 0, idle, 1, scheduled, 2 resuming
+ this[kHostHeader] = `host: ${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ''}\r\n`
+ this[kBodyTimeout] = bodyTimeout != null ? bodyTimeout : 300e3
+ this[kHeadersTimeout] = headersTimeout != null ? headersTimeout : 300e3
+ this[kStrictContentLength] = strictContentLength == null ? true : strictContentLength
+ this[kMaxRedirections] = maxRedirections
+ this[kMaxRequests] = maxRequestsPerClient
+ this[kClosedResolve] = null
+ this[kMaxResponseSize] = maxResponseSize > -1 ? maxResponseSize : -1
+ this[kHTTPConnVersion] = 'h1'
+
+ // HTTP/2
+ this[kHTTP2Session] = null
+ this[kHTTP2SessionState] = !allowH2
+ ? null
+ : {
+ // streams: null, // Fixed queue of streams - For future support of `push`
+ openStreams: 0, // Keep track of them to decide wether or not unref the session
+ maxConcurrentStreams: maxConcurrentStreams != null ? maxConcurrentStreams : 100 // Max peerConcurrentStreams for a Node h2 server
+ }
+ this[kHost] = `${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ''}`
+
+ // kQueue is built up of 3 sections separated by
+ // the kRunningIdx and kPendingIdx indices.
+ // | complete | running | pending |
+ // ^ kRunningIdx ^ kPendingIdx ^ kQueue.length
+ // kRunningIdx points to the first running element.
+ // kPendingIdx points to the first pending element.
+ // This implements a fast queue with an amortized
+ // time of O(1).
+
+ this[kQueue] = []
+ this[kRunningIdx] = 0
+ this[kPendingIdx] = 0
+ }
+
+ get pipelining () {
+ return this[kPipelining]
+ }
+
+ set pipelining (value) {
+ this[kPipelining] = value
+ resume(this, true)
+ }
+
+ get [kPending] () {
+ return this[kQueue].length - this[kPendingIdx]
+ }
+
+ get [kRunning] () {
+ return this[kPendingIdx] - this[kRunningIdx]
+ }
+
+ get [kSize] () {
+ return this[kQueue].length - this[kRunningIdx]
+ }
+
+ get [kConnected] () {
+ return !!this[kSocket] && !this[kConnecting] && !this[kSocket].destroyed
+ }
+
+ get [kBusy] () {
+ const socket = this[kSocket]
+ return (
+ (socket && (socket[kReset] || socket[kWriting] || socket[kBlocking])) ||
+ (this[kSize] >= (this[kPipelining] || 1)) ||
+ this[kPending] > 0
+ )
+ }
+
+ /* istanbul ignore: only used for test */
+ [kConnect] (cb) {
+ connect(this)
+ this.once('connect', cb)
+ }
+
+ [kDispatch] (opts, handler) {
+ const origin = opts.origin || this[kUrl].origin
+
+ const request = this[kHTTPConnVersion] === 'h2'
+ ? Request[kHTTP2BuildRequest](origin, opts, handler)
+ : Request[kHTTP1BuildRequest](origin, opts, handler)
+
+ this[kQueue].push(request)
+ if (this[kResuming]) {
+ // Do nothing.
+ } else if (util.bodyLength(request.body) == null && util.isIterable(request.body)) {
+ // Wait a tick in case stream/iterator is ended in the same tick.
+ this[kResuming] = 1
+ process.nextTick(resume, this)
+ } else {
+ resume(this, true)
+ }
+
+ if (this[kResuming] && this[kNeedDrain] !== 2 && this[kBusy]) {
+ this[kNeedDrain] = 2
+ }
+
+ return this[kNeedDrain] < 2
+ }
+
+ async [kClose] () {
+ // TODO: for H2 we need to gracefully flush the remaining enqueued
+ // request and close each stream.
+ return new Promise((resolve) => {
+ if (!this[kSize]) {
+ resolve(null)
+ } else {
+ this[kClosedResolve] = resolve
+ }
+ })
+ }
+
+ async [kDestroy] (err) {
+ return new Promise((resolve) => {
+ const requests = this[kQueue].splice(this[kPendingIdx])
+ for (let i = 0; i < requests.length; i++) {
+ const request = requests[i]
+ errorRequest(this, request, err)
+ }
+
+ const callback = () => {
+ if (this[kClosedResolve]) {
+ // TODO (fix): Should we error here with ClientDestroyedError?
+ this[kClosedResolve]()
+ this[kClosedResolve] = null
+ }
+ resolve()
+ }
+
+ if (this[kHTTP2Session] != null) {
+ util.destroy(this[kHTTP2Session], err)
+ this[kHTTP2Session] = null
+ this[kHTTP2SessionState] = null
+ }
+
+ if (!this[kSocket]) {
+ queueMicrotask(callback)
+ } else {
+ util.destroy(this[kSocket].on('close', callback), err)
+ }
+
+ resume(this)
+ })
+ }
+}
+
+function onHttp2SessionError (err) {
+ assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID')
+
+ this[kSocket][kError] = err
+
+ onError(this[kClient], err)
+}
+
+function onHttp2FrameError (type, code, id) {
+ const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`)
+
+ if (id === 0) {
+ this[kSocket][kError] = err
+ onError(this[kClient], err)
+ }
+}
+
+function onHttp2SessionEnd () {
+ util.destroy(this, new SocketError('other side closed'))
+ util.destroy(this[kSocket], new SocketError('other side closed'))
+}
+
+function onHTTP2GoAway (code) {
+ const client = this[kClient]
+ const err = new InformationalError(`HTTP/2: "GOAWAY" frame received with code ${code}`)
+ client[kSocket] = null
+ client[kHTTP2Session] = null
+
+ if (client.destroyed) {
+ assert(this[kPending] === 0)
+
+ // Fail entire queue.
+ const requests = client[kQueue].splice(client[kRunningIdx])
+ for (let i = 0; i < requests.length; i++) {
+ const request = requests[i]
+ errorRequest(this, request, err)
+ }
+ } else if (client[kRunning] > 0) {
+ // Fail head of pipeline.
+ const request = client[kQueue][client[kRunningIdx]]
+ client[kQueue][client[kRunningIdx]++] = null
+
+ errorRequest(client, request, err)
+ }
+
+ client[kPendingIdx] = client[kRunningIdx]
+
+ assert(client[kRunning] === 0)
+
+ client.emit('disconnect',
+ client[kUrl],
+ [client],
+ err
+ )
+
+ resume(client)
+}
+
+const constants = require('./llhttp/constants')
+const createRedirectInterceptor = require('./interceptor/redirectInterceptor')
+const EMPTY_BUF = Buffer.alloc(0)
+
+async function lazyllhttp () {
+ const llhttpWasmData = process.env.JEST_WORKER_ID ? require('./llhttp/llhttp-wasm.js') : undefined
+
+ let mod
+ try {
+ mod = await WebAssembly.compile(Buffer.from(require('./llhttp/llhttp_simd-wasm.js'), 'base64'))
+ } catch (e) {
+ /* istanbul ignore next */
+
+ // We could check if the error was caused by the simd option not
+ // being enabled, but the occurring of this other error
+ // * https://github.com/emscripten-core/emscripten/issues/11495
+ // got me to remove that check to avoid breaking Node 12.
+ mod = await WebAssembly.compile(Buffer.from(llhttpWasmData || require('./llhttp/llhttp-wasm.js'), 'base64'))
+ }
+
+ return await WebAssembly.instantiate(mod, {
+ env: {
+ /* eslint-disable camelcase */
+
+ wasm_on_url: (p, at, len) => {
+ /* istanbul ignore next */
+ return 0
+ },
+ wasm_on_status: (p, at, len) => {
+ assert.strictEqual(currentParser.ptr, p)
+ const start = at - currentBufferPtr + currentBufferRef.byteOffset
+ return currentParser.onStatus(new FastBuffer(currentBufferRef.buffer, start, len)) || 0
+ },
+ wasm_on_message_begin: (p) => {
+ assert.strictEqual(currentParser.ptr, p)
+ return currentParser.onMessageBegin() || 0
+ },
+ wasm_on_header_field: (p, at, len) => {
+ assert.strictEqual(currentParser.ptr, p)
+ const start = at - currentBufferPtr + currentBufferRef.byteOffset
+ return currentParser.onHeaderField(new FastBuffer(currentBufferRef.buffer, start, len)) || 0
+ },
+ wasm_on_header_value: (p, at, len) => {
+ assert.strictEqual(currentParser.ptr, p)
+ const start = at - currentBufferPtr + currentBufferRef.byteOffset
+ return currentParser.onHeaderValue(new FastBuffer(currentBufferRef.buffer, start, len)) || 0
+ },
+ wasm_on_headers_complete: (p, statusCode, upgrade, shouldKeepAlive) => {
+ assert.strictEqual(currentParser.ptr, p)
+ return currentParser.onHeadersComplete(statusCode, Boolean(upgrade), Boolean(shouldKeepAlive)) || 0
+ },
+ wasm_on_body: (p, at, len) => {
+ assert.strictEqual(currentParser.ptr, p)
+ const start = at - currentBufferPtr + currentBufferRef.byteOffset
+ return currentParser.onBody(new FastBuffer(currentBufferRef.buffer, start, len)) || 0
+ },
+ wasm_on_message_complete: (p) => {
+ assert.strictEqual(currentParser.ptr, p)
+ return currentParser.onMessageComplete() || 0
+ }
+
+ /* eslint-enable camelcase */
+ }
+ })
+}
+
+let llhttpInstance = null
+let llhttpPromise = lazyllhttp()
+llhttpPromise.catch()
+
+let currentParser = null
+let currentBufferRef = null
+let currentBufferSize = 0
+let currentBufferPtr = null
+
+const TIMEOUT_HEADERS = 1
+const TIMEOUT_BODY = 2
+const TIMEOUT_IDLE = 3
+
+class Parser {
+ constructor (client, socket, { exports }) {
+ assert(Number.isFinite(client[kMaxHeadersSize]) && client[kMaxHeadersSize] > 0)
+
+ this.llhttp = exports
+ this.ptr = this.llhttp.llhttp_alloc(constants.TYPE.RESPONSE)
+ this.client = client
+ this.socket = socket
+ this.timeout = null
+ this.timeoutValue = null
+ this.timeoutType = null
+ this.statusCode = null
+ this.statusText = ''
+ this.upgrade = false
+ this.headers = []
+ this.headersSize = 0
+ this.headersMaxSize = client[kMaxHeadersSize]
+ this.shouldKeepAlive = false
+ this.paused = false
+ this.resume = this.resume.bind(this)
+
+ this.bytesRead = 0
+
+ this.keepAlive = ''
+ this.contentLength = ''
+ this.connection = ''
+ this.maxResponseSize = client[kMaxResponseSize]
+ }
+
+ setTimeout (value, type) {
+ this.timeoutType = type
+ if (value !== this.timeoutValue) {
+ timers.clearTimeout(this.timeout)
+ if (value) {
+ this.timeout = timers.setTimeout(onParserTimeout, value, this)
+ // istanbul ignore else: only for jest
+ if (this.timeout.unref) {
+ this.timeout.unref()
+ }
+ } else {
+ this.timeout = null
+ }
+ this.timeoutValue = value
+ } else if (this.timeout) {
+ // istanbul ignore else: only for jest
+ if (this.timeout.refresh) {
+ this.timeout.refresh()
+ }
+ }
+ }
+
+ resume () {
+ if (this.socket.destroyed || !this.paused) {
+ return
+ }
+
+ assert(this.ptr != null)
+ assert(currentParser == null)
+
+ this.llhttp.llhttp_resume(this.ptr)
+
+ assert(this.timeoutType === TIMEOUT_BODY)
+ if (this.timeout) {
+ // istanbul ignore else: only for jest
+ if (this.timeout.refresh) {
+ this.timeout.refresh()
+ }
+ }
+
+ this.paused = false
+ this.execute(this.socket.read() || EMPTY_BUF) // Flush parser.
+ this.readMore()
+ }
+
+ readMore () {
+ while (!this.paused && this.ptr) {
+ const chunk = this.socket.read()
+ if (chunk === null) {
+ break
+ }
+ this.execute(chunk)
+ }
+ }
+
+ execute (data) {
+ assert(this.ptr != null)
+ assert(currentParser == null)
+ assert(!this.paused)
+
+ const { socket, llhttp } = this
+
+ if (data.length > currentBufferSize) {
+ if (currentBufferPtr) {
+ llhttp.free(currentBufferPtr)
+ }
+ currentBufferSize = Math.ceil(data.length / 4096) * 4096
+ currentBufferPtr = llhttp.malloc(currentBufferSize)
+ }
+
+ new Uint8Array(llhttp.memory.buffer, currentBufferPtr, currentBufferSize).set(data)
+
+ // Call `execute` on the wasm parser.
+ // We pass the `llhttp_parser` pointer address, the pointer address of buffer view data,
+ // and finally the length of bytes to parse.
+ // The return value is an error code or `constants.ERROR.OK`.
+ try {
+ let ret
+
+ try {
+ currentBufferRef = data
+ currentParser = this
+ ret = llhttp.llhttp_execute(this.ptr, currentBufferPtr, data.length)
+ /* eslint-disable-next-line no-useless-catch */
+ } catch (err) {
+ /* istanbul ignore next: difficult to make a test case for */
+ throw err
+ } finally {
+ currentParser = null
+ currentBufferRef = null
+ }
+
+ const offset = llhttp.llhttp_get_error_pos(this.ptr) - currentBufferPtr
+
+ if (ret === constants.ERROR.PAUSED_UPGRADE) {
+ this.onUpgrade(data.slice(offset))
+ } else if (ret === constants.ERROR.PAUSED) {
+ this.paused = true
+ socket.unshift(data.slice(offset))
+ } else if (ret !== constants.ERROR.OK) {
+ const ptr = llhttp.llhttp_get_error_reason(this.ptr)
+ let message = ''
+ /* istanbul ignore else: difficult to make a test case for */
+ if (ptr) {
+ const len = new Uint8Array(llhttp.memory.buffer, ptr).indexOf(0)
+ message =
+ 'Response does not match the HTTP/1.1 protocol (' +
+ Buffer.from(llhttp.memory.buffer, ptr, len).toString() +
+ ')'
+ }
+ throw new HTTPParserError(message, constants.ERROR[ret], data.slice(offset))
+ }
+ } catch (err) {
+ util.destroy(socket, err)
+ }
+ }
+
+ destroy () {
+ assert(this.ptr != null)
+ assert(currentParser == null)
+
+ this.llhttp.llhttp_free(this.ptr)
+ this.ptr = null
+
+ timers.clearTimeout(this.timeout)
+ this.timeout = null
+ this.timeoutValue = null
+ this.timeoutType = null
+
+ this.paused = false
+ }
+
+ onStatus (buf) {
+ this.statusText = buf.toString()
+ }
+
+ onMessageBegin () {
+ const { socket, client } = this
+
+ /* istanbul ignore next: difficult to make a test case for */
+ if (socket.destroyed) {
+ return -1
+ }
+
+ const request = client[kQueue][client[kRunningIdx]]
+ if (!request) {
+ return -1
+ }
+ }
+
+ onHeaderField (buf) {
+ const len = this.headers.length
+
+ if ((len & 1) === 0) {
+ this.headers.push(buf)
+ } else {
+ this.headers[len - 1] = Buffer.concat([this.headers[len - 1], buf])
+ }
+
+ this.trackHeader(buf.length)
+ }
+
+ onHeaderValue (buf) {
+ let len = this.headers.length
+
+ if ((len & 1) === 1) {
+ this.headers.push(buf)
+ len += 1
+ } else {
+ this.headers[len - 1] = Buffer.concat([this.headers[len - 1], buf])
+ }
+
+ const key = this.headers[len - 2]
+ if (key.length === 10 && key.toString().toLowerCase() === 'keep-alive') {
+ this.keepAlive += buf.toString()
+ } else if (key.length === 10 && key.toString().toLowerCase() === 'connection') {
+ this.connection += buf.toString()
+ } else if (key.length === 14 && key.toString().toLowerCase() === 'content-length') {
+ this.contentLength += buf.toString()
+ }
+
+ this.trackHeader(buf.length)
+ }
+
+ trackHeader (len) {
+ this.headersSize += len
+ if (this.headersSize >= this.headersMaxSize) {
+ util.destroy(this.socket, new HeadersOverflowError())
+ }
+ }
+
+ onUpgrade (head) {
+ const { upgrade, client, socket, headers, statusCode } = this
+
+ assert(upgrade)
+
+ const request = client[kQueue][client[kRunningIdx]]
+ assert(request)
+
+ assert(!socket.destroyed)
+ assert(socket === client[kSocket])
+ assert(!this.paused)
+ assert(request.upgrade || request.method === 'CONNECT')
+
+ this.statusCode = null
+ this.statusText = ''
+ this.shouldKeepAlive = null
+
+ assert(this.headers.length % 2 === 0)
+ this.headers = []
+ this.headersSize = 0
+
+ socket.unshift(head)
+
+ socket[kParser].destroy()
+ socket[kParser] = null
+
+ socket[kClient] = null
+ socket[kError] = null
+ socket
+ .removeListener('error', onSocketError)
+ .removeListener('readable', onSocketReadable)
+ .removeListener('end', onSocketEnd)
+ .removeListener('close', onSocketClose)
+
+ client[kSocket] = null
+ client[kQueue][client[kRunningIdx]++] = null
+ client.emit('disconnect', client[kUrl], [client], new InformationalError('upgrade'))
+
+ try {
+ request.onUpgrade(statusCode, headers, socket)
+ } catch (err) {
+ util.destroy(socket, err)
+ }
+
+ resume(client)
+ }
+
+ onHeadersComplete (statusCode, upgrade, shouldKeepAlive) {
+ const { client, socket, headers, statusText } = this
+
+ /* istanbul ignore next: difficult to make a test case for */
+ if (socket.destroyed) {
+ return -1
+ }
+
+ const request = client[kQueue][client[kRunningIdx]]
+
+ /* istanbul ignore next: difficult to make a test case for */
+ if (!request) {
+ return -1
+ }
+
+ assert(!this.upgrade)
+ assert(this.statusCode < 200)
+
+ if (statusCode === 100) {
+ util.destroy(socket, new SocketError('bad response', util.getSocketInfo(socket)))
+ return -1
+ }
+
+ /* this can only happen if server is misbehaving */
+ if (upgrade && !request.upgrade) {
+ util.destroy(socket, new SocketError('bad upgrade', util.getSocketInfo(socket)))
+ return -1
+ }
+
+ assert.strictEqual(this.timeoutType, TIMEOUT_HEADERS)
+
+ this.statusCode = statusCode
+ this.shouldKeepAlive = (
+ shouldKeepAlive ||
+ // Override llhttp value which does not allow keepAlive for HEAD.
+ (request.method === 'HEAD' && !socket[kReset] && this.connection.toLowerCase() === 'keep-alive')
+ )
+
+ if (this.statusCode >= 200) {
+ const bodyTimeout = request.bodyTimeout != null
+ ? request.bodyTimeout
+ : client[kBodyTimeout]
+ this.setTimeout(bodyTimeout, TIMEOUT_BODY)
+ } else if (this.timeout) {
+ // istanbul ignore else: only for jest
+ if (this.timeout.refresh) {
+ this.timeout.refresh()
+ }
+ }
+
+ if (request.method === 'CONNECT') {
+ assert(client[kRunning] === 1)
+ this.upgrade = true
+ return 2
+ }
+
+ if (upgrade) {
+ assert(client[kRunning] === 1)
+ this.upgrade = true
+ return 2
+ }
+
+ assert(this.headers.length % 2 === 0)
+ this.headers = []
+ this.headersSize = 0
+
+ if (this.shouldKeepAlive && client[kPipelining]) {
+ const keepAliveTimeout = this.keepAlive ? util.parseKeepAliveTimeout(this.keepAlive) : null
+
+ if (keepAliveTimeout != null) {
+ const timeout = Math.min(
+ keepAliveTimeout - client[kKeepAliveTimeoutThreshold],
+ client[kKeepAliveMaxTimeout]
+ )
+ if (timeout <= 0) {
+ socket[kReset] = true
+ } else {
+ client[kKeepAliveTimeoutValue] = timeout
+ }
+ } else {
+ client[kKeepAliveTimeoutValue] = client[kKeepAliveDefaultTimeout]
+ }
+ } else {
+ // Stop more requests from being dispatched.
+ socket[kReset] = true
+ }
+
+ const pause = request.onHeaders(statusCode, headers, this.resume, statusText) === false
+
+ if (request.aborted) {
+ return -1
+ }
+
+ if (request.method === 'HEAD') {
+ return 1
+ }
+
+ if (statusCode < 200) {
+ return 1
+ }
+
+ if (socket[kBlocking]) {
+ socket[kBlocking] = false
+ resume(client)
+ }
+
+ return pause ? constants.ERROR.PAUSED : 0
+ }
+
+ onBody (buf) {
+ const { client, socket, statusCode, maxResponseSize } = this
+
+ if (socket.destroyed) {
+ return -1
+ }
+
+ const request = client[kQueue][client[kRunningIdx]]
+ assert(request)
+
+ assert.strictEqual(this.timeoutType, TIMEOUT_BODY)
+ if (this.timeout) {
+ // istanbul ignore else: only for jest
+ if (this.timeout.refresh) {
+ this.timeout.refresh()
+ }
+ }
+
+ assert(statusCode >= 200)
+
+ if (maxResponseSize > -1 && this.bytesRead + buf.length > maxResponseSize) {
+ util.destroy(socket, new ResponseExceededMaxSizeError())
+ return -1
+ }
+
+ this.bytesRead += buf.length
+
+ if (request.onData(buf) === false) {
+ return constants.ERROR.PAUSED
+ }
+ }
+
+ onMessageComplete () {
+ const { client, socket, statusCode, upgrade, headers, contentLength, bytesRead, shouldKeepAlive } = this
+
+ if (socket.destroyed && (!statusCode || shouldKeepAlive)) {
+ return -1
+ }
+
+ if (upgrade) {
+ return
+ }
+
+ const request = client[kQueue][client[kRunningIdx]]
+ assert(request)
+
+ assert(statusCode >= 100)
+
+ this.statusCode = null
+ this.statusText = ''
+ this.bytesRead = 0
+ this.contentLength = ''
+ this.keepAlive = ''
+ this.connection = ''
+
+ assert(this.headers.length % 2 === 0)
+ this.headers = []
+ this.headersSize = 0
+
+ if (statusCode < 200) {
+ return
+ }
+
+ /* istanbul ignore next: should be handled by llhttp? */
+ if (request.method !== 'HEAD' && contentLength && bytesRead !== parseInt(contentLength, 10)) {
+ util.destroy(socket, new ResponseContentLengthMismatchError())
+ return -1
+ }
+
+ request.onComplete(headers)
+
+ client[kQueue][client[kRunningIdx]++] = null
+
+ if (socket[kWriting]) {
+ assert.strictEqual(client[kRunning], 0)
+ // Response completed before request.
+ util.destroy(socket, new InformationalError('reset'))
+ return constants.ERROR.PAUSED
+ } else if (!shouldKeepAlive) {
+ util.destroy(socket, new InformationalError('reset'))
+ return constants.ERROR.PAUSED
+ } else if (socket[kReset] && client[kRunning] === 0) {
+ // Destroy socket once all requests have completed.
+ // The request at the tail of the pipeline is the one
+ // that requested reset and no further requests should
+ // have been queued since then.
+ util.destroy(socket, new InformationalError('reset'))
+ return constants.ERROR.PAUSED
+ } else if (client[kPipelining] === 1) {
+ // We must wait a full event loop cycle to reuse this socket to make sure
+ // that non-spec compliant servers are not closing the connection even if they
+ // said they won't.
+ setImmediate(resume, client)
+ } else {
+ resume(client)
+ }
+ }
+}
+
+function onParserTimeout (parser) {
+ const { socket, timeoutType, client } = parser
+
+ /* istanbul ignore else */
+ if (timeoutType === TIMEOUT_HEADERS) {
+ if (!socket[kWriting] || socket.writableNeedDrain || client[kRunning] > 1) {
+ assert(!parser.paused, 'cannot be paused while waiting for headers')
+ util.destroy(socket, new HeadersTimeoutError())
+ }
+ } else if (timeoutType === TIMEOUT_BODY) {
+ if (!parser.paused) {
+ util.destroy(socket, new BodyTimeoutError())
+ }
+ } else if (timeoutType === TIMEOUT_IDLE) {
+ assert(client[kRunning] === 0 && client[kKeepAliveTimeoutValue])
+ util.destroy(socket, new InformationalError('socket idle timeout'))
+ }
+}
+
+function onSocketReadable () {
+ const { [kParser]: parser } = this
+ if (parser) {
+ parser.readMore()
+ }
+}
+
+function onSocketError (err) {
+ const { [kClient]: client, [kParser]: parser } = this
+
+ assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID')
+
+ if (client[kHTTPConnVersion] !== 'h2') {
+ // On Mac OS, we get an ECONNRESET even if there is a full body to be forwarded
+ // to the user.
+ if (err.code === 'ECONNRESET' && parser.statusCode && !parser.shouldKeepAlive) {
+ // We treat all incoming data so for as a valid response.
+ parser.onMessageComplete()
+ return
+ }
+ }
+
+ this[kError] = err
+
+ onError(this[kClient], err)
+}
+
+function onError (client, err) {
+ if (
+ client[kRunning] === 0 &&
+ err.code !== 'UND_ERR_INFO' &&
+ err.code !== 'UND_ERR_SOCKET'
+ ) {
+ // Error is not caused by running request and not a recoverable
+ // socket error.
+
+ assert(client[kPendingIdx] === client[kRunningIdx])
+
+ const requests = client[kQueue].splice(client[kRunningIdx])
+ for (let i = 0; i < requests.length; i++) {
+ const request = requests[i]
+ errorRequest(client, request, err)
+ }
+ assert(client[kSize] === 0)
+ }
+}
+
+function onSocketEnd () {
+ const { [kParser]: parser, [kClient]: client } = this
+
+ if (client[kHTTPConnVersion] !== 'h2') {
+ if (parser.statusCode && !parser.shouldKeepAlive) {
+ // We treat all incoming data so far as a valid response.
+ parser.onMessageComplete()
+ return
+ }
+ }
+
+ util.destroy(this, new SocketError('other side closed', util.getSocketInfo(this)))
+}
+
+function onSocketClose () {
+ const { [kClient]: client, [kParser]: parser } = this
+
+ if (client[kHTTPConnVersion] === 'h1' && parser) {
+ if (!this[kError] && parser.statusCode && !parser.shouldKeepAlive) {
+ // We treat all incoming data so far as a valid response.
+ parser.onMessageComplete()
+ }
+
+ this[kParser].destroy()
+ this[kParser] = null
+ }
+
+ const err = this[kError] || new SocketError('closed', util.getSocketInfo(this))
+
+ client[kSocket] = null
+
+ if (client.destroyed) {
+ assert(client[kPending] === 0)
+
+ // Fail entire queue.
+ const requests = client[kQueue].splice(client[kRunningIdx])
+ for (let i = 0; i < requests.length; i++) {
+ const request = requests[i]
+ errorRequest(client, request, err)
+ }
+ } else if (client[kRunning] > 0 && err.code !== 'UND_ERR_INFO') {
+ // Fail head of pipeline.
+ const request = client[kQueue][client[kRunningIdx]]
+ client[kQueue][client[kRunningIdx]++] = null
+
+ errorRequest(client, request, err)
+ }
+
+ client[kPendingIdx] = client[kRunningIdx]
+
+ assert(client[kRunning] === 0)
+
+ client.emit('disconnect', client[kUrl], [client], err)
+
+ resume(client)
+}
+
+async function connect (client) {
+ assert(!client[kConnecting])
+ assert(!client[kSocket])
+
+ let { host, hostname, protocol, port } = client[kUrl]
+
+ // Resolve ipv6
+ if (hostname[0] === '[') {
+ const idx = hostname.indexOf(']')
+
+ assert(idx !== -1)
+ const ip = hostname.substring(1, idx)
+
+ assert(net.isIP(ip))
+ hostname = ip
+ }
+
+ client[kConnecting] = true
+
+ if (channels.beforeConnect.hasSubscribers) {
+ channels.beforeConnect.publish({
+ connectParams: {
+ host,
+ hostname,
+ protocol,
+ port,
+ servername: client[kServerName],
+ localAddress: client[kLocalAddress]
+ },
+ connector: client[kConnector]
+ })
+ }
+
+ try {
+ const socket = await new Promise((resolve, reject) => {
+ client[kConnector]({
+ host,
+ hostname,
+ protocol,
+ port,
+ servername: client[kServerName],
+ localAddress: client[kLocalAddress]
+ }, (err, socket) => {
+ if (err) {
+ reject(err)
+ } else {
+ resolve(socket)
+ }
+ })
+ })
+
+ if (client.destroyed) {
+ util.destroy(socket.on('error', () => {}), new ClientDestroyedError())
+ return
+ }
+
+ client[kConnecting] = false
+
+ assert(socket)
+
+ const isH2 = socket.alpnProtocol === 'h2'
+ if (isH2) {
+ if (!h2ExperimentalWarned) {
+ h2ExperimentalWarned = true
+ process.emitWarning('H2 support is experimental, expect them to change at any time.', {
+ code: 'UNDICI-H2'
+ })
+ }
+
+ const session = http2.connect(client[kUrl], {
+ createConnection: () => socket,
+ peerMaxConcurrentStreams: client[kHTTP2SessionState].maxConcurrentStreams
+ })
+
+ client[kHTTPConnVersion] = 'h2'
+ session[kClient] = client
+ session[kSocket] = socket
+ session.on('error', onHttp2SessionError)
+ session.on('frameError', onHttp2FrameError)
+ session.on('end', onHttp2SessionEnd)
+ session.on('goaway', onHTTP2GoAway)
+ session.on('close', onSocketClose)
+ session.unref()
+
+ client[kHTTP2Session] = session
+ socket[kHTTP2Session] = session
+ } else {
+ if (!llhttpInstance) {
+ llhttpInstance = await llhttpPromise
+ llhttpPromise = null
+ }
+
+ socket[kNoRef] = false
+ socket[kWriting] = false
+ socket[kReset] = false
+ socket[kBlocking] = false
+ socket[kParser] = new Parser(client, socket, llhttpInstance)
+ }
+
+ socket[kCounter] = 0
+ socket[kMaxRequests] = client[kMaxRequests]
+ socket[kClient] = client
+ socket[kError] = null
+
+ socket
+ .on('error', onSocketError)
+ .on('readable', onSocketReadable)
+ .on('end', onSocketEnd)
+ .on('close', onSocketClose)
+
+ client[kSocket] = socket
+
+ if (channels.connected.hasSubscribers) {
+ channels.connected.publish({
+ connectParams: {
+ host,
+ hostname,
+ protocol,
+ port,
+ servername: client[kServerName],
+ localAddress: client[kLocalAddress]
+ },
+ connector: client[kConnector],
+ socket
+ })
+ }
+ client.emit('connect', client[kUrl], [client])
+ } catch (err) {
+ if (client.destroyed) {
+ return
+ }
+
+ client[kConnecting] = false
+
+ if (channels.connectError.hasSubscribers) {
+ channels.connectError.publish({
+ connectParams: {
+ host,
+ hostname,
+ protocol,
+ port,
+ servername: client[kServerName],
+ localAddress: client[kLocalAddress]
+ },
+ connector: client[kConnector],
+ error: err
+ })
+ }
+
+ if (err.code === 'ERR_TLS_CERT_ALTNAME_INVALID') {
+ assert(client[kRunning] === 0)
+ while (client[kPending] > 0 && client[kQueue][client[kPendingIdx]].servername === client[kServerName]) {
+ const request = client[kQueue][client[kPendingIdx]++]
+ errorRequest(client, request, err)
+ }
+ } else {
+ onError(client, err)
+ }
+
+ client.emit('connectionError', client[kUrl], [client], err)
+ }
+
+ resume(client)
+}
+
+function emitDrain (client) {
+ client[kNeedDrain] = 0
+ client.emit('drain', client[kUrl], [client])
+}
+
+function resume (client, sync) {
+ if (client[kResuming] === 2) {
+ return
+ }
+
+ client[kResuming] = 2
+
+ _resume(client, sync)
+ client[kResuming] = 0
+
+ if (client[kRunningIdx] > 256) {
+ client[kQueue].splice(0, client[kRunningIdx])
+ client[kPendingIdx] -= client[kRunningIdx]
+ client[kRunningIdx] = 0
+ }
+}
+
+function _resume (client, sync) {
+ while (true) {
+ if (client.destroyed) {
+ assert(client[kPending] === 0)
+ return
+ }
+
+ if (client[kClosedResolve] && !client[kSize]) {
+ client[kClosedResolve]()
+ client[kClosedResolve] = null
+ return
+ }
+
+ const socket = client[kSocket]
+
+ if (socket && !socket.destroyed && socket.alpnProtocol !== 'h2') {
+ if (client[kSize] === 0) {
+ if (!socket[kNoRef] && socket.unref) {
+ socket.unref()
+ socket[kNoRef] = true
+ }
+ } else if (socket[kNoRef] && socket.ref) {
+ socket.ref()
+ socket[kNoRef] = false
+ }
+
+ if (client[kSize] === 0) {
+ if (socket[kParser].timeoutType !== TIMEOUT_IDLE) {
+ socket[kParser].setTimeout(client[kKeepAliveTimeoutValue], TIMEOUT_IDLE)
+ }
+ } else if (client[kRunning] > 0 && socket[kParser].statusCode < 200) {
+ if (socket[kParser].timeoutType !== TIMEOUT_HEADERS) {
+ const request = client[kQueue][client[kRunningIdx]]
+ const headersTimeout = request.headersTimeout != null
+ ? request.headersTimeout
+ : client[kHeadersTimeout]
+ socket[kParser].setTimeout(headersTimeout, TIMEOUT_HEADERS)
+ }
+ }
+ }
+
+ if (client[kBusy]) {
+ client[kNeedDrain] = 2
+ } else if (client[kNeedDrain] === 2) {
+ if (sync) {
+ client[kNeedDrain] = 1
+ process.nextTick(emitDrain, client)
+ } else {
+ emitDrain(client)
+ }
+ continue
+ }
+
+ if (client[kPending] === 0) {
+ return
+ }
+
+ if (client[kRunning] >= (client[kPipelining] || 1)) {
+ return
+ }
+
+ const request = client[kQueue][client[kPendingIdx]]
+
+ if (client[kUrl].protocol === 'https:' && client[kServerName] !== request.servername) {
+ if (client[kRunning] > 0) {
+ return
+ }
+
+ client[kServerName] = request.servername
+
+ if (socket && socket.servername !== request.servername) {
+ util.destroy(socket, new InformationalError('servername changed'))
+ return
+ }
+ }
+
+ if (client[kConnecting]) {
+ return
+ }
+
+ if (!socket && !client[kHTTP2Session]) {
+ connect(client)
+ return
+ }
+
+ if (socket.destroyed || socket[kWriting] || socket[kReset] || socket[kBlocking]) {
+ return
+ }
+
+ if (client[kRunning] > 0 && !request.idempotent) {
+ // Non-idempotent request cannot be retried.
+ // Ensure that no other requests are inflight and
+ // could cause failure.
+ return
+ }
+
+ if (client[kRunning] > 0 && (request.upgrade || request.method === 'CONNECT')) {
+ // Don't dispatch an upgrade until all preceding requests have completed.
+ // A misbehaving server might upgrade the connection before all pipelined
+ // request has completed.
+ return
+ }
+
+ if (client[kRunning] > 0 && util.bodyLength(request.body) !== 0 &&
+ (util.isStream(request.body) || util.isAsyncIterable(request.body))) {
+ // Request with stream or iterator body can error while other requests
+ // are inflight and indirectly error those as well.
+ // Ensure this doesn't happen by waiting for inflight
+ // to complete before dispatching.
+
+ // Request with stream or iterator body cannot be retried.
+ // Ensure that no other requests are inflight and
+ // could cause failure.
+ return
+ }
+
+ if (!request.aborted && write(client, request)) {
+ client[kPendingIdx]++
+ } else {
+ client[kQueue].splice(client[kPendingIdx], 1)
+ }
+ }
+}
+
+// https://www.rfc-editor.org/rfc/rfc7230#section-3.3.2
+function shouldSendContentLength (method) {
+ return method !== 'GET' && method !== 'HEAD' && method !== 'OPTIONS' && method !== 'TRACE' && method !== 'CONNECT'
+}
+
+function write (client, request) {
+ if (client[kHTTPConnVersion] === 'h2') {
+ writeH2(client, client[kHTTP2Session], request)
+ return
+ }
+
+ const { body, method, path, host, upgrade, headers, blocking, reset } = request
+
+ // https://tools.ietf.org/html/rfc7231#section-4.3.1
+ // https://tools.ietf.org/html/rfc7231#section-4.3.2
+ // https://tools.ietf.org/html/rfc7231#section-4.3.5
+
+ // Sending a payload body on a request that does not
+ // expect it can cause undefined behavior on some
+ // servers and corrupt connection state. Do not
+ // re-use the connection for further requests.
+
+ const expectsPayload = (
+ method === 'PUT' ||
+ method === 'POST' ||
+ method === 'PATCH'
+ )
+
+ if (body && typeof body.read === 'function') {
+ // Try to read EOF in order to get length.
+ body.read(0)
+ }
+
+ const bodyLength = util.bodyLength(body)
+
+ let contentLength = bodyLength
+
+ if (contentLength === null) {
+ contentLength = request.contentLength
+ }
+
+ if (contentLength === 0 && !expectsPayload) {
+ // https://tools.ietf.org/html/rfc7230#section-3.3.2
+ // A user agent SHOULD NOT send a Content-Length header field when
+ // the request message does not contain a payload body and the method
+ // semantics do not anticipate such a body.
+
+ contentLength = null
+ }
+
+ // https://github.com/nodejs/undici/issues/2046
+ // A user agent may send a Content-Length header with 0 value, this should be allowed.
+ if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength !== null && request.contentLength !== contentLength) {
+ if (client[kStrictContentLength]) {
+ errorRequest(client, request, new RequestContentLengthMismatchError())
+ return false
+ }
+
+ process.emitWarning(new RequestContentLengthMismatchError())
+ }
+
+ const socket = client[kSocket]
+
+ try {
+ request.onConnect((err) => {
+ if (request.aborted || request.completed) {
+ return
+ }
+
+ errorRequest(client, request, err || new RequestAbortedError())
+
+ util.destroy(socket, new InformationalError('aborted'))
+ })
+ } catch (err) {
+ errorRequest(client, request, err)
+ }
+
+ if (request.aborted) {
+ return false
+ }
+
+ if (method === 'HEAD') {
+ // https://github.com/mcollina/undici/issues/258
+ // Close after a HEAD request to interop with misbehaving servers
+ // that may send a body in the response.
+
+ socket[kReset] = true
+ }
+
+ if (upgrade || method === 'CONNECT') {
+ // On CONNECT or upgrade, block pipeline from dispatching further
+ // requests on this connection.
+
+ socket[kReset] = true
+ }
+
+ if (reset != null) {
+ socket[kReset] = reset
+ }
+
+ if (client[kMaxRequests] && socket[kCounter]++ >= client[kMaxRequests]) {
+ socket[kReset] = true
+ }
+
+ if (blocking) {
+ socket[kBlocking] = true
+ }
+
+ let header = `${method} ${path} HTTP/1.1\r\n`
+
+ if (typeof host === 'string') {
+ header += `host: ${host}\r\n`
+ } else {
+ header += client[kHostHeader]
+ }
+
+ if (upgrade) {
+ header += `connection: upgrade\r\nupgrade: ${upgrade}\r\n`
+ } else if (client[kPipelining] && !socket[kReset]) {
+ header += 'connection: keep-alive\r\n'
+ } else {
+ header += 'connection: close\r\n'
+ }
+
+ if (headers) {
+ header += headers
+ }
+
+ if (channels.sendHeaders.hasSubscribers) {
+ channels.sendHeaders.publish({ request, headers: header, socket })
+ }
+
+ /* istanbul ignore else: assertion */
+ if (!body || bodyLength === 0) {
+ if (contentLength === 0) {
+ socket.write(`${header}content-length: 0\r\n\r\n`, 'latin1')
+ } else {
+ assert(contentLength === null, 'no body must not have content length')
+ socket.write(`${header}\r\n`, 'latin1')
+ }
+ request.onRequestSent()
+ } else if (util.isBuffer(body)) {
+ assert(contentLength === body.byteLength, 'buffer body must have content length')
+
+ socket.cork()
+ socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1')
+ socket.write(body)
+ socket.uncork()
+ request.onBodySent(body)
+ request.onRequestSent()
+ if (!expectsPayload) {
+ socket[kReset] = true
+ }
+ } else if (util.isBlobLike(body)) {
+ if (typeof body.stream === 'function') {
+ writeIterable({ body: body.stream(), client, request, socket, contentLength, header, expectsPayload })
+ } else {
+ writeBlob({ body, client, request, socket, contentLength, header, expectsPayload })
+ }
+ } else if (util.isStream(body)) {
+ writeStream({ body, client, request, socket, contentLength, header, expectsPayload })
+ } else if (util.isIterable(body)) {
+ writeIterable({ body, client, request, socket, contentLength, header, expectsPayload })
+ } else {
+ assert(false)
+ }
+
+ return true
+}
+
+function writeH2 (client, session, request) {
+ const { body, method, path, host, upgrade, expectContinue, signal, headers: reqHeaders } = request
+
+ let headers
+ if (typeof reqHeaders === 'string') headers = Request[kHTTP2CopyHeaders](reqHeaders.trim())
+ else headers = reqHeaders
+
+ if (upgrade) {
+ errorRequest(client, request, new Error('Upgrade not supported for H2'))
+ return false
+ }
+
+ try {
+ // TODO(HTTP/2): Should we call onConnect immediately or on stream ready event?
+ request.onConnect((err) => {
+ if (request.aborted || request.completed) {
+ return
+ }
+
+ errorRequest(client, request, err || new RequestAbortedError())
+ })
+ } catch (err) {
+ errorRequest(client, request, err)
+ }
+
+ if (request.aborted) {
+ return false
+ }
+
+ /** @type {import('node:http2').ClientHttp2Stream} */
+ let stream
+ const h2State = client[kHTTP2SessionState]
+
+ headers[HTTP2_HEADER_AUTHORITY] = host || client[kHost]
+ headers[HTTP2_HEADER_METHOD] = method
+
+ if (method === 'CONNECT') {
+ session.ref()
+ // we are already connected, streams are pending, first request
+ // will create a new stream. We trigger a request to create the stream and wait until
+ // `ready` event is triggered
+ // We disabled endStream to allow the user to write to the stream
+ stream = session.request(headers, { endStream: false, signal })
+
+ if (stream.id && !stream.pending) {
+ request.onUpgrade(null, null, stream)
+ ++h2State.openStreams
+ } else {
+ stream.once('ready', () => {
+ request.onUpgrade(null, null, stream)
+ ++h2State.openStreams
+ })
+ }
+
+ stream.once('close', () => {
+ h2State.openStreams -= 1
+ // TODO(HTTP/2): unref only if current streams count is 0
+ if (h2State.openStreams === 0) session.unref()
+ })
+
+ return true
+ }
+
+ // https://tools.ietf.org/html/rfc7540#section-8.3
+ // :path and :scheme headers must be omited when sending CONNECT
+
+ headers[HTTP2_HEADER_PATH] = path
+ headers[HTTP2_HEADER_SCHEME] = 'https'
+
+ // https://tools.ietf.org/html/rfc7231#section-4.3.1
+ // https://tools.ietf.org/html/rfc7231#section-4.3.2
+ // https://tools.ietf.org/html/rfc7231#section-4.3.5
+
+ // Sending a payload body on a request that does not
+ // expect it can cause undefined behavior on some
+ // servers and corrupt connection state. Do not
+ // re-use the connection for further requests.
+
+ const expectsPayload = (
+ method === 'PUT' ||
+ method === 'POST' ||
+ method === 'PATCH'
+ )
+
+ if (body && typeof body.read === 'function') {
+ // Try to read EOF in order to get length.
+ body.read(0)
+ }
+
+ let contentLength = util.bodyLength(body)
+
+ if (contentLength == null) {
+ contentLength = request.contentLength
+ }
+
+ if (contentLength === 0 || !expectsPayload) {
+ // https://tools.ietf.org/html/rfc7230#section-3.3.2
+ // A user agent SHOULD NOT send a Content-Length header field when
+ // the request message does not contain a payload body and the method
+ // semantics do not anticipate such a body.
+
+ contentLength = null
+ }
+
+ // https://github.com/nodejs/undici/issues/2046
+ // A user agent may send a Content-Length header with 0 value, this should be allowed.
+ if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength != null && request.contentLength !== contentLength) {
+ if (client[kStrictContentLength]) {
+ errorRequest(client, request, new RequestContentLengthMismatchError())
+ return false
+ }
+
+ process.emitWarning(new RequestContentLengthMismatchError())
+ }
+
+ if (contentLength != null) {
+ assert(body, 'no body must not have content length')
+ headers[HTTP2_HEADER_CONTENT_LENGTH] = `${contentLength}`
+ }
+
+ session.ref()
+
+ const shouldEndStream = method === 'GET' || method === 'HEAD'
+ if (expectContinue) {
+ headers[HTTP2_HEADER_EXPECT] = '100-continue'
+ stream = session.request(headers, { endStream: shouldEndStream, signal })
+
+ stream.once('continue', writeBodyH2)
+ } else {
+ stream = session.request(headers, {
+ endStream: shouldEndStream,
+ signal
+ })
+ writeBodyH2()
+ }
+
+ // Increment counter as we have new several streams open
+ ++h2State.openStreams
+
+ stream.once('response', headers => {
+ const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers
+
+ if (request.onHeaders(Number(statusCode), realHeaders, stream.resume.bind(stream), '') === false) {
+ stream.pause()
+ }
+ })
+
+ stream.once('end', () => {
+ request.onComplete([])
+ })
+
+ stream.on('data', (chunk) => {
+ if (request.onData(chunk) === false) {
+ stream.pause()
+ }
+ })
+
+ stream.once('close', () => {
+ h2State.openStreams -= 1
+ // TODO(HTTP/2): unref only if current streams count is 0
+ if (h2State.openStreams === 0) {
+ session.unref()
+ }
+ })
+
+ stream.once('error', function (err) {
+ if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) {
+ h2State.streams -= 1
+ util.destroy(stream, err)
+ }
+ })
+
+ stream.once('frameError', (type, code) => {
+ const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`)
+ errorRequest(client, request, err)
+
+ if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) {
+ h2State.streams -= 1
+ util.destroy(stream, err)
+ }
+ })
+
+ // stream.on('aborted', () => {
+ // // TODO(HTTP/2): Support aborted
+ // })
+
+ // stream.on('timeout', () => {
+ // // TODO(HTTP/2): Support timeout
+ // })
+
+ // stream.on('push', headers => {
+ // // TODO(HTTP/2): Suppor push
+ // })
+
+ // stream.on('trailers', headers => {
+ // // TODO(HTTP/2): Support trailers
+ // })
+
+ return true
+
+ function writeBodyH2 () {
+ /* istanbul ignore else: assertion */
+ if (!body) {
+ request.onRequestSent()
+ } else if (util.isBuffer(body)) {
+ assert(contentLength === body.byteLength, 'buffer body must have content length')
+ stream.cork()
+ stream.write(body)
+ stream.uncork()
+ stream.end()
+ request.onBodySent(body)
+ request.onRequestSent()
+ } else if (util.isBlobLike(body)) {
+ if (typeof body.stream === 'function') {
+ writeIterable({
+ client,
+ request,
+ contentLength,
+ h2stream: stream,
+ expectsPayload,
+ body: body.stream(),
+ socket: client[kSocket],
+ header: ''
+ })
+ } else {
+ writeBlob({
+ body,
+ client,
+ request,
+ contentLength,
+ expectsPayload,
+ h2stream: stream,
+ header: '',
+ socket: client[kSocket]
+ })
+ }
+ } else if (util.isStream(body)) {
+ writeStream({
+ body,
+ client,
+ request,
+ contentLength,
+ expectsPayload,
+ socket: client[kSocket],
+ h2stream: stream,
+ header: ''
+ })
+ } else if (util.isIterable(body)) {
+ writeIterable({
+ body,
+ client,
+ request,
+ contentLength,
+ expectsPayload,
+ header: '',
+ h2stream: stream,
+ socket: client[kSocket]
+ })
+ } else {
+ assert(false)
+ }
+ }
+}
+
+function writeStream ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) {
+ assert(contentLength !== 0 || client[kRunning] === 0, 'stream body cannot be pipelined')
+
+ if (client[kHTTPConnVersion] === 'h2') {
+ // For HTTP/2, is enough to pipe the stream
+ const pipe = pipeline(
+ body,
+ h2stream,
+ (err) => {
+ if (err) {
+ util.destroy(body, err)
+ util.destroy(h2stream, err)
+ } else {
+ request.onRequestSent()
+ }
+ }
+ )
+
+ pipe.on('data', onPipeData)
+ pipe.once('end', () => {
+ pipe.removeListener('data', onPipeData)
+ util.destroy(pipe)
+ })
+
+ function onPipeData (chunk) {
+ request.onBodySent(chunk)
+ }
+
+ return
+ }
+
+ let finished = false
+
+ const writer = new AsyncWriter({ socket, request, contentLength, client, expectsPayload, header })
+
+ const onData = function (chunk) {
+ if (finished) {
+ return
+ }
+
+ try {
+ if (!writer.write(chunk) && this.pause) {
+ this.pause()
+ }
+ } catch (err) {
+ util.destroy(this, err)
+ }
+ }
+ const onDrain = function () {
+ if (finished) {
+ return
+ }
+
+ if (body.resume) {
+ body.resume()
+ }
+ }
+ const onAbort = function () {
+ if (finished) {
+ return
+ }
+ const err = new RequestAbortedError()
+ queueMicrotask(() => onFinished(err))
+ }
+ const onFinished = function (err) {
+ if (finished) {
+ return
+ }
+
+ finished = true
+
+ assert(socket.destroyed || (socket[kWriting] && client[kRunning] <= 1))
+
+ socket
+ .off('drain', onDrain)
+ .off('error', onFinished)
+
+ body
+ .removeListener('data', onData)
+ .removeListener('end', onFinished)
+ .removeListener('error', onFinished)
+ .removeListener('close', onAbort)
+
+ if (!err) {
+ try {
+ writer.end()
+ } catch (er) {
+ err = er
+ }
+ }
+
+ writer.destroy(err)
+
+ if (err && (err.code !== 'UND_ERR_INFO' || err.message !== 'reset')) {
+ util.destroy(body, err)
+ } else {
+ util.destroy(body)
+ }
+ }
+
+ body
+ .on('data', onData)
+ .on('end', onFinished)
+ .on('error', onFinished)
+ .on('close', onAbort)
+
+ if (body.resume) {
+ body.resume()
+ }
+
+ socket
+ .on('drain', onDrain)
+ .on('error', onFinished)
+}
+
+async function writeBlob ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) {
+ assert(contentLength === body.size, 'blob body must have content length')
+
+ const isH2 = client[kHTTPConnVersion] === 'h2'
+ try {
+ if (contentLength != null && contentLength !== body.size) {
+ throw new RequestContentLengthMismatchError()
+ }
+
+ const buffer = Buffer.from(await body.arrayBuffer())
+
+ if (isH2) {
+ h2stream.cork()
+ h2stream.write(buffer)
+ h2stream.uncork()
+ } else {
+ socket.cork()
+ socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1')
+ socket.write(buffer)
+ socket.uncork()
+ }
+
+ request.onBodySent(buffer)
+ request.onRequestSent()
+
+ if (!expectsPayload) {
+ socket[kReset] = true
+ }
+
+ resume(client)
+ } catch (err) {
+ util.destroy(isH2 ? h2stream : socket, err)
+ }
+}
+
+async function writeIterable ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) {
+ assert(contentLength !== 0 || client[kRunning] === 0, 'iterator body cannot be pipelined')
+
+ let callback = null
+ function onDrain () {
+ if (callback) {
+ const cb = callback
+ callback = null
+ cb()
+ }
+ }
+
+ const waitForDrain = () => new Promise((resolve, reject) => {
+ assert(callback === null)
+
+ if (socket[kError]) {
+ reject(socket[kError])
+ } else {
+ callback = resolve
+ }
+ })
+
+ if (client[kHTTPConnVersion] === 'h2') {
+ h2stream
+ .on('close', onDrain)
+ .on('drain', onDrain)
+
+ try {
+ // It's up to the user to somehow abort the async iterable.
+ for await (const chunk of body) {
+ if (socket[kError]) {
+ throw socket[kError]
+ }
+
+ const res = h2stream.write(chunk)
+ request.onBodySent(chunk)
+ if (!res) {
+ await waitForDrain()
+ }
+ }
+ } catch (err) {
+ h2stream.destroy(err)
+ } finally {
+ request.onRequestSent()
+ h2stream.end()
+ h2stream
+ .off('close', onDrain)
+ .off('drain', onDrain)
+ }
+
+ return
+ }
+
+ socket
+ .on('close', onDrain)
+ .on('drain', onDrain)
+
+ const writer = new AsyncWriter({ socket, request, contentLength, client, expectsPayload, header })
+ try {
+ // It's up to the user to somehow abort the async iterable.
+ for await (const chunk of body) {
+ if (socket[kError]) {
+ throw socket[kError]
+ }
+
+ if (!writer.write(chunk)) {
+ await waitForDrain()
+ }
+ }
+
+ writer.end()
+ } catch (err) {
+ writer.destroy(err)
+ } finally {
+ socket
+ .off('close', onDrain)
+ .off('drain', onDrain)
+ }
+}
+
+class AsyncWriter {
+ constructor ({ socket, request, contentLength, client, expectsPayload, header }) {
+ this.socket = socket
+ this.request = request
+ this.contentLength = contentLength
+ this.client = client
+ this.bytesWritten = 0
+ this.expectsPayload = expectsPayload
+ this.header = header
+
+ socket[kWriting] = true
+ }
+
+ write (chunk) {
+ const { socket, request, contentLength, client, bytesWritten, expectsPayload, header } = this
+
+ if (socket[kError]) {
+ throw socket[kError]
+ }
+
+ if (socket.destroyed) {
+ return false
+ }
+
+ const len = Buffer.byteLength(chunk)
+ if (!len) {
+ return true
+ }
+
+ // We should defer writing chunks.
+ if (contentLength !== null && bytesWritten + len > contentLength) {
+ if (client[kStrictContentLength]) {
+ throw new RequestContentLengthMismatchError()
+ }
+
+ process.emitWarning(new RequestContentLengthMismatchError())
+ }
+
+ socket.cork()
+
+ if (bytesWritten === 0) {
+ if (!expectsPayload) {
+ socket[kReset] = true
+ }
+
+ if (contentLength === null) {
+ socket.write(`${header}transfer-encoding: chunked\r\n`, 'latin1')
+ } else {
+ socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1')
+ }
+ }
+
+ if (contentLength === null) {
+ socket.write(`\r\n${len.toString(16)}\r\n`, 'latin1')
+ }
+
+ this.bytesWritten += len
+
+ const ret = socket.write(chunk)
+
+ socket.uncork()
+
+ request.onBodySent(chunk)
+
+ if (!ret) {
+ if (socket[kParser].timeout && socket[kParser].timeoutType === TIMEOUT_HEADERS) {
+ // istanbul ignore else: only for jest
+ if (socket[kParser].timeout.refresh) {
+ socket[kParser].timeout.refresh()
+ }
+ }
+ }
+
+ return ret
+ }
+
+ end () {
+ const { socket, contentLength, client, bytesWritten, expectsPayload, header, request } = this
+ request.onRequestSent()
+
+ socket[kWriting] = false
+
+ if (socket[kError]) {
+ throw socket[kError]
+ }
+
+ if (socket.destroyed) {
+ return
+ }
+
+ if (bytesWritten === 0) {
+ if (expectsPayload) {
+ // https://tools.ietf.org/html/rfc7230#section-3.3.2
+ // A user agent SHOULD send a Content-Length in a request message when
+ // no Transfer-Encoding is sent and the request method defines a meaning
+ // for an enclosed payload body.
+
+ socket.write(`${header}content-length: 0\r\n\r\n`, 'latin1')
+ } else {
+ socket.write(`${header}\r\n`, 'latin1')
+ }
+ } else if (contentLength === null) {
+ socket.write('\r\n0\r\n\r\n', 'latin1')
+ }
+
+ if (contentLength !== null && bytesWritten !== contentLength) {
+ if (client[kStrictContentLength]) {
+ throw new RequestContentLengthMismatchError()
+ } else {
+ process.emitWarning(new RequestContentLengthMismatchError())
+ }
+ }
+
+ if (socket[kParser].timeout && socket[kParser].timeoutType === TIMEOUT_HEADERS) {
+ // istanbul ignore else: only for jest
+ if (socket[kParser].timeout.refresh) {
+ socket[kParser].timeout.refresh()
+ }
+ }
+
+ resume(client)
+ }
+
+ destroy (err) {
+ const { socket, client } = this
+
+ socket[kWriting] = false
+
+ if (err) {
+ assert(client[kRunning] <= 1, 'pipeline should only contain this request')
+ util.destroy(socket, err)
+ }
+ }
+}
+
+function errorRequest (client, request, err) {
+ try {
+ request.onError(err)
+ assert(request.aborted)
+ } catch (err) {
+ client.emit('error', err)
+ }
+}
+
+module.exports = Client
diff --git a/lib/compat/dispatcher-weakref.js b/lib/compat/dispatcher-weakref.js
new file mode 100644
index 0000000..8cb99e2
--- /dev/null
+++ b/lib/compat/dispatcher-weakref.js
@@ -0,0 +1,48 @@
+'use strict'
+
+/* istanbul ignore file: only for Node 12 */
+
+const { kConnected, kSize } = require('../core/symbols')
+
+class CompatWeakRef {
+ constructor (value) {
+ this.value = value
+ }
+
+ deref () {
+ return this.value[kConnected] === 0 && this.value[kSize] === 0
+ ? undefined
+ : this.value
+ }
+}
+
+class CompatFinalizer {
+ constructor (finalizer) {
+ this.finalizer = finalizer
+ }
+
+ register (dispatcher, key) {
+ if (dispatcher.on) {
+ dispatcher.on('disconnect', () => {
+ if (dispatcher[kConnected] === 0 && dispatcher[kSize] === 0) {
+ this.finalizer(key)
+ }
+ })
+ }
+ }
+}
+
+module.exports = function () {
+ // FIXME: remove workaround when the Node bug is fixed
+ // https://github.com/nodejs/node/issues/49344#issuecomment-1741776308
+ if (process.env.NODE_V8_COVERAGE) {
+ return {
+ WeakRef: CompatWeakRef,
+ FinalizationRegistry: CompatFinalizer
+ }
+ }
+ return {
+ WeakRef: global.WeakRef || CompatWeakRef,
+ FinalizationRegistry: global.FinalizationRegistry || CompatFinalizer
+ }
+}
diff --git a/lib/cookies/constants.js b/lib/cookies/constants.js
new file mode 100644
index 0000000..85f1fec
--- /dev/null
+++ b/lib/cookies/constants.js
@@ -0,0 +1,12 @@
+'use strict'
+
+// https://wicg.github.io/cookie-store/#cookie-maximum-attribute-value-size
+const maxAttributeValueSize = 1024
+
+// https://wicg.github.io/cookie-store/#cookie-maximum-name-value-pair-size
+const maxNameValuePairSize = 4096
+
+module.exports = {
+ maxAttributeValueSize,
+ maxNameValuePairSize
+}
diff --git a/lib/cookies/index.js b/lib/cookies/index.js
new file mode 100644
index 0000000..c9c1f28
--- /dev/null
+++ b/lib/cookies/index.js
@@ -0,0 +1,184 @@
+'use strict'
+
+const { parseSetCookie } = require('./parse')
+const { stringify, getHeadersList } = require('./util')
+const { webidl } = require('../fetch/webidl')
+const { Headers } = require('../fetch/headers')
+
+/**
+ * @typedef {Object} Cookie
+ * @property {string} name
+ * @property {string} value
+ * @property {Date|number|undefined} expires
+ * @property {number|undefined} maxAge
+ * @property {string|undefined} domain
+ * @property {string|undefined} path
+ * @property {boolean|undefined} secure
+ * @property {boolean|undefined} httpOnly
+ * @property {'Strict'|'Lax'|'None'} sameSite
+ * @property {string[]} unparsed
+ */
+
+/**
+ * @param {Headers} headers
+ * @returns {Record<string, string>}
+ */
+function getCookies (headers) {
+ webidl.argumentLengthCheck(arguments, 1, { header: 'getCookies' })
+
+ webidl.brandCheck(headers, Headers, { strict: false })
+
+ const cookie = headers.get('cookie')
+ const out = {}
+
+ if (!cookie) {
+ return out
+ }
+
+ for (const piece of cookie.split(';')) {
+ const [name, ...value] = piece.split('=')
+
+ out[name.trim()] = value.join('=')
+ }
+
+ return out
+}
+
+/**
+ * @param {Headers} headers
+ * @param {string} name
+ * @param {{ path?: string, domain?: string }|undefined} attributes
+ * @returns {void}
+ */
+function deleteCookie (headers, name, attributes) {
+ webidl.argumentLengthCheck(arguments, 2, { header: 'deleteCookie' })
+
+ webidl.brandCheck(headers, Headers, { strict: false })
+
+ name = webidl.converters.DOMString(name)
+ attributes = webidl.converters.DeleteCookieAttributes(attributes)
+
+ // Matches behavior of
+ // https://github.com/denoland/deno_std/blob/63827b16330b82489a04614027c33b7904e08be5/http/cookie.ts#L278
+ setCookie(headers, {
+ name,
+ value: '',
+ expires: new Date(0),
+ ...attributes
+ })
+}
+
+/**
+ * @param {Headers} headers
+ * @returns {Cookie[]}
+ */
+function getSetCookies (headers) {
+ webidl.argumentLengthCheck(arguments, 1, { header: 'getSetCookies' })
+
+ webidl.brandCheck(headers, Headers, { strict: false })
+
+ const cookies = getHeadersList(headers).cookies
+
+ if (!cookies) {
+ return []
+ }
+
+ // In older versions of undici, cookies is a list of name:value.
+ return cookies.map((pair) => parseSetCookie(Array.isArray(pair) ? pair[1] : pair))
+}
+
+/**
+ * @param {Headers} headers
+ * @param {Cookie} cookie
+ * @returns {void}
+ */
+function setCookie (headers, cookie) {
+ webidl.argumentLengthCheck(arguments, 2, { header: 'setCookie' })
+
+ webidl.brandCheck(headers, Headers, { strict: false })
+
+ cookie = webidl.converters.Cookie(cookie)
+
+ const str = stringify(cookie)
+
+ if (str) {
+ headers.append('Set-Cookie', stringify(cookie))
+ }
+}
+
+webidl.converters.DeleteCookieAttributes = webidl.dictionaryConverter([
+ {
+ converter: webidl.nullableConverter(webidl.converters.DOMString),
+ key: 'path',
+ defaultValue: null
+ },
+ {
+ converter: webidl.nullableConverter(webidl.converters.DOMString),
+ key: 'domain',
+ defaultValue: null
+ }
+])
+
+webidl.converters.Cookie = webidl.dictionaryConverter([
+ {
+ converter: webidl.converters.DOMString,
+ key: 'name'
+ },
+ {
+ converter: webidl.converters.DOMString,
+ key: 'value'
+ },
+ {
+ converter: webidl.nullableConverter((value) => {
+ if (typeof value === 'number') {
+ return webidl.converters['unsigned long long'](value)
+ }
+
+ return new Date(value)
+ }),
+ key: 'expires',
+ defaultValue: null
+ },
+ {
+ converter: webidl.nullableConverter(webidl.converters['long long']),
+ key: 'maxAge',
+ defaultValue: null
+ },
+ {
+ converter: webidl.nullableConverter(webidl.converters.DOMString),
+ key: 'domain',
+ defaultValue: null
+ },
+ {
+ converter: webidl.nullableConverter(webidl.converters.DOMString),
+ key: 'path',
+ defaultValue: null
+ },
+ {
+ converter: webidl.nullableConverter(webidl.converters.boolean),
+ key: 'secure',
+ defaultValue: null
+ },
+ {
+ converter: webidl.nullableConverter(webidl.converters.boolean),
+ key: 'httpOnly',
+ defaultValue: null
+ },
+ {
+ converter: webidl.converters.USVString,
+ key: 'sameSite',
+ allowedValues: ['Strict', 'Lax', 'None']
+ },
+ {
+ converter: webidl.sequenceConverter(webidl.converters.DOMString),
+ key: 'unparsed',
+ defaultValue: []
+ }
+])
+
+module.exports = {
+ getCookies,
+ deleteCookie,
+ getSetCookies,
+ setCookie
+}
diff --git a/lib/cookies/parse.js b/lib/cookies/parse.js
new file mode 100644
index 0000000..aae2750
--- /dev/null
+++ b/lib/cookies/parse.js
@@ -0,0 +1,317 @@
+'use strict'
+
+const { maxNameValuePairSize, maxAttributeValueSize } = require('./constants')
+const { isCTLExcludingHtab } = require('./util')
+const { collectASequenceOfCodePointsFast } = require('../fetch/dataURL')
+const assert = require('assert')
+
+/**
+ * @description Parses the field-value attributes of a set-cookie header string.
+ * @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4
+ * @param {string} header
+ * @returns if the header is invalid, null will be returned
+ */
+function parseSetCookie (header) {
+ // 1. If the set-cookie-string contains a %x00-08 / %x0A-1F / %x7F
+ // character (CTL characters excluding HTAB): Abort these steps and
+ // ignore the set-cookie-string entirely.
+ if (isCTLExcludingHtab(header)) {
+ return null
+ }
+
+ let nameValuePair = ''
+ let unparsedAttributes = ''
+ let name = ''
+ let value = ''
+
+ // 2. If the set-cookie-string contains a %x3B (";") character:
+ if (header.includes(';')) {
+ // 1. The name-value-pair string consists of the characters up to,
+ // but not including, the first %x3B (";"), and the unparsed-
+ // attributes consist of the remainder of the set-cookie-string
+ // (including the %x3B (";") in question).
+ const position = { position: 0 }
+
+ nameValuePair = collectASequenceOfCodePointsFast(';', header, position)
+ unparsedAttributes = header.slice(position.position)
+ } else {
+ // Otherwise:
+
+ // 1. The name-value-pair string consists of all the characters
+ // contained in the set-cookie-string, and the unparsed-
+ // attributes is the empty string.
+ nameValuePair = header
+ }
+
+ // 3. If the name-value-pair string lacks a %x3D ("=") character, then
+ // the name string is empty, and the value string is the value of
+ // name-value-pair.
+ if (!nameValuePair.includes('=')) {
+ value = nameValuePair
+ } else {
+ // Otherwise, the name string consists of the characters up to, but
+ // not including, the first %x3D ("=") character, and the (possibly
+ // empty) value string consists of the characters after the first
+ // %x3D ("=") character.
+ const position = { position: 0 }
+ name = collectASequenceOfCodePointsFast(
+ '=',
+ nameValuePair,
+ position
+ )
+ value = nameValuePair.slice(position.position + 1)
+ }
+
+ // 4. Remove any leading or trailing WSP characters from the name
+ // string and the value string.
+ name = name.trim()
+ value = value.trim()
+
+ // 5. If the sum of the lengths of the name string and the value string
+ // is more than 4096 octets, abort these steps and ignore the set-
+ // cookie-string entirely.
+ if (name.length + value.length > maxNameValuePairSize) {
+ return null
+ }
+
+ // 6. The cookie-name is the name string, and the cookie-value is the
+ // value string.
+ return {
+ name, value, ...parseUnparsedAttributes(unparsedAttributes)
+ }
+}
+
+/**
+ * Parses the remaining attributes of a set-cookie header
+ * @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4
+ * @param {string} unparsedAttributes
+ * @param {[Object.<string, unknown>]={}} cookieAttributeList
+ */
+function parseUnparsedAttributes (unparsedAttributes, cookieAttributeList = {}) {
+ // 1. If the unparsed-attributes string is empty, skip the rest of
+ // these steps.
+ if (unparsedAttributes.length === 0) {
+ return cookieAttributeList
+ }
+
+ // 2. Discard the first character of the unparsed-attributes (which
+ // will be a %x3B (";") character).
+ assert(unparsedAttributes[0] === ';')
+ unparsedAttributes = unparsedAttributes.slice(1)
+
+ let cookieAv = ''
+
+ // 3. If the remaining unparsed-attributes contains a %x3B (";")
+ // character:
+ if (unparsedAttributes.includes(';')) {
+ // 1. Consume the characters of the unparsed-attributes up to, but
+ // not including, the first %x3B (";") character.
+ cookieAv = collectASequenceOfCodePointsFast(
+ ';',
+ unparsedAttributes,
+ { position: 0 }
+ )
+ unparsedAttributes = unparsedAttributes.slice(cookieAv.length)
+ } else {
+ // Otherwise:
+
+ // 1. Consume the remainder of the unparsed-attributes.
+ cookieAv = unparsedAttributes
+ unparsedAttributes = ''
+ }
+
+ // Let the cookie-av string be the characters consumed in this step.
+
+ let attributeName = ''
+ let attributeValue = ''
+
+ // 4. If the cookie-av string contains a %x3D ("=") character:
+ if (cookieAv.includes('=')) {
+ // 1. The (possibly empty) attribute-name string consists of the
+ // characters up to, but not including, the first %x3D ("=")
+ // character, and the (possibly empty) attribute-value string
+ // consists of the characters after the first %x3D ("=")
+ // character.
+ const position = { position: 0 }
+
+ attributeName = collectASequenceOfCodePointsFast(
+ '=',
+ cookieAv,
+ position
+ )
+ attributeValue = cookieAv.slice(position.position + 1)
+ } else {
+ // Otherwise:
+
+ // 1. The attribute-name string consists of the entire cookie-av
+ // string, and the attribute-value string is empty.
+ attributeName = cookieAv
+ }
+
+ // 5. Remove any leading or trailing WSP characters from the attribute-
+ // name string and the attribute-value string.
+ attributeName = attributeName.trim()
+ attributeValue = attributeValue.trim()
+
+ // 6. If the attribute-value is longer than 1024 octets, ignore the
+ // cookie-av string and return to Step 1 of this algorithm.
+ if (attributeValue.length > maxAttributeValueSize) {
+ return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList)
+ }
+
+ // 7. Process the attribute-name and attribute-value according to the
+ // requirements in the following subsections. (Notice that
+ // attributes with unrecognized attribute-names are ignored.)
+ const attributeNameLowercase = attributeName.toLowerCase()
+
+ // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.1
+ // If the attribute-name case-insensitively matches the string
+ // "Expires", the user agent MUST process the cookie-av as follows.
+ if (attributeNameLowercase === 'expires') {
+ // 1. Let the expiry-time be the result of parsing the attribute-value
+ // as cookie-date (see Section 5.1.1).
+ const expiryTime = new Date(attributeValue)
+
+ // 2. If the attribute-value failed to parse as a cookie date, ignore
+ // the cookie-av.
+
+ cookieAttributeList.expires = expiryTime
+ } else if (attributeNameLowercase === 'max-age') {
+ // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.2
+ // If the attribute-name case-insensitively matches the string "Max-
+ // Age", the user agent MUST process the cookie-av as follows.
+
+ // 1. If the first character of the attribute-value is not a DIGIT or a
+ // "-" character, ignore the cookie-av.
+ const charCode = attributeValue.charCodeAt(0)
+
+ if ((charCode < 48 || charCode > 57) && attributeValue[0] !== '-') {
+ return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList)
+ }
+
+ // 2. If the remainder of attribute-value contains a non-DIGIT
+ // character, ignore the cookie-av.
+ if (!/^\d+$/.test(attributeValue)) {
+ return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList)
+ }
+
+ // 3. Let delta-seconds be the attribute-value converted to an integer.
+ const deltaSeconds = Number(attributeValue)
+
+ // 4. Let cookie-age-limit be the maximum age of the cookie (which
+ // SHOULD be 400 days or less, see Section 4.1.2.2).
+
+ // 5. Set delta-seconds to the smaller of its present value and cookie-
+ // age-limit.
+ // deltaSeconds = Math.min(deltaSeconds * 1000, maxExpiresMs)
+
+ // 6. If delta-seconds is less than or equal to zero (0), let expiry-
+ // time be the earliest representable date and time. Otherwise, let
+ // the expiry-time be the current date and time plus delta-seconds
+ // seconds.
+ // const expiryTime = deltaSeconds <= 0 ? Date.now() : Date.now() + deltaSeconds
+
+ // 7. Append an attribute to the cookie-attribute-list with an
+ // attribute-name of Max-Age and an attribute-value of expiry-time.
+ cookieAttributeList.maxAge = deltaSeconds
+ } else if (attributeNameLowercase === 'domain') {
+ // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.3
+ // If the attribute-name case-insensitively matches the string "Domain",
+ // the user agent MUST process the cookie-av as follows.
+
+ // 1. Let cookie-domain be the attribute-value.
+ let cookieDomain = attributeValue
+
+ // 2. If cookie-domain starts with %x2E ("."), let cookie-domain be
+ // cookie-domain without its leading %x2E (".").
+ if (cookieDomain[0] === '.') {
+ cookieDomain = cookieDomain.slice(1)
+ }
+
+ // 3. Convert the cookie-domain to lower case.
+ cookieDomain = cookieDomain.toLowerCase()
+
+ // 4. Append an attribute to the cookie-attribute-list with an
+ // attribute-name of Domain and an attribute-value of cookie-domain.
+ cookieAttributeList.domain = cookieDomain
+ } else if (attributeNameLowercase === 'path') {
+ // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.4
+ // If the attribute-name case-insensitively matches the string "Path",
+ // the user agent MUST process the cookie-av as follows.
+
+ // 1. If the attribute-value is empty or if the first character of the
+ // attribute-value is not %x2F ("/"):
+ let cookiePath = ''
+ if (attributeValue.length === 0 || attributeValue[0] !== '/') {
+ // 1. Let cookie-path be the default-path.
+ cookiePath = '/'
+ } else {
+ // Otherwise:
+
+ // 1. Let cookie-path be the attribute-value.
+ cookiePath = attributeValue
+ }
+
+ // 2. Append an attribute to the cookie-attribute-list with an
+ // attribute-name of Path and an attribute-value of cookie-path.
+ cookieAttributeList.path = cookiePath
+ } else if (attributeNameLowercase === 'secure') {
+ // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.5
+ // If the attribute-name case-insensitively matches the string "Secure",
+ // the user agent MUST append an attribute to the cookie-attribute-list
+ // with an attribute-name of Secure and an empty attribute-value.
+
+ cookieAttributeList.secure = true
+ } else if (attributeNameLowercase === 'httponly') {
+ // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.6
+ // If the attribute-name case-insensitively matches the string
+ // "HttpOnly", the user agent MUST append an attribute to the cookie-
+ // attribute-list with an attribute-name of HttpOnly and an empty
+ // attribute-value.
+
+ cookieAttributeList.httpOnly = true
+ } else if (attributeNameLowercase === 'samesite') {
+ // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.7
+ // If the attribute-name case-insensitively matches the string
+ // "SameSite", the user agent MUST process the cookie-av as follows:
+
+ // 1. Let enforcement be "Default".
+ let enforcement = 'Default'
+
+ const attributeValueLowercase = attributeValue.toLowerCase()
+ // 2. If cookie-av's attribute-value is a case-insensitive match for
+ // "None", set enforcement to "None".
+ if (attributeValueLowercase.includes('none')) {
+ enforcement = 'None'
+ }
+
+ // 3. If cookie-av's attribute-value is a case-insensitive match for
+ // "Strict", set enforcement to "Strict".
+ if (attributeValueLowercase.includes('strict')) {
+ enforcement = 'Strict'
+ }
+
+ // 4. If cookie-av's attribute-value is a case-insensitive match for
+ // "Lax", set enforcement to "Lax".
+ if (attributeValueLowercase.includes('lax')) {
+ enforcement = 'Lax'
+ }
+
+ // 5. Append an attribute to the cookie-attribute-list with an
+ // attribute-name of "SameSite" and an attribute-value of
+ // enforcement.
+ cookieAttributeList.sameSite = enforcement
+ } else {
+ cookieAttributeList.unparsed ??= []
+
+ cookieAttributeList.unparsed.push(`${attributeName}=${attributeValue}`)
+ }
+
+ // 8. Return to Step 1 of this algorithm.
+ return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList)
+}
+
+module.exports = {
+ parseSetCookie,
+ parseUnparsedAttributes
+}
diff --git a/lib/cookies/util.js b/lib/cookies/util.js
new file mode 100644
index 0000000..2290329
--- /dev/null
+++ b/lib/cookies/util.js
@@ -0,0 +1,291 @@
+'use strict'
+
+const assert = require('assert')
+const { kHeadersList } = require('../core/symbols')
+
+function isCTLExcludingHtab (value) {
+ if (value.length === 0) {
+ return false
+ }
+
+ for (const char of value) {
+ const code = char.charCodeAt(0)
+
+ if (
+ (code >= 0x00 || code <= 0x08) ||
+ (code >= 0x0A || code <= 0x1F) ||
+ code === 0x7F
+ ) {
+ return false
+ }
+ }
+}
+
+/**
+ CHAR = <any US-ASCII character (octets 0 - 127)>
+ token = 1*<any CHAR except CTLs or separators>
+ separators = "(" | ")" | "<" | ">" | "@"
+ | "," | ";" | ":" | "\" | <">
+ | "/" | "[" | "]" | "?" | "="
+ | "{" | "}" | SP | HT
+ * @param {string} name
+ */
+function validateCookieName (name) {
+ for (const char of name) {
+ const code = char.charCodeAt(0)
+
+ if (
+ (code <= 0x20 || code > 0x7F) ||
+ char === '(' ||
+ char === ')' ||
+ char === '>' ||
+ char === '<' ||
+ char === '@' ||
+ char === ',' ||
+ char === ';' ||
+ char === ':' ||
+ char === '\\' ||
+ char === '"' ||
+ char === '/' ||
+ char === '[' ||
+ char === ']' ||
+ char === '?' ||
+ char === '=' ||
+ char === '{' ||
+ char === '}'
+ ) {
+ throw new Error('Invalid cookie name')
+ }
+ }
+}
+
+/**
+ cookie-value = *cookie-octet / ( DQUOTE *cookie-octet DQUOTE )
+ cookie-octet = %x21 / %x23-2B / %x2D-3A / %x3C-5B / %x5D-7E
+ ; US-ASCII characters excluding CTLs,
+ ; whitespace DQUOTE, comma, semicolon,
+ ; and backslash
+ * @param {string} value
+ */
+function validateCookieValue (value) {
+ for (const char of value) {
+ const code = char.charCodeAt(0)
+
+ if (
+ code < 0x21 || // exclude CTLs (0-31)
+ code === 0x22 ||
+ code === 0x2C ||
+ code === 0x3B ||
+ code === 0x5C ||
+ code > 0x7E // non-ascii
+ ) {
+ throw new Error('Invalid header value')
+ }
+ }
+}
+
+/**
+ * path-value = <any CHAR except CTLs or ";">
+ * @param {string} path
+ */
+function validateCookiePath (path) {
+ for (const char of path) {
+ const code = char.charCodeAt(0)
+
+ if (code < 0x21 || char === ';') {
+ throw new Error('Invalid cookie path')
+ }
+ }
+}
+
+/**
+ * I have no idea why these values aren't allowed to be honest,
+ * but Deno tests these. - Khafra
+ * @param {string} domain
+ */
+function validateCookieDomain (domain) {
+ if (
+ domain.startsWith('-') ||
+ domain.endsWith('.') ||
+ domain.endsWith('-')
+ ) {
+ throw new Error('Invalid cookie domain')
+ }
+}
+
+/**
+ * @see https://www.rfc-editor.org/rfc/rfc7231#section-7.1.1.1
+ * @param {number|Date} date
+ IMF-fixdate = day-name "," SP date1 SP time-of-day SP GMT
+ ; fixed length/zone/capitalization subset of the format
+ ; see Section 3.3 of [RFC5322]
+
+ day-name = %x4D.6F.6E ; "Mon", case-sensitive
+ / %x54.75.65 ; "Tue", case-sensitive
+ / %x57.65.64 ; "Wed", case-sensitive
+ / %x54.68.75 ; "Thu", case-sensitive
+ / %x46.72.69 ; "Fri", case-sensitive
+ / %x53.61.74 ; "Sat", case-sensitive
+ / %x53.75.6E ; "Sun", case-sensitive
+ date1 = day SP month SP year
+ ; e.g., 02 Jun 1982
+
+ day = 2DIGIT
+ month = %x4A.61.6E ; "Jan", case-sensitive
+ / %x46.65.62 ; "Feb", case-sensitive
+ / %x4D.61.72 ; "Mar", case-sensitive
+ / %x41.70.72 ; "Apr", case-sensitive
+ / %x4D.61.79 ; "May", case-sensitive
+ / %x4A.75.6E ; "Jun", case-sensitive
+ / %x4A.75.6C ; "Jul", case-sensitive
+ / %x41.75.67 ; "Aug", case-sensitive
+ / %x53.65.70 ; "Sep", case-sensitive
+ / %x4F.63.74 ; "Oct", case-sensitive
+ / %x4E.6F.76 ; "Nov", case-sensitive
+ / %x44.65.63 ; "Dec", case-sensitive
+ year = 4DIGIT
+
+ GMT = %x47.4D.54 ; "GMT", case-sensitive
+
+ time-of-day = hour ":" minute ":" second
+ ; 00:00:00 - 23:59:60 (leap second)
+
+ hour = 2DIGIT
+ minute = 2DIGIT
+ second = 2DIGIT
+ */
+function toIMFDate (date) {
+ if (typeof date === 'number') {
+ date = new Date(date)
+ }
+
+ const days = [
+ 'Sun', 'Mon', 'Tue', 'Wed',
+ 'Thu', 'Fri', 'Sat'
+ ]
+
+ const months = [
+ 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
+ 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'
+ ]
+
+ const dayName = days[date.getUTCDay()]
+ const day = date.getUTCDate().toString().padStart(2, '0')
+ const month = months[date.getUTCMonth()]
+ const year = date.getUTCFullYear()
+ const hour = date.getUTCHours().toString().padStart(2, '0')
+ const minute = date.getUTCMinutes().toString().padStart(2, '0')
+ const second = date.getUTCSeconds().toString().padStart(2, '0')
+
+ return `${dayName}, ${day} ${month} ${year} ${hour}:${minute}:${second} GMT`
+}
+
+/**
+ max-age-av = "Max-Age=" non-zero-digit *DIGIT
+ ; In practice, both expires-av and max-age-av
+ ; are limited to dates representable by the
+ ; user agent.
+ * @param {number} maxAge
+ */
+function validateCookieMaxAge (maxAge) {
+ if (maxAge < 0) {
+ throw new Error('Invalid cookie max-age')
+ }
+}
+
+/**
+ * @see https://www.rfc-editor.org/rfc/rfc6265#section-4.1.1
+ * @param {import('./index').Cookie} cookie
+ */
+function stringify (cookie) {
+ if (cookie.name.length === 0) {
+ return null
+ }
+
+ validateCookieName(cookie.name)
+ validateCookieValue(cookie.value)
+
+ const out = [`${cookie.name}=${cookie.value}`]
+
+ // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.1
+ // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.2
+ if (cookie.name.startsWith('__Secure-')) {
+ cookie.secure = true
+ }
+
+ if (cookie.name.startsWith('__Host-')) {
+ cookie.secure = true
+ cookie.domain = null
+ cookie.path = '/'
+ }
+
+ if (cookie.secure) {
+ out.push('Secure')
+ }
+
+ if (cookie.httpOnly) {
+ out.push('HttpOnly')
+ }
+
+ if (typeof cookie.maxAge === 'number') {
+ validateCookieMaxAge(cookie.maxAge)
+ out.push(`Max-Age=${cookie.maxAge}`)
+ }
+
+ if (cookie.domain) {
+ validateCookieDomain(cookie.domain)
+ out.push(`Domain=${cookie.domain}`)
+ }
+
+ if (cookie.path) {
+ validateCookiePath(cookie.path)
+ out.push(`Path=${cookie.path}`)
+ }
+
+ if (cookie.expires && cookie.expires.toString() !== 'Invalid Date') {
+ out.push(`Expires=${toIMFDate(cookie.expires)}`)
+ }
+
+ if (cookie.sameSite) {
+ out.push(`SameSite=${cookie.sameSite}`)
+ }
+
+ for (const part of cookie.unparsed) {
+ if (!part.includes('=')) {
+ throw new Error('Invalid unparsed')
+ }
+
+ const [key, ...value] = part.split('=')
+
+ out.push(`${key.trim()}=${value.join('=')}`)
+ }
+
+ return out.join('; ')
+}
+
+let kHeadersListNode
+
+function getHeadersList (headers) {
+ if (headers[kHeadersList]) {
+ return headers[kHeadersList]
+ }
+
+ if (!kHeadersListNode) {
+ kHeadersListNode = Object.getOwnPropertySymbols(headers).find(
+ (symbol) => symbol.description === 'headers list'
+ )
+
+ assert(kHeadersListNode, 'Headers cannot be parsed')
+ }
+
+ const headersList = headers[kHeadersListNode]
+ assert(headersList)
+
+ return headersList
+}
+
+module.exports = {
+ isCTLExcludingHtab,
+ stringify,
+ getHeadersList
+}
diff --git a/lib/core/connect.js b/lib/core/connect.js
new file mode 100644
index 0000000..3309117
--- /dev/null
+++ b/lib/core/connect.js
@@ -0,0 +1,189 @@
+'use strict'
+
+const net = require('net')
+const assert = require('assert')
+const util = require('./util')
+const { InvalidArgumentError, ConnectTimeoutError } = require('./errors')
+
+let tls // include tls conditionally since it is not always available
+
+// TODO: session re-use does not wait for the first
+// connection to resolve the session and might therefore
+// resolve the same servername multiple times even when
+// re-use is enabled.
+
+let SessionCache
+// FIXME: remove workaround when the Node bug is fixed
+// https://github.com/nodejs/node/issues/49344#issuecomment-1741776308
+if (global.FinalizationRegistry && !process.env.NODE_V8_COVERAGE) {
+ SessionCache = class WeakSessionCache {
+ constructor (maxCachedSessions) {
+ this._maxCachedSessions = maxCachedSessions
+ this._sessionCache = new Map()
+ this._sessionRegistry = new global.FinalizationRegistry((key) => {
+ if (this._sessionCache.size < this._maxCachedSessions) {
+ return
+ }
+
+ const ref = this._sessionCache.get(key)
+ if (ref !== undefined && ref.deref() === undefined) {
+ this._sessionCache.delete(key)
+ }
+ })
+ }
+
+ get (sessionKey) {
+ const ref = this._sessionCache.get(sessionKey)
+ return ref ? ref.deref() : null
+ }
+
+ set (sessionKey, session) {
+ if (this._maxCachedSessions === 0) {
+ return
+ }
+
+ this._sessionCache.set(sessionKey, new WeakRef(session))
+ this._sessionRegistry.register(session, sessionKey)
+ }
+ }
+} else {
+ SessionCache = class SimpleSessionCache {
+ constructor (maxCachedSessions) {
+ this._maxCachedSessions = maxCachedSessions
+ this._sessionCache = new Map()
+ }
+
+ get (sessionKey) {
+ return this._sessionCache.get(sessionKey)
+ }
+
+ set (sessionKey, session) {
+ if (this._maxCachedSessions === 0) {
+ return
+ }
+
+ if (this._sessionCache.size >= this._maxCachedSessions) {
+ // remove the oldest session
+ const { value: oldestKey } = this._sessionCache.keys().next()
+ this._sessionCache.delete(oldestKey)
+ }
+
+ this._sessionCache.set(sessionKey, session)
+ }
+ }
+}
+
+function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, ...opts }) {
+ if (maxCachedSessions != null && (!Number.isInteger(maxCachedSessions) || maxCachedSessions < 0)) {
+ throw new InvalidArgumentError('maxCachedSessions must be a positive integer or zero')
+ }
+
+ const options = { path: socketPath, ...opts }
+ const sessionCache = new SessionCache(maxCachedSessions == null ? 100 : maxCachedSessions)
+ timeout = timeout == null ? 10e3 : timeout
+ allowH2 = allowH2 != null ? allowH2 : false
+ return function connect ({ hostname, host, protocol, port, servername, localAddress, httpSocket }, callback) {
+ let socket
+ if (protocol === 'https:') {
+ if (!tls) {
+ tls = require('tls')
+ }
+ servername = servername || options.servername || util.getServerName(host) || null
+
+ const sessionKey = servername || hostname
+ const session = sessionCache.get(sessionKey) || null
+
+ assert(sessionKey)
+
+ socket = tls.connect({
+ highWaterMark: 16384, // TLS in node can't have bigger HWM anyway...
+ ...options,
+ servername,
+ session,
+ localAddress,
+ // TODO(HTTP/2): Add support for h2c
+ ALPNProtocols: allowH2 ? ['http/1.1', 'h2'] : ['http/1.1'],
+ socket: httpSocket, // upgrade socket connection
+ port: port || 443,
+ host: hostname
+ })
+
+ socket
+ .on('session', function (session) {
+ // TODO (fix): Can a session become invalid once established? Don't think so?
+ sessionCache.set(sessionKey, session)
+ })
+ } else {
+ assert(!httpSocket, 'httpSocket can only be sent on TLS update')
+ socket = net.connect({
+ highWaterMark: 64 * 1024, // Same as nodejs fs streams.
+ ...options,
+ localAddress,
+ port: port || 80,
+ host: hostname
+ })
+ }
+
+ // Set TCP keep alive options on the socket here instead of in connect() for the case of assigning the socket
+ if (options.keepAlive == null || options.keepAlive) {
+ const keepAliveInitialDelay = options.keepAliveInitialDelay === undefined ? 60e3 : options.keepAliveInitialDelay
+ socket.setKeepAlive(true, keepAliveInitialDelay)
+ }
+
+ const cancelTimeout = setupTimeout(() => onConnectTimeout(socket), timeout)
+
+ socket
+ .setNoDelay(true)
+ .once(protocol === 'https:' ? 'secureConnect' : 'connect', function () {
+ cancelTimeout()
+
+ if (callback) {
+ const cb = callback
+ callback = null
+ cb(null, this)
+ }
+ })
+ .on('error', function (err) {
+ cancelTimeout()
+
+ if (callback) {
+ const cb = callback
+ callback = null
+ cb(err)
+ }
+ })
+
+ return socket
+ }
+}
+
+function setupTimeout (onConnectTimeout, timeout) {
+ if (!timeout) {
+ return () => {}
+ }
+
+ let s1 = null
+ let s2 = null
+ const timeoutId = setTimeout(() => {
+ // setImmediate is added to make sure that we priotorise socket error events over timeouts
+ s1 = setImmediate(() => {
+ if (process.platform === 'win32') {
+ // Windows needs an extra setImmediate probably due to implementation differences in the socket logic
+ s2 = setImmediate(() => onConnectTimeout())
+ } else {
+ onConnectTimeout()
+ }
+ })
+ }, timeout)
+ return () => {
+ clearTimeout(timeoutId)
+ clearImmediate(s1)
+ clearImmediate(s2)
+ }
+}
+
+function onConnectTimeout (socket) {
+ util.destroy(socket, new ConnectTimeoutError())
+}
+
+module.exports = buildConnector
diff --git a/lib/core/errors.js b/lib/core/errors.js
new file mode 100644
index 0000000..7af704b
--- /dev/null
+++ b/lib/core/errors.js
@@ -0,0 +1,230 @@
+'use strict'
+
+class UndiciError extends Error {
+ constructor (message) {
+ super(message)
+ this.name = 'UndiciError'
+ this.code = 'UND_ERR'
+ }
+}
+
+class ConnectTimeoutError extends UndiciError {
+ constructor (message) {
+ super(message)
+ Error.captureStackTrace(this, ConnectTimeoutError)
+ this.name = 'ConnectTimeoutError'
+ this.message = message || 'Connect Timeout Error'
+ this.code = 'UND_ERR_CONNECT_TIMEOUT'
+ }
+}
+
+class HeadersTimeoutError extends UndiciError {
+ constructor (message) {
+ super(message)
+ Error.captureStackTrace(this, HeadersTimeoutError)
+ this.name = 'HeadersTimeoutError'
+ this.message = message || 'Headers Timeout Error'
+ this.code = 'UND_ERR_HEADERS_TIMEOUT'
+ }
+}
+
+class HeadersOverflowError extends UndiciError {
+ constructor (message) {
+ super(message)
+ Error.captureStackTrace(this, HeadersOverflowError)
+ this.name = 'HeadersOverflowError'
+ this.message = message || 'Headers Overflow Error'
+ this.code = 'UND_ERR_HEADERS_OVERFLOW'
+ }
+}
+
+class BodyTimeoutError extends UndiciError {
+ constructor (message) {
+ super(message)
+ Error.captureStackTrace(this, BodyTimeoutError)
+ this.name = 'BodyTimeoutError'
+ this.message = message || 'Body Timeout Error'
+ this.code = 'UND_ERR_BODY_TIMEOUT'
+ }
+}
+
+class ResponseStatusCodeError extends UndiciError {
+ constructor (message, statusCode, headers, body) {
+ super(message)
+ Error.captureStackTrace(this, ResponseStatusCodeError)
+ this.name = 'ResponseStatusCodeError'
+ this.message = message || 'Response Status Code Error'
+ this.code = 'UND_ERR_RESPONSE_STATUS_CODE'
+ this.body = body
+ this.status = statusCode
+ this.statusCode = statusCode
+ this.headers = headers
+ }
+}
+
+class InvalidArgumentError extends UndiciError {
+ constructor (message) {
+ super(message)
+ Error.captureStackTrace(this, InvalidArgumentError)
+ this.name = 'InvalidArgumentError'
+ this.message = message || 'Invalid Argument Error'
+ this.code = 'UND_ERR_INVALID_ARG'
+ }
+}
+
+class InvalidReturnValueError extends UndiciError {
+ constructor (message) {
+ super(message)
+ Error.captureStackTrace(this, InvalidReturnValueError)
+ this.name = 'InvalidReturnValueError'
+ this.message = message || 'Invalid Return Value Error'
+ this.code = 'UND_ERR_INVALID_RETURN_VALUE'
+ }
+}
+
+class RequestAbortedError extends UndiciError {
+ constructor (message) {
+ super(message)
+ Error.captureStackTrace(this, RequestAbortedError)
+ this.name = 'AbortError'
+ this.message = message || 'Request aborted'
+ this.code = 'UND_ERR_ABORTED'
+ }
+}
+
+class InformationalError extends UndiciError {
+ constructor (message) {
+ super(message)
+ Error.captureStackTrace(this, InformationalError)
+ this.name = 'InformationalError'
+ this.message = message || 'Request information'
+ this.code = 'UND_ERR_INFO'
+ }
+}
+
+class RequestContentLengthMismatchError extends UndiciError {
+ constructor (message) {
+ super(message)
+ Error.captureStackTrace(this, RequestContentLengthMismatchError)
+ this.name = 'RequestContentLengthMismatchError'
+ this.message = message || 'Request body length does not match content-length header'
+ this.code = 'UND_ERR_REQ_CONTENT_LENGTH_MISMATCH'
+ }
+}
+
+class ResponseContentLengthMismatchError extends UndiciError {
+ constructor (message) {
+ super(message)
+ Error.captureStackTrace(this, ResponseContentLengthMismatchError)
+ this.name = 'ResponseContentLengthMismatchError'
+ this.message = message || 'Response body length does not match content-length header'
+ this.code = 'UND_ERR_RES_CONTENT_LENGTH_MISMATCH'
+ }
+}
+
+class ClientDestroyedError extends UndiciError {
+ constructor (message) {
+ super(message)
+ Error.captureStackTrace(this, ClientDestroyedError)
+ this.name = 'ClientDestroyedError'
+ this.message = message || 'The client is destroyed'
+ this.code = 'UND_ERR_DESTROYED'
+ }
+}
+
+class ClientClosedError extends UndiciError {
+ constructor (message) {
+ super(message)
+ Error.captureStackTrace(this, ClientClosedError)
+ this.name = 'ClientClosedError'
+ this.message = message || 'The client is closed'
+ this.code = 'UND_ERR_CLOSED'
+ }
+}
+
+class SocketError extends UndiciError {
+ constructor (message, socket) {
+ super(message)
+ Error.captureStackTrace(this, SocketError)
+ this.name = 'SocketError'
+ this.message = message || 'Socket error'
+ this.code = 'UND_ERR_SOCKET'
+ this.socket = socket
+ }
+}
+
+class NotSupportedError extends UndiciError {
+ constructor (message) {
+ super(message)
+ Error.captureStackTrace(this, NotSupportedError)
+ this.name = 'NotSupportedError'
+ this.message = message || 'Not supported error'
+ this.code = 'UND_ERR_NOT_SUPPORTED'
+ }
+}
+
+class BalancedPoolMissingUpstreamError extends UndiciError {
+ constructor (message) {
+ super(message)
+ Error.captureStackTrace(this, NotSupportedError)
+ this.name = 'MissingUpstreamError'
+ this.message = message || 'No upstream has been added to the BalancedPool'
+ this.code = 'UND_ERR_BPL_MISSING_UPSTREAM'
+ }
+}
+
+class HTTPParserError extends Error {
+ constructor (message, code, data) {
+ super(message)
+ Error.captureStackTrace(this, HTTPParserError)
+ this.name = 'HTTPParserError'
+ this.code = code ? `HPE_${code}` : undefined
+ this.data = data ? data.toString() : undefined
+ }
+}
+
+class ResponseExceededMaxSizeError extends UndiciError {
+ constructor (message) {
+ super(message)
+ Error.captureStackTrace(this, ResponseExceededMaxSizeError)
+ this.name = 'ResponseExceededMaxSizeError'
+ this.message = message || 'Response content exceeded max size'
+ this.code = 'UND_ERR_RES_EXCEEDED_MAX_SIZE'
+ }
+}
+
+class RequestRetryError extends UndiciError {
+ constructor (message, code, { headers, data }) {
+ super(message)
+ Error.captureStackTrace(this, RequestRetryError)
+ this.name = 'RequestRetryError'
+ this.message = message || 'Request retry error'
+ this.code = 'UND_ERR_REQ_RETRY'
+ this.statusCode = code
+ this.data = data
+ this.headers = headers
+ }
+}
+
+module.exports = {
+ HTTPParserError,
+ UndiciError,
+ HeadersTimeoutError,
+ HeadersOverflowError,
+ BodyTimeoutError,
+ RequestContentLengthMismatchError,
+ ConnectTimeoutError,
+ ResponseStatusCodeError,
+ InvalidArgumentError,
+ InvalidReturnValueError,
+ RequestAbortedError,
+ ClientDestroyedError,
+ ClientClosedError,
+ InformationalError,
+ SocketError,
+ NotSupportedError,
+ ResponseContentLengthMismatchError,
+ BalancedPoolMissingUpstreamError,
+ ResponseExceededMaxSizeError,
+ RequestRetryError
+}
diff --git a/lib/core/request.js b/lib/core/request.js
new file mode 100644
index 0000000..3697e6a
--- /dev/null
+++ b/lib/core/request.js
@@ -0,0 +1,499 @@
+'use strict'
+
+const {
+ InvalidArgumentError,
+ NotSupportedError
+} = require('./errors')
+const assert = require('assert')
+const { kHTTP2BuildRequest, kHTTP2CopyHeaders, kHTTP1BuildRequest } = require('./symbols')
+const util = require('./util')
+
+// tokenRegExp and headerCharRegex have been lifted from
+// https://github.com/nodejs/node/blob/main/lib/_http_common.js
+
+/**
+ * Verifies that the given val is a valid HTTP token
+ * per the rules defined in RFC 7230
+ * See https://tools.ietf.org/html/rfc7230#section-3.2.6
+ */
+const tokenRegExp = /^[\^_`a-zA-Z\-0-9!#$%&'*+.|~]+$/
+
+/**
+ * Matches if val contains an invalid field-vchar
+ * field-value = *( field-content / obs-fold )
+ * field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ]
+ * field-vchar = VCHAR / obs-text
+ */
+const headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/
+
+// Verifies that a given path is valid does not contain control chars \x00 to \x20
+const invalidPathRegex = /[^\u0021-\u00ff]/
+
+const kHandler = Symbol('handler')
+
+const channels = {}
+
+let extractBody
+
+try {
+ const diagnosticsChannel = require('diagnostics_channel')
+ channels.create = diagnosticsChannel.channel('undici:request:create')
+ channels.bodySent = diagnosticsChannel.channel('undici:request:bodySent')
+ channels.headers = diagnosticsChannel.channel('undici:request:headers')
+ channels.trailers = diagnosticsChannel.channel('undici:request:trailers')
+ channels.error = diagnosticsChannel.channel('undici:request:error')
+} catch {
+ channels.create = { hasSubscribers: false }
+ channels.bodySent = { hasSubscribers: false }
+ channels.headers = { hasSubscribers: false }
+ channels.trailers = { hasSubscribers: false }
+ channels.error = { hasSubscribers: false }
+}
+
+class Request {
+ constructor (origin, {
+ path,
+ method,
+ body,
+ headers,
+ query,
+ idempotent,
+ blocking,
+ upgrade,
+ headersTimeout,
+ bodyTimeout,
+ reset,
+ throwOnError,
+ expectContinue
+ }, handler) {
+ if (typeof path !== 'string') {
+ throw new InvalidArgumentError('path must be a string')
+ } else if (
+ path[0] !== '/' &&
+ !(path.startsWith('http://') || path.startsWith('https://')) &&
+ method !== 'CONNECT'
+ ) {
+ throw new InvalidArgumentError('path must be an absolute URL or start with a slash')
+ } else if (invalidPathRegex.exec(path) !== null) {
+ throw new InvalidArgumentError('invalid request path')
+ }
+
+ if (typeof method !== 'string') {
+ throw new InvalidArgumentError('method must be a string')
+ } else if (tokenRegExp.exec(method) === null) {
+ throw new InvalidArgumentError('invalid request method')
+ }
+
+ if (upgrade && typeof upgrade !== 'string') {
+ throw new InvalidArgumentError('upgrade must be a string')
+ }
+
+ if (headersTimeout != null && (!Number.isFinite(headersTimeout) || headersTimeout < 0)) {
+ throw new InvalidArgumentError('invalid headersTimeout')
+ }
+
+ if (bodyTimeout != null && (!Number.isFinite(bodyTimeout) || bodyTimeout < 0)) {
+ throw new InvalidArgumentError('invalid bodyTimeout')
+ }
+
+ if (reset != null && typeof reset !== 'boolean') {
+ throw new InvalidArgumentError('invalid reset')
+ }
+
+ if (expectContinue != null && typeof expectContinue !== 'boolean') {
+ throw new InvalidArgumentError('invalid expectContinue')
+ }
+
+ this.headersTimeout = headersTimeout
+
+ this.bodyTimeout = bodyTimeout
+
+ this.throwOnError = throwOnError === true
+
+ this.method = method
+
+ this.abort = null
+
+ if (body == null) {
+ this.body = null
+ } else if (util.isStream(body)) {
+ this.body = body
+
+ const rState = this.body._readableState
+ if (!rState || !rState.autoDestroy) {
+ this.endHandler = function autoDestroy () {
+ util.destroy(this)
+ }
+ this.body.on('end', this.endHandler)
+ }
+
+ this.errorHandler = err => {
+ if (this.abort) {
+ this.abort(err)
+ } else {
+ this.error = err
+ }
+ }
+ this.body.on('error', this.errorHandler)
+ } else if (util.isBuffer(body)) {
+ this.body = body.byteLength ? body : null
+ } else if (ArrayBuffer.isView(body)) {
+ this.body = body.buffer.byteLength ? Buffer.from(body.buffer, body.byteOffset, body.byteLength) : null
+ } else if (body instanceof ArrayBuffer) {
+ this.body = body.byteLength ? Buffer.from(body) : null
+ } else if (typeof body === 'string') {
+ this.body = body.length ? Buffer.from(body) : null
+ } else if (util.isFormDataLike(body) || util.isIterable(body) || util.isBlobLike(body)) {
+ this.body = body
+ } else {
+ throw new InvalidArgumentError('body must be a string, a Buffer, a Readable stream, an iterable, or an async iterable')
+ }
+
+ this.completed = false
+
+ this.aborted = false
+
+ this.upgrade = upgrade || null
+
+ this.path = query ? util.buildURL(path, query) : path
+
+ this.origin = origin
+
+ this.idempotent = idempotent == null
+ ? method === 'HEAD' || method === 'GET'
+ : idempotent
+
+ this.blocking = blocking == null ? false : blocking
+
+ this.reset = reset == null ? null : reset
+
+ this.host = null
+
+ this.contentLength = null
+
+ this.contentType = null
+
+ this.headers = ''
+
+ // Only for H2
+ this.expectContinue = expectContinue != null ? expectContinue : false
+
+ if (Array.isArray(headers)) {
+ if (headers.length % 2 !== 0) {
+ throw new InvalidArgumentError('headers array must be even')
+ }
+ for (let i = 0; i < headers.length; i += 2) {
+ processHeader(this, headers[i], headers[i + 1])
+ }
+ } else if (headers && typeof headers === 'object') {
+ const keys = Object.keys(headers)
+ for (let i = 0; i < keys.length; i++) {
+ const key = keys[i]
+ processHeader(this, key, headers[key])
+ }
+ } else if (headers != null) {
+ throw new InvalidArgumentError('headers must be an object or an array')
+ }
+
+ if (util.isFormDataLike(this.body)) {
+ if (util.nodeMajor < 16 || (util.nodeMajor === 16 && util.nodeMinor < 8)) {
+ throw new InvalidArgumentError('Form-Data bodies are only supported in node v16.8 and newer.')
+ }
+
+ if (!extractBody) {
+ extractBody = require('../fetch/body.js').extractBody
+ }
+
+ const [bodyStream, contentType] = extractBody(body)
+ if (this.contentType == null) {
+ this.contentType = contentType
+ this.headers += `content-type: ${contentType}\r\n`
+ }
+ this.body = bodyStream.stream
+ this.contentLength = bodyStream.length
+ } else if (util.isBlobLike(body) && this.contentType == null && body.type) {
+ this.contentType = body.type
+ this.headers += `content-type: ${body.type}\r\n`
+ }
+
+ util.validateHandler(handler, method, upgrade)
+
+ this.servername = util.getServerName(this.host)
+
+ this[kHandler] = handler
+
+ if (channels.create.hasSubscribers) {
+ channels.create.publish({ request: this })
+ }
+ }
+
+ onBodySent (chunk) {
+ if (this[kHandler].onBodySent) {
+ try {
+ return this[kHandler].onBodySent(chunk)
+ } catch (err) {
+ this.abort(err)
+ }
+ }
+ }
+
+ onRequestSent () {
+ if (channels.bodySent.hasSubscribers) {
+ channels.bodySent.publish({ request: this })
+ }
+
+ if (this[kHandler].onRequestSent) {
+ try {
+ return this[kHandler].onRequestSent()
+ } catch (err) {
+ this.abort(err)
+ }
+ }
+ }
+
+ onConnect (abort) {
+ assert(!this.aborted)
+ assert(!this.completed)
+
+ if (this.error) {
+ abort(this.error)
+ } else {
+ this.abort = abort
+ return this[kHandler].onConnect(abort)
+ }
+ }
+
+ onHeaders (statusCode, headers, resume, statusText) {
+ assert(!this.aborted)
+ assert(!this.completed)
+
+ if (channels.headers.hasSubscribers) {
+ channels.headers.publish({ request: this, response: { statusCode, headers, statusText } })
+ }
+
+ try {
+ return this[kHandler].onHeaders(statusCode, headers, resume, statusText)
+ } catch (err) {
+ this.abort(err)
+ }
+ }
+
+ onData (chunk) {
+ assert(!this.aborted)
+ assert(!this.completed)
+
+ try {
+ return this[kHandler].onData(chunk)
+ } catch (err) {
+ this.abort(err)
+ return false
+ }
+ }
+
+ onUpgrade (statusCode, headers, socket) {
+ assert(!this.aborted)
+ assert(!this.completed)
+
+ return this[kHandler].onUpgrade(statusCode, headers, socket)
+ }
+
+ onComplete (trailers) {
+ this.onFinally()
+
+ assert(!this.aborted)
+
+ this.completed = true
+ if (channels.trailers.hasSubscribers) {
+ channels.trailers.publish({ request: this, trailers })
+ }
+
+ try {
+ return this[kHandler].onComplete(trailers)
+ } catch (err) {
+ // TODO (fix): This might be a bad idea?
+ this.onError(err)
+ }
+ }
+
+ onError (error) {
+ this.onFinally()
+
+ if (channels.error.hasSubscribers) {
+ channels.error.publish({ request: this, error })
+ }
+
+ if (this.aborted) {
+ return
+ }
+ this.aborted = true
+
+ return this[kHandler].onError(error)
+ }
+
+ onFinally () {
+ if (this.errorHandler) {
+ this.body.off('error', this.errorHandler)
+ this.errorHandler = null
+ }
+
+ if (this.endHandler) {
+ this.body.off('end', this.endHandler)
+ this.endHandler = null
+ }
+ }
+
+ // TODO: adjust to support H2
+ addHeader (key, value) {
+ processHeader(this, key, value)
+ return this
+ }
+
+ static [kHTTP1BuildRequest] (origin, opts, handler) {
+ // TODO: Migrate header parsing here, to make Requests
+ // HTTP agnostic
+ return new Request(origin, opts, handler)
+ }
+
+ static [kHTTP2BuildRequest] (origin, opts, handler) {
+ const headers = opts.headers
+ opts = { ...opts, headers: null }
+
+ const request = new Request(origin, opts, handler)
+
+ request.headers = {}
+
+ if (Array.isArray(headers)) {
+ if (headers.length % 2 !== 0) {
+ throw new InvalidArgumentError('headers array must be even')
+ }
+ for (let i = 0; i < headers.length; i += 2) {
+ processHeader(request, headers[i], headers[i + 1], true)
+ }
+ } else if (headers && typeof headers === 'object') {
+ const keys = Object.keys(headers)
+ for (let i = 0; i < keys.length; i++) {
+ const key = keys[i]
+ processHeader(request, key, headers[key], true)
+ }
+ } else if (headers != null) {
+ throw new InvalidArgumentError('headers must be an object or an array')
+ }
+
+ return request
+ }
+
+ static [kHTTP2CopyHeaders] (raw) {
+ const rawHeaders = raw.split('\r\n')
+ const headers = {}
+
+ for (const header of rawHeaders) {
+ const [key, value] = header.split(': ')
+
+ if (value == null || value.length === 0) continue
+
+ if (headers[key]) headers[key] += `,${value}`
+ else headers[key] = value
+ }
+
+ return headers
+ }
+}
+
+function processHeaderValue (key, val, skipAppend) {
+ if (val && typeof val === 'object') {
+ throw new InvalidArgumentError(`invalid ${key} header`)
+ }
+
+ val = val != null ? `${val}` : ''
+
+ if (headerCharRegex.exec(val) !== null) {
+ throw new InvalidArgumentError(`invalid ${key} header`)
+ }
+
+ return skipAppend ? val : `${key}: ${val}\r\n`
+}
+
+function processHeader (request, key, val, skipAppend = false) {
+ if (val && (typeof val === 'object' && !Array.isArray(val))) {
+ throw new InvalidArgumentError(`invalid ${key} header`)
+ } else if (val === undefined) {
+ return
+ }
+
+ if (
+ request.host === null &&
+ key.length === 4 &&
+ key.toLowerCase() === 'host'
+ ) {
+ if (headerCharRegex.exec(val) !== null) {
+ throw new InvalidArgumentError(`invalid ${key} header`)
+ }
+ // Consumed by Client
+ request.host = val
+ } else if (
+ request.contentLength === null &&
+ key.length === 14 &&
+ key.toLowerCase() === 'content-length'
+ ) {
+ request.contentLength = parseInt(val, 10)
+ if (!Number.isFinite(request.contentLength)) {
+ throw new InvalidArgumentError('invalid content-length header')
+ }
+ } else if (
+ request.contentType === null &&
+ key.length === 12 &&
+ key.toLowerCase() === 'content-type'
+ ) {
+ request.contentType = val
+ if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend)
+ else request.headers += processHeaderValue(key, val)
+ } else if (
+ key.length === 17 &&
+ key.toLowerCase() === 'transfer-encoding'
+ ) {
+ throw new InvalidArgumentError('invalid transfer-encoding header')
+ } else if (
+ key.length === 10 &&
+ key.toLowerCase() === 'connection'
+ ) {
+ const value = typeof val === 'string' ? val.toLowerCase() : null
+ if (value !== 'close' && value !== 'keep-alive') {
+ throw new InvalidArgumentError('invalid connection header')
+ } else if (value === 'close') {
+ request.reset = true
+ }
+ } else if (
+ key.length === 10 &&
+ key.toLowerCase() === 'keep-alive'
+ ) {
+ throw new InvalidArgumentError('invalid keep-alive header')
+ } else if (
+ key.length === 7 &&
+ key.toLowerCase() === 'upgrade'
+ ) {
+ throw new InvalidArgumentError('invalid upgrade header')
+ } else if (
+ key.length === 6 &&
+ key.toLowerCase() === 'expect'
+ ) {
+ throw new NotSupportedError('expect header not supported')
+ } else if (tokenRegExp.exec(key) === null) {
+ throw new InvalidArgumentError('invalid header key')
+ } else {
+ if (Array.isArray(val)) {
+ for (let i = 0; i < val.length; i++) {
+ if (skipAppend) {
+ if (request.headers[key]) request.headers[key] += `,${processHeaderValue(key, val[i], skipAppend)}`
+ else request.headers[key] = processHeaderValue(key, val[i], skipAppend)
+ } else {
+ request.headers += processHeaderValue(key, val[i])
+ }
+ }
+ } else {
+ if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend)
+ else request.headers += processHeaderValue(key, val)
+ }
+ }
+}
+
+module.exports = Request
diff --git a/lib/core/symbols.js b/lib/core/symbols.js
new file mode 100644
index 0000000..68d8566
--- /dev/null
+++ b/lib/core/symbols.js
@@ -0,0 +1,63 @@
+module.exports = {
+ kClose: Symbol('close'),
+ kDestroy: Symbol('destroy'),
+ kDispatch: Symbol('dispatch'),
+ kUrl: Symbol('url'),
+ kWriting: Symbol('writing'),
+ kResuming: Symbol('resuming'),
+ kQueue: Symbol('queue'),
+ kConnect: Symbol('connect'),
+ kConnecting: Symbol('connecting'),
+ kHeadersList: Symbol('headers list'),
+ kKeepAliveDefaultTimeout: Symbol('default keep alive timeout'),
+ kKeepAliveMaxTimeout: Symbol('max keep alive timeout'),
+ kKeepAliveTimeoutThreshold: Symbol('keep alive timeout threshold'),
+ kKeepAliveTimeoutValue: Symbol('keep alive timeout'),
+ kKeepAlive: Symbol('keep alive'),
+ kHeadersTimeout: Symbol('headers timeout'),
+ kBodyTimeout: Symbol('body timeout'),
+ kServerName: Symbol('server name'),
+ kLocalAddress: Symbol('local address'),
+ kHost: Symbol('host'),
+ kNoRef: Symbol('no ref'),
+ kBodyUsed: Symbol('used'),
+ kRunning: Symbol('running'),
+ kBlocking: Symbol('blocking'),
+ kPending: Symbol('pending'),
+ kSize: Symbol('size'),
+ kBusy: Symbol('busy'),
+ kQueued: Symbol('queued'),
+ kFree: Symbol('free'),
+ kConnected: Symbol('connected'),
+ kClosed: Symbol('closed'),
+ kNeedDrain: Symbol('need drain'),
+ kReset: Symbol('reset'),
+ kDestroyed: Symbol.for('nodejs.stream.destroyed'),
+ kMaxHeadersSize: Symbol('max headers size'),
+ kRunningIdx: Symbol('running index'),
+ kPendingIdx: Symbol('pending index'),
+ kError: Symbol('error'),
+ kClients: Symbol('clients'),
+ kClient: Symbol('client'),
+ kParser: Symbol('parser'),
+ kOnDestroyed: Symbol('destroy callbacks'),
+ kPipelining: Symbol('pipelining'),
+ kSocket: Symbol('socket'),
+ kHostHeader: Symbol('host header'),
+ kConnector: Symbol('connector'),
+ kStrictContentLength: Symbol('strict content length'),
+ kMaxRedirections: Symbol('maxRedirections'),
+ kMaxRequests: Symbol('maxRequestsPerClient'),
+ kProxy: Symbol('proxy agent options'),
+ kCounter: Symbol('socket request counter'),
+ kInterceptors: Symbol('dispatch interceptors'),
+ kMaxResponseSize: Symbol('max response size'),
+ kHTTP2Session: Symbol('http2Session'),
+ kHTTP2SessionState: Symbol('http2Session state'),
+ kHTTP2BuildRequest: Symbol('http2 build request'),
+ kHTTP1BuildRequest: Symbol('http1 build request'),
+ kHTTP2CopyHeaders: Symbol('http2 copy headers'),
+ kHTTPConnVersion: Symbol('http connection version'),
+ kRetryHandlerDefaultRetry: Symbol('retry agent default retry'),
+ kConstruct: Symbol('constructable')
+}
diff --git a/lib/core/util.js b/lib/core/util.js
new file mode 100644
index 0000000..8d5450b
--- /dev/null
+++ b/lib/core/util.js
@@ -0,0 +1,511 @@
+'use strict'
+
+const assert = require('assert')
+const { kDestroyed, kBodyUsed } = require('./symbols')
+const { IncomingMessage } = require('http')
+const stream = require('stream')
+const net = require('net')
+const { InvalidArgumentError } = require('./errors')
+const { Blob } = require('buffer')
+const nodeUtil = require('util')
+const { stringify } = require('querystring')
+
+const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v))
+
+function nop () {}
+
+function isStream (obj) {
+ return obj && typeof obj === 'object' && typeof obj.pipe === 'function' && typeof obj.on === 'function'
+}
+
+// based on https://github.com/node-fetch/fetch-blob/blob/8ab587d34080de94140b54f07168451e7d0b655e/index.js#L229-L241 (MIT License)
+function isBlobLike (object) {
+ return (Blob && object instanceof Blob) || (
+ object &&
+ typeof object === 'object' &&
+ (typeof object.stream === 'function' ||
+ typeof object.arrayBuffer === 'function') &&
+ /^(Blob|File)$/.test(object[Symbol.toStringTag])
+ )
+}
+
+function buildURL (url, queryParams) {
+ if (url.includes('?') || url.includes('#')) {
+ throw new Error('Query params cannot be passed when url already contains "?" or "#".')
+ }
+
+ const stringified = stringify(queryParams)
+
+ if (stringified) {
+ url += '?' + stringified
+ }
+
+ return url
+}
+
+function parseURL (url) {
+ if (typeof url === 'string') {
+ url = new URL(url)
+
+ if (!/^https?:/.test(url.origin || url.protocol)) {
+ throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.')
+ }
+
+ return url
+ }
+
+ if (!url || typeof url !== 'object') {
+ throw new InvalidArgumentError('Invalid URL: The URL argument must be a non-null object.')
+ }
+
+ if (!/^https?:/.test(url.origin || url.protocol)) {
+ throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.')
+ }
+
+ if (!(url instanceof URL)) {
+ if (url.port != null && url.port !== '' && !Number.isFinite(parseInt(url.port))) {
+ throw new InvalidArgumentError('Invalid URL: port must be a valid integer or a string representation of an integer.')
+ }
+
+ if (url.path != null && typeof url.path !== 'string') {
+ throw new InvalidArgumentError('Invalid URL path: the path must be a string or null/undefined.')
+ }
+
+ if (url.pathname != null && typeof url.pathname !== 'string') {
+ throw new InvalidArgumentError('Invalid URL pathname: the pathname must be a string or null/undefined.')
+ }
+
+ if (url.hostname != null && typeof url.hostname !== 'string') {
+ throw new InvalidArgumentError('Invalid URL hostname: the hostname must be a string or null/undefined.')
+ }
+
+ if (url.origin != null && typeof url.origin !== 'string') {
+ throw new InvalidArgumentError('Invalid URL origin: the origin must be a string or null/undefined.')
+ }
+
+ const port = url.port != null
+ ? url.port
+ : (url.protocol === 'https:' ? 443 : 80)
+ let origin = url.origin != null
+ ? url.origin
+ : `${url.protocol}//${url.hostname}:${port}`
+ let path = url.path != null
+ ? url.path
+ : `${url.pathname || ''}${url.search || ''}`
+
+ if (origin.endsWith('/')) {
+ origin = origin.substring(0, origin.length - 1)
+ }
+
+ if (path && !path.startsWith('/')) {
+ path = `/${path}`
+ }
+ // new URL(path, origin) is unsafe when `path` contains an absolute URL
+ // From https://developer.mozilla.org/en-US/docs/Web/API/URL/URL:
+ // If first parameter is a relative URL, second param is required, and will be used as the base URL.
+ // If first parameter is an absolute URL, a given second param will be ignored.
+ url = new URL(origin + path)
+ }
+
+ return url
+}
+
+function parseOrigin (url) {
+ url = parseURL(url)
+
+ if (url.pathname !== '/' || url.search || url.hash) {
+ throw new InvalidArgumentError('invalid url')
+ }
+
+ return url
+}
+
+function getHostname (host) {
+ if (host[0] === '[') {
+ const idx = host.indexOf(']')
+
+ assert(idx !== -1)
+ return host.substring(1, idx)
+ }
+
+ const idx = host.indexOf(':')
+ if (idx === -1) return host
+
+ return host.substring(0, idx)
+}
+
+// IP addresses are not valid server names per RFC6066
+// > Currently, the only server names supported are DNS hostnames
+function getServerName (host) {
+ if (!host) {
+ return null
+ }
+
+ assert.strictEqual(typeof host, 'string')
+
+ const servername = getHostname(host)
+ if (net.isIP(servername)) {
+ return ''
+ }
+
+ return servername
+}
+
+function deepClone (obj) {
+ return JSON.parse(JSON.stringify(obj))
+}
+
+function isAsyncIterable (obj) {
+ return !!(obj != null && typeof obj[Symbol.asyncIterator] === 'function')
+}
+
+function isIterable (obj) {
+ return !!(obj != null && (typeof obj[Symbol.iterator] === 'function' || typeof obj[Symbol.asyncIterator] === 'function'))
+}
+
+function bodyLength (body) {
+ if (body == null) {
+ return 0
+ } else if (isStream(body)) {
+ const state = body._readableState
+ return state && state.objectMode === false && state.ended === true && Number.isFinite(state.length)
+ ? state.length
+ : null
+ } else if (isBlobLike(body)) {
+ return body.size != null ? body.size : null
+ } else if (isBuffer(body)) {
+ return body.byteLength
+ }
+
+ return null
+}
+
+function isDestroyed (stream) {
+ return !stream || !!(stream.destroyed || stream[kDestroyed])
+}
+
+function isReadableAborted (stream) {
+ const state = stream && stream._readableState
+ return isDestroyed(stream) && state && !state.endEmitted
+}
+
+function destroy (stream, err) {
+ if (stream == null || !isStream(stream) || isDestroyed(stream)) {
+ return
+ }
+
+ if (typeof stream.destroy === 'function') {
+ if (Object.getPrototypeOf(stream).constructor === IncomingMessage) {
+ // See: https://github.com/nodejs/node/pull/38505/files
+ stream.socket = null
+ }
+
+ stream.destroy(err)
+ } else if (err) {
+ process.nextTick((stream, err) => {
+ stream.emit('error', err)
+ }, stream, err)
+ }
+
+ if (stream.destroyed !== true) {
+ stream[kDestroyed] = true
+ }
+}
+
+const KEEPALIVE_TIMEOUT_EXPR = /timeout=(\d+)/
+function parseKeepAliveTimeout (val) {
+ const m = val.toString().match(KEEPALIVE_TIMEOUT_EXPR)
+ return m ? parseInt(m[1], 10) * 1000 : null
+}
+
+function parseHeaders (headers, obj = {}) {
+ // For H2 support
+ if (!Array.isArray(headers)) return headers
+
+ for (let i = 0; i < headers.length; i += 2) {
+ const key = headers[i].toString().toLowerCase()
+ let val = obj[key]
+
+ if (!val) {
+ if (Array.isArray(headers[i + 1])) {
+ obj[key] = headers[i + 1].map(x => x.toString('utf8'))
+ } else {
+ obj[key] = headers[i + 1].toString('utf8')
+ }
+ } else {
+ if (!Array.isArray(val)) {
+ val = [val]
+ obj[key] = val
+ }
+ val.push(headers[i + 1].toString('utf8'))
+ }
+ }
+
+ // See https://github.com/nodejs/node/pull/46528
+ if ('content-length' in obj && 'content-disposition' in obj) {
+ obj['content-disposition'] = Buffer.from(obj['content-disposition']).toString('latin1')
+ }
+
+ return obj
+}
+
+function parseRawHeaders (headers) {
+ const ret = []
+ let hasContentLength = false
+ let contentDispositionIdx = -1
+
+ for (let n = 0; n < headers.length; n += 2) {
+ const key = headers[n + 0].toString()
+ const val = headers[n + 1].toString('utf8')
+
+ if (key.length === 14 && (key === 'content-length' || key.toLowerCase() === 'content-length')) {
+ ret.push(key, val)
+ hasContentLength = true
+ } else if (key.length === 19 && (key === 'content-disposition' || key.toLowerCase() === 'content-disposition')) {
+ contentDispositionIdx = ret.push(key, val) - 1
+ } else {
+ ret.push(key, val)
+ }
+ }
+
+ // See https://github.com/nodejs/node/pull/46528
+ if (hasContentLength && contentDispositionIdx !== -1) {
+ ret[contentDispositionIdx] = Buffer.from(ret[contentDispositionIdx]).toString('latin1')
+ }
+
+ return ret
+}
+
+function isBuffer (buffer) {
+ // See, https://github.com/mcollina/undici/pull/319
+ return buffer instanceof Uint8Array || Buffer.isBuffer(buffer)
+}
+
+function validateHandler (handler, method, upgrade) {
+ if (!handler || typeof handler !== 'object') {
+ throw new InvalidArgumentError('handler must be an object')
+ }
+
+ if (typeof handler.onConnect !== 'function') {
+ throw new InvalidArgumentError('invalid onConnect method')
+ }
+
+ if (typeof handler.onError !== 'function') {
+ throw new InvalidArgumentError('invalid onError method')
+ }
+
+ if (typeof handler.onBodySent !== 'function' && handler.onBodySent !== undefined) {
+ throw new InvalidArgumentError('invalid onBodySent method')
+ }
+
+ if (upgrade || method === 'CONNECT') {
+ if (typeof handler.onUpgrade !== 'function') {
+ throw new InvalidArgumentError('invalid onUpgrade method')
+ }
+ } else {
+ if (typeof handler.onHeaders !== 'function') {
+ throw new InvalidArgumentError('invalid onHeaders method')
+ }
+
+ if (typeof handler.onData !== 'function') {
+ throw new InvalidArgumentError('invalid onData method')
+ }
+
+ if (typeof handler.onComplete !== 'function') {
+ throw new InvalidArgumentError('invalid onComplete method')
+ }
+ }
+}
+
+// A body is disturbed if it has been read from and it cannot
+// be re-used without losing state or data.
+function isDisturbed (body) {
+ return !!(body && (
+ stream.isDisturbed
+ ? stream.isDisturbed(body) || body[kBodyUsed] // TODO (fix): Why is body[kBodyUsed] needed?
+ : body[kBodyUsed] ||
+ body.readableDidRead ||
+ (body._readableState && body._readableState.dataEmitted) ||
+ isReadableAborted(body)
+ ))
+}
+
+function isErrored (body) {
+ return !!(body && (
+ stream.isErrored
+ ? stream.isErrored(body)
+ : /state: 'errored'/.test(nodeUtil.inspect(body)
+ )))
+}
+
+function isReadable (body) {
+ return !!(body && (
+ stream.isReadable
+ ? stream.isReadable(body)
+ : /state: 'readable'/.test(nodeUtil.inspect(body)
+ )))
+}
+
+function getSocketInfo (socket) {
+ return {
+ localAddress: socket.localAddress,
+ localPort: socket.localPort,
+ remoteAddress: socket.remoteAddress,
+ remotePort: socket.remotePort,
+ remoteFamily: socket.remoteFamily,
+ timeout: socket.timeout,
+ bytesWritten: socket.bytesWritten,
+ bytesRead: socket.bytesRead
+ }
+}
+
+async function * convertIterableToBuffer (iterable) {
+ for await (const chunk of iterable) {
+ yield Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk)
+ }
+}
+
+let ReadableStream
+function ReadableStreamFrom (iterable) {
+ if (!ReadableStream) {
+ ReadableStream = require('stream/web').ReadableStream
+ }
+
+ if (ReadableStream.from) {
+ return ReadableStream.from(convertIterableToBuffer(iterable))
+ }
+
+ let iterator
+ return new ReadableStream(
+ {
+ async start () {
+ iterator = iterable[Symbol.asyncIterator]()
+ },
+ async pull (controller) {
+ const { done, value } = await iterator.next()
+ if (done) {
+ queueMicrotask(() => {
+ controller.close()
+ })
+ } else {
+ const buf = Buffer.isBuffer(value) ? value : Buffer.from(value)
+ controller.enqueue(new Uint8Array(buf))
+ }
+ return controller.desiredSize > 0
+ },
+ async cancel (reason) {
+ await iterator.return()
+ }
+ },
+ 0
+ )
+}
+
+// The chunk should be a FormData instance and contains
+// all the required methods.
+function isFormDataLike (object) {
+ return (
+ object &&
+ typeof object === 'object' &&
+ typeof object.append === 'function' &&
+ typeof object.delete === 'function' &&
+ typeof object.get === 'function' &&
+ typeof object.getAll === 'function' &&
+ typeof object.has === 'function' &&
+ typeof object.set === 'function' &&
+ object[Symbol.toStringTag] === 'FormData'
+ )
+}
+
+function throwIfAborted (signal) {
+ if (!signal) { return }
+ if (typeof signal.throwIfAborted === 'function') {
+ signal.throwIfAborted()
+ } else {
+ if (signal.aborted) {
+ // DOMException not available < v17.0.0
+ const err = new Error('The operation was aborted')
+ err.name = 'AbortError'
+ throw err
+ }
+ }
+}
+
+function addAbortListener (signal, listener) {
+ if ('addEventListener' in signal) {
+ signal.addEventListener('abort', listener, { once: true })
+ return () => signal.removeEventListener('abort', listener)
+ }
+ signal.addListener('abort', listener)
+ return () => signal.removeListener('abort', listener)
+}
+
+const hasToWellFormed = !!String.prototype.toWellFormed
+
+/**
+ * @param {string} val
+ */
+function toUSVString (val) {
+ if (hasToWellFormed) {
+ return `${val}`.toWellFormed()
+ } else if (nodeUtil.toUSVString) {
+ return nodeUtil.toUSVString(val)
+ }
+
+ return `${val}`
+}
+
+// Parsed accordingly to RFC 9110
+// https://www.rfc-editor.org/rfc/rfc9110#field.content-range
+function parseRangeHeader (range) {
+ if (range == null || range === '') return { start: 0, end: null, size: null }
+
+ const m = range ? range.match(/^bytes (\d+)-(\d+)\/(\d+)?$/) : null
+ return m
+ ? {
+ start: parseInt(m[1]),
+ end: m[2] ? parseInt(m[2]) : null,
+ size: m[3] ? parseInt(m[3]) : null
+ }
+ : null
+}
+
+const kEnumerableProperty = Object.create(null)
+kEnumerableProperty.enumerable = true
+
+module.exports = {
+ kEnumerableProperty,
+ nop,
+ isDisturbed,
+ isErrored,
+ isReadable,
+ toUSVString,
+ isReadableAborted,
+ isBlobLike,
+ parseOrigin,
+ parseURL,
+ getServerName,
+ isStream,
+ isIterable,
+ isAsyncIterable,
+ isDestroyed,
+ parseRawHeaders,
+ parseHeaders,
+ parseKeepAliveTimeout,
+ destroy,
+ bodyLength,
+ deepClone,
+ ReadableStreamFrom,
+ isBuffer,
+ validateHandler,
+ getSocketInfo,
+ isFormDataLike,
+ buildURL,
+ throwIfAborted,
+ addAbortListener,
+ parseRangeHeader,
+ nodeMajor,
+ nodeMinor,
+ nodeHasAutoSelectFamily: nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 13),
+ safeHTTPMethods: ['GET', 'HEAD', 'OPTIONS', 'TRACE']
+}
diff --git a/lib/dispatcher-base.js b/lib/dispatcher-base.js
new file mode 100644
index 0000000..5c0220b
--- /dev/null
+++ b/lib/dispatcher-base.js
@@ -0,0 +1,192 @@
+'use strict'
+
+const Dispatcher = require('./dispatcher')
+const {
+ ClientDestroyedError,
+ ClientClosedError,
+ InvalidArgumentError
+} = require('./core/errors')
+const { kDestroy, kClose, kDispatch, kInterceptors } = require('./core/symbols')
+
+const kDestroyed = Symbol('destroyed')
+const kClosed = Symbol('closed')
+const kOnDestroyed = Symbol('onDestroyed')
+const kOnClosed = Symbol('onClosed')
+const kInterceptedDispatch = Symbol('Intercepted Dispatch')
+
+class DispatcherBase extends Dispatcher {
+ constructor () {
+ super()
+
+ this[kDestroyed] = false
+ this[kOnDestroyed] = null
+ this[kClosed] = false
+ this[kOnClosed] = []
+ }
+
+ get destroyed () {
+ return this[kDestroyed]
+ }
+
+ get closed () {
+ return this[kClosed]
+ }
+
+ get interceptors () {
+ return this[kInterceptors]
+ }
+
+ set interceptors (newInterceptors) {
+ if (newInterceptors) {
+ for (let i = newInterceptors.length - 1; i >= 0; i--) {
+ const interceptor = this[kInterceptors][i]
+ if (typeof interceptor !== 'function') {
+ throw new InvalidArgumentError('interceptor must be an function')
+ }
+ }
+ }
+
+ this[kInterceptors] = newInterceptors
+ }
+
+ close (callback) {
+ if (callback === undefined) {
+ return new Promise((resolve, reject) => {
+ this.close((err, data) => {
+ return err ? reject(err) : resolve(data)
+ })
+ })
+ }
+
+ if (typeof callback !== 'function') {
+ throw new InvalidArgumentError('invalid callback')
+ }
+
+ if (this[kDestroyed]) {
+ queueMicrotask(() => callback(new ClientDestroyedError(), null))
+ return
+ }
+
+ if (this[kClosed]) {
+ if (this[kOnClosed]) {
+ this[kOnClosed].push(callback)
+ } else {
+ queueMicrotask(() => callback(null, null))
+ }
+ return
+ }
+
+ this[kClosed] = true
+ this[kOnClosed].push(callback)
+
+ const onClosed = () => {
+ const callbacks = this[kOnClosed]
+ this[kOnClosed] = null
+ for (let i = 0; i < callbacks.length; i++) {
+ callbacks[i](null, null)
+ }
+ }
+
+ // Should not error.
+ this[kClose]()
+ .then(() => this.destroy())
+ .then(() => {
+ queueMicrotask(onClosed)
+ })
+ }
+
+ destroy (err, callback) {
+ if (typeof err === 'function') {
+ callback = err
+ err = null
+ }
+
+ if (callback === undefined) {
+ return new Promise((resolve, reject) => {
+ this.destroy(err, (err, data) => {
+ return err ? /* istanbul ignore next: should never error */ reject(err) : resolve(data)
+ })
+ })
+ }
+
+ if (typeof callback !== 'function') {
+ throw new InvalidArgumentError('invalid callback')
+ }
+
+ if (this[kDestroyed]) {
+ if (this[kOnDestroyed]) {
+ this[kOnDestroyed].push(callback)
+ } else {
+ queueMicrotask(() => callback(null, null))
+ }
+ return
+ }
+
+ if (!err) {
+ err = new ClientDestroyedError()
+ }
+
+ this[kDestroyed] = true
+ this[kOnDestroyed] = this[kOnDestroyed] || []
+ this[kOnDestroyed].push(callback)
+
+ const onDestroyed = () => {
+ const callbacks = this[kOnDestroyed]
+ this[kOnDestroyed] = null
+ for (let i = 0; i < callbacks.length; i++) {
+ callbacks[i](null, null)
+ }
+ }
+
+ // Should not error.
+ this[kDestroy](err).then(() => {
+ queueMicrotask(onDestroyed)
+ })
+ }
+
+ [kInterceptedDispatch] (opts, handler) {
+ if (!this[kInterceptors] || this[kInterceptors].length === 0) {
+ this[kInterceptedDispatch] = this[kDispatch]
+ return this[kDispatch](opts, handler)
+ }
+
+ let dispatch = this[kDispatch].bind(this)
+ for (let i = this[kInterceptors].length - 1; i >= 0; i--) {
+ dispatch = this[kInterceptors][i](dispatch)
+ }
+ this[kInterceptedDispatch] = dispatch
+ return dispatch(opts, handler)
+ }
+
+ dispatch (opts, handler) {
+ if (!handler || typeof handler !== 'object') {
+ throw new InvalidArgumentError('handler must be an object')
+ }
+
+ try {
+ if (!opts || typeof opts !== 'object') {
+ throw new InvalidArgumentError('opts must be an object.')
+ }
+
+ if (this[kDestroyed] || this[kOnDestroyed]) {
+ throw new ClientDestroyedError()
+ }
+
+ if (this[kClosed]) {
+ throw new ClientClosedError()
+ }
+
+ return this[kInterceptedDispatch](opts, handler)
+ } catch (err) {
+ if (typeof handler.onError !== 'function') {
+ throw new InvalidArgumentError('invalid onError method')
+ }
+
+ handler.onError(err)
+
+ return false
+ }
+ }
+}
+
+module.exports = DispatcherBase
diff --git a/lib/dispatcher.js b/lib/dispatcher.js
new file mode 100644
index 0000000..9b809d8
--- /dev/null
+++ b/lib/dispatcher.js
@@ -0,0 +1,19 @@
+'use strict'
+
+const EventEmitter = require('events')
+
+class Dispatcher extends EventEmitter {
+ dispatch () {
+ throw new Error('not implemented')
+ }
+
+ close () {
+ throw new Error('not implemented')
+ }
+
+ destroy () {
+ throw new Error('not implemented')
+ }
+}
+
+module.exports = Dispatcher
diff --git a/lib/fetch/LICENSE b/lib/fetch/LICENSE
new file mode 100644
index 0000000..2943500
--- /dev/null
+++ b/lib/fetch/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2020 Ethan Arrowood
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/lib/fetch/body.js b/lib/fetch/body.js
new file mode 100644
index 0000000..fd8481b
--- /dev/null
+++ b/lib/fetch/body.js
@@ -0,0 +1,605 @@
+'use strict'
+
+const Busboy = require('@fastify/busboy')
+const util = require('../core/util')
+const {
+ ReadableStreamFrom,
+ isBlobLike,
+ isReadableStreamLike,
+ readableStreamClose,
+ createDeferredPromise,
+ fullyReadBody
+} = require('./util')
+const { FormData } = require('./formdata')
+const { kState } = require('./symbols')
+const { webidl } = require('./webidl')
+const { DOMException, structuredClone } = require('./constants')
+const { Blob, File: NativeFile } = require('buffer')
+const { kBodyUsed } = require('../core/symbols')
+const assert = require('assert')
+const { isErrored } = require('../core/util')
+const { isUint8Array, isArrayBuffer } = require('util/types')
+const { File: UndiciFile } = require('./file')
+const { parseMIMEType, serializeAMimeType } = require('./dataURL')
+
+let ReadableStream = globalThis.ReadableStream
+
+/** @type {globalThis['File']} */
+const File = NativeFile ?? UndiciFile
+const textEncoder = new TextEncoder()
+const textDecoder = new TextDecoder()
+
+// https://fetch.spec.whatwg.org/#concept-bodyinit-extract
+function extractBody (object, keepalive = false) {
+ if (!ReadableStream) {
+ ReadableStream = require('stream/web').ReadableStream
+ }
+
+ // 1. Let stream be null.
+ let stream = null
+
+ // 2. If object is a ReadableStream object, then set stream to object.
+ if (object instanceof ReadableStream) {
+ stream = object
+ } else if (isBlobLike(object)) {
+ // 3. Otherwise, if object is a Blob object, set stream to the
+ // result of running object’s get stream.
+ stream = object.stream()
+ } else {
+ // 4. Otherwise, set stream to a new ReadableStream object, and set
+ // up stream.
+ stream = new ReadableStream({
+ async pull (controller) {
+ controller.enqueue(
+ typeof source === 'string' ? textEncoder.encode(source) : source
+ )
+ queueMicrotask(() => readableStreamClose(controller))
+ },
+ start () {},
+ type: undefined
+ })
+ }
+
+ // 5. Assert: stream is a ReadableStream object.
+ assert(isReadableStreamLike(stream))
+
+ // 6. Let action be null.
+ let action = null
+
+ // 7. Let source be null.
+ let source = null
+
+ // 8. Let length be null.
+ let length = null
+
+ // 9. Let type be null.
+ let type = null
+
+ // 10. Switch on object:
+ if (typeof object === 'string') {
+ // Set source to the UTF-8 encoding of object.
+ // Note: setting source to a Uint8Array here breaks some mocking assumptions.
+ source = object
+
+ // Set type to `text/plain;charset=UTF-8`.
+ type = 'text/plain;charset=UTF-8'
+ } else if (object instanceof URLSearchParams) {
+ // URLSearchParams
+
+ // spec says to run application/x-www-form-urlencoded on body.list
+ // this is implemented in Node.js as apart of an URLSearchParams instance toString method
+ // See: https://github.com/nodejs/node/blob/e46c680bf2b211bbd52cf959ca17ee98c7f657f5/lib/internal/url.js#L490
+ // and https://github.com/nodejs/node/blob/e46c680bf2b211bbd52cf959ca17ee98c7f657f5/lib/internal/url.js#L1100
+
+ // Set source to the result of running the application/x-www-form-urlencoded serializer with object’s list.
+ source = object.toString()
+
+ // Set type to `application/x-www-form-urlencoded;charset=UTF-8`.
+ type = 'application/x-www-form-urlencoded;charset=UTF-8'
+ } else if (isArrayBuffer(object)) {
+ // BufferSource/ArrayBuffer
+
+ // Set source to a copy of the bytes held by object.
+ source = new Uint8Array(object.slice())
+ } else if (ArrayBuffer.isView(object)) {
+ // BufferSource/ArrayBufferView
+
+ // Set source to a copy of the bytes held by object.
+ source = new Uint8Array(object.buffer.slice(object.byteOffset, object.byteOffset + object.byteLength))
+ } else if (util.isFormDataLike(object)) {
+ const boundary = `----formdata-undici-0${`${Math.floor(Math.random() * 1e11)}`.padStart(11, '0')}`
+ const prefix = `--${boundary}\r\nContent-Disposition: form-data`
+
+ /*! formdata-polyfill. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
+ const escape = (str) =>
+ str.replace(/\n/g, '%0A').replace(/\r/g, '%0D').replace(/"/g, '%22')
+ const normalizeLinefeeds = (value) => value.replace(/\r?\n|\r/g, '\r\n')
+
+ // Set action to this step: run the multipart/form-data
+ // encoding algorithm, with object’s entry list and UTF-8.
+ // - This ensures that the body is immutable and can't be changed afterwords
+ // - That the content-length is calculated in advance.
+ // - And that all parts are pre-encoded and ready to be sent.
+
+ const blobParts = []
+ const rn = new Uint8Array([13, 10]) // '\r\n'
+ length = 0
+ let hasUnknownSizeValue = false
+
+ for (const [name, value] of object) {
+ if (typeof value === 'string') {
+ const chunk = textEncoder.encode(prefix +
+ `; name="${escape(normalizeLinefeeds(name))}"` +
+ `\r\n\r\n${normalizeLinefeeds(value)}\r\n`)
+ blobParts.push(chunk)
+ length += chunk.byteLength
+ } else {
+ const chunk = textEncoder.encode(`${prefix}; name="${escape(normalizeLinefeeds(name))}"` +
+ (value.name ? `; filename="${escape(value.name)}"` : '') + '\r\n' +
+ `Content-Type: ${
+ value.type || 'application/octet-stream'
+ }\r\n\r\n`)
+ blobParts.push(chunk, value, rn)
+ if (typeof value.size === 'number') {
+ length += chunk.byteLength + value.size + rn.byteLength
+ } else {
+ hasUnknownSizeValue = true
+ }
+ }
+ }
+
+ const chunk = textEncoder.encode(`--${boundary}--`)
+ blobParts.push(chunk)
+ length += chunk.byteLength
+ if (hasUnknownSizeValue) {
+ length = null
+ }
+
+ // Set source to object.
+ source = object
+
+ action = async function * () {
+ for (const part of blobParts) {
+ if (part.stream) {
+ yield * part.stream()
+ } else {
+ yield part
+ }
+ }
+ }
+
+ // Set type to `multipart/form-data; boundary=`,
+ // followed by the multipart/form-data boundary string generated
+ // by the multipart/form-data encoding algorithm.
+ type = 'multipart/form-data; boundary=' + boundary
+ } else if (isBlobLike(object)) {
+ // Blob
+
+ // Set source to object.
+ source = object
+
+ // Set length to object’s size.
+ length = object.size
+
+ // If object’s type attribute is not the empty byte sequence, set
+ // type to its value.
+ if (object.type) {
+ type = object.type
+ }
+ } else if (typeof object[Symbol.asyncIterator] === 'function') {
+ // If keepalive is true, then throw a TypeError.
+ if (keepalive) {
+ throw new TypeError('keepalive')
+ }
+
+ // If object is disturbed or locked, then throw a TypeError.
+ if (util.isDisturbed(object) || object.locked) {
+ throw new TypeError(
+ 'Response body object should not be disturbed or locked'
+ )
+ }
+
+ stream =
+ object instanceof ReadableStream ? object : ReadableStreamFrom(object)
+ }
+
+ // 11. If source is a byte sequence, then set action to a
+ // step that returns source and length to source’s length.
+ if (typeof source === 'string' || util.isBuffer(source)) {
+ length = Buffer.byteLength(source)
+ }
+
+ // 12. If action is non-null, then run these steps in in parallel:
+ if (action != null) {
+ // Run action.
+ let iterator
+ stream = new ReadableStream({
+ async start () {
+ iterator = action(object)[Symbol.asyncIterator]()
+ },
+ async pull (controller) {
+ const { value, done } = await iterator.next()
+ if (done) {
+ // When running action is done, close stream.
+ queueMicrotask(() => {
+ controller.close()
+ })
+ } else {
+ // Whenever one or more bytes are available and stream is not errored,
+ // enqueue a Uint8Array wrapping an ArrayBuffer containing the available
+ // bytes into stream.
+ if (!isErrored(stream)) {
+ controller.enqueue(new Uint8Array(value))
+ }
+ }
+ return controller.desiredSize > 0
+ },
+ async cancel (reason) {
+ await iterator.return()
+ },
+ type: undefined
+ })
+ }
+
+ // 13. Let body be a body whose stream is stream, source is source,
+ // and length is length.
+ const body = { stream, source, length }
+
+ // 14. Return (body, type).
+ return [body, type]
+}
+
+// https://fetch.spec.whatwg.org/#bodyinit-safely-extract
+function safelyExtractBody (object, keepalive = false) {
+ if (!ReadableStream) {
+ // istanbul ignore next
+ ReadableStream = require('stream/web').ReadableStream
+ }
+
+ // To safely extract a body and a `Content-Type` value from
+ // a byte sequence or BodyInit object object, run these steps:
+
+ // 1. If object is a ReadableStream object, then:
+ if (object instanceof ReadableStream) {
+ // Assert: object is neither disturbed nor locked.
+ // istanbul ignore next
+ assert(!util.isDisturbed(object), 'The body has already been consumed.')
+ // istanbul ignore next
+ assert(!object.locked, 'The stream is locked.')
+ }
+
+ // 2. Return the results of extracting object.
+ return extractBody(object, keepalive)
+}
+
+function cloneBody (body) {
+ // To clone a body body, run these steps:
+
+ // https://fetch.spec.whatwg.org/#concept-body-clone
+
+ // 1. Let « out1, out2 » be the result of teeing body’s stream.
+ const [out1, out2] = body.stream.tee()
+ const out2Clone = structuredClone(out2, { transfer: [out2] })
+ // This, for whatever reasons, unrefs out2Clone which allows
+ // the process to exit by itself.
+ const [, finalClone] = out2Clone.tee()
+
+ // 2. Set body’s stream to out1.
+ body.stream = out1
+
+ // 3. Return a body whose stream is out2 and other members are copied from body.
+ return {
+ stream: finalClone,
+ length: body.length,
+ source: body.source
+ }
+}
+
+async function * consumeBody (body) {
+ if (body) {
+ if (isUint8Array(body)) {
+ yield body
+ } else {
+ const stream = body.stream
+
+ if (util.isDisturbed(stream)) {
+ throw new TypeError('The body has already been consumed.')
+ }
+
+ if (stream.locked) {
+ throw new TypeError('The stream is locked.')
+ }
+
+ // Compat.
+ stream[kBodyUsed] = true
+
+ yield * stream
+ }
+ }
+}
+
+function throwIfAborted (state) {
+ if (state.aborted) {
+ throw new DOMException('The operation was aborted.', 'AbortError')
+ }
+}
+
+function bodyMixinMethods (instance) {
+ const methods = {
+ blob () {
+ // The blob() method steps are to return the result of
+ // running consume body with this and the following step
+ // given a byte sequence bytes: return a Blob whose
+ // contents are bytes and whose type attribute is this’s
+ // MIME type.
+ return specConsumeBody(this, (bytes) => {
+ let mimeType = bodyMimeType(this)
+
+ if (mimeType === 'failure') {
+ mimeType = ''
+ } else if (mimeType) {
+ mimeType = serializeAMimeType(mimeType)
+ }
+
+ // Return a Blob whose contents are bytes and type attribute
+ // is mimeType.
+ return new Blob([bytes], { type: mimeType })
+ }, instance)
+ },
+
+ arrayBuffer () {
+ // The arrayBuffer() method steps are to return the result
+ // of running consume body with this and the following step
+ // given a byte sequence bytes: return a new ArrayBuffer
+ // whose contents are bytes.
+ return specConsumeBody(this, (bytes) => {
+ return new Uint8Array(bytes).buffer
+ }, instance)
+ },
+
+ text () {
+ // The text() method steps are to return the result of running
+ // consume body with this and UTF-8 decode.
+ return specConsumeBody(this, utf8DecodeBytes, instance)
+ },
+
+ json () {
+ // The json() method steps are to return the result of running
+ // consume body with this and parse JSON from bytes.
+ return specConsumeBody(this, parseJSONFromBytes, instance)
+ },
+
+ async formData () {
+ webidl.brandCheck(this, instance)
+
+ throwIfAborted(this[kState])
+
+ const contentType = this.headers.get('Content-Type')
+
+ // If mimeType’s essence is "multipart/form-data", then:
+ if (/multipart\/form-data/.test(contentType)) {
+ const headers = {}
+ for (const [key, value] of this.headers) headers[key.toLowerCase()] = value
+
+ const responseFormData = new FormData()
+
+ let busboy
+
+ try {
+ busboy = new Busboy({
+ headers,
+ preservePath: true
+ })
+ } catch (err) {
+ throw new DOMException(`${err}`, 'AbortError')
+ }
+
+ busboy.on('field', (name, value) => {
+ responseFormData.append(name, value)
+ })
+ busboy.on('file', (name, value, filename, encoding, mimeType) => {
+ const chunks = []
+
+ if (encoding === 'base64' || encoding.toLowerCase() === 'base64') {
+ let base64chunk = ''
+
+ value.on('data', (chunk) => {
+ base64chunk += chunk.toString().replace(/[\r\n]/gm, '')
+
+ const end = base64chunk.length - base64chunk.length % 4
+ chunks.push(Buffer.from(base64chunk.slice(0, end), 'base64'))
+
+ base64chunk = base64chunk.slice(end)
+ })
+ value.on('end', () => {
+ chunks.push(Buffer.from(base64chunk, 'base64'))
+ responseFormData.append(name, new File(chunks, filename, { type: mimeType }))
+ })
+ } else {
+ value.on('data', (chunk) => {
+ chunks.push(chunk)
+ })
+ value.on('end', () => {
+ responseFormData.append(name, new File(chunks, filename, { type: mimeType }))
+ })
+ }
+ })
+
+ const busboyResolve = new Promise((resolve, reject) => {
+ busboy.on('finish', resolve)
+ busboy.on('error', (err) => reject(new TypeError(err)))
+ })
+
+ if (this.body !== null) for await (const chunk of consumeBody(this[kState].body)) busboy.write(chunk)
+ busboy.end()
+ await busboyResolve
+
+ return responseFormData
+ } else if (/application\/x-www-form-urlencoded/.test(contentType)) {
+ // Otherwise, if mimeType’s essence is "application/x-www-form-urlencoded", then:
+
+ // 1. Let entries be the result of parsing bytes.
+ let entries
+ try {
+ let text = ''
+ // application/x-www-form-urlencoded parser will keep the BOM.
+ // https://url.spec.whatwg.org/#concept-urlencoded-parser
+ // Note that streaming decoder is stateful and cannot be reused
+ const streamingDecoder = new TextDecoder('utf-8', { ignoreBOM: true })
+
+ for await (const chunk of consumeBody(this[kState].body)) {
+ if (!isUint8Array(chunk)) {
+ throw new TypeError('Expected Uint8Array chunk')
+ }
+ text += streamingDecoder.decode(chunk, { stream: true })
+ }
+ text += streamingDecoder.decode()
+ entries = new URLSearchParams(text)
+ } catch (err) {
+ // istanbul ignore next: Unclear when new URLSearchParams can fail on a string.
+ // 2. If entries is failure, then throw a TypeError.
+ throw Object.assign(new TypeError(), { cause: err })
+ }
+
+ // 3. Return a new FormData object whose entries are entries.
+ const formData = new FormData()
+ for (const [name, value] of entries) {
+ formData.append(name, value)
+ }
+ return formData
+ } else {
+ // Wait a tick before checking if the request has been aborted.
+ // Otherwise, a TypeError can be thrown when an AbortError should.
+ await Promise.resolve()
+
+ throwIfAborted(this[kState])
+
+ // Otherwise, throw a TypeError.
+ throw webidl.errors.exception({
+ header: `${instance.name}.formData`,
+ message: 'Could not parse content as FormData.'
+ })
+ }
+ }
+ }
+
+ return methods
+}
+
+function mixinBody (prototype) {
+ Object.assign(prototype.prototype, bodyMixinMethods(prototype))
+}
+
+/**
+ * @see https://fetch.spec.whatwg.org/#concept-body-consume-body
+ * @param {Response|Request} object
+ * @param {(value: unknown) => unknown} convertBytesToJSValue
+ * @param {Response|Request} instance
+ */
+async function specConsumeBody (object, convertBytesToJSValue, instance) {
+ webidl.brandCheck(object, instance)
+
+ throwIfAborted(object[kState])
+
+ // 1. If object is unusable, then return a promise rejected
+ // with a TypeError.
+ if (bodyUnusable(object[kState].body)) {
+ throw new TypeError('Body is unusable')
+ }
+
+ // 2. Let promise be a new promise.
+ const promise = createDeferredPromise()
+
+ // 3. Let errorSteps given error be to reject promise with error.
+ const errorSteps = (error) => promise.reject(error)
+
+ // 4. Let successSteps given a byte sequence data be to resolve
+ // promise with the result of running convertBytesToJSValue
+ // with data. If that threw an exception, then run errorSteps
+ // with that exception.
+ const successSteps = (data) => {
+ try {
+ promise.resolve(convertBytesToJSValue(data))
+ } catch (e) {
+ errorSteps(e)
+ }
+ }
+
+ // 5. If object’s body is null, then run successSteps with an
+ // empty byte sequence.
+ if (object[kState].body == null) {
+ successSteps(new Uint8Array())
+ return promise.promise
+ }
+
+ // 6. Otherwise, fully read object’s body given successSteps,
+ // errorSteps, and object’s relevant global object.
+ await fullyReadBody(object[kState].body, successSteps, errorSteps)
+
+ // 7. Return promise.
+ return promise.promise
+}
+
+// https://fetch.spec.whatwg.org/#body-unusable
+function bodyUnusable (body) {
+ // An object including the Body interface mixin is
+ // said to be unusable if its body is non-null and
+ // its body’s stream is disturbed or locked.
+ return body != null && (body.stream.locked || util.isDisturbed(body.stream))
+}
+
+/**
+ * @see https://encoding.spec.whatwg.org/#utf-8-decode
+ * @param {Buffer} buffer
+ */
+function utf8DecodeBytes (buffer) {
+ if (buffer.length === 0) {
+ return ''
+ }
+
+ // 1. Let buffer be the result of peeking three bytes from
+ // ioQueue, converted to a byte sequence.
+
+ // 2. If buffer is 0xEF 0xBB 0xBF, then read three
+ // bytes from ioQueue. (Do nothing with those bytes.)
+ if (buffer[0] === 0xEF && buffer[1] === 0xBB && buffer[2] === 0xBF) {
+ buffer = buffer.subarray(3)
+ }
+
+ // 3. Process a queue with an instance of UTF-8’s
+ // decoder, ioQueue, output, and "replacement".
+ const output = textDecoder.decode(buffer)
+
+ // 4. Return output.
+ return output
+}
+
+/**
+ * @see https://infra.spec.whatwg.org/#parse-json-bytes-to-a-javascript-value
+ * @param {Uint8Array} bytes
+ */
+function parseJSONFromBytes (bytes) {
+ return JSON.parse(utf8DecodeBytes(bytes))
+}
+
+/**
+ * @see https://fetch.spec.whatwg.org/#concept-body-mime-type
+ * @param {import('./response').Response|import('./request').Request} object
+ */
+function bodyMimeType (object) {
+ const { headersList } = object[kState]
+ const contentType = headersList.get('content-type')
+
+ if (contentType === null) {
+ return 'failure'
+ }
+
+ return parseMIMEType(contentType)
+}
+
+module.exports = {
+ extractBody,
+ safelyExtractBody,
+ cloneBody,
+ mixinBody
+}
diff --git a/lib/fetch/constants.js b/lib/fetch/constants.js
new file mode 100644
index 0000000..218fcbe
--- /dev/null
+++ b/lib/fetch/constants.js
@@ -0,0 +1,151 @@
+'use strict'
+
+const { MessageChannel, receiveMessageOnPort } = require('worker_threads')
+
+const corsSafeListedMethods = ['GET', 'HEAD', 'POST']
+const corsSafeListedMethodsSet = new Set(corsSafeListedMethods)
+
+const nullBodyStatus = [101, 204, 205, 304]
+
+const redirectStatus = [301, 302, 303, 307, 308]
+const redirectStatusSet = new Set(redirectStatus)
+
+// https://fetch.spec.whatwg.org/#block-bad-port
+const badPorts = [
+ '1', '7', '9', '11', '13', '15', '17', '19', '20', '21', '22', '23', '25', '37', '42', '43', '53', '69', '77', '79',
+ '87', '95', '101', '102', '103', '104', '109', '110', '111', '113', '115', '117', '119', '123', '135', '137',
+ '139', '143', '161', '179', '389', '427', '465', '512', '513', '514', '515', '526', '530', '531', '532',
+ '540', '548', '554', '556', '563', '587', '601', '636', '989', '990', '993', '995', '1719', '1720', '1723',
+ '2049', '3659', '4045', '5060', '5061', '6000', '6566', '6665', '6666', '6667', '6668', '6669', '6697',
+ '10080'
+]
+
+const badPortsSet = new Set(badPorts)
+
+// https://w3c.github.io/webappsec-referrer-policy/#referrer-policies
+const referrerPolicy = [
+ '',
+ 'no-referrer',
+ 'no-referrer-when-downgrade',
+ 'same-origin',
+ 'origin',
+ 'strict-origin',
+ 'origin-when-cross-origin',
+ 'strict-origin-when-cross-origin',
+ 'unsafe-url'
+]
+const referrerPolicySet = new Set(referrerPolicy)
+
+const requestRedirect = ['follow', 'manual', 'error']
+
+const safeMethods = ['GET', 'HEAD', 'OPTIONS', 'TRACE']
+const safeMethodsSet = new Set(safeMethods)
+
+const requestMode = ['navigate', 'same-origin', 'no-cors', 'cors']
+
+const requestCredentials = ['omit', 'same-origin', 'include']
+
+const requestCache = [
+ 'default',
+ 'no-store',
+ 'reload',
+ 'no-cache',
+ 'force-cache',
+ 'only-if-cached'
+]
+
+// https://fetch.spec.whatwg.org/#request-body-header-name
+const requestBodyHeader = [
+ 'content-encoding',
+ 'content-language',
+ 'content-location',
+ 'content-type',
+ // See https://github.com/nodejs/undici/issues/2021
+ // 'Content-Length' is a forbidden header name, which is typically
+ // removed in the Headers implementation. However, undici doesn't
+ // filter out headers, so we add it here.
+ 'content-length'
+]
+
+// https://fetch.spec.whatwg.org/#enumdef-requestduplex
+const requestDuplex = [
+ 'half'
+]
+
+// http://fetch.spec.whatwg.org/#forbidden-method
+const forbiddenMethods = ['CONNECT', 'TRACE', 'TRACK']
+const forbiddenMethodsSet = new Set(forbiddenMethods)
+
+const subresource = [
+ 'audio',
+ 'audioworklet',
+ 'font',
+ 'image',
+ 'manifest',
+ 'paintworklet',
+ 'script',
+ 'style',
+ 'track',
+ 'video',
+ 'xslt',
+ ''
+]
+const subresourceSet = new Set(subresource)
+
+/** @type {globalThis['DOMException']} */
+const DOMException = globalThis.DOMException ?? (() => {
+ // DOMException was only made a global in Node v17.0.0,
+ // but fetch supports >= v16.8.
+ try {
+ atob('~')
+ } catch (err) {
+ return Object.getPrototypeOf(err).constructor
+ }
+})()
+
+let channel
+
+/** @type {globalThis['structuredClone']} */
+const structuredClone =
+ globalThis.structuredClone ??
+ // https://github.com/nodejs/node/blob/b27ae24dcc4251bad726d9d84baf678d1f707fed/lib/internal/structured_clone.js
+ // structuredClone was added in v17.0.0, but fetch supports v16.8
+ function structuredClone (value, options = undefined) {
+ if (arguments.length === 0) {
+ throw new TypeError('missing argument')
+ }
+
+ if (!channel) {
+ channel = new MessageChannel()
+ }
+ channel.port1.unref()
+ channel.port2.unref()
+ channel.port1.postMessage(value, options?.transfer)
+ return receiveMessageOnPort(channel.port2).message
+ }
+
+module.exports = {
+ DOMException,
+ structuredClone,
+ subresource,
+ forbiddenMethods,
+ requestBodyHeader,
+ referrerPolicy,
+ requestRedirect,
+ requestMode,
+ requestCredentials,
+ requestCache,
+ redirectStatus,
+ corsSafeListedMethods,
+ nullBodyStatus,
+ safeMethods,
+ badPorts,
+ requestDuplex,
+ subresourceSet,
+ badPortsSet,
+ redirectStatusSet,
+ corsSafeListedMethodsSet,
+ safeMethodsSet,
+ forbiddenMethodsSet,
+ referrerPolicySet
+}
diff --git a/lib/fetch/dataURL.js b/lib/fetch/dataURL.js
new file mode 100644
index 0000000..7b6a606
--- /dev/null
+++ b/lib/fetch/dataURL.js
@@ -0,0 +1,627 @@
+const assert = require('assert')
+const { atob } = require('buffer')
+const { isomorphicDecode } = require('./util')
+
+const encoder = new TextEncoder()
+
+/**
+ * @see https://mimesniff.spec.whatwg.org/#http-token-code-point
+ */
+const HTTP_TOKEN_CODEPOINTS = /^[!#$%&'*+-.^_|~A-Za-z0-9]+$/
+const HTTP_WHITESPACE_REGEX = /(\u000A|\u000D|\u0009|\u0020)/ // eslint-disable-line
+/**
+ * @see https://mimesniff.spec.whatwg.org/#http-quoted-string-token-code-point
+ */
+const HTTP_QUOTED_STRING_TOKENS = /[\u0009|\u0020-\u007E|\u0080-\u00FF]/ // eslint-disable-line
+
+// https://fetch.spec.whatwg.org/#data-url-processor
+/** @param {URL} dataURL */
+function dataURLProcessor (dataURL) {
+ // 1. Assert: dataURL’s scheme is "data".
+ assert(dataURL.protocol === 'data:')
+
+ // 2. Let input be the result of running the URL
+ // serializer on dataURL with exclude fragment
+ // set to true.
+ let input = URLSerializer(dataURL, true)
+
+ // 3. Remove the leading "data:" string from input.
+ input = input.slice(5)
+
+ // 4. Let position point at the start of input.
+ const position = { position: 0 }
+
+ // 5. Let mimeType be the result of collecting a
+ // sequence of code points that are not equal
+ // to U+002C (,), given position.
+ let mimeType = collectASequenceOfCodePointsFast(
+ ',',
+ input,
+ position
+ )
+
+ // 6. Strip leading and trailing ASCII whitespace
+ // from mimeType.
+ // Undici implementation note: we need to store the
+ // length because if the mimetype has spaces removed,
+ // the wrong amount will be sliced from the input in
+ // step #9
+ const mimeTypeLength = mimeType.length
+ mimeType = removeASCIIWhitespace(mimeType, true, true)
+
+ // 7. If position is past the end of input, then
+ // return failure
+ if (position.position >= input.length) {
+ return 'failure'
+ }
+
+ // 8. Advance position by 1.
+ position.position++
+
+ // 9. Let encodedBody be the remainder of input.
+ const encodedBody = input.slice(mimeTypeLength + 1)
+
+ // 10. Let body be the percent-decoding of encodedBody.
+ let body = stringPercentDecode(encodedBody)
+
+ // 11. If mimeType ends with U+003B (;), followed by
+ // zero or more U+0020 SPACE, followed by an ASCII
+ // case-insensitive match for "base64", then:
+ if (/;(\u0020){0,}base64$/i.test(mimeType)) {
+ // 1. Let stringBody be the isomorphic decode of body.
+ const stringBody = isomorphicDecode(body)
+
+ // 2. Set body to the forgiving-base64 decode of
+ // stringBody.
+ body = forgivingBase64(stringBody)
+
+ // 3. If body is failure, then return failure.
+ if (body === 'failure') {
+ return 'failure'
+ }
+
+ // 4. Remove the last 6 code points from mimeType.
+ mimeType = mimeType.slice(0, -6)
+
+ // 5. Remove trailing U+0020 SPACE code points from mimeType,
+ // if any.
+ mimeType = mimeType.replace(/(\u0020)+$/, '')
+
+ // 6. Remove the last U+003B (;) code point from mimeType.
+ mimeType = mimeType.slice(0, -1)
+ }
+
+ // 12. If mimeType starts with U+003B (;), then prepend
+ // "text/plain" to mimeType.
+ if (mimeType.startsWith(';')) {
+ mimeType = 'text/plain' + mimeType
+ }
+
+ // 13. Let mimeTypeRecord be the result of parsing
+ // mimeType.
+ let mimeTypeRecord = parseMIMEType(mimeType)
+
+ // 14. If mimeTypeRecord is failure, then set
+ // mimeTypeRecord to text/plain;charset=US-ASCII.
+ if (mimeTypeRecord === 'failure') {
+ mimeTypeRecord = parseMIMEType('text/plain;charset=US-ASCII')
+ }
+
+ // 15. Return a new data: URL struct whose MIME
+ // type is mimeTypeRecord and body is body.
+ // https://fetch.spec.whatwg.org/#data-url-struct
+ return { mimeType: mimeTypeRecord, body }
+}
+
+// https://url.spec.whatwg.org/#concept-url-serializer
+/**
+ * @param {URL} url
+ * @param {boolean} excludeFragment
+ */
+function URLSerializer (url, excludeFragment = false) {
+ if (!excludeFragment) {
+ return url.href
+ }
+
+ const href = url.href
+ const hashLength = url.hash.length
+
+ return hashLength === 0 ? href : href.substring(0, href.length - hashLength)
+}
+
+// https://infra.spec.whatwg.org/#collect-a-sequence-of-code-points
+/**
+ * @param {(char: string) => boolean} condition
+ * @param {string} input
+ * @param {{ position: number }} position
+ */
+function collectASequenceOfCodePoints (condition, input, position) {
+ // 1. Let result be the empty string.
+ let result = ''
+
+ // 2. While position doesn’t point past the end of input and the
+ // code point at position within input meets the condition condition:
+ while (position.position < input.length && condition(input[position.position])) {
+ // 1. Append that code point to the end of result.
+ result += input[position.position]
+
+ // 2. Advance position by 1.
+ position.position++
+ }
+
+ // 3. Return result.
+ return result
+}
+
+/**
+ * A faster collectASequenceOfCodePoints that only works when comparing a single character.
+ * @param {string} char
+ * @param {string} input
+ * @param {{ position: number }} position
+ */
+function collectASequenceOfCodePointsFast (char, input, position) {
+ const idx = input.indexOf(char, position.position)
+ const start = position.position
+
+ if (idx === -1) {
+ position.position = input.length
+ return input.slice(start)
+ }
+
+ position.position = idx
+ return input.slice(start, position.position)
+}
+
+// https://url.spec.whatwg.org/#string-percent-decode
+/** @param {string} input */
+function stringPercentDecode (input) {
+ // 1. Let bytes be the UTF-8 encoding of input.
+ const bytes = encoder.encode(input)
+
+ // 2. Return the percent-decoding of bytes.
+ return percentDecode(bytes)
+}
+
+// https://url.spec.whatwg.org/#percent-decode
+/** @param {Uint8Array} input */
+function percentDecode (input) {
+ // 1. Let output be an empty byte sequence.
+ /** @type {number[]} */
+ const output = []
+
+ // 2. For each byte byte in input:
+ for (let i = 0; i < input.length; i++) {
+ const byte = input[i]
+
+ // 1. If byte is not 0x25 (%), then append byte to output.
+ if (byte !== 0x25) {
+ output.push(byte)
+
+ // 2. Otherwise, if byte is 0x25 (%) and the next two bytes
+ // after byte in input are not in the ranges
+ // 0x30 (0) to 0x39 (9), 0x41 (A) to 0x46 (F),
+ // and 0x61 (a) to 0x66 (f), all inclusive, append byte
+ // to output.
+ } else if (
+ byte === 0x25 &&
+ !/^[0-9A-Fa-f]{2}$/i.test(String.fromCharCode(input[i + 1], input[i + 2]))
+ ) {
+ output.push(0x25)
+
+ // 3. Otherwise:
+ } else {
+ // 1. Let bytePoint be the two bytes after byte in input,
+ // decoded, and then interpreted as hexadecimal number.
+ const nextTwoBytes = String.fromCharCode(input[i + 1], input[i + 2])
+ const bytePoint = Number.parseInt(nextTwoBytes, 16)
+
+ // 2. Append a byte whose value is bytePoint to output.
+ output.push(bytePoint)
+
+ // 3. Skip the next two bytes in input.
+ i += 2
+ }
+ }
+
+ // 3. Return output.
+ return Uint8Array.from(output)
+}
+
+// https://mimesniff.spec.whatwg.org/#parse-a-mime-type
+/** @param {string} input */
+function parseMIMEType (input) {
+ // 1. Remove any leading and trailing HTTP whitespace
+ // from input.
+ input = removeHTTPWhitespace(input, true, true)
+
+ // 2. Let position be a position variable for input,
+ // initially pointing at the start of input.
+ const position = { position: 0 }
+
+ // 3. Let type be the result of collecting a sequence
+ // of code points that are not U+002F (/) from
+ // input, given position.
+ const type = collectASequenceOfCodePointsFast(
+ '/',
+ input,
+ position
+ )
+
+ // 4. If type is the empty string or does not solely
+ // contain HTTP token code points, then return failure.
+ // https://mimesniff.spec.whatwg.org/#http-token-code-point
+ if (type.length === 0 || !HTTP_TOKEN_CODEPOINTS.test(type)) {
+ return 'failure'
+ }
+
+ // 5. If position is past the end of input, then return
+ // failure
+ if (position.position > input.length) {
+ return 'failure'
+ }
+
+ // 6. Advance position by 1. (This skips past U+002F (/).)
+ position.position++
+
+ // 7. Let subtype be the result of collecting a sequence of
+ // code points that are not U+003B (;) from input, given
+ // position.
+ let subtype = collectASequenceOfCodePointsFast(
+ ';',
+ input,
+ position
+ )
+
+ // 8. Remove any trailing HTTP whitespace from subtype.
+ subtype = removeHTTPWhitespace(subtype, false, true)
+
+ // 9. If subtype is the empty string or does not solely
+ // contain HTTP token code points, then return failure.
+ if (subtype.length === 0 || !HTTP_TOKEN_CODEPOINTS.test(subtype)) {
+ return 'failure'
+ }
+
+ const typeLowercase = type.toLowerCase()
+ const subtypeLowercase = subtype.toLowerCase()
+
+ // 10. Let mimeType be a new MIME type record whose type
+ // is type, in ASCII lowercase, and subtype is subtype,
+ // in ASCII lowercase.
+ // https://mimesniff.spec.whatwg.org/#mime-type
+ const mimeType = {
+ type: typeLowercase,
+ subtype: subtypeLowercase,
+ /** @type {Map<string, string>} */
+ parameters: new Map(),
+ // https://mimesniff.spec.whatwg.org/#mime-type-essence
+ essence: `${typeLowercase}/${subtypeLowercase}`
+ }
+
+ // 11. While position is not past the end of input:
+ while (position.position < input.length) {
+ // 1. Advance position by 1. (This skips past U+003B (;).)
+ position.position++
+
+ // 2. Collect a sequence of code points that are HTTP
+ // whitespace from input given position.
+ collectASequenceOfCodePoints(
+ // https://fetch.spec.whatwg.org/#http-whitespace
+ char => HTTP_WHITESPACE_REGEX.test(char),
+ input,
+ position
+ )
+
+ // 3. Let parameterName be the result of collecting a
+ // sequence of code points that are not U+003B (;)
+ // or U+003D (=) from input, given position.
+ let parameterName = collectASequenceOfCodePoints(
+ (char) => char !== ';' && char !== '=',
+ input,
+ position
+ )
+
+ // 4. Set parameterName to parameterName, in ASCII
+ // lowercase.
+ parameterName = parameterName.toLowerCase()
+
+ // 5. If position is not past the end of input, then:
+ if (position.position < input.length) {
+ // 1. If the code point at position within input is
+ // U+003B (;), then continue.
+ if (input[position.position] === ';') {
+ continue
+ }
+
+ // 2. Advance position by 1. (This skips past U+003D (=).)
+ position.position++
+ }
+
+ // 6. If position is past the end of input, then break.
+ if (position.position > input.length) {
+ break
+ }
+
+ // 7. Let parameterValue be null.
+ let parameterValue = null
+
+ // 8. If the code point at position within input is
+ // U+0022 ("), then:
+ if (input[position.position] === '"') {
+ // 1. Set parameterValue to the result of collecting
+ // an HTTP quoted string from input, given position
+ // and the extract-value flag.
+ parameterValue = collectAnHTTPQuotedString(input, position, true)
+
+ // 2. Collect a sequence of code points that are not
+ // U+003B (;) from input, given position.
+ collectASequenceOfCodePointsFast(
+ ';',
+ input,
+ position
+ )
+
+ // 9. Otherwise:
+ } else {
+ // 1. Set parameterValue to the result of collecting
+ // a sequence of code points that are not U+003B (;)
+ // from input, given position.
+ parameterValue = collectASequenceOfCodePointsFast(
+ ';',
+ input,
+ position
+ )
+
+ // 2. Remove any trailing HTTP whitespace from parameterValue.
+ parameterValue = removeHTTPWhitespace(parameterValue, false, true)
+
+ // 3. If parameterValue is the empty string, then continue.
+ if (parameterValue.length === 0) {
+ continue
+ }
+ }
+
+ // 10. If all of the following are true
+ // - parameterName is not the empty string
+ // - parameterName solely contains HTTP token code points
+ // - parameterValue solely contains HTTP quoted-string token code points
+ // - mimeType’s parameters[parameterName] does not exist
+ // then set mimeType’s parameters[parameterName] to parameterValue.
+ if (
+ parameterName.length !== 0 &&
+ HTTP_TOKEN_CODEPOINTS.test(parameterName) &&
+ (parameterValue.length === 0 || HTTP_QUOTED_STRING_TOKENS.test(parameterValue)) &&
+ !mimeType.parameters.has(parameterName)
+ ) {
+ mimeType.parameters.set(parameterName, parameterValue)
+ }
+ }
+
+ // 12. Return mimeType.
+ return mimeType
+}
+
+// https://infra.spec.whatwg.org/#forgiving-base64-decode
+/** @param {string} data */
+function forgivingBase64 (data) {
+ // 1. Remove all ASCII whitespace from data.
+ data = data.replace(/[\u0009\u000A\u000C\u000D\u0020]/g, '') // eslint-disable-line
+
+ // 2. If data’s code point length divides by 4 leaving
+ // no remainder, then:
+ if (data.length % 4 === 0) {
+ // 1. If data ends with one or two U+003D (=) code points,
+ // then remove them from data.
+ data = data.replace(/=?=$/, '')
+ }
+
+ // 3. If data’s code point length divides by 4 leaving
+ // a remainder of 1, then return failure.
+ if (data.length % 4 === 1) {
+ return 'failure'
+ }
+
+ // 4. If data contains a code point that is not one of
+ // U+002B (+)
+ // U+002F (/)
+ // ASCII alphanumeric
+ // then return failure.
+ if (/[^+/0-9A-Za-z]/.test(data)) {
+ return 'failure'
+ }
+
+ const binary = atob(data)
+ const bytes = new Uint8Array(binary.length)
+
+ for (let byte = 0; byte < binary.length; byte++) {
+ bytes[byte] = binary.charCodeAt(byte)
+ }
+
+ return bytes
+}
+
+// https://fetch.spec.whatwg.org/#collect-an-http-quoted-string
+// tests: https://fetch.spec.whatwg.org/#example-http-quoted-string
+/**
+ * @param {string} input
+ * @param {{ position: number }} position
+ * @param {boolean?} extractValue
+ */
+function collectAnHTTPQuotedString (input, position, extractValue) {
+ // 1. Let positionStart be position.
+ const positionStart = position.position
+
+ // 2. Let value be the empty string.
+ let value = ''
+
+ // 3. Assert: the code point at position within input
+ // is U+0022 (").
+ assert(input[position.position] === '"')
+
+ // 4. Advance position by 1.
+ position.position++
+
+ // 5. While true:
+ while (true) {
+ // 1. Append the result of collecting a sequence of code points
+ // that are not U+0022 (") or U+005C (\) from input, given
+ // position, to value.
+ value += collectASequenceOfCodePoints(
+ (char) => char !== '"' && char !== '\\',
+ input,
+ position
+ )
+
+ // 2. If position is past the end of input, then break.
+ if (position.position >= input.length) {
+ break
+ }
+
+ // 3. Let quoteOrBackslash be the code point at position within
+ // input.
+ const quoteOrBackslash = input[position.position]
+
+ // 4. Advance position by 1.
+ position.position++
+
+ // 5. If quoteOrBackslash is U+005C (\), then:
+ if (quoteOrBackslash === '\\') {
+ // 1. If position is past the end of input, then append
+ // U+005C (\) to value and break.
+ if (position.position >= input.length) {
+ value += '\\'
+ break
+ }
+
+ // 2. Append the code point at position within input to value.
+ value += input[position.position]
+
+ // 3. Advance position by 1.
+ position.position++
+
+ // 6. Otherwise:
+ } else {
+ // 1. Assert: quoteOrBackslash is U+0022 (").
+ assert(quoteOrBackslash === '"')
+
+ // 2. Break.
+ break
+ }
+ }
+
+ // 6. If the extract-value flag is set, then return value.
+ if (extractValue) {
+ return value
+ }
+
+ // 7. Return the code points from positionStart to position,
+ // inclusive, within input.
+ return input.slice(positionStart, position.position)
+}
+
+/**
+ * @see https://mimesniff.spec.whatwg.org/#serialize-a-mime-type
+ */
+function serializeAMimeType (mimeType) {
+ assert(mimeType !== 'failure')
+ const { parameters, essence } = mimeType
+
+ // 1. Let serialization be the concatenation of mimeType’s
+ // type, U+002F (/), and mimeType’s subtype.
+ let serialization = essence
+
+ // 2. For each name → value of mimeType’s parameters:
+ for (let [name, value] of parameters.entries()) {
+ // 1. Append U+003B (;) to serialization.
+ serialization += ';'
+
+ // 2. Append name to serialization.
+ serialization += name
+
+ // 3. Append U+003D (=) to serialization.
+ serialization += '='
+
+ // 4. If value does not solely contain HTTP token code
+ // points or value is the empty string, then:
+ if (!HTTP_TOKEN_CODEPOINTS.test(value)) {
+ // 1. Precede each occurence of U+0022 (") or
+ // U+005C (\) in value with U+005C (\).
+ value = value.replace(/(\\|")/g, '\\$1')
+
+ // 2. Prepend U+0022 (") to value.
+ value = '"' + value
+
+ // 3. Append U+0022 (") to value.
+ value += '"'
+ }
+
+ // 5. Append value to serialization.
+ serialization += value
+ }
+
+ // 3. Return serialization.
+ return serialization
+}
+
+/**
+ * @see https://fetch.spec.whatwg.org/#http-whitespace
+ * @param {string} char
+ */
+function isHTTPWhiteSpace (char) {
+ return char === '\r' || char === '\n' || char === '\t' || char === ' '
+}
+
+/**
+ * @see https://fetch.spec.whatwg.org/#http-whitespace
+ * @param {string} str
+ */
+function removeHTTPWhitespace (str, leading = true, trailing = true) {
+ let lead = 0
+ let trail = str.length - 1
+
+ if (leading) {
+ for (; lead < str.length && isHTTPWhiteSpace(str[lead]); lead++);
+ }
+
+ if (trailing) {
+ for (; trail > 0 && isHTTPWhiteSpace(str[trail]); trail--);
+ }
+
+ return str.slice(lead, trail + 1)
+}
+
+/**
+ * @see https://infra.spec.whatwg.org/#ascii-whitespace
+ * @param {string} char
+ */
+function isASCIIWhitespace (char) {
+ return char === '\r' || char === '\n' || char === '\t' || char === '\f' || char === ' '
+}
+
+/**
+ * @see https://infra.spec.whatwg.org/#strip-leading-and-trailing-ascii-whitespace
+ */
+function removeASCIIWhitespace (str, leading = true, trailing = true) {
+ let lead = 0
+ let trail = str.length - 1
+
+ if (leading) {
+ for (; lead < str.length && isASCIIWhitespace(str[lead]); lead++);
+ }
+
+ if (trailing) {
+ for (; trail > 0 && isASCIIWhitespace(str[trail]); trail--);
+ }
+
+ return str.slice(lead, trail + 1)
+}
+
+module.exports = {
+ dataURLProcessor,
+ URLSerializer,
+ collectASequenceOfCodePoints,
+ collectASequenceOfCodePointsFast,
+ stringPercentDecode,
+ parseMIMEType,
+ collectAnHTTPQuotedString,
+ serializeAMimeType
+}
diff --git a/lib/fetch/file.js b/lib/fetch/file.js
new file mode 100644
index 0000000..3133d25
--- /dev/null
+++ b/lib/fetch/file.js
@@ -0,0 +1,344 @@
+'use strict'
+
+const { Blob, File: NativeFile } = require('buffer')
+const { types } = require('util')
+const { kState } = require('./symbols')
+const { isBlobLike } = require('./util')
+const { webidl } = require('./webidl')
+const { parseMIMEType, serializeAMimeType } = require('./dataURL')
+const { kEnumerableProperty } = require('../core/util')
+const encoder = new TextEncoder()
+
+class File extends Blob {
+ constructor (fileBits, fileName, options = {}) {
+ // The File constructor is invoked with two or three parameters, depending
+ // on whether the optional dictionary parameter is used. When the File()
+ // constructor is invoked, user agents must run the following steps:
+ webidl.argumentLengthCheck(arguments, 2, { header: 'File constructor' })
+
+ fileBits = webidl.converters['sequence<BlobPart>'](fileBits)
+ fileName = webidl.converters.USVString(fileName)
+ options = webidl.converters.FilePropertyBag(options)
+
+ // 1. Let bytes be the result of processing blob parts given fileBits and
+ // options.
+ // Note: Blob handles this for us
+
+ // 2. Let n be the fileName argument to the constructor.
+ const n = fileName
+
+ // 3. Process FilePropertyBag dictionary argument by running the following
+ // substeps:
+
+ // 1. If the type member is provided and is not the empty string, let t
+ // be set to the type dictionary member. If t contains any characters
+ // outside the range U+0020 to U+007E, then set t to the empty string
+ // and return from these substeps.
+ // 2. Convert every character in t to ASCII lowercase.
+ let t = options.type
+ let d
+
+ // eslint-disable-next-line no-labels
+ substep: {
+ if (t) {
+ t = parseMIMEType(t)
+
+ if (t === 'failure') {
+ t = ''
+ // eslint-disable-next-line no-labels
+ break substep
+ }
+
+ t = serializeAMimeType(t).toLowerCase()
+ }
+
+ // 3. If the lastModified member is provided, let d be set to the
+ // lastModified dictionary member. If it is not provided, set d to the
+ // current date and time represented as the number of milliseconds since
+ // the Unix Epoch (which is the equivalent of Date.now() [ECMA-262]).
+ d = options.lastModified
+ }
+
+ // 4. Return a new File object F such that:
+ // F refers to the bytes byte sequence.
+ // F.size is set to the number of total bytes in bytes.
+ // F.name is set to n.
+ // F.type is set to t.
+ // F.lastModified is set to d.
+
+ super(processBlobParts(fileBits, options), { type: t })
+ this[kState] = {
+ name: n,
+ lastModified: d,
+ type: t
+ }
+ }
+
+ get name () {
+ webidl.brandCheck(this, File)
+
+ return this[kState].name
+ }
+
+ get lastModified () {
+ webidl.brandCheck(this, File)
+
+ return this[kState].lastModified
+ }
+
+ get type () {
+ webidl.brandCheck(this, File)
+
+ return this[kState].type
+ }
+}
+
+class FileLike {
+ constructor (blobLike, fileName, options = {}) {
+ // TODO: argument idl type check
+
+ // The File constructor is invoked with two or three parameters, depending
+ // on whether the optional dictionary parameter is used. When the File()
+ // constructor is invoked, user agents must run the following steps:
+
+ // 1. Let bytes be the result of processing blob parts given fileBits and
+ // options.
+
+ // 2. Let n be the fileName argument to the constructor.
+ const n = fileName
+
+ // 3. Process FilePropertyBag dictionary argument by running the following
+ // substeps:
+
+ // 1. If the type member is provided and is not the empty string, let t
+ // be set to the type dictionary member. If t contains any characters
+ // outside the range U+0020 to U+007E, then set t to the empty string
+ // and return from these substeps.
+ // TODO
+ const t = options.type
+
+ // 2. Convert every character in t to ASCII lowercase.
+ // TODO
+
+ // 3. If the lastModified member is provided, let d be set to the
+ // lastModified dictionary member. If it is not provided, set d to the
+ // current date and time represented as the number of milliseconds since
+ // the Unix Epoch (which is the equivalent of Date.now() [ECMA-262]).
+ const d = options.lastModified ?? Date.now()
+
+ // 4. Return a new File object F such that:
+ // F refers to the bytes byte sequence.
+ // F.size is set to the number of total bytes in bytes.
+ // F.name is set to n.
+ // F.type is set to t.
+ // F.lastModified is set to d.
+
+ this[kState] = {
+ blobLike,
+ name: n,
+ type: t,
+ lastModified: d
+ }
+ }
+
+ stream (...args) {
+ webidl.brandCheck(this, FileLike)
+
+ return this[kState].blobLike.stream(...args)
+ }
+
+ arrayBuffer (...args) {
+ webidl.brandCheck(this, FileLike)
+
+ return this[kState].blobLike.arrayBuffer(...args)
+ }
+
+ slice (...args) {
+ webidl.brandCheck(this, FileLike)
+
+ return this[kState].blobLike.slice(...args)
+ }
+
+ text (...args) {
+ webidl.brandCheck(this, FileLike)
+
+ return this[kState].blobLike.text(...args)
+ }
+
+ get size () {
+ webidl.brandCheck(this, FileLike)
+
+ return this[kState].blobLike.size
+ }
+
+ get type () {
+ webidl.brandCheck(this, FileLike)
+
+ return this[kState].blobLike.type
+ }
+
+ get name () {
+ webidl.brandCheck(this, FileLike)
+
+ return this[kState].name
+ }
+
+ get lastModified () {
+ webidl.brandCheck(this, FileLike)
+
+ return this[kState].lastModified
+ }
+
+ get [Symbol.toStringTag] () {
+ return 'File'
+ }
+}
+
+Object.defineProperties(File.prototype, {
+ [Symbol.toStringTag]: {
+ value: 'File',
+ configurable: true
+ },
+ name: kEnumerableProperty,
+ lastModified: kEnumerableProperty
+})
+
+webidl.converters.Blob = webidl.interfaceConverter(Blob)
+
+webidl.converters.BlobPart = function (V, opts) {
+ if (webidl.util.Type(V) === 'Object') {
+ if (isBlobLike(V)) {
+ return webidl.converters.Blob(V, { strict: false })
+ }
+
+ if (
+ ArrayBuffer.isView(V) ||
+ types.isAnyArrayBuffer(V)
+ ) {
+ return webidl.converters.BufferSource(V, opts)
+ }
+ }
+
+ return webidl.converters.USVString(V, opts)
+}
+
+webidl.converters['sequence<BlobPart>'] = webidl.sequenceConverter(
+ webidl.converters.BlobPart
+)
+
+// https://www.w3.org/TR/FileAPI/#dfn-FilePropertyBag
+webidl.converters.FilePropertyBag = webidl.dictionaryConverter([
+ {
+ key: 'lastModified',
+ converter: webidl.converters['long long'],
+ get defaultValue () {
+ return Date.now()
+ }
+ },
+ {
+ key: 'type',
+ converter: webidl.converters.DOMString,
+ defaultValue: ''
+ },
+ {
+ key: 'endings',
+ converter: (value) => {
+ value = webidl.converters.DOMString(value)
+ value = value.toLowerCase()
+
+ if (value !== 'native') {
+ value = 'transparent'
+ }
+
+ return value
+ },
+ defaultValue: 'transparent'
+ }
+])
+
+/**
+ * @see https://www.w3.org/TR/FileAPI/#process-blob-parts
+ * @param {(NodeJS.TypedArray|Blob|string)[]} parts
+ * @param {{ type: string, endings: string }} options
+ */
+function processBlobParts (parts, options) {
+ // 1. Let bytes be an empty sequence of bytes.
+ /** @type {NodeJS.TypedArray[]} */
+ const bytes = []
+
+ // 2. For each element in parts:
+ for (const element of parts) {
+ // 1. If element is a USVString, run the following substeps:
+ if (typeof element === 'string') {
+ // 1. Let s be element.
+ let s = element
+
+ // 2. If the endings member of options is "native", set s
+ // to the result of converting line endings to native
+ // of element.
+ if (options.endings === 'native') {
+ s = convertLineEndingsNative(s)
+ }
+
+ // 3. Append the result of UTF-8 encoding s to bytes.
+ bytes.push(encoder.encode(s))
+ } else if (
+ types.isAnyArrayBuffer(element) ||
+ types.isTypedArray(element)
+ ) {
+ // 2. If element is a BufferSource, get a copy of the
+ // bytes held by the buffer source, and append those
+ // bytes to bytes.
+ if (!element.buffer) { // ArrayBuffer
+ bytes.push(new Uint8Array(element))
+ } else {
+ bytes.push(
+ new Uint8Array(element.buffer, element.byteOffset, element.byteLength)
+ )
+ }
+ } else if (isBlobLike(element)) {
+ // 3. If element is a Blob, append the bytes it represents
+ // to bytes.
+ bytes.push(element)
+ }
+ }
+
+ // 3. Return bytes.
+ return bytes
+}
+
+/**
+ * @see https://www.w3.org/TR/FileAPI/#convert-line-endings-to-native
+ * @param {string} s
+ */
+function convertLineEndingsNative (s) {
+ // 1. Let native line ending be be the code point U+000A LF.
+ let nativeLineEnding = '\n'
+
+ // 2. If the underlying platform’s conventions are to
+ // represent newlines as a carriage return and line feed
+ // sequence, set native line ending to the code point
+ // U+000D CR followed by the code point U+000A LF.
+ if (process.platform === 'win32') {
+ nativeLineEnding = '\r\n'
+ }
+
+ return s.replace(/\r?\n/g, nativeLineEnding)
+}
+
+// If this function is moved to ./util.js, some tools (such as
+// rollup) will warn about circular dependencies. See:
+// https://github.com/nodejs/undici/issues/1629
+function isFileLike (object) {
+ return (
+ (NativeFile && object instanceof NativeFile) ||
+ object instanceof File || (
+ object &&
+ (typeof object.stream === 'function' ||
+ typeof object.arrayBuffer === 'function') &&
+ object[Symbol.toStringTag] === 'File'
+ )
+ )
+}
+
+module.exports = { File, FileLike, isFileLike }
diff --git a/lib/fetch/formdata.js b/lib/fetch/formdata.js
new file mode 100644
index 0000000..5975e26
--- /dev/null
+++ b/lib/fetch/formdata.js
@@ -0,0 +1,265 @@
+'use strict'
+
+const { isBlobLike, toUSVString, makeIterator } = require('./util')
+const { kState } = require('./symbols')
+const { File: UndiciFile, FileLike, isFileLike } = require('./file')
+const { webidl } = require('./webidl')
+const { Blob, File: NativeFile } = require('buffer')
+
+/** @type {globalThis['File']} */
+const File = NativeFile ?? UndiciFile
+
+// https://xhr.spec.whatwg.org/#formdata
+class FormData {
+ constructor (form) {
+ if (form !== undefined) {
+ throw webidl.errors.conversionFailed({
+ prefix: 'FormData constructor',
+ argument: 'Argument 1',
+ types: ['undefined']
+ })
+ }
+
+ this[kState] = []
+ }
+
+ append (name, value, filename = undefined) {
+ webidl.brandCheck(this, FormData)
+
+ webidl.argumentLengthCheck(arguments, 2, { header: 'FormData.append' })
+
+ if (arguments.length === 3 && !isBlobLike(value)) {
+ throw new TypeError(
+ "Failed to execute 'append' on 'FormData': parameter 2 is not of type 'Blob'"
+ )
+ }
+
+ // 1. Let value be value if given; otherwise blobValue.
+
+ name = webidl.converters.USVString(name)
+ value = isBlobLike(value)
+ ? webidl.converters.Blob(value, { strict: false })
+ : webidl.converters.USVString(value)
+ filename = arguments.length === 3
+ ? webidl.converters.USVString(filename)
+ : undefined
+
+ // 2. Let entry be the result of creating an entry with
+ // name, value, and filename if given.
+ const entry = makeEntry(name, value, filename)
+
+ // 3. Append entry to this’s entry list.
+ this[kState].push(entry)
+ }
+
+ delete (name) {
+ webidl.brandCheck(this, FormData)
+
+ webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.delete' })
+
+ name = webidl.converters.USVString(name)
+
+ // The delete(name) method steps are to remove all entries whose name
+ // is name from this’s entry list.
+ this[kState] = this[kState].filter(entry => entry.name !== name)
+ }
+
+ get (name) {
+ webidl.brandCheck(this, FormData)
+
+ webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.get' })
+
+ name = webidl.converters.USVString(name)
+
+ // 1. If there is no entry whose name is name in this’s entry list,
+ // then return null.
+ const idx = this[kState].findIndex((entry) => entry.name === name)
+ if (idx === -1) {
+ return null
+ }
+
+ // 2. Return the value of the first entry whose name is name from
+ // this’s entry list.
+ return this[kState][idx].value
+ }
+
+ getAll (name) {
+ webidl.brandCheck(this, FormData)
+
+ webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.getAll' })
+
+ name = webidl.converters.USVString(name)
+
+ // 1. If there is no entry whose name is name in this’s entry list,
+ // then return the empty list.
+ // 2. Return the values of all entries whose name is name, in order,
+ // from this’s entry list.
+ return this[kState]
+ .filter((entry) => entry.name === name)
+ .map((entry) => entry.value)
+ }
+
+ has (name) {
+ webidl.brandCheck(this, FormData)
+
+ webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.has' })
+
+ name = webidl.converters.USVString(name)
+
+ // The has(name) method steps are to return true if there is an entry
+ // whose name is name in this’s entry list; otherwise false.
+ return this[kState].findIndex((entry) => entry.name === name) !== -1
+ }
+
+ set (name, value, filename = undefined) {
+ webidl.brandCheck(this, FormData)
+
+ webidl.argumentLengthCheck(arguments, 2, { header: 'FormData.set' })
+
+ if (arguments.length === 3 && !isBlobLike(value)) {
+ throw new TypeError(
+ "Failed to execute 'set' on 'FormData': parameter 2 is not of type 'Blob'"
+ )
+ }
+
+ // The set(name, value) and set(name, blobValue, filename) method steps
+ // are:
+
+ // 1. Let value be value if given; otherwise blobValue.
+
+ name = webidl.converters.USVString(name)
+ value = isBlobLike(value)
+ ? webidl.converters.Blob(value, { strict: false })
+ : webidl.converters.USVString(value)
+ filename = arguments.length === 3
+ ? toUSVString(filename)
+ : undefined
+
+ // 2. Let entry be the result of creating an entry with name, value, and
+ // filename if given.
+ const entry = makeEntry(name, value, filename)
+
+ // 3. If there are entries in this’s entry list whose name is name, then
+ // replace the first such entry with entry and remove the others.
+ const idx = this[kState].findIndex((entry) => entry.name === name)
+ if (idx !== -1) {
+ this[kState] = [
+ ...this[kState].slice(0, idx),
+ entry,
+ ...this[kState].slice(idx + 1).filter((entry) => entry.name !== name)
+ ]
+ } else {
+ // 4. Otherwise, append entry to this’s entry list.
+ this[kState].push(entry)
+ }
+ }
+
+ entries () {
+ webidl.brandCheck(this, FormData)
+
+ return makeIterator(
+ () => this[kState].map(pair => [pair.name, pair.value]),
+ 'FormData',
+ 'key+value'
+ )
+ }
+
+ keys () {
+ webidl.brandCheck(this, FormData)
+
+ return makeIterator(
+ () => this[kState].map(pair => [pair.name, pair.value]),
+ 'FormData',
+ 'key'
+ )
+ }
+
+ values () {
+ webidl.brandCheck(this, FormData)
+
+ return makeIterator(
+ () => this[kState].map(pair => [pair.name, pair.value]),
+ 'FormData',
+ 'value'
+ )
+ }
+
+ /**
+ * @param {(value: string, key: string, self: FormData) => void} callbackFn
+ * @param {unknown} thisArg
+ */
+ forEach (callbackFn, thisArg = globalThis) {
+ webidl.brandCheck(this, FormData)
+
+ webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.forEach' })
+
+ if (typeof callbackFn !== 'function') {
+ throw new TypeError(
+ "Failed to execute 'forEach' on 'FormData': parameter 1 is not of type 'Function'."
+ )
+ }
+
+ for (const [key, value] of this) {
+ callbackFn.apply(thisArg, [value, key, this])
+ }
+ }
+}
+
+FormData.prototype[Symbol.iterator] = FormData.prototype.entries
+
+Object.defineProperties(FormData.prototype, {
+ [Symbol.toStringTag]: {
+ value: 'FormData',
+ configurable: true
+ }
+})
+
+/**
+ * @see https://html.spec.whatwg.org/multipage/form-control-infrastructure.html#create-an-entry
+ * @param {string} name
+ * @param {string|Blob} value
+ * @param {?string} filename
+ * @returns
+ */
+function makeEntry (name, value, filename) {
+ // 1. Set name to the result of converting name into a scalar value string.
+ // "To convert a string into a scalar value string, replace any surrogates
+ // with U+FFFD."
+ // see: https://nodejs.org/dist/latest-v18.x/docs/api/buffer.html#buftostringencoding-start-end
+ name = Buffer.from(name).toString('utf8')
+
+ // 2. If value is a string, then set value to the result of converting
+ // value into a scalar value string.
+ if (typeof value === 'string') {
+ value = Buffer.from(value).toString('utf8')
+ } else {
+ // 3. Otherwise:
+
+ // 1. If value is not a File object, then set value to a new File object,
+ // representing the same bytes, whose name attribute value is "blob"
+ if (!isFileLike(value)) {
+ value = value instanceof Blob
+ ? new File([value], 'blob', { type: value.type })
+ : new FileLike(value, 'blob', { type: value.type })
+ }
+
+ // 2. If filename is given, then set value to a new File object,
+ // representing the same bytes, whose name attribute is filename.
+ if (filename !== undefined) {
+ /** @type {FilePropertyBag} */
+ const options = {
+ type: value.type,
+ lastModified: value.lastModified
+ }
+
+ value = (NativeFile && value instanceof NativeFile) || value instanceof UndiciFile
+ ? new File([value], filename, options)
+ : new FileLike(value, filename, options)
+ }
+ }
+
+ // 4. Return an entry whose name is name and whose value is value.
+ return { name, value }
+}
+
+module.exports = { FormData }
diff --git a/lib/fetch/global.js b/lib/fetch/global.js
new file mode 100644
index 0000000..1df6f12
--- /dev/null
+++ b/lib/fetch/global.js
@@ -0,0 +1,40 @@
+'use strict'
+
+// In case of breaking changes, increase the version
+// number to avoid conflicts.
+const globalOrigin = Symbol.for('undici.globalOrigin.1')
+
+function getGlobalOrigin () {
+ return globalThis[globalOrigin]
+}
+
+function setGlobalOrigin (newOrigin) {
+ if (newOrigin === undefined) {
+ Object.defineProperty(globalThis, globalOrigin, {
+ value: undefined,
+ writable: true,
+ enumerable: false,
+ configurable: false
+ })
+
+ return
+ }
+
+ const parsedURL = new URL(newOrigin)
+
+ if (parsedURL.protocol !== 'http:' && parsedURL.protocol !== 'https:') {
+ throw new TypeError(`Only http & https urls are allowed, received ${parsedURL.protocol}`)
+ }
+
+ Object.defineProperty(globalThis, globalOrigin, {
+ value: parsedURL,
+ writable: true,
+ enumerable: false,
+ configurable: false
+ })
+}
+
+module.exports = {
+ getGlobalOrigin,
+ setGlobalOrigin
+}
diff --git a/lib/fetch/headers.js b/lib/fetch/headers.js
new file mode 100644
index 0000000..2f1c0be
--- /dev/null
+++ b/lib/fetch/headers.js
@@ -0,0 +1,589 @@
+// https://github.com/Ethan-Arrowood/undici-fetch
+
+'use strict'
+
+const { kHeadersList, kConstruct } = require('../core/symbols')
+const { kGuard } = require('./symbols')
+const { kEnumerableProperty } = require('../core/util')
+const {
+ makeIterator,
+ isValidHeaderName,
+ isValidHeaderValue
+} = require('./util')
+const { webidl } = require('./webidl')
+const assert = require('assert')
+
+const kHeadersMap = Symbol('headers map')
+const kHeadersSortedMap = Symbol('headers map sorted')
+
+/**
+ * @param {number} code
+ */
+function isHTTPWhiteSpaceCharCode (code) {
+ return code === 0x00a || code === 0x00d || code === 0x009 || code === 0x020
+}
+
+/**
+ * @see https://fetch.spec.whatwg.org/#concept-header-value-normalize
+ * @param {string} potentialValue
+ */
+function headerValueNormalize (potentialValue) {
+ // To normalize a byte sequence potentialValue, remove
+ // any leading and trailing HTTP whitespace bytes from
+ // potentialValue.
+ let i = 0; let j = potentialValue.length
+
+ while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(j - 1))) --j
+ while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(i))) ++i
+
+ return i === 0 && j === potentialValue.length ? potentialValue : potentialValue.substring(i, j)
+}
+
+function fill (headers, object) {
+ // To fill a Headers object headers with a given object object, run these steps:
+
+ // 1. If object is a sequence, then for each header in object:
+ // Note: webidl conversion to array has already been done.
+ if (Array.isArray(object)) {
+ for (let i = 0; i < object.length; ++i) {
+ const header = object[i]
+ // 1. If header does not contain exactly two items, then throw a TypeError.
+ if (header.length !== 2) {
+ throw webidl.errors.exception({
+ header: 'Headers constructor',
+ message: `expected name/value pair to be length 2, found ${header.length}.`
+ })
+ }
+
+ // 2. Append (header’s first item, header’s second item) to headers.
+ appendHeader(headers, header[0], header[1])
+ }
+ } else if (typeof object === 'object' && object !== null) {
+ // Note: null should throw
+
+ // 2. Otherwise, object is a record, then for each key → value in object,
+ // append (key, value) to headers
+ const keys = Object.keys(object)
+ for (let i = 0; i < keys.length; ++i) {
+ appendHeader(headers, keys[i], object[keys[i]])
+ }
+ } else {
+ throw webidl.errors.conversionFailed({
+ prefix: 'Headers constructor',
+ argument: 'Argument 1',
+ types: ['sequence<sequence<ByteString>>', 'record<ByteString, ByteString>']
+ })
+ }
+}
+
+/**
+ * @see https://fetch.spec.whatwg.org/#concept-headers-append
+ */
+function appendHeader (headers, name, value) {
+ // 1. Normalize value.
+ value = headerValueNormalize(value)
+
+ // 2. If name is not a header name or value is not a
+ // header value, then throw a TypeError.
+ if (!isValidHeaderName(name)) {
+ throw webidl.errors.invalidArgument({
+ prefix: 'Headers.append',
+ value: name,
+ type: 'header name'
+ })
+ } else if (!isValidHeaderValue(value)) {
+ throw webidl.errors.invalidArgument({
+ prefix: 'Headers.append',
+ value,
+ type: 'header value'
+ })
+ }
+
+ // 3. If headers’s guard is "immutable", then throw a TypeError.
+ // 4. Otherwise, if headers’s guard is "request" and name is a
+ // forbidden header name, return.
+ // Note: undici does not implement forbidden header names
+ if (headers[kGuard] === 'immutable') {
+ throw new TypeError('immutable')
+ } else if (headers[kGuard] === 'request-no-cors') {
+ // 5. Otherwise, if headers’s guard is "request-no-cors":
+ // TODO
+ }
+
+ // 6. Otherwise, if headers’s guard is "response" and name is a
+ // forbidden response-header name, return.
+
+ // 7. Append (name, value) to headers’s header list.
+ return headers[kHeadersList].append(name, value)
+
+ // 8. If headers’s guard is "request-no-cors", then remove
+ // privileged no-CORS request headers from headers
+}
+
+class HeadersList {
+ /** @type {[string, string][]|null} */
+ cookies = null
+
+ constructor (init) {
+ if (init instanceof HeadersList) {
+ this[kHeadersMap] = new Map(init[kHeadersMap])
+ this[kHeadersSortedMap] = init[kHeadersSortedMap]
+ this.cookies = init.cookies === null ? null : [...init.cookies]
+ } else {
+ this[kHeadersMap] = new Map(init)
+ this[kHeadersSortedMap] = null
+ }
+ }
+
+ // https://fetch.spec.whatwg.org/#header-list-contains
+ contains (name) {
+ // A header list list contains a header name name if list
+ // contains a header whose name is a byte-case-insensitive
+ // match for name.
+ name = name.toLowerCase()
+
+ return this[kHeadersMap].has(name)
+ }
+
+ clear () {
+ this[kHeadersMap].clear()
+ this[kHeadersSortedMap] = null
+ this.cookies = null
+ }
+
+ // https://fetch.spec.whatwg.org/#concept-header-list-append
+ append (name, value) {
+ this[kHeadersSortedMap] = null
+
+ // 1. If list contains name, then set name to the first such
+ // header’s name.
+ const lowercaseName = name.toLowerCase()
+ const exists = this[kHeadersMap].get(lowercaseName)
+
+ // 2. Append (name, value) to list.
+ if (exists) {
+ const delimiter = lowercaseName === 'cookie' ? '; ' : ', '
+ this[kHeadersMap].set(lowercaseName, {
+ name: exists.name,
+ value: `${exists.value}${delimiter}${value}`
+ })
+ } else {
+ this[kHeadersMap].set(lowercaseName, { name, value })
+ }
+
+ if (lowercaseName === 'set-cookie') {
+ this.cookies ??= []
+ this.cookies.push(value)
+ }
+ }
+
+ // https://fetch.spec.whatwg.org/#concept-header-list-set
+ set (name, value) {
+ this[kHeadersSortedMap] = null
+ const lowercaseName = name.toLowerCase()
+
+ if (lowercaseName === 'set-cookie') {
+ this.cookies = [value]
+ }
+
+ // 1. If list contains name, then set the value of
+ // the first such header to value and remove the
+ // others.
+ // 2. Otherwise, append header (name, value) to list.
+ this[kHeadersMap].set(lowercaseName, { name, value })
+ }
+
+ // https://fetch.spec.whatwg.org/#concept-header-list-delete
+ delete (name) {
+ this[kHeadersSortedMap] = null
+
+ name = name.toLowerCase()
+
+ if (name === 'set-cookie') {
+ this.cookies = null
+ }
+
+ this[kHeadersMap].delete(name)
+ }
+
+ // https://fetch.spec.whatwg.org/#concept-header-list-get
+ get (name) {
+ const value = this[kHeadersMap].get(name.toLowerCase())
+
+ // 1. If list does not contain name, then return null.
+ // 2. Return the values of all headers in list whose name
+ // is a byte-case-insensitive match for name,
+ // separated from each other by 0x2C 0x20, in order.
+ return value === undefined ? null : value.value
+ }
+
+ * [Symbol.iterator] () {
+ // use the lowercased name
+ for (const [name, { value }] of this[kHeadersMap]) {
+ yield [name, value]
+ }
+ }
+
+ get entries () {
+ const headers = {}
+
+ if (this[kHeadersMap].size) {
+ for (const { name, value } of this[kHeadersMap].values()) {
+ headers[name] = value
+ }
+ }
+
+ return headers
+ }
+}
+
+// https://fetch.spec.whatwg.org/#headers-class
+class Headers {
+ constructor (init = undefined) {
+ if (init === kConstruct) {
+ return
+ }
+ this[kHeadersList] = new HeadersList()
+
+ // The new Headers(init) constructor steps are:
+
+ // 1. Set this’s guard to "none".
+ this[kGuard] = 'none'
+
+ // 2. If init is given, then fill this with init.
+ if (init !== undefined) {
+ init = webidl.converters.HeadersInit(init)
+ fill(this, init)
+ }
+ }
+
+ // https://fetch.spec.whatwg.org/#dom-headers-append
+ append (name, value) {
+ webidl.brandCheck(this, Headers)
+
+ webidl.argumentLengthCheck(arguments, 2, { header: 'Headers.append' })
+
+ name = webidl.converters.ByteString(name)
+ value = webidl.converters.ByteString(value)
+
+ return appendHeader(this, name, value)
+ }
+
+ // https://fetch.spec.whatwg.org/#dom-headers-delete
+ delete (name) {
+ webidl.brandCheck(this, Headers)
+
+ webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.delete' })
+
+ name = webidl.converters.ByteString(name)
+
+ // 1. If name is not a header name, then throw a TypeError.
+ if (!isValidHeaderName(name)) {
+ throw webidl.errors.invalidArgument({
+ prefix: 'Headers.delete',
+ value: name,
+ type: 'header name'
+ })
+ }
+
+ // 2. If this’s guard is "immutable", then throw a TypeError.
+ // 3. Otherwise, if this’s guard is "request" and name is a
+ // forbidden header name, return.
+ // 4. Otherwise, if this’s guard is "request-no-cors", name
+ // is not a no-CORS-safelisted request-header name, and
+ // name is not a privileged no-CORS request-header name,
+ // return.
+ // 5. Otherwise, if this’s guard is "response" and name is
+ // a forbidden response-header name, return.
+ // Note: undici does not implement forbidden header names
+ if (this[kGuard] === 'immutable') {
+ throw new TypeError('immutable')
+ } else if (this[kGuard] === 'request-no-cors') {
+ // TODO
+ }
+
+ // 6. If this’s header list does not contain name, then
+ // return.
+ if (!this[kHeadersList].contains(name)) {
+ return
+ }
+
+ // 7. Delete name from this’s header list.
+ // 8. If this’s guard is "request-no-cors", then remove
+ // privileged no-CORS request headers from this.
+ this[kHeadersList].delete(name)
+ }
+
+ // https://fetch.spec.whatwg.org/#dom-headers-get
+ get (name) {
+ webidl.brandCheck(this, Headers)
+
+ webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.get' })
+
+ name = webidl.converters.ByteString(name)
+
+ // 1. If name is not a header name, then throw a TypeError.
+ if (!isValidHeaderName(name)) {
+ throw webidl.errors.invalidArgument({
+ prefix: 'Headers.get',
+ value: name,
+ type: 'header name'
+ })
+ }
+
+ // 2. Return the result of getting name from this’s header
+ // list.
+ return this[kHeadersList].get(name)
+ }
+
+ // https://fetch.spec.whatwg.org/#dom-headers-has
+ has (name) {
+ webidl.brandCheck(this, Headers)
+
+ webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.has' })
+
+ name = webidl.converters.ByteString(name)
+
+ // 1. If name is not a header name, then throw a TypeError.
+ if (!isValidHeaderName(name)) {
+ throw webidl.errors.invalidArgument({
+ prefix: 'Headers.has',
+ value: name,
+ type: 'header name'
+ })
+ }
+
+ // 2. Return true if this’s header list contains name;
+ // otherwise false.
+ return this[kHeadersList].contains(name)
+ }
+
+ // https://fetch.spec.whatwg.org/#dom-headers-set
+ set (name, value) {
+ webidl.brandCheck(this, Headers)
+
+ webidl.argumentLengthCheck(arguments, 2, { header: 'Headers.set' })
+
+ name = webidl.converters.ByteString(name)
+ value = webidl.converters.ByteString(value)
+
+ // 1. Normalize value.
+ value = headerValueNormalize(value)
+
+ // 2. If name is not a header name or value is not a
+ // header value, then throw a TypeError.
+ if (!isValidHeaderName(name)) {
+ throw webidl.errors.invalidArgument({
+ prefix: 'Headers.set',
+ value: name,
+ type: 'header name'
+ })
+ } else if (!isValidHeaderValue(value)) {
+ throw webidl.errors.invalidArgument({
+ prefix: 'Headers.set',
+ value,
+ type: 'header value'
+ })
+ }
+
+ // 3. If this’s guard is "immutable", then throw a TypeError.
+ // 4. Otherwise, if this’s guard is "request" and name is a
+ // forbidden header name, return.
+ // 5. Otherwise, if this’s guard is "request-no-cors" and
+ // name/value is not a no-CORS-safelisted request-header,
+ // return.
+ // 6. Otherwise, if this’s guard is "response" and name is a
+ // forbidden response-header name, return.
+ // Note: undici does not implement forbidden header names
+ if (this[kGuard] === 'immutable') {
+ throw new TypeError('immutable')
+ } else if (this[kGuard] === 'request-no-cors') {
+ // TODO
+ }
+
+ // 7. Set (name, value) in this’s header list.
+ // 8. If this’s guard is "request-no-cors", then remove
+ // privileged no-CORS request headers from this
+ this[kHeadersList].set(name, value)
+ }
+
+ // https://fetch.spec.whatwg.org/#dom-headers-getsetcookie
+ getSetCookie () {
+ webidl.brandCheck(this, Headers)
+
+ // 1. If this’s header list does not contain `Set-Cookie`, then return « ».
+ // 2. Return the values of all headers in this’s header list whose name is
+ // a byte-case-insensitive match for `Set-Cookie`, in order.
+
+ const list = this[kHeadersList].cookies
+
+ if (list) {
+ return [...list]
+ }
+
+ return []
+ }
+
+ // https://fetch.spec.whatwg.org/#concept-header-list-sort-and-combine
+ get [kHeadersSortedMap] () {
+ if (this[kHeadersList][kHeadersSortedMap]) {
+ return this[kHeadersList][kHeadersSortedMap]
+ }
+
+ // 1. Let headers be an empty list of headers with the key being the name
+ // and value the value.
+ const headers = []
+
+ // 2. Let names be the result of convert header names to a sorted-lowercase
+ // set with all the names of the headers in list.
+ const names = [...this[kHeadersList]].sort((a, b) => a[0] < b[0] ? -1 : 1)
+ const cookies = this[kHeadersList].cookies
+
+ // 3. For each name of names:
+ for (let i = 0; i < names.length; ++i) {
+ const [name, value] = names[i]
+ // 1. If name is `set-cookie`, then:
+ if (name === 'set-cookie') {
+ // 1. Let values be a list of all values of headers in list whose name
+ // is a byte-case-insensitive match for name, in order.
+
+ // 2. For each value of values:
+ // 1. Append (name, value) to headers.
+ for (let j = 0; j < cookies.length; ++j) {
+ headers.push([name, cookies[j]])
+ }
+ } else {
+ // 2. Otherwise:
+
+ // 1. Let value be the result of getting name from list.
+
+ // 2. Assert: value is non-null.
+ assert(value !== null)
+
+ // 3. Append (name, value) to headers.
+ headers.push([name, value])
+ }
+ }
+
+ this[kHeadersList][kHeadersSortedMap] = headers
+
+ // 4. Return headers.
+ return headers
+ }
+
+ keys () {
+ webidl.brandCheck(this, Headers)
+
+ if (this[kGuard] === 'immutable') {
+ const value = this[kHeadersSortedMap]
+ return makeIterator(() => value, 'Headers',
+ 'key')
+ }
+
+ return makeIterator(
+ () => [...this[kHeadersSortedMap].values()],
+ 'Headers',
+ 'key'
+ )
+ }
+
+ values () {
+ webidl.brandCheck(this, Headers)
+
+ if (this[kGuard] === 'immutable') {
+ const value = this[kHeadersSortedMap]
+ return makeIterator(() => value, 'Headers',
+ 'value')
+ }
+
+ return makeIterator(
+ () => [...this[kHeadersSortedMap].values()],
+ 'Headers',
+ 'value'
+ )
+ }
+
+ entries () {
+ webidl.brandCheck(this, Headers)
+
+ if (this[kGuard] === 'immutable') {
+ const value = this[kHeadersSortedMap]
+ return makeIterator(() => value, 'Headers',
+ 'key+value')
+ }
+
+ return makeIterator(
+ () => [...this[kHeadersSortedMap].values()],
+ 'Headers',
+ 'key+value'
+ )
+ }
+
+ /**
+ * @param {(value: string, key: string, self: Headers) => void} callbackFn
+ * @param {unknown} thisArg
+ */
+ forEach (callbackFn, thisArg = globalThis) {
+ webidl.brandCheck(this, Headers)
+
+ webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.forEach' })
+
+ if (typeof callbackFn !== 'function') {
+ throw new TypeError(
+ "Failed to execute 'forEach' on 'Headers': parameter 1 is not of type 'Function'."
+ )
+ }
+
+ for (const [key, value] of this) {
+ callbackFn.apply(thisArg, [value, key, this])
+ }
+ }
+
+ [Symbol.for('nodejs.util.inspect.custom')] () {
+ webidl.brandCheck(this, Headers)
+
+ return this[kHeadersList]
+ }
+}
+
+Headers.prototype[Symbol.iterator] = Headers.prototype.entries
+
+Object.defineProperties(Headers.prototype, {
+ append: kEnumerableProperty,
+ delete: kEnumerableProperty,
+ get: kEnumerableProperty,
+ has: kEnumerableProperty,
+ set: kEnumerableProperty,
+ getSetCookie: kEnumerableProperty,
+ keys: kEnumerableProperty,
+ values: kEnumerableProperty,
+ entries: kEnumerableProperty,
+ forEach: kEnumerableProperty,
+ [Symbol.iterator]: { enumerable: false },
+ [Symbol.toStringTag]: {
+ value: 'Headers',
+ configurable: true
+ }
+})
+
+webidl.converters.HeadersInit = function (V) {
+ if (webidl.util.Type(V) === 'Object') {
+ if (V[Symbol.iterator]) {
+ return webidl.converters['sequence<sequence<ByteString>>'](V)
+ }
+
+ return webidl.converters['record<ByteString, ByteString>'](V)
+ }
+
+ throw webidl.errors.conversionFailed({
+ prefix: 'Headers constructor',
+ argument: 'Argument 1',
+ types: ['sequence<sequence<ByteString>>', 'record<ByteString, ByteString>']
+ })
+}
+
+module.exports = {
+ fill,
+ Headers,
+ HeadersList
+}
diff --git a/lib/fetch/index.js b/lib/fetch/index.js
new file mode 100644
index 0000000..17c3d87
--- /dev/null
+++ b/lib/fetch/index.js
@@ -0,0 +1,2145 @@
+// https://github.com/Ethan-Arrowood/undici-fetch
+
+'use strict'
+
+const {
+ Response,
+ makeNetworkError,
+ makeAppropriateNetworkError,
+ filterResponse,
+ makeResponse
+} = require('./response')
+const { Headers } = require('./headers')
+const { Request, makeRequest } = require('./request')
+const zlib = require('zlib')
+const {
+ bytesMatch,
+ makePolicyContainer,
+ clonePolicyContainer,
+ requestBadPort,
+ TAOCheck,
+ appendRequestOriginHeader,
+ responseLocationURL,
+ requestCurrentURL,
+ setRequestReferrerPolicyOnRedirect,
+ tryUpgradeRequestToAPotentiallyTrustworthyURL,
+ createOpaqueTimingInfo,
+ appendFetchMetadata,
+ corsCheck,
+ crossOriginResourcePolicyCheck,
+ determineRequestsReferrer,
+ coarsenedSharedCurrentTime,
+ createDeferredPromise,
+ isBlobLike,
+ sameOrigin,
+ isCancelled,
+ isAborted,
+ isErrorLike,
+ fullyReadBody,
+ readableStreamClose,
+ isomorphicEncode,
+ urlIsLocal,
+ urlIsHttpHttpsScheme,
+ urlHasHttpsScheme
+} = require('./util')
+const { kState, kHeaders, kGuard, kRealm } = require('./symbols')
+const assert = require('assert')
+const { safelyExtractBody } = require('./body')
+const {
+ redirectStatusSet,
+ nullBodyStatus,
+ safeMethodsSet,
+ requestBodyHeader,
+ subresourceSet,
+ DOMException
+} = require('./constants')
+const { kHeadersList } = require('../core/symbols')
+const EE = require('events')
+const { Readable, pipeline } = require('stream')
+const { addAbortListener, isErrored, isReadable, nodeMajor, nodeMinor } = require('../core/util')
+const { dataURLProcessor, serializeAMimeType } = require('./dataURL')
+const { TransformStream } = require('stream/web')
+const { getGlobalDispatcher } = require('../global')
+const { webidl } = require('./webidl')
+const { STATUS_CODES } = require('http')
+const GET_OR_HEAD = ['GET', 'HEAD']
+
+/** @type {import('buffer').resolveObjectURL} */
+let resolveObjectURL
+let ReadableStream = globalThis.ReadableStream
+
+class Fetch extends EE {
+ constructor (dispatcher) {
+ super()
+
+ this.dispatcher = dispatcher
+ this.connection = null
+ this.dump = false
+ this.state = 'ongoing'
+ // 2 terminated listeners get added per request,
+ // but only 1 gets removed. If there are 20 redirects,
+ // 21 listeners will be added.
+ // See https://github.com/nodejs/undici/issues/1711
+ // TODO (fix): Find and fix root cause for leaked listener.
+ this.setMaxListeners(21)
+ }
+
+ terminate (reason) {
+ if (this.state !== 'ongoing') {
+ return
+ }
+
+ this.state = 'terminated'
+ this.connection?.destroy(reason)
+ this.emit('terminated', reason)
+ }
+
+ // https://fetch.spec.whatwg.org/#fetch-controller-abort
+ abort (error) {
+ if (this.state !== 'ongoing') {
+ return
+ }
+
+ // 1. Set controller’s state to "aborted".
+ this.state = 'aborted'
+
+ // 2. Let fallbackError be an "AbortError" DOMException.
+ // 3. Set error to fallbackError if it is not given.
+ if (!error) {
+ error = new DOMException('The operation was aborted.', 'AbortError')
+ }
+
+ // 4. Let serializedError be StructuredSerialize(error).
+ // If that threw an exception, catch it, and let
+ // serializedError be StructuredSerialize(fallbackError).
+
+ // 5. Set controller’s serialized abort reason to serializedError.
+ this.serializedAbortReason = error
+
+ this.connection?.destroy(error)
+ this.emit('terminated', error)
+ }
+}
+
+// https://fetch.spec.whatwg.org/#fetch-method
+function fetch (input, init = {}) {
+ webidl.argumentLengthCheck(arguments, 1, { header: 'globalThis.fetch' })
+
+ // 1. Let p be a new promise.
+ const p = createDeferredPromise()
+
+ // 2. Let requestObject be the result of invoking the initial value of
+ // Request as constructor with input and init as arguments. If this throws
+ // an exception, reject p with it and return p.
+ let requestObject
+
+ try {
+ requestObject = new Request(input, init)
+ } catch (e) {
+ p.reject(e)
+ return p.promise
+ }
+
+ // 3. Let request be requestObject’s request.
+ const request = requestObject[kState]
+
+ // 4. If requestObject’s signal’s aborted flag is set, then:
+ if (requestObject.signal.aborted) {
+ // 1. Abort the fetch() call with p, request, null, and
+ // requestObject’s signal’s abort reason.
+ abortFetch(p, request, null, requestObject.signal.reason)
+
+ // 2. Return p.
+ return p.promise
+ }
+
+ // 5. Let globalObject be request’s client’s global object.
+ const globalObject = request.client.globalObject
+
+ // 6. If globalObject is a ServiceWorkerGlobalScope object, then set
+ // request’s service-workers mode to "none".
+ if (globalObject?.constructor?.name === 'ServiceWorkerGlobalScope') {
+ request.serviceWorkers = 'none'
+ }
+
+ // 7. Let responseObject be null.
+ let responseObject = null
+
+ // 8. Let relevantRealm be this’s relevant Realm.
+ const relevantRealm = null
+
+ // 9. Let locallyAborted be false.
+ let locallyAborted = false
+
+ // 10. Let controller be null.
+ let controller = null
+
+ // 11. Add the following abort steps to requestObject’s signal:
+ addAbortListener(
+ requestObject.signal,
+ () => {
+ // 1. Set locallyAborted to true.
+ locallyAborted = true
+
+ // 2. Assert: controller is non-null.
+ assert(controller != null)
+
+ // 3. Abort controller with requestObject’s signal’s abort reason.
+ controller.abort(requestObject.signal.reason)
+
+ // 4. Abort the fetch() call with p, request, responseObject,
+ // and requestObject’s signal’s abort reason.
+ abortFetch(p, request, responseObject, requestObject.signal.reason)
+ }
+ )
+
+ // 12. Let handleFetchDone given response response be to finalize and
+ // report timing with response, globalObject, and "fetch".
+ const handleFetchDone = (response) =>
+ finalizeAndReportTiming(response, 'fetch')
+
+ // 13. Set controller to the result of calling fetch given request,
+ // with processResponseEndOfBody set to handleFetchDone, and processResponse
+ // given response being these substeps:
+
+ const processResponse = (response) => {
+ // 1. If locallyAborted is true, terminate these substeps.
+ if (locallyAborted) {
+ return Promise.resolve()
+ }
+
+ // 2. If response’s aborted flag is set, then:
+ if (response.aborted) {
+ // 1. Let deserializedError be the result of deserialize a serialized
+ // abort reason given controller’s serialized abort reason and
+ // relevantRealm.
+
+ // 2. Abort the fetch() call with p, request, responseObject, and
+ // deserializedError.
+
+ abortFetch(p, request, responseObject, controller.serializedAbortReason)
+ return Promise.resolve()
+ }
+
+ // 3. If response is a network error, then reject p with a TypeError
+ // and terminate these substeps.
+ if (response.type === 'error') {
+ p.reject(
+ Object.assign(new TypeError('fetch failed'), { cause: response.error })
+ )
+ return Promise.resolve()
+ }
+
+ // 4. Set responseObject to the result of creating a Response object,
+ // given response, "immutable", and relevantRealm.
+ responseObject = new Response()
+ responseObject[kState] = response
+ responseObject[kRealm] = relevantRealm
+ responseObject[kHeaders][kHeadersList] = response.headersList
+ responseObject[kHeaders][kGuard] = 'immutable'
+ responseObject[kHeaders][kRealm] = relevantRealm
+
+ // 5. Resolve p with responseObject.
+ p.resolve(responseObject)
+ }
+
+ controller = fetching({
+ request,
+ processResponseEndOfBody: handleFetchDone,
+ processResponse,
+ dispatcher: init.dispatcher ?? getGlobalDispatcher() // undici
+ })
+
+ // 14. Return p.
+ return p.promise
+}
+
+// https://fetch.spec.whatwg.org/#finalize-and-report-timing
+function finalizeAndReportTiming (response, initiatorType = 'other') {
+ // 1. If response is an aborted network error, then return.
+ if (response.type === 'error' && response.aborted) {
+ return
+ }
+
+ // 2. If response’s URL list is null or empty, then return.
+ if (!response.urlList?.length) {
+ return
+ }
+
+ // 3. Let originalURL be response’s URL list[0].
+ const originalURL = response.urlList[0]
+
+ // 4. Let timingInfo be response’s timing info.
+ let timingInfo = response.timingInfo
+
+ // 5. Let cacheState be response’s cache state.
+ let cacheState = response.cacheState
+
+ // 6. If originalURL’s scheme is not an HTTP(S) scheme, then return.
+ if (!urlIsHttpHttpsScheme(originalURL)) {
+ return
+ }
+
+ // 7. If timingInfo is null, then return.
+ if (timingInfo === null) {
+ return
+ }
+
+ // 8. If response’s timing allow passed flag is not set, then:
+ if (!response.timingAllowPassed) {
+ // 1. Set timingInfo to a the result of creating an opaque timing info for timingInfo.
+ timingInfo = createOpaqueTimingInfo({
+ startTime: timingInfo.startTime
+ })
+
+ // 2. Set cacheState to the empty string.
+ cacheState = ''
+ }
+
+ // 9. Set timingInfo’s end time to the coarsened shared current time
+ // given global’s relevant settings object’s cross-origin isolated
+ // capability.
+ // TODO: given global’s relevant settings object’s cross-origin isolated
+ // capability?
+ timingInfo.endTime = coarsenedSharedCurrentTime()
+
+ // 10. Set response’s timing info to timingInfo.
+ response.timingInfo = timingInfo
+
+ // 11. Mark resource timing for timingInfo, originalURL, initiatorType,
+ // global, and cacheState.
+ markResourceTiming(
+ timingInfo,
+ originalURL,
+ initiatorType,
+ globalThis,
+ cacheState
+ )
+}
+
+// https://w3c.github.io/resource-timing/#dfn-mark-resource-timing
+function markResourceTiming (timingInfo, originalURL, initiatorType, globalThis, cacheState) {
+ if (nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 2)) {
+ performance.markResourceTiming(timingInfo, originalURL.href, initiatorType, globalThis, cacheState)
+ }
+}
+
+// https://fetch.spec.whatwg.org/#abort-fetch
+function abortFetch (p, request, responseObject, error) {
+ // Note: AbortSignal.reason was added in node v17.2.0
+ // which would give us an undefined error to reject with.
+ // Remove this once node v16 is no longer supported.
+ if (!error) {
+ error = new DOMException('The operation was aborted.', 'AbortError')
+ }
+
+ // 1. Reject promise with error.
+ p.reject(error)
+
+ // 2. If request’s body is not null and is readable, then cancel request’s
+ // body with error.
+ if (request.body != null && isReadable(request.body?.stream)) {
+ request.body.stream.cancel(error).catch((err) => {
+ if (err.code === 'ERR_INVALID_STATE') {
+ // Node bug?
+ return
+ }
+ throw err
+ })
+ }
+
+ // 3. If responseObject is null, then return.
+ if (responseObject == null) {
+ return
+ }
+
+ // 4. Let response be responseObject’s response.
+ const response = responseObject[kState]
+
+ // 5. If response’s body is not null and is readable, then error response’s
+ // body with error.
+ if (response.body != null && isReadable(response.body?.stream)) {
+ response.body.stream.cancel(error).catch((err) => {
+ if (err.code === 'ERR_INVALID_STATE') {
+ // Node bug?
+ return
+ }
+ throw err
+ })
+ }
+}
+
+// https://fetch.spec.whatwg.org/#fetching
+function fetching ({
+ request,
+ processRequestBodyChunkLength,
+ processRequestEndOfBody,
+ processResponse,
+ processResponseEndOfBody,
+ processResponseConsumeBody,
+ useParallelQueue = false,
+ dispatcher // undici
+}) {
+ // 1. Let taskDestination be null.
+ let taskDestination = null
+
+ // 2. Let crossOriginIsolatedCapability be false.
+ let crossOriginIsolatedCapability = false
+
+ // 3. If request’s client is non-null, then:
+ if (request.client != null) {
+ // 1. Set taskDestination to request’s client’s global object.
+ taskDestination = request.client.globalObject
+
+ // 2. Set crossOriginIsolatedCapability to request’s client’s cross-origin
+ // isolated capability.
+ crossOriginIsolatedCapability =
+ request.client.crossOriginIsolatedCapability
+ }
+
+ // 4. If useParallelQueue is true, then set taskDestination to the result of
+ // starting a new parallel queue.
+ // TODO
+
+ // 5. Let timingInfo be a new fetch timing info whose start time and
+ // post-redirect start time are the coarsened shared current time given
+ // crossOriginIsolatedCapability.
+ const currenTime = coarsenedSharedCurrentTime(crossOriginIsolatedCapability)
+ const timingInfo = createOpaqueTimingInfo({
+ startTime: currenTime
+ })
+
+ // 6. Let fetchParams be a new fetch params whose
+ // request is request,
+ // timing info is timingInfo,
+ // process request body chunk length is processRequestBodyChunkLength,
+ // process request end-of-body is processRequestEndOfBody,
+ // process response is processResponse,
+ // process response consume body is processResponseConsumeBody,
+ // process response end-of-body is processResponseEndOfBody,
+ // task destination is taskDestination,
+ // and cross-origin isolated capability is crossOriginIsolatedCapability.
+ const fetchParams = {
+ controller: new Fetch(dispatcher),
+ request,
+ timingInfo,
+ processRequestBodyChunkLength,
+ processRequestEndOfBody,
+ processResponse,
+ processResponseConsumeBody,
+ processResponseEndOfBody,
+ taskDestination,
+ crossOriginIsolatedCapability
+ }
+
+ // 7. If request’s body is a byte sequence, then set request’s body to
+ // request’s body as a body.
+ // NOTE: Since fetching is only called from fetch, body should already be
+ // extracted.
+ assert(!request.body || request.body.stream)
+
+ // 8. If request’s window is "client", then set request’s window to request’s
+ // client, if request’s client’s global object is a Window object; otherwise
+ // "no-window".
+ if (request.window === 'client') {
+ // TODO: What if request.client is null?
+ request.window =
+ request.client?.globalObject?.constructor?.name === 'Window'
+ ? request.client
+ : 'no-window'
+ }
+
+ // 9. If request’s origin is "client", then set request’s origin to request’s
+ // client’s origin.
+ if (request.origin === 'client') {
+ // TODO: What if request.client is null?
+ request.origin = request.client?.origin
+ }
+
+ // 10. If all of the following conditions are true:
+ // TODO
+
+ // 11. If request’s policy container is "client", then:
+ if (request.policyContainer === 'client') {
+ // 1. If request’s client is non-null, then set request’s policy
+ // container to a clone of request’s client’s policy container. [HTML]
+ if (request.client != null) {
+ request.policyContainer = clonePolicyContainer(
+ request.client.policyContainer
+ )
+ } else {
+ // 2. Otherwise, set request’s policy container to a new policy
+ // container.
+ request.policyContainer = makePolicyContainer()
+ }
+ }
+
+ // 12. If request’s header list does not contain `Accept`, then:
+ if (!request.headersList.contains('accept')) {
+ // 1. Let value be `*/*`.
+ const value = '*/*'
+
+ // 2. A user agent should set value to the first matching statement, if
+ // any, switching on request’s destination:
+ // "document"
+ // "frame"
+ // "iframe"
+ // `text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8`
+ // "image"
+ // `image/png,image/svg+xml,image/*;q=0.8,*/*;q=0.5`
+ // "style"
+ // `text/css,*/*;q=0.1`
+ // TODO
+
+ // 3. Append `Accept`/value to request’s header list.
+ request.headersList.append('accept', value)
+ }
+
+ // 13. If request’s header list does not contain `Accept-Language`, then
+ // user agents should append `Accept-Language`/an appropriate value to
+ // request’s header list.
+ if (!request.headersList.contains('accept-language')) {
+ request.headersList.append('accept-language', '*')
+ }
+
+ // 14. If request’s priority is null, then use request’s initiator and
+ // destination appropriately in setting request’s priority to a
+ // user-agent-defined object.
+ if (request.priority === null) {
+ // TODO
+ }
+
+ // 15. If request is a subresource request, then:
+ if (subresourceSet.has(request.destination)) {
+ // TODO
+ }
+
+ // 16. Run main fetch given fetchParams.
+ mainFetch(fetchParams)
+ .catch(err => {
+ fetchParams.controller.terminate(err)
+ })
+
+ // 17. Return fetchParam's controller
+ return fetchParams.controller
+}
+
+// https://fetch.spec.whatwg.org/#concept-main-fetch
+async function mainFetch (fetchParams, recursive = false) {
+ // 1. Let request be fetchParams’s request.
+ const request = fetchParams.request
+
+ // 2. Let response be null.
+ let response = null
+
+ // 3. If request’s local-URLs-only flag is set and request’s current URL is
+ // not local, then set response to a network error.
+ if (request.localURLsOnly && !urlIsLocal(requestCurrentURL(request))) {
+ response = makeNetworkError('local URLs only')
+ }
+
+ // 4. Run report Content Security Policy violations for request.
+ // TODO
+
+ // 5. Upgrade request to a potentially trustworthy URL, if appropriate.
+ tryUpgradeRequestToAPotentiallyTrustworthyURL(request)
+
+ // 6. If should request be blocked due to a bad port, should fetching request
+ // be blocked as mixed content, or should request be blocked by Content
+ // Security Policy returns blocked, then set response to a network error.
+ if (requestBadPort(request) === 'blocked') {
+ response = makeNetworkError('bad port')
+ }
+ // TODO: should fetching request be blocked as mixed content?
+ // TODO: should request be blocked by Content Security Policy?
+
+ // 7. If request’s referrer policy is the empty string, then set request’s
+ // referrer policy to request’s policy container’s referrer policy.
+ if (request.referrerPolicy === '') {
+ request.referrerPolicy = request.policyContainer.referrerPolicy
+ }
+
+ // 8. If request’s referrer is not "no-referrer", then set request’s
+ // referrer to the result of invoking determine request’s referrer.
+ if (request.referrer !== 'no-referrer') {
+ request.referrer = determineRequestsReferrer(request)
+ }
+
+ // 9. Set request’s current URL’s scheme to "https" if all of the following
+ // conditions are true:
+ // - request’s current URL’s scheme is "http"
+ // - request’s current URL’s host is a domain
+ // - Matching request’s current URL’s host per Known HSTS Host Domain Name
+ // Matching results in either a superdomain match with an asserted
+ // includeSubDomains directive or a congruent match (with or without an
+ // asserted includeSubDomains directive). [HSTS]
+ // TODO
+
+ // 10. If recursive is false, then run the remaining steps in parallel.
+ // TODO
+
+ // 11. If response is null, then set response to the result of running
+ // the steps corresponding to the first matching statement:
+ if (response === null) {
+ response = await (async () => {
+ const currentURL = requestCurrentURL(request)
+
+ if (
+ // - request’s current URL’s origin is same origin with request’s origin,
+ // and request’s response tainting is "basic"
+ (sameOrigin(currentURL, request.url) && request.responseTainting === 'basic') ||
+ // request’s current URL’s scheme is "data"
+ (currentURL.protocol === 'data:') ||
+ // - request’s mode is "navigate" or "websocket"
+ (request.mode === 'navigate' || request.mode === 'websocket')
+ ) {
+ // 1. Set request’s response tainting to "basic".
+ request.responseTainting = 'basic'
+
+ // 2. Return the result of running scheme fetch given fetchParams.
+ return await schemeFetch(fetchParams)
+ }
+
+ // request’s mode is "same-origin"
+ if (request.mode === 'same-origin') {
+ // 1. Return a network error.
+ return makeNetworkError('request mode cannot be "same-origin"')
+ }
+
+ // request’s mode is "no-cors"
+ if (request.mode === 'no-cors') {
+ // 1. If request’s redirect mode is not "follow", then return a network
+ // error.
+ if (request.redirect !== 'follow') {
+ return makeNetworkError(
+ 'redirect mode cannot be "follow" for "no-cors" request'
+ )
+ }
+
+ // 2. Set request’s response tainting to "opaque".
+ request.responseTainting = 'opaque'
+
+ // 3. Return the result of running scheme fetch given fetchParams.
+ return await schemeFetch(fetchParams)
+ }
+
+ // request’s current URL’s scheme is not an HTTP(S) scheme
+ if (!urlIsHttpHttpsScheme(requestCurrentURL(request))) {
+ // Return a network error.
+ return makeNetworkError('URL scheme must be a HTTP(S) scheme')
+ }
+
+ // - request’s use-CORS-preflight flag is set
+ // - request’s unsafe-request flag is set and either request’s method is
+ // not a CORS-safelisted method or CORS-unsafe request-header names with
+ // request’s header list is not empty
+ // 1. Set request’s response tainting to "cors".
+ // 2. Let corsWithPreflightResponse be the result of running HTTP fetch
+ // given fetchParams and true.
+ // 3. If corsWithPreflightResponse is a network error, then clear cache
+ // entries using request.
+ // 4. Return corsWithPreflightResponse.
+ // TODO
+
+ // Otherwise
+ // 1. Set request’s response tainting to "cors".
+ request.responseTainting = 'cors'
+
+ // 2. Return the result of running HTTP fetch given fetchParams.
+ return await httpFetch(fetchParams)
+ })()
+ }
+
+ // 12. If recursive is true, then return response.
+ if (recursive) {
+ return response
+ }
+
+ // 13. If response is not a network error and response is not a filtered
+ // response, then:
+ if (response.status !== 0 && !response.internalResponse) {
+ // If request’s response tainting is "cors", then:
+ if (request.responseTainting === 'cors') {
+ // 1. Let headerNames be the result of extracting header list values
+ // given `Access-Control-Expose-Headers` and response’s header list.
+ // TODO
+ // 2. If request’s credentials mode is not "include" and headerNames
+ // contains `*`, then set response’s CORS-exposed header-name list to
+ // all unique header names in response’s header list.
+ // TODO
+ // 3. Otherwise, if headerNames is not null or failure, then set
+ // response’s CORS-exposed header-name list to headerNames.
+ // TODO
+ }
+
+ // Set response to the following filtered response with response as its
+ // internal response, depending on request’s response tainting:
+ if (request.responseTainting === 'basic') {
+ response = filterResponse(response, 'basic')
+ } else if (request.responseTainting === 'cors') {
+ response = filterResponse(response, 'cors')
+ } else if (request.responseTainting === 'opaque') {
+ response = filterResponse(response, 'opaque')
+ } else {
+ assert(false)
+ }
+ }
+
+ // 14. Let internalResponse be response, if response is a network error,
+ // and response’s internal response otherwise.
+ let internalResponse =
+ response.status === 0 ? response : response.internalResponse
+
+ // 15. If internalResponse’s URL list is empty, then set it to a clone of
+ // request’s URL list.
+ if (internalResponse.urlList.length === 0) {
+ internalResponse.urlList.push(...request.urlList)
+ }
+
+ // 16. If request’s timing allow failed flag is unset, then set
+ // internalResponse’s timing allow passed flag.
+ if (!request.timingAllowFailed) {
+ response.timingAllowPassed = true
+ }
+
+ // 17. If response is not a network error and any of the following returns
+ // blocked
+ // - should internalResponse to request be blocked as mixed content
+ // - should internalResponse to request be blocked by Content Security Policy
+ // - should internalResponse to request be blocked due to its MIME type
+ // - should internalResponse to request be blocked due to nosniff
+ // TODO
+
+ // 18. If response’s type is "opaque", internalResponse’s status is 206,
+ // internalResponse’s range-requested flag is set, and request’s header
+ // list does not contain `Range`, then set response and internalResponse
+ // to a network error.
+ if (
+ response.type === 'opaque' &&
+ internalResponse.status === 206 &&
+ internalResponse.rangeRequested &&
+ !request.headers.contains('range')
+ ) {
+ response = internalResponse = makeNetworkError()
+ }
+
+ // 19. If response is not a network error and either request’s method is
+ // `HEAD` or `CONNECT`, or internalResponse’s status is a null body status,
+ // set internalResponse’s body to null and disregard any enqueuing toward
+ // it (if any).
+ if (
+ response.status !== 0 &&
+ (request.method === 'HEAD' ||
+ request.method === 'CONNECT' ||
+ nullBodyStatus.includes(internalResponse.status))
+ ) {
+ internalResponse.body = null
+ fetchParams.controller.dump = true
+ }
+
+ // 20. If request’s integrity metadata is not the empty string, then:
+ if (request.integrity) {
+ // 1. Let processBodyError be this step: run fetch finale given fetchParams
+ // and a network error.
+ const processBodyError = (reason) =>
+ fetchFinale(fetchParams, makeNetworkError(reason))
+
+ // 2. If request’s response tainting is "opaque", or response’s body is null,
+ // then run processBodyError and abort these steps.
+ if (request.responseTainting === 'opaque' || response.body == null) {
+ processBodyError(response.error)
+ return
+ }
+
+ // 3. Let processBody given bytes be these steps:
+ const processBody = (bytes) => {
+ // 1. If bytes do not match request’s integrity metadata,
+ // then run processBodyError and abort these steps. [SRI]
+ if (!bytesMatch(bytes, request.integrity)) {
+ processBodyError('integrity mismatch')
+ return
+ }
+
+ // 2. Set response’s body to bytes as a body.
+ response.body = safelyExtractBody(bytes)[0]
+
+ // 3. Run fetch finale given fetchParams and response.
+ fetchFinale(fetchParams, response)
+ }
+
+ // 4. Fully read response’s body given processBody and processBodyError.
+ await fullyReadBody(response.body, processBody, processBodyError)
+ } else {
+ // 21. Otherwise, run fetch finale given fetchParams and response.
+ fetchFinale(fetchParams, response)
+ }
+}
+
+// https://fetch.spec.whatwg.org/#concept-scheme-fetch
+// given a fetch params fetchParams
+function schemeFetch (fetchParams) {
+ // Note: since the connection is destroyed on redirect, which sets fetchParams to a
+ // cancelled state, we do not want this condition to trigger *unless* there have been
+ // no redirects. See https://github.com/nodejs/undici/issues/1776
+ // 1. If fetchParams is canceled, then return the appropriate network error for fetchParams.
+ if (isCancelled(fetchParams) && fetchParams.request.redirectCount === 0) {
+ return Promise.resolve(makeAppropriateNetworkError(fetchParams))
+ }
+
+ // 2. Let request be fetchParams’s request.
+ const { request } = fetchParams
+
+ const { protocol: scheme } = requestCurrentURL(request)
+
+ // 3. Switch on request’s current URL’s scheme and run the associated steps:
+ switch (scheme) {
+ case 'about:': {
+ // If request’s current URL’s path is the string "blank", then return a new response
+ // whose status message is `OK`, header list is « (`Content-Type`, `text/html;charset=utf-8`) »,
+ // and body is the empty byte sequence as a body.
+
+ // Otherwise, return a network error.
+ return Promise.resolve(makeNetworkError('about scheme is not supported'))
+ }
+ case 'blob:': {
+ if (!resolveObjectURL) {
+ resolveObjectURL = require('buffer').resolveObjectURL
+ }
+
+ // 1. Let blobURLEntry be request’s current URL’s blob URL entry.
+ const blobURLEntry = requestCurrentURL(request)
+
+ // https://github.com/web-platform-tests/wpt/blob/7b0ebaccc62b566a1965396e5be7bb2bc06f841f/FileAPI/url/resources/fetch-tests.js#L52-L56
+ // Buffer.resolveObjectURL does not ignore URL queries.
+ if (blobURLEntry.search.length !== 0) {
+ return Promise.resolve(makeNetworkError('NetworkError when attempting to fetch resource.'))
+ }
+
+ const blobURLEntryObject = resolveObjectURL(blobURLEntry.toString())
+
+ // 2. If request’s method is not `GET`, blobURLEntry is null, or blobURLEntry’s
+ // object is not a Blob object, then return a network error.
+ if (request.method !== 'GET' || !isBlobLike(blobURLEntryObject)) {
+ return Promise.resolve(makeNetworkError('invalid method'))
+ }
+
+ // 3. Let bodyWithType be the result of safely extracting blobURLEntry’s object.
+ const bodyWithType = safelyExtractBody(blobURLEntryObject)
+
+ // 4. Let body be bodyWithType’s body.
+ const body = bodyWithType[0]
+
+ // 5. Let length be body’s length, serialized and isomorphic encoded.
+ const length = isomorphicEncode(`${body.length}`)
+
+ // 6. Let type be bodyWithType’s type if it is non-null; otherwise the empty byte sequence.
+ const type = bodyWithType[1] ?? ''
+
+ // 7. Return a new response whose status message is `OK`, header list is
+ // « (`Content-Length`, length), (`Content-Type`, type) », and body is body.
+ const response = makeResponse({
+ statusText: 'OK',
+ headersList: [
+ ['content-length', { name: 'Content-Length', value: length }],
+ ['content-type', { name: 'Content-Type', value: type }]
+ ]
+ })
+
+ response.body = body
+
+ return Promise.resolve(response)
+ }
+ case 'data:': {
+ // 1. Let dataURLStruct be the result of running the
+ // data: URL processor on request’s current URL.
+ const currentURL = requestCurrentURL(request)
+ const dataURLStruct = dataURLProcessor(currentURL)
+
+ // 2. If dataURLStruct is failure, then return a
+ // network error.
+ if (dataURLStruct === 'failure') {
+ return Promise.resolve(makeNetworkError('failed to fetch the data URL'))
+ }
+
+ // 3. Let mimeType be dataURLStruct’s MIME type, serialized.
+ const mimeType = serializeAMimeType(dataURLStruct.mimeType)
+
+ // 4. Return a response whose status message is `OK`,
+ // header list is « (`Content-Type`, mimeType) »,
+ // and body is dataURLStruct’s body as a body.
+ return Promise.resolve(makeResponse({
+ statusText: 'OK',
+ headersList: [
+ ['content-type', { name: 'Content-Type', value: mimeType }]
+ ],
+ body: safelyExtractBody(dataURLStruct.body)[0]
+ }))
+ }
+ case 'file:': {
+ // For now, unfortunate as it is, file URLs are left as an exercise for the reader.
+ // When in doubt, return a network error.
+ return Promise.resolve(makeNetworkError('not implemented... yet...'))
+ }
+ case 'http:':
+ case 'https:': {
+ // Return the result of running HTTP fetch given fetchParams.
+
+ return httpFetch(fetchParams)
+ .catch((err) => makeNetworkError(err))
+ }
+ default: {
+ return Promise.resolve(makeNetworkError('unknown scheme'))
+ }
+ }
+}
+
+// https://fetch.spec.whatwg.org/#finalize-response
+function finalizeResponse (fetchParams, response) {
+ // 1. Set fetchParams’s request’s done flag.
+ fetchParams.request.done = true
+
+ // 2, If fetchParams’s process response done is not null, then queue a fetch
+ // task to run fetchParams’s process response done given response, with
+ // fetchParams’s task destination.
+ if (fetchParams.processResponseDone != null) {
+ queueMicrotask(() => fetchParams.processResponseDone(response))
+ }
+}
+
+// https://fetch.spec.whatwg.org/#fetch-finale
+function fetchFinale (fetchParams, response) {
+ // 1. If response is a network error, then:
+ if (response.type === 'error') {
+ // 1. Set response’s URL list to « fetchParams’s request’s URL list[0] ».
+ response.urlList = [fetchParams.request.urlList[0]]
+
+ // 2. Set response’s timing info to the result of creating an opaque timing
+ // info for fetchParams’s timing info.
+ response.timingInfo = createOpaqueTimingInfo({
+ startTime: fetchParams.timingInfo.startTime
+ })
+ }
+
+ // 2. Let processResponseEndOfBody be the following steps:
+ const processResponseEndOfBody = () => {
+ // 1. Set fetchParams’s request’s done flag.
+ fetchParams.request.done = true
+
+ // If fetchParams’s process response end-of-body is not null,
+ // then queue a fetch task to run fetchParams’s process response
+ // end-of-body given response with fetchParams’s task destination.
+ if (fetchParams.processResponseEndOfBody != null) {
+ queueMicrotask(() => fetchParams.processResponseEndOfBody(response))
+ }
+ }
+
+ // 3. If fetchParams’s process response is non-null, then queue a fetch task
+ // to run fetchParams’s process response given response, with fetchParams’s
+ // task destination.
+ if (fetchParams.processResponse != null) {
+ queueMicrotask(() => fetchParams.processResponse(response))
+ }
+
+ // 4. If response’s body is null, then run processResponseEndOfBody.
+ if (response.body == null) {
+ processResponseEndOfBody()
+ } else {
+ // 5. Otherwise:
+
+ // 1. Let transformStream be a new a TransformStream.
+
+ // 2. Let identityTransformAlgorithm be an algorithm which, given chunk,
+ // enqueues chunk in transformStream.
+ const identityTransformAlgorithm = (chunk, controller) => {
+ controller.enqueue(chunk)
+ }
+
+ // 3. Set up transformStream with transformAlgorithm set to identityTransformAlgorithm
+ // and flushAlgorithm set to processResponseEndOfBody.
+ const transformStream = new TransformStream({
+ start () {},
+ transform: identityTransformAlgorithm,
+ flush: processResponseEndOfBody
+ }, {
+ size () {
+ return 1
+ }
+ }, {
+ size () {
+ return 1
+ }
+ })
+
+ // 4. Set response’s body to the result of piping response’s body through transformStream.
+ response.body = { stream: response.body.stream.pipeThrough(transformStream) }
+ }
+
+ // 6. If fetchParams’s process response consume body is non-null, then:
+ if (fetchParams.processResponseConsumeBody != null) {
+ // 1. Let processBody given nullOrBytes be this step: run fetchParams’s
+ // process response consume body given response and nullOrBytes.
+ const processBody = (nullOrBytes) => fetchParams.processResponseConsumeBody(response, nullOrBytes)
+
+ // 2. Let processBodyError be this step: run fetchParams’s process
+ // response consume body given response and failure.
+ const processBodyError = (failure) => fetchParams.processResponseConsumeBody(response, failure)
+
+ // 3. If response’s body is null, then queue a fetch task to run processBody
+ // given null, with fetchParams’s task destination.
+ if (response.body == null) {
+ queueMicrotask(() => processBody(null))
+ } else {
+ // 4. Otherwise, fully read response’s body given processBody, processBodyError,
+ // and fetchParams’s task destination.
+ return fullyReadBody(response.body, processBody, processBodyError)
+ }
+ return Promise.resolve()
+ }
+}
+
+// https://fetch.spec.whatwg.org/#http-fetch
+async function httpFetch (fetchParams) {
+ // 1. Let request be fetchParams’s request.
+ const request = fetchParams.request
+
+ // 2. Let response be null.
+ let response = null
+
+ // 3. Let actualResponse be null.
+ let actualResponse = null
+
+ // 4. Let timingInfo be fetchParams’s timing info.
+ const timingInfo = fetchParams.timingInfo
+
+ // 5. If request’s service-workers mode is "all", then:
+ if (request.serviceWorkers === 'all') {
+ // TODO
+ }
+
+ // 6. If response is null, then:
+ if (response === null) {
+ // 1. If makeCORSPreflight is true and one of these conditions is true:
+ // TODO
+
+ // 2. If request’s redirect mode is "follow", then set request’s
+ // service-workers mode to "none".
+ if (request.redirect === 'follow') {
+ request.serviceWorkers = 'none'
+ }
+
+ // 3. Set response and actualResponse to the result of running
+ // HTTP-network-or-cache fetch given fetchParams.
+ actualResponse = response = await httpNetworkOrCacheFetch(fetchParams)
+
+ // 4. If request’s response tainting is "cors" and a CORS check
+ // for request and response returns failure, then return a network error.
+ if (
+ request.responseTainting === 'cors' &&
+ corsCheck(request, response) === 'failure'
+ ) {
+ return makeNetworkError('cors failure')
+ }
+
+ // 5. If the TAO check for request and response returns failure, then set
+ // request’s timing allow failed flag.
+ if (TAOCheck(request, response) === 'failure') {
+ request.timingAllowFailed = true
+ }
+ }
+
+ // 7. If either request’s response tainting or response’s type
+ // is "opaque", and the cross-origin resource policy check with
+ // request’s origin, request’s client, request’s destination,
+ // and actualResponse returns blocked, then return a network error.
+ if (
+ (request.responseTainting === 'opaque' || response.type === 'opaque') &&
+ crossOriginResourcePolicyCheck(
+ request.origin,
+ request.client,
+ request.destination,
+ actualResponse
+ ) === 'blocked'
+ ) {
+ return makeNetworkError('blocked')
+ }
+
+ // 8. If actualResponse’s status is a redirect status, then:
+ if (redirectStatusSet.has(actualResponse.status)) {
+ // 1. If actualResponse’s status is not 303, request’s body is not null,
+ // and the connection uses HTTP/2, then user agents may, and are even
+ // encouraged to, transmit an RST_STREAM frame.
+ // See, https://github.com/whatwg/fetch/issues/1288
+ if (request.redirect !== 'manual') {
+ fetchParams.controller.connection.destroy()
+ }
+
+ // 2. Switch on request’s redirect mode:
+ if (request.redirect === 'error') {
+ // Set response to a network error.
+ response = makeNetworkError('unexpected redirect')
+ } else if (request.redirect === 'manual') {
+ // Set response to an opaque-redirect filtered response whose internal
+ // response is actualResponse.
+ // NOTE(spec): On the web this would return an `opaqueredirect` response,
+ // but that doesn't make sense server side.
+ // See https://github.com/nodejs/undici/issues/1193.
+ response = actualResponse
+ } else if (request.redirect === 'follow') {
+ // Set response to the result of running HTTP-redirect fetch given
+ // fetchParams and response.
+ response = await httpRedirectFetch(fetchParams, response)
+ } else {
+ assert(false)
+ }
+ }
+
+ // 9. Set response’s timing info to timingInfo.
+ response.timingInfo = timingInfo
+
+ // 10. Return response.
+ return response
+}
+
+// https://fetch.spec.whatwg.org/#http-redirect-fetch
+function httpRedirectFetch (fetchParams, response) {
+ // 1. Let request be fetchParams’s request.
+ const request = fetchParams.request
+
+ // 2. Let actualResponse be response, if response is not a filtered response,
+ // and response’s internal response otherwise.
+ const actualResponse = response.internalResponse
+ ? response.internalResponse
+ : response
+
+ // 3. Let locationURL be actualResponse’s location URL given request’s current
+ // URL’s fragment.
+ let locationURL
+
+ try {
+ locationURL = responseLocationURL(
+ actualResponse,
+ requestCurrentURL(request).hash
+ )
+
+ // 4. If locationURL is null, then return response.
+ if (locationURL == null) {
+ return response
+ }
+ } catch (err) {
+ // 5. If locationURL is failure, then return a network error.
+ return Promise.resolve(makeNetworkError(err))
+ }
+
+ // 6. If locationURL’s scheme is not an HTTP(S) scheme, then return a network
+ // error.
+ if (!urlIsHttpHttpsScheme(locationURL)) {
+ return Promise.resolve(makeNetworkError('URL scheme must be a HTTP(S) scheme'))
+ }
+
+ // 7. If request’s redirect count is 20, then return a network error.
+ if (request.redirectCount === 20) {
+ return Promise.resolve(makeNetworkError('redirect count exceeded'))
+ }
+
+ // 8. Increase request’s redirect count by 1.
+ request.redirectCount += 1
+
+ // 9. If request’s mode is "cors", locationURL includes credentials, and
+ // request’s origin is not same origin with locationURL’s origin, then return
+ // a network error.
+ if (
+ request.mode === 'cors' &&
+ (locationURL.username || locationURL.password) &&
+ !sameOrigin(request, locationURL)
+ ) {
+ return Promise.resolve(makeNetworkError('cross origin not allowed for request mode "cors"'))
+ }
+
+ // 10. If request’s response tainting is "cors" and locationURL includes
+ // credentials, then return a network error.
+ if (
+ request.responseTainting === 'cors' &&
+ (locationURL.username || locationURL.password)
+ ) {
+ return Promise.resolve(makeNetworkError(
+ 'URL cannot contain credentials for request mode "cors"'
+ ))
+ }
+
+ // 11. If actualResponse’s status is not 303, request’s body is non-null,
+ // and request’s body’s source is null, then return a network error.
+ if (
+ actualResponse.status !== 303 &&
+ request.body != null &&
+ request.body.source == null
+ ) {
+ return Promise.resolve(makeNetworkError())
+ }
+
+ // 12. If one of the following is true
+ // - actualResponse’s status is 301 or 302 and request’s method is `POST`
+ // - actualResponse’s status is 303 and request’s method is not `GET` or `HEAD`
+ if (
+ ([301, 302].includes(actualResponse.status) && request.method === 'POST') ||
+ (actualResponse.status === 303 &&
+ !GET_OR_HEAD.includes(request.method))
+ ) {
+ // then:
+ // 1. Set request’s method to `GET` and request’s body to null.
+ request.method = 'GET'
+ request.body = null
+
+ // 2. For each headerName of request-body-header name, delete headerName from
+ // request’s header list.
+ for (const headerName of requestBodyHeader) {
+ request.headersList.delete(headerName)
+ }
+ }
+
+ // 13. If request’s current URL’s origin is not same origin with locationURL’s
+ // origin, then for each headerName of CORS non-wildcard request-header name,
+ // delete headerName from request’s header list.
+ if (!sameOrigin(requestCurrentURL(request), locationURL)) {
+ // https://fetch.spec.whatwg.org/#cors-non-wildcard-request-header-name
+ request.headersList.delete('authorization')
+
+ // "Cookie" and "Host" are forbidden request-headers, which undici doesn't implement.
+ request.headersList.delete('cookie')
+ request.headersList.delete('host')
+ }
+
+ // 14. If request’s body is non-null, then set request’s body to the first return
+ // value of safely extracting request’s body’s source.
+ if (request.body != null) {
+ assert(request.body.source != null)
+ request.body = safelyExtractBody(request.body.source)[0]
+ }
+
+ // 15. Let timingInfo be fetchParams’s timing info.
+ const timingInfo = fetchParams.timingInfo
+
+ // 16. Set timingInfo’s redirect end time and post-redirect start time to the
+ // coarsened shared current time given fetchParams’s cross-origin isolated
+ // capability.
+ timingInfo.redirectEndTime = timingInfo.postRedirectStartTime =
+ coarsenedSharedCurrentTime(fetchParams.crossOriginIsolatedCapability)
+
+ // 17. If timingInfo’s redirect start time is 0, then set timingInfo’s
+ // redirect start time to timingInfo’s start time.
+ if (timingInfo.redirectStartTime === 0) {
+ timingInfo.redirectStartTime = timingInfo.startTime
+ }
+
+ // 18. Append locationURL to request’s URL list.
+ request.urlList.push(locationURL)
+
+ // 19. Invoke set request’s referrer policy on redirect on request and
+ // actualResponse.
+ setRequestReferrerPolicyOnRedirect(request, actualResponse)
+
+ // 20. Return the result of running main fetch given fetchParams and true.
+ return mainFetch(fetchParams, true)
+}
+
+// https://fetch.spec.whatwg.org/#http-network-or-cache-fetch
+async function httpNetworkOrCacheFetch (
+ fetchParams,
+ isAuthenticationFetch = false,
+ isNewConnectionFetch = false
+) {
+ // 1. Let request be fetchParams’s request.
+ const request = fetchParams.request
+
+ // 2. Let httpFetchParams be null.
+ let httpFetchParams = null
+
+ // 3. Let httpRequest be null.
+ let httpRequest = null
+
+ // 4. Let response be null.
+ let response = null
+
+ // 5. Let storedResponse be null.
+ // TODO: cache
+
+ // 6. Let httpCache be null.
+ const httpCache = null
+
+ // 7. Let the revalidatingFlag be unset.
+ const revalidatingFlag = false
+
+ // 8. Run these steps, but abort when the ongoing fetch is terminated:
+
+ // 1. If request’s window is "no-window" and request’s redirect mode is
+ // "error", then set httpFetchParams to fetchParams and httpRequest to
+ // request.
+ if (request.window === 'no-window' && request.redirect === 'error') {
+ httpFetchParams = fetchParams
+ httpRequest = request
+ } else {
+ // Otherwise:
+
+ // 1. Set httpRequest to a clone of request.
+ httpRequest = makeRequest(request)
+
+ // 2. Set httpFetchParams to a copy of fetchParams.
+ httpFetchParams = { ...fetchParams }
+
+ // 3. Set httpFetchParams’s request to httpRequest.
+ httpFetchParams.request = httpRequest
+ }
+
+ // 3. Let includeCredentials be true if one of
+ const includeCredentials =
+ request.credentials === 'include' ||
+ (request.credentials === 'same-origin' &&
+ request.responseTainting === 'basic')
+
+ // 4. Let contentLength be httpRequest’s body’s length, if httpRequest’s
+ // body is non-null; otherwise null.
+ const contentLength = httpRequest.body ? httpRequest.body.length : null
+
+ // 5. Let contentLengthHeaderValue be null.
+ let contentLengthHeaderValue = null
+
+ // 6. If httpRequest’s body is null and httpRequest’s method is `POST` or
+ // `PUT`, then set contentLengthHeaderValue to `0`.
+ if (
+ httpRequest.body == null &&
+ ['POST', 'PUT'].includes(httpRequest.method)
+ ) {
+ contentLengthHeaderValue = '0'
+ }
+
+ // 7. If contentLength is non-null, then set contentLengthHeaderValue to
+ // contentLength, serialized and isomorphic encoded.
+ if (contentLength != null) {
+ contentLengthHeaderValue = isomorphicEncode(`${contentLength}`)
+ }
+
+ // 8. If contentLengthHeaderValue is non-null, then append
+ // `Content-Length`/contentLengthHeaderValue to httpRequest’s header
+ // list.
+ if (contentLengthHeaderValue != null) {
+ httpRequest.headersList.append('content-length', contentLengthHeaderValue)
+ }
+
+ // 9. If contentLengthHeaderValue is non-null, then append (`Content-Length`,
+ // contentLengthHeaderValue) to httpRequest’s header list.
+
+ // 10. If contentLength is non-null and httpRequest’s keepalive is true,
+ // then:
+ if (contentLength != null && httpRequest.keepalive) {
+ // NOTE: keepalive is a noop outside of browser context.
+ }
+
+ // 11. If httpRequest’s referrer is a URL, then append
+ // `Referer`/httpRequest’s referrer, serialized and isomorphic encoded,
+ // to httpRequest’s header list.
+ if (httpRequest.referrer instanceof URL) {
+ httpRequest.headersList.append('referer', isomorphicEncode(httpRequest.referrer.href))
+ }
+
+ // 12. Append a request `Origin` header for httpRequest.
+ appendRequestOriginHeader(httpRequest)
+
+ // 13. Append the Fetch metadata headers for httpRequest. [FETCH-METADATA]
+ appendFetchMetadata(httpRequest)
+
+ // 14. If httpRequest’s header list does not contain `User-Agent`, then
+ // user agents should append `User-Agent`/default `User-Agent` value to
+ // httpRequest’s header list.
+ if (!httpRequest.headersList.contains('user-agent')) {
+ httpRequest.headersList.append('user-agent', typeof esbuildDetection === 'undefined' ? 'undici' : 'node')
+ }
+
+ // 15. If httpRequest’s cache mode is "default" and httpRequest’s header
+ // list contains `If-Modified-Since`, `If-None-Match`,
+ // `If-Unmodified-Since`, `If-Match`, or `If-Range`, then set
+ // httpRequest’s cache mode to "no-store".
+ if (
+ httpRequest.cache === 'default' &&
+ (httpRequest.headersList.contains('if-modified-since') ||
+ httpRequest.headersList.contains('if-none-match') ||
+ httpRequest.headersList.contains('if-unmodified-since') ||
+ httpRequest.headersList.contains('if-match') ||
+ httpRequest.headersList.contains('if-range'))
+ ) {
+ httpRequest.cache = 'no-store'
+ }
+
+ // 16. If httpRequest’s cache mode is "no-cache", httpRequest’s prevent
+ // no-cache cache-control header modification flag is unset, and
+ // httpRequest’s header list does not contain `Cache-Control`, then append
+ // `Cache-Control`/`max-age=0` to httpRequest’s header list.
+ if (
+ httpRequest.cache === 'no-cache' &&
+ !httpRequest.preventNoCacheCacheControlHeaderModification &&
+ !httpRequest.headersList.contains('cache-control')
+ ) {
+ httpRequest.headersList.append('cache-control', 'max-age=0')
+ }
+
+ // 17. If httpRequest’s cache mode is "no-store" or "reload", then:
+ if (httpRequest.cache === 'no-store' || httpRequest.cache === 'reload') {
+ // 1. If httpRequest’s header list does not contain `Pragma`, then append
+ // `Pragma`/`no-cache` to httpRequest’s header list.
+ if (!httpRequest.headersList.contains('pragma')) {
+ httpRequest.headersList.append('pragma', 'no-cache')
+ }
+
+ // 2. If httpRequest’s header list does not contain `Cache-Control`,
+ // then append `Cache-Control`/`no-cache` to httpRequest’s header list.
+ if (!httpRequest.headersList.contains('cache-control')) {
+ httpRequest.headersList.append('cache-control', 'no-cache')
+ }
+ }
+
+ // 18. If httpRequest’s header list contains `Range`, then append
+ // `Accept-Encoding`/`identity` to httpRequest’s header list.
+ if (httpRequest.headersList.contains('range')) {
+ httpRequest.headersList.append('accept-encoding', 'identity')
+ }
+
+ // 19. Modify httpRequest’s header list per HTTP. Do not append a given
+ // header if httpRequest’s header list contains that header’s name.
+ // TODO: https://github.com/whatwg/fetch/issues/1285#issuecomment-896560129
+ if (!httpRequest.headersList.contains('accept-encoding')) {
+ if (urlHasHttpsScheme(requestCurrentURL(httpRequest))) {
+ httpRequest.headersList.append('accept-encoding', 'br, gzip, deflate')
+ } else {
+ httpRequest.headersList.append('accept-encoding', 'gzip, deflate')
+ }
+ }
+
+ httpRequest.headersList.delete('host')
+
+ // 20. If includeCredentials is true, then:
+ if (includeCredentials) {
+ // 1. If the user agent is not configured to block cookies for httpRequest
+ // (see section 7 of [COOKIES]), then:
+ // TODO: credentials
+ // 2. If httpRequest’s header list does not contain `Authorization`, then:
+ // TODO: credentials
+ }
+
+ // 21. If there’s a proxy-authentication entry, use it as appropriate.
+ // TODO: proxy-authentication
+
+ // 22. Set httpCache to the result of determining the HTTP cache
+ // partition, given httpRequest.
+ // TODO: cache
+
+ // 23. If httpCache is null, then set httpRequest’s cache mode to
+ // "no-store".
+ if (httpCache == null) {
+ httpRequest.cache = 'no-store'
+ }
+
+ // 24. If httpRequest’s cache mode is neither "no-store" nor "reload",
+ // then:
+ if (httpRequest.mode !== 'no-store' && httpRequest.mode !== 'reload') {
+ // TODO: cache
+ }
+
+ // 9. If aborted, then return the appropriate network error for fetchParams.
+ // TODO
+
+ // 10. If response is null, then:
+ if (response == null) {
+ // 1. If httpRequest’s cache mode is "only-if-cached", then return a
+ // network error.
+ if (httpRequest.mode === 'only-if-cached') {
+ return makeNetworkError('only if cached')
+ }
+
+ // 2. Let forwardResponse be the result of running HTTP-network fetch
+ // given httpFetchParams, includeCredentials, and isNewConnectionFetch.
+ const forwardResponse = await httpNetworkFetch(
+ httpFetchParams,
+ includeCredentials,
+ isNewConnectionFetch
+ )
+
+ // 3. If httpRequest’s method is unsafe and forwardResponse’s status is
+ // in the range 200 to 399, inclusive, invalidate appropriate stored
+ // responses in httpCache, as per the "Invalidation" chapter of HTTP
+ // Caching, and set storedResponse to null. [HTTP-CACHING]
+ if (
+ !safeMethodsSet.has(httpRequest.method) &&
+ forwardResponse.status >= 200 &&
+ forwardResponse.status <= 399
+ ) {
+ // TODO: cache
+ }
+
+ // 4. If the revalidatingFlag is set and forwardResponse’s status is 304,
+ // then:
+ if (revalidatingFlag && forwardResponse.status === 304) {
+ // TODO: cache
+ }
+
+ // 5. If response is null, then:
+ if (response == null) {
+ // 1. Set response to forwardResponse.
+ response = forwardResponse
+
+ // 2. Store httpRequest and forwardResponse in httpCache, as per the
+ // "Storing Responses in Caches" chapter of HTTP Caching. [HTTP-CACHING]
+ // TODO: cache
+ }
+ }
+
+ // 11. Set response’s URL list to a clone of httpRequest’s URL list.
+ response.urlList = [...httpRequest.urlList]
+
+ // 12. If httpRequest’s header list contains `Range`, then set response’s
+ // range-requested flag.
+ if (httpRequest.headersList.contains('range')) {
+ response.rangeRequested = true
+ }
+
+ // 13. Set response’s request-includes-credentials to includeCredentials.
+ response.requestIncludesCredentials = includeCredentials
+
+ // 14. If response’s status is 401, httpRequest’s response tainting is not
+ // "cors", includeCredentials is true, and request’s window is an environment
+ // settings object, then:
+ // TODO
+
+ // 15. If response’s status is 407, then:
+ if (response.status === 407) {
+ // 1. If request’s window is "no-window", then return a network error.
+ if (request.window === 'no-window') {
+ return makeNetworkError()
+ }
+
+ // 2. ???
+
+ // 3. If fetchParams is canceled, then return the appropriate network error for fetchParams.
+ if (isCancelled(fetchParams)) {
+ return makeAppropriateNetworkError(fetchParams)
+ }
+
+ // 4. Prompt the end user as appropriate in request’s window and store
+ // the result as a proxy-authentication entry. [HTTP-AUTH]
+ // TODO: Invoke some kind of callback?
+
+ // 5. Set response to the result of running HTTP-network-or-cache fetch given
+ // fetchParams.
+ // TODO
+ return makeNetworkError('proxy authentication required')
+ }
+
+ // 16. If all of the following are true
+ if (
+ // response’s status is 421
+ response.status === 421 &&
+ // isNewConnectionFetch is false
+ !isNewConnectionFetch &&
+ // request’s body is null, or request’s body is non-null and request’s body’s source is non-null
+ (request.body == null || request.body.source != null)
+ ) {
+ // then:
+
+ // 1. If fetchParams is canceled, then return the appropriate network error for fetchParams.
+ if (isCancelled(fetchParams)) {
+ return makeAppropriateNetworkError(fetchParams)
+ }
+
+ // 2. Set response to the result of running HTTP-network-or-cache
+ // fetch given fetchParams, isAuthenticationFetch, and true.
+
+ // TODO (spec): The spec doesn't specify this but we need to cancel
+ // the active response before we can start a new one.
+ // https://github.com/whatwg/fetch/issues/1293
+ fetchParams.controller.connection.destroy()
+
+ response = await httpNetworkOrCacheFetch(
+ fetchParams,
+ isAuthenticationFetch,
+ true
+ )
+ }
+
+ // 17. If isAuthenticationFetch is true, then create an authentication entry
+ if (isAuthenticationFetch) {
+ // TODO
+ }
+
+ // 18. Return response.
+ return response
+}
+
+// https://fetch.spec.whatwg.org/#http-network-fetch
+async function httpNetworkFetch (
+ fetchParams,
+ includeCredentials = false,
+ forceNewConnection = false
+) {
+ assert(!fetchParams.controller.connection || fetchParams.controller.connection.destroyed)
+
+ fetchParams.controller.connection = {
+ abort: null,
+ destroyed: false,
+ destroy (err) {
+ if (!this.destroyed) {
+ this.destroyed = true
+ this.abort?.(err ?? new DOMException('The operation was aborted.', 'AbortError'))
+ }
+ }
+ }
+
+ // 1. Let request be fetchParams’s request.
+ const request = fetchParams.request
+
+ // 2. Let response be null.
+ let response = null
+
+ // 3. Let timingInfo be fetchParams’s timing info.
+ const timingInfo = fetchParams.timingInfo
+
+ // 4. Let httpCache be the result of determining the HTTP cache partition,
+ // given request.
+ // TODO: cache
+ const httpCache = null
+
+ // 5. If httpCache is null, then set request’s cache mode to "no-store".
+ if (httpCache == null) {
+ request.cache = 'no-store'
+ }
+
+ // 6. Let networkPartitionKey be the result of determining the network
+ // partition key given request.
+ // TODO
+
+ // 7. Let newConnection be "yes" if forceNewConnection is true; otherwise
+ // "no".
+ const newConnection = forceNewConnection ? 'yes' : 'no' // eslint-disable-line no-unused-vars
+
+ // 8. Switch on request’s mode:
+ if (request.mode === 'websocket') {
+ // Let connection be the result of obtaining a WebSocket connection,
+ // given request’s current URL.
+ // TODO
+ } else {
+ // Let connection be the result of obtaining a connection, given
+ // networkPartitionKey, request’s current URL’s origin,
+ // includeCredentials, and forceNewConnection.
+ // TODO
+ }
+
+ // 9. Run these steps, but abort when the ongoing fetch is terminated:
+
+ // 1. If connection is failure, then return a network error.
+
+ // 2. Set timingInfo’s final connection timing info to the result of
+ // calling clamp and coarsen connection timing info with connection’s
+ // timing info, timingInfo’s post-redirect start time, and fetchParams’s
+ // cross-origin isolated capability.
+
+ // 3. If connection is not an HTTP/2 connection, request’s body is non-null,
+ // and request’s body’s source is null, then append (`Transfer-Encoding`,
+ // `chunked`) to request’s header list.
+
+ // 4. Set timingInfo’s final network-request start time to the coarsened
+ // shared current time given fetchParams’s cross-origin isolated
+ // capability.
+
+ // 5. Set response to the result of making an HTTP request over connection
+ // using request with the following caveats:
+
+ // - Follow the relevant requirements from HTTP. [HTTP] [HTTP-SEMANTICS]
+ // [HTTP-COND] [HTTP-CACHING] [HTTP-AUTH]
+
+ // - If request’s body is non-null, and request’s body’s source is null,
+ // then the user agent may have a buffer of up to 64 kibibytes and store
+ // a part of request’s body in that buffer. If the user agent reads from
+ // request’s body beyond that buffer’s size and the user agent needs to
+ // resend request, then instead return a network error.
+
+ // - Set timingInfo’s final network-response start time to the coarsened
+ // shared current time given fetchParams’s cross-origin isolated capability,
+ // immediately after the user agent’s HTTP parser receives the first byte
+ // of the response (e.g., frame header bytes for HTTP/2 or response status
+ // line for HTTP/1.x).
+
+ // - Wait until all the headers are transmitted.
+
+ // - Any responses whose status is in the range 100 to 199, inclusive,
+ // and is not 101, are to be ignored, except for the purposes of setting
+ // timingInfo’s final network-response start time above.
+
+ // - If request’s header list contains `Transfer-Encoding`/`chunked` and
+ // response is transferred via HTTP/1.0 or older, then return a network
+ // error.
+
+ // - If the HTTP request results in a TLS client certificate dialog, then:
+
+ // 1. If request’s window is an environment settings object, make the
+ // dialog available in request’s window.
+
+ // 2. Otherwise, return a network error.
+
+ // To transmit request’s body body, run these steps:
+ let requestBody = null
+ // 1. If body is null and fetchParams’s process request end-of-body is
+ // non-null, then queue a fetch task given fetchParams’s process request
+ // end-of-body and fetchParams’s task destination.
+ if (request.body == null && fetchParams.processRequestEndOfBody) {
+ queueMicrotask(() => fetchParams.processRequestEndOfBody())
+ } else if (request.body != null) {
+ // 2. Otherwise, if body is non-null:
+
+ // 1. Let processBodyChunk given bytes be these steps:
+ const processBodyChunk = async function * (bytes) {
+ // 1. If the ongoing fetch is terminated, then abort these steps.
+ if (isCancelled(fetchParams)) {
+ return
+ }
+
+ // 2. Run this step in parallel: transmit bytes.
+ yield bytes
+
+ // 3. If fetchParams’s process request body is non-null, then run
+ // fetchParams’s process request body given bytes’s length.
+ fetchParams.processRequestBodyChunkLength?.(bytes.byteLength)
+ }
+
+ // 2. Let processEndOfBody be these steps:
+ const processEndOfBody = () => {
+ // 1. If fetchParams is canceled, then abort these steps.
+ if (isCancelled(fetchParams)) {
+ return
+ }
+
+ // 2. If fetchParams’s process request end-of-body is non-null,
+ // then run fetchParams’s process request end-of-body.
+ if (fetchParams.processRequestEndOfBody) {
+ fetchParams.processRequestEndOfBody()
+ }
+ }
+
+ // 3. Let processBodyError given e be these steps:
+ const processBodyError = (e) => {
+ // 1. If fetchParams is canceled, then abort these steps.
+ if (isCancelled(fetchParams)) {
+ return
+ }
+
+ // 2. If e is an "AbortError" DOMException, then abort fetchParams’s controller.
+ if (e.name === 'AbortError') {
+ fetchParams.controller.abort()
+ } else {
+ fetchParams.controller.terminate(e)
+ }
+ }
+
+ // 4. Incrementally read request’s body given processBodyChunk, processEndOfBody,
+ // processBodyError, and fetchParams’s task destination.
+ requestBody = (async function * () {
+ try {
+ for await (const bytes of request.body.stream) {
+ yield * processBodyChunk(bytes)
+ }
+ processEndOfBody()
+ } catch (err) {
+ processBodyError(err)
+ }
+ })()
+ }
+
+ try {
+ // socket is only provided for websockets
+ const { body, status, statusText, headersList, socket } = await dispatch({ body: requestBody })
+
+ if (socket) {
+ response = makeResponse({ status, statusText, headersList, socket })
+ } else {
+ const iterator = body[Symbol.asyncIterator]()
+ fetchParams.controller.next = () => iterator.next()
+
+ response = makeResponse({ status, statusText, headersList })
+ }
+ } catch (err) {
+ // 10. If aborted, then:
+ if (err.name === 'AbortError') {
+ // 1. If connection uses HTTP/2, then transmit an RST_STREAM frame.
+ fetchParams.controller.connection.destroy()
+
+ // 2. Return the appropriate network error for fetchParams.
+ return makeAppropriateNetworkError(fetchParams, err)
+ }
+
+ return makeNetworkError(err)
+ }
+
+ // 11. Let pullAlgorithm be an action that resumes the ongoing fetch
+ // if it is suspended.
+ const pullAlgorithm = () => {
+ fetchParams.controller.resume()
+ }
+
+ // 12. Let cancelAlgorithm be an algorithm that aborts fetchParams’s
+ // controller with reason, given reason.
+ const cancelAlgorithm = (reason) => {
+ fetchParams.controller.abort(reason)
+ }
+
+ // 13. Let highWaterMark be a non-negative, non-NaN number, chosen by
+ // the user agent.
+ // TODO
+
+ // 14. Let sizeAlgorithm be an algorithm that accepts a chunk object
+ // and returns a non-negative, non-NaN, non-infinite number, chosen by the user agent.
+ // TODO
+
+ // 15. Let stream be a new ReadableStream.
+ // 16. Set up stream with pullAlgorithm set to pullAlgorithm,
+ // cancelAlgorithm set to cancelAlgorithm, highWaterMark set to
+ // highWaterMark, and sizeAlgorithm set to sizeAlgorithm.
+ if (!ReadableStream) {
+ ReadableStream = require('stream/web').ReadableStream
+ }
+
+ const stream = new ReadableStream(
+ {
+ async start (controller) {
+ fetchParams.controller.controller = controller
+ },
+ async pull (controller) {
+ await pullAlgorithm(controller)
+ },
+ async cancel (reason) {
+ await cancelAlgorithm(reason)
+ }
+ },
+ {
+ highWaterMark: 0,
+ size () {
+ return 1
+ }
+ }
+ )
+
+ // 17. Run these steps, but abort when the ongoing fetch is terminated:
+
+ // 1. Set response’s body to a new body whose stream is stream.
+ response.body = { stream }
+
+ // 2. If response is not a network error and request’s cache mode is
+ // not "no-store", then update response in httpCache for request.
+ // TODO
+
+ // 3. If includeCredentials is true and the user agent is not configured
+ // to block cookies for request (see section 7 of [COOKIES]), then run the
+ // "set-cookie-string" parsing algorithm (see section 5.2 of [COOKIES]) on
+ // the value of each header whose name is a byte-case-insensitive match for
+ // `Set-Cookie` in response’s header list, if any, and request’s current URL.
+ // TODO
+
+ // 18. If aborted, then:
+ // TODO
+
+ // 19. Run these steps in parallel:
+
+ // 1. Run these steps, but abort when fetchParams is canceled:
+ fetchParams.controller.on('terminated', onAborted)
+ fetchParams.controller.resume = async () => {
+ // 1. While true
+ while (true) {
+ // 1-3. See onData...
+
+ // 4. Set bytes to the result of handling content codings given
+ // codings and bytes.
+ let bytes
+ let isFailure
+ try {
+ const { done, value } = await fetchParams.controller.next()
+
+ if (isAborted(fetchParams)) {
+ break
+ }
+
+ bytes = done ? undefined : value
+ } catch (err) {
+ if (fetchParams.controller.ended && !timingInfo.encodedBodySize) {
+ // zlib doesn't like empty streams.
+ bytes = undefined
+ } else {
+ bytes = err
+
+ // err may be propagated from the result of calling readablestream.cancel,
+ // which might not be an error. https://github.com/nodejs/undici/issues/2009
+ isFailure = true
+ }
+ }
+
+ if (bytes === undefined) {
+ // 2. Otherwise, if the bytes transmission for response’s message
+ // body is done normally and stream is readable, then close
+ // stream, finalize response for fetchParams and response, and
+ // abort these in-parallel steps.
+ readableStreamClose(fetchParams.controller.controller)
+
+ finalizeResponse(fetchParams, response)
+
+ return
+ }
+
+ // 5. Increase timingInfo’s decoded body size by bytes’s length.
+ timingInfo.decodedBodySize += bytes?.byteLength ?? 0
+
+ // 6. If bytes is failure, then terminate fetchParams’s controller.
+ if (isFailure) {
+ fetchParams.controller.terminate(bytes)
+ return
+ }
+
+ // 7. Enqueue a Uint8Array wrapping an ArrayBuffer containing bytes
+ // into stream.
+ fetchParams.controller.controller.enqueue(new Uint8Array(bytes))
+
+ // 8. If stream is errored, then terminate the ongoing fetch.
+ if (isErrored(stream)) {
+ fetchParams.controller.terminate()
+ return
+ }
+
+ // 9. If stream doesn’t need more data ask the user agent to suspend
+ // the ongoing fetch.
+ if (!fetchParams.controller.controller.desiredSize) {
+ return
+ }
+ }
+ }
+
+ // 2. If aborted, then:
+ function onAborted (reason) {
+ // 2. If fetchParams is aborted, then:
+ if (isAborted(fetchParams)) {
+ // 1. Set response’s aborted flag.
+ response.aborted = true
+
+ // 2. If stream is readable, then error stream with the result of
+ // deserialize a serialized abort reason given fetchParams’s
+ // controller’s serialized abort reason and an
+ // implementation-defined realm.
+ if (isReadable(stream)) {
+ fetchParams.controller.controller.error(
+ fetchParams.controller.serializedAbortReason
+ )
+ }
+ } else {
+ // 3. Otherwise, if stream is readable, error stream with a TypeError.
+ if (isReadable(stream)) {
+ fetchParams.controller.controller.error(new TypeError('terminated', {
+ cause: isErrorLike(reason) ? reason : undefined
+ }))
+ }
+ }
+
+ // 4. If connection uses HTTP/2, then transmit an RST_STREAM frame.
+ // 5. Otherwise, the user agent should close connection unless it would be bad for performance to do so.
+ fetchParams.controller.connection.destroy()
+ }
+
+ // 20. Return response.
+ return response
+
+ async function dispatch ({ body }) {
+ const url = requestCurrentURL(request)
+ /** @type {import('../..').Agent} */
+ const agent = fetchParams.controller.dispatcher
+
+ return new Promise((resolve, reject) => agent.dispatch(
+ {
+ path: url.pathname + url.search,
+ origin: url.origin,
+ method: request.method,
+ body: fetchParams.controller.dispatcher.isMockActive ? request.body && (request.body.source || request.body.stream) : body,
+ headers: request.headersList.entries,
+ maxRedirections: 0,
+ upgrade: request.mode === 'websocket' ? 'websocket' : undefined
+ },
+ {
+ body: null,
+ abort: null,
+
+ onConnect (abort) {
+ // TODO (fix): Do we need connection here?
+ const { connection } = fetchParams.controller
+
+ if (connection.destroyed) {
+ abort(new DOMException('The operation was aborted.', 'AbortError'))
+ } else {
+ fetchParams.controller.on('terminated', abort)
+ this.abort = connection.abort = abort
+ }
+ },
+
+ onHeaders (status, headersList, resume, statusText) {
+ if (status < 200) {
+ return
+ }
+
+ let codings = []
+ let location = ''
+
+ const headers = new Headers()
+
+ // For H2, the headers are a plain JS object
+ // We distinguish between them and iterate accordingly
+ if (Array.isArray(headersList)) {
+ for (let n = 0; n < headersList.length; n += 2) {
+ const key = headersList[n + 0].toString('latin1')
+ const val = headersList[n + 1].toString('latin1')
+ if (key.toLowerCase() === 'content-encoding') {
+ // https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1
+ // "All content-coding values are case-insensitive..."
+ codings = val.toLowerCase().split(',').map((x) => x.trim())
+ } else if (key.toLowerCase() === 'location') {
+ location = val
+ }
+
+ headers[kHeadersList].append(key, val)
+ }
+ } else {
+ const keys = Object.keys(headersList)
+ for (const key of keys) {
+ const val = headersList[key]
+ if (key.toLowerCase() === 'content-encoding') {
+ // https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1
+ // "All content-coding values are case-insensitive..."
+ codings = val.toLowerCase().split(',').map((x) => x.trim()).reverse()
+ } else if (key.toLowerCase() === 'location') {
+ location = val
+ }
+
+ headers[kHeadersList].append(key, val)
+ }
+ }
+
+ this.body = new Readable({ read: resume })
+
+ const decoders = []
+
+ const willFollow = request.redirect === 'follow' &&
+ location &&
+ redirectStatusSet.has(status)
+
+ // https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding
+ if (request.method !== 'HEAD' && request.method !== 'CONNECT' && !nullBodyStatus.includes(status) && !willFollow) {
+ for (const coding of codings) {
+ // https://www.rfc-editor.org/rfc/rfc9112.html#section-7.2
+ if (coding === 'x-gzip' || coding === 'gzip') {
+ decoders.push(zlib.createGunzip({
+ // Be less strict when decoding compressed responses, since sometimes
+ // servers send slightly invalid responses that are still accepted
+ // by common browsers.
+ // Always using Z_SYNC_FLUSH is what cURL does.
+ flush: zlib.constants.Z_SYNC_FLUSH,
+ finishFlush: zlib.constants.Z_SYNC_FLUSH
+ }))
+ } else if (coding === 'deflate') {
+ decoders.push(zlib.createInflate())
+ } else if (coding === 'br') {
+ decoders.push(zlib.createBrotliDecompress())
+ } else {
+ decoders.length = 0
+ break
+ }
+ }
+ }
+
+ resolve({
+ status,
+ statusText,
+ headersList: headers[kHeadersList],
+ body: decoders.length
+ ? pipeline(this.body, ...decoders, () => { })
+ : this.body.on('error', () => {})
+ })
+
+ return true
+ },
+
+ onData (chunk) {
+ if (fetchParams.controller.dump) {
+ return
+ }
+
+ // 1. If one or more bytes have been transmitted from response’s
+ // message body, then:
+
+ // 1. Let bytes be the transmitted bytes.
+ const bytes = chunk
+
+ // 2. Let codings be the result of extracting header list values
+ // given `Content-Encoding` and response’s header list.
+ // See pullAlgorithm.
+
+ // 3. Increase timingInfo’s encoded body size by bytes’s length.
+ timingInfo.encodedBodySize += bytes.byteLength
+
+ // 4. See pullAlgorithm...
+
+ return this.body.push(bytes)
+ },
+
+ onComplete () {
+ if (this.abort) {
+ fetchParams.controller.off('terminated', this.abort)
+ }
+
+ fetchParams.controller.ended = true
+
+ this.body.push(null)
+ },
+
+ onError (error) {
+ if (this.abort) {
+ fetchParams.controller.off('terminated', this.abort)
+ }
+
+ this.body?.destroy(error)
+
+ fetchParams.controller.terminate(error)
+
+ reject(error)
+ },
+
+ onUpgrade (status, headersList, socket) {
+ if (status !== 101) {
+ return
+ }
+
+ const headers = new Headers()
+
+ for (let n = 0; n < headersList.length; n += 2) {
+ const key = headersList[n + 0].toString('latin1')
+ const val = headersList[n + 1].toString('latin1')
+
+ headers[kHeadersList].append(key, val)
+ }
+
+ resolve({
+ status,
+ statusText: STATUS_CODES[status],
+ headersList: headers[kHeadersList],
+ socket
+ })
+
+ return true
+ }
+ }
+ ))
+ }
+}
+
+module.exports = {
+ fetch,
+ Fetch,
+ fetching,
+ finalizeAndReportTiming
+}
diff --git a/lib/fetch/request.js b/lib/fetch/request.js
new file mode 100644
index 0000000..6fe4dff
--- /dev/null
+++ b/lib/fetch/request.js
@@ -0,0 +1,946 @@
+/* globals AbortController */
+
+'use strict'
+
+const { extractBody, mixinBody, cloneBody } = require('./body')
+const { Headers, fill: fillHeaders, HeadersList } = require('./headers')
+const { FinalizationRegistry } = require('../compat/dispatcher-weakref')()
+const util = require('../core/util')
+const {
+ isValidHTTPToken,
+ sameOrigin,
+ normalizeMethod,
+ makePolicyContainer,
+ normalizeMethodRecord
+} = require('./util')
+const {
+ forbiddenMethodsSet,
+ corsSafeListedMethodsSet,
+ referrerPolicy,
+ requestRedirect,
+ requestMode,
+ requestCredentials,
+ requestCache,
+ requestDuplex
+} = require('./constants')
+const { kEnumerableProperty } = util
+const { kHeaders, kSignal, kState, kGuard, kRealm } = require('./symbols')
+const { webidl } = require('./webidl')
+const { getGlobalOrigin } = require('./global')
+const { URLSerializer } = require('./dataURL')
+const { kHeadersList, kConstruct } = require('../core/symbols')
+const assert = require('assert')
+const { getMaxListeners, setMaxListeners, getEventListeners, defaultMaxListeners } = require('events')
+
+let TransformStream = globalThis.TransformStream
+
+const kAbortController = Symbol('abortController')
+
+const requestFinalizer = new FinalizationRegistry(({ signal, abort }) => {
+ signal.removeEventListener('abort', abort)
+})
+
+// https://fetch.spec.whatwg.org/#request-class
+class Request {
+ // https://fetch.spec.whatwg.org/#dom-request
+ constructor (input, init = {}) {
+ if (input === kConstruct) {
+ return
+ }
+
+ webidl.argumentLengthCheck(arguments, 1, { header: 'Request constructor' })
+
+ input = webidl.converters.RequestInfo(input)
+ init = webidl.converters.RequestInit(init)
+
+ // https://html.spec.whatwg.org/multipage/webappapis.html#environment-settings-object
+ this[kRealm] = {
+ settingsObject: {
+ baseUrl: getGlobalOrigin(),
+ get origin () {
+ return this.baseUrl?.origin
+ },
+ policyContainer: makePolicyContainer()
+ }
+ }
+
+ // 1. Let request be null.
+ let request = null
+
+ // 2. Let fallbackMode be null.
+ let fallbackMode = null
+
+ // 3. Let baseURL be this’s relevant settings object’s API base URL.
+ const baseUrl = this[kRealm].settingsObject.baseUrl
+
+ // 4. Let signal be null.
+ let signal = null
+
+ // 5. If input is a string, then:
+ if (typeof input === 'string') {
+ // 1. Let parsedURL be the result of parsing input with baseURL.
+ // 2. If parsedURL is failure, then throw a TypeError.
+ let parsedURL
+ try {
+ parsedURL = new URL(input, baseUrl)
+ } catch (err) {
+ throw new TypeError('Failed to parse URL from ' + input, { cause: err })
+ }
+
+ // 3. If parsedURL includes credentials, then throw a TypeError.
+ if (parsedURL.username || parsedURL.password) {
+ throw new TypeError(
+ 'Request cannot be constructed from a URL that includes credentials: ' +
+ input
+ )
+ }
+
+ // 4. Set request to a new request whose URL is parsedURL.
+ request = makeRequest({ urlList: [parsedURL] })
+
+ // 5. Set fallbackMode to "cors".
+ fallbackMode = 'cors'
+ } else {
+ // 6. Otherwise:
+
+ // 7. Assert: input is a Request object.
+ assert(input instanceof Request)
+
+ // 8. Set request to input’s request.
+ request = input[kState]
+
+ // 9. Set signal to input’s signal.
+ signal = input[kSignal]
+ }
+
+ // 7. Let origin be this’s relevant settings object’s origin.
+ const origin = this[kRealm].settingsObject.origin
+
+ // 8. Let window be "client".
+ let window = 'client'
+
+ // 9. If request’s window is an environment settings object and its origin
+ // is same origin with origin, then set window to request’s window.
+ if (
+ request.window?.constructor?.name === 'EnvironmentSettingsObject' &&
+ sameOrigin(request.window, origin)
+ ) {
+ window = request.window
+ }
+
+ // 10. If init["window"] exists and is non-null, then throw a TypeError.
+ if (init.window != null) {
+ throw new TypeError(`'window' option '${window}' must be null`)
+ }
+
+ // 11. If init["window"] exists, then set window to "no-window".
+ if ('window' in init) {
+ window = 'no-window'
+ }
+
+ // 12. Set request to a new request with the following properties:
+ request = makeRequest({
+ // URL request’s URL.
+ // undici implementation note: this is set as the first item in request's urlList in makeRequest
+ // method request’s method.
+ method: request.method,
+ // header list A copy of request’s header list.
+ // undici implementation note: headersList is cloned in makeRequest
+ headersList: request.headersList,
+ // unsafe-request flag Set.
+ unsafeRequest: request.unsafeRequest,
+ // client This’s relevant settings object.
+ client: this[kRealm].settingsObject,
+ // window window.
+ window,
+ // priority request’s priority.
+ priority: request.priority,
+ // origin request’s origin. The propagation of the origin is only significant for navigation requests
+ // being handled by a service worker. In this scenario a request can have an origin that is different
+ // from the current client.
+ origin: request.origin,
+ // referrer request’s referrer.
+ referrer: request.referrer,
+ // referrer policy request’s referrer policy.
+ referrerPolicy: request.referrerPolicy,
+ // mode request’s mode.
+ mode: request.mode,
+ // credentials mode request’s credentials mode.
+ credentials: request.credentials,
+ // cache mode request’s cache mode.
+ cache: request.cache,
+ // redirect mode request’s redirect mode.
+ redirect: request.redirect,
+ // integrity metadata request’s integrity metadata.
+ integrity: request.integrity,
+ // keepalive request’s keepalive.
+ keepalive: request.keepalive,
+ // reload-navigation flag request’s reload-navigation flag.
+ reloadNavigation: request.reloadNavigation,
+ // history-navigation flag request’s history-navigation flag.
+ historyNavigation: request.historyNavigation,
+ // URL list A clone of request’s URL list.
+ urlList: [...request.urlList]
+ })
+
+ const initHasKey = Object.keys(init).length !== 0
+
+ // 13. If init is not empty, then:
+ if (initHasKey) {
+ // 1. If request’s mode is "navigate", then set it to "same-origin".
+ if (request.mode === 'navigate') {
+ request.mode = 'same-origin'
+ }
+
+ // 2. Unset request’s reload-navigation flag.
+ request.reloadNavigation = false
+
+ // 3. Unset request’s history-navigation flag.
+ request.historyNavigation = false
+
+ // 4. Set request’s origin to "client".
+ request.origin = 'client'
+
+ // 5. Set request’s referrer to "client"
+ request.referrer = 'client'
+
+ // 6. Set request’s referrer policy to the empty string.
+ request.referrerPolicy = ''
+
+ // 7. Set request’s URL to request’s current URL.
+ request.url = request.urlList[request.urlList.length - 1]
+
+ // 8. Set request’s URL list to « request’s URL ».
+ request.urlList = [request.url]
+ }
+
+ // 14. If init["referrer"] exists, then:
+ if (init.referrer !== undefined) {
+ // 1. Let referrer be init["referrer"].
+ const referrer = init.referrer
+
+ // 2. If referrer is the empty string, then set request’s referrer to "no-referrer".
+ if (referrer === '') {
+ request.referrer = 'no-referrer'
+ } else {
+ // 1. Let parsedReferrer be the result of parsing referrer with
+ // baseURL.
+ // 2. If parsedReferrer is failure, then throw a TypeError.
+ let parsedReferrer
+ try {
+ parsedReferrer = new URL(referrer, baseUrl)
+ } catch (err) {
+ throw new TypeError(`Referrer "${referrer}" is not a valid URL.`, { cause: err })
+ }
+
+ // 3. If one of the following is true
+ // - parsedReferrer’s scheme is "about" and path is the string "client"
+ // - parsedReferrer’s origin is not same origin with origin
+ // then set request’s referrer to "client".
+ if (
+ (parsedReferrer.protocol === 'about:' && parsedReferrer.hostname === 'client') ||
+ (origin && !sameOrigin(parsedReferrer, this[kRealm].settingsObject.baseUrl))
+ ) {
+ request.referrer = 'client'
+ } else {
+ // 4. Otherwise, set request’s referrer to parsedReferrer.
+ request.referrer = parsedReferrer
+ }
+ }
+ }
+
+ // 15. If init["referrerPolicy"] exists, then set request’s referrer policy
+ // to it.
+ if (init.referrerPolicy !== undefined) {
+ request.referrerPolicy = init.referrerPolicy
+ }
+
+ // 16. Let mode be init["mode"] if it exists, and fallbackMode otherwise.
+ let mode
+ if (init.mode !== undefined) {
+ mode = init.mode
+ } else {
+ mode = fallbackMode
+ }
+
+ // 17. If mode is "navigate", then throw a TypeError.
+ if (mode === 'navigate') {
+ throw webidl.errors.exception({
+ header: 'Request constructor',
+ message: 'invalid request mode navigate.'
+ })
+ }
+
+ // 18. If mode is non-null, set request’s mode to mode.
+ if (mode != null) {
+ request.mode = mode
+ }
+
+ // 19. If init["credentials"] exists, then set request’s credentials mode
+ // to it.
+ if (init.credentials !== undefined) {
+ request.credentials = init.credentials
+ }
+
+ // 18. If init["cache"] exists, then set request’s cache mode to it.
+ if (init.cache !== undefined) {
+ request.cache = init.cache
+ }
+
+ // 21. If request’s cache mode is "only-if-cached" and request’s mode is
+ // not "same-origin", then throw a TypeError.
+ if (request.cache === 'only-if-cached' && request.mode !== 'same-origin') {
+ throw new TypeError(
+ "'only-if-cached' can be set only with 'same-origin' mode"
+ )
+ }
+
+ // 22. If init["redirect"] exists, then set request’s redirect mode to it.
+ if (init.redirect !== undefined) {
+ request.redirect = init.redirect
+ }
+
+ // 23. If init["integrity"] exists, then set request’s integrity metadata to it.
+ if (init.integrity != null) {
+ request.integrity = String(init.integrity)
+ }
+
+ // 24. If init["keepalive"] exists, then set request’s keepalive to it.
+ if (init.keepalive !== undefined) {
+ request.keepalive = Boolean(init.keepalive)
+ }
+
+ // 25. If init["method"] exists, then:
+ if (init.method !== undefined) {
+ // 1. Let method be init["method"].
+ let method = init.method
+
+ // 2. If method is not a method or method is a forbidden method, then
+ // throw a TypeError.
+ if (!isValidHTTPToken(method)) {
+ throw new TypeError(`'${method}' is not a valid HTTP method.`)
+ }
+
+ if (forbiddenMethodsSet.has(method.toUpperCase())) {
+ throw new TypeError(`'${method}' HTTP method is unsupported.`)
+ }
+
+ // 3. Normalize method.
+ method = normalizeMethodRecord[method] ?? normalizeMethod(method)
+
+ // 4. Set request’s method to method.
+ request.method = method
+ }
+
+ // 26. If init["signal"] exists, then set signal to it.
+ if (init.signal !== undefined) {
+ signal = init.signal
+ }
+
+ // 27. Set this’s request to request.
+ this[kState] = request
+
+ // 28. Set this’s signal to a new AbortSignal object with this’s relevant
+ // Realm.
+ // TODO: could this be simplified with AbortSignal.any
+ // (https://dom.spec.whatwg.org/#dom-abortsignal-any)
+ const ac = new AbortController()
+ this[kSignal] = ac.signal
+ this[kSignal][kRealm] = this[kRealm]
+
+ // 29. If signal is not null, then make this’s signal follow signal.
+ if (signal != null) {
+ if (
+ !signal ||
+ typeof signal.aborted !== 'boolean' ||
+ typeof signal.addEventListener !== 'function'
+ ) {
+ throw new TypeError(
+ "Failed to construct 'Request': member signal is not of type AbortSignal."
+ )
+ }
+
+ if (signal.aborted) {
+ ac.abort(signal.reason)
+ } else {
+ // Keep a strong ref to ac while request object
+ // is alive. This is needed to prevent AbortController
+ // from being prematurely garbage collected.
+ // See, https://github.com/nodejs/undici/issues/1926.
+ this[kAbortController] = ac
+
+ const acRef = new WeakRef(ac)
+ const abort = function () {
+ const ac = acRef.deref()
+ if (ac !== undefined) {
+ ac.abort(this.reason)
+ }
+ }
+
+ // Third-party AbortControllers may not work with these.
+ // See, https://github.com/nodejs/undici/pull/1910#issuecomment-1464495619.
+ try {
+ // If the max amount of listeners is equal to the default, increase it
+ // This is only available in node >= v19.9.0
+ if (typeof getMaxListeners === 'function' && getMaxListeners(signal) === defaultMaxListeners) {
+ setMaxListeners(100, signal)
+ } else if (getEventListeners(signal, 'abort').length >= defaultMaxListeners) {
+ setMaxListeners(100, signal)
+ }
+ } catch {}
+
+ util.addAbortListener(signal, abort)
+ requestFinalizer.register(ac, { signal, abort })
+ }
+ }
+
+ // 30. Set this’s headers to a new Headers object with this’s relevant
+ // Realm, whose header list is request’s header list and guard is
+ // "request".
+ this[kHeaders] = new Headers(kConstruct)
+ this[kHeaders][kHeadersList] = request.headersList
+ this[kHeaders][kGuard] = 'request'
+ this[kHeaders][kRealm] = this[kRealm]
+
+ // 31. If this’s request’s mode is "no-cors", then:
+ if (mode === 'no-cors') {
+ // 1. If this’s request’s method is not a CORS-safelisted method,
+ // then throw a TypeError.
+ if (!corsSafeListedMethodsSet.has(request.method)) {
+ throw new TypeError(
+ `'${request.method} is unsupported in no-cors mode.`
+ )
+ }
+
+ // 2. Set this’s headers’s guard to "request-no-cors".
+ this[kHeaders][kGuard] = 'request-no-cors'
+ }
+
+ // 32. If init is not empty, then:
+ if (initHasKey) {
+ /** @type {HeadersList} */
+ const headersList = this[kHeaders][kHeadersList]
+ // 1. Let headers be a copy of this’s headers and its associated header
+ // list.
+ // 2. If init["headers"] exists, then set headers to init["headers"].
+ const headers = init.headers !== undefined ? init.headers : new HeadersList(headersList)
+
+ // 3. Empty this’s headers’s header list.
+ headersList.clear()
+
+ // 4. If headers is a Headers object, then for each header in its header
+ // list, append header’s name/header’s value to this’s headers.
+ if (headers instanceof HeadersList) {
+ for (const [key, val] of headers) {
+ headersList.append(key, val)
+ }
+ // Note: Copy the `set-cookie` meta-data.
+ headersList.cookies = headers.cookies
+ } else {
+ // 5. Otherwise, fill this’s headers with headers.
+ fillHeaders(this[kHeaders], headers)
+ }
+ }
+
+ // 33. Let inputBody be input’s request’s body if input is a Request
+ // object; otherwise null.
+ const inputBody = input instanceof Request ? input[kState].body : null
+
+ // 34. If either init["body"] exists and is non-null or inputBody is
+ // non-null, and request’s method is `GET` or `HEAD`, then throw a
+ // TypeError.
+ if (
+ (init.body != null || inputBody != null) &&
+ (request.method === 'GET' || request.method === 'HEAD')
+ ) {
+ throw new TypeError('Request with GET/HEAD method cannot have body.')
+ }
+
+ // 35. Let initBody be null.
+ let initBody = null
+
+ // 36. If init["body"] exists and is non-null, then:
+ if (init.body != null) {
+ // 1. Let Content-Type be null.
+ // 2. Set initBody and Content-Type to the result of extracting
+ // init["body"], with keepalive set to request’s keepalive.
+ const [extractedBody, contentType] = extractBody(
+ init.body,
+ request.keepalive
+ )
+ initBody = extractedBody
+
+ // 3, If Content-Type is non-null and this’s headers’s header list does
+ // not contain `Content-Type`, then append `Content-Type`/Content-Type to
+ // this’s headers.
+ if (contentType && !this[kHeaders][kHeadersList].contains('content-type')) {
+ this[kHeaders].append('content-type', contentType)
+ }
+ }
+
+ // 37. Let inputOrInitBody be initBody if it is non-null; otherwise
+ // inputBody.
+ const inputOrInitBody = initBody ?? inputBody
+
+ // 38. If inputOrInitBody is non-null and inputOrInitBody’s source is
+ // null, then:
+ if (inputOrInitBody != null && inputOrInitBody.source == null) {
+ // 1. If initBody is non-null and init["duplex"] does not exist,
+ // then throw a TypeError.
+ if (initBody != null && init.duplex == null) {
+ throw new TypeError('RequestInit: duplex option is required when sending a body.')
+ }
+
+ // 2. If this’s request’s mode is neither "same-origin" nor "cors",
+ // then throw a TypeError.
+ if (request.mode !== 'same-origin' && request.mode !== 'cors') {
+ throw new TypeError(
+ 'If request is made from ReadableStream, mode should be "same-origin" or "cors"'
+ )
+ }
+
+ // 3. Set this’s request’s use-CORS-preflight flag.
+ request.useCORSPreflightFlag = true
+ }
+
+ // 39. Let finalBody be inputOrInitBody.
+ let finalBody = inputOrInitBody
+
+ // 40. If initBody is null and inputBody is non-null, then:
+ if (initBody == null && inputBody != null) {
+ // 1. If input is unusable, then throw a TypeError.
+ if (util.isDisturbed(inputBody.stream) || inputBody.stream.locked) {
+ throw new TypeError(
+ 'Cannot construct a Request with a Request object that has already been used.'
+ )
+ }
+
+ // 2. Set finalBody to the result of creating a proxy for inputBody.
+ if (!TransformStream) {
+ TransformStream = require('stream/web').TransformStream
+ }
+
+ // https://streams.spec.whatwg.org/#readablestream-create-a-proxy
+ const identityTransform = new TransformStream()
+ inputBody.stream.pipeThrough(identityTransform)
+ finalBody = {
+ source: inputBody.source,
+ length: inputBody.length,
+ stream: identityTransform.readable
+ }
+ }
+
+ // 41. Set this’s request’s body to finalBody.
+ this[kState].body = finalBody
+ }
+
+ // Returns request’s HTTP method, which is "GET" by default.
+ get method () {
+ webidl.brandCheck(this, Request)
+
+ // The method getter steps are to return this’s request’s method.
+ return this[kState].method
+ }
+
+ // Returns the URL of request as a string.
+ get url () {
+ webidl.brandCheck(this, Request)
+
+ // The url getter steps are to return this’s request’s URL, serialized.
+ return URLSerializer(this[kState].url)
+ }
+
+ // Returns a Headers object consisting of the headers associated with request.
+ // Note that headers added in the network layer by the user agent will not
+ // be accounted for in this object, e.g., the "Host" header.
+ get headers () {
+ webidl.brandCheck(this, Request)
+
+ // The headers getter steps are to return this’s headers.
+ return this[kHeaders]
+ }
+
+ // Returns the kind of resource requested by request, e.g., "document"
+ // or "script".
+ get destination () {
+ webidl.brandCheck(this, Request)
+
+ // The destination getter are to return this’s request’s destination.
+ return this[kState].destination
+ }
+
+ // Returns the referrer of request. Its value can be a same-origin URL if
+ // explicitly set in init, the empty string to indicate no referrer, and
+ // "about:client" when defaulting to the global’s default. This is used
+ // during fetching to determine the value of the `Referer` header of the
+ // request being made.
+ get referrer () {
+ webidl.brandCheck(this, Request)
+
+ // 1. If this’s request’s referrer is "no-referrer", then return the
+ // empty string.
+ if (this[kState].referrer === 'no-referrer') {
+ return ''
+ }
+
+ // 2. If this’s request’s referrer is "client", then return
+ // "about:client".
+ if (this[kState].referrer === 'client') {
+ return 'about:client'
+ }
+
+ // Return this’s request’s referrer, serialized.
+ return this[kState].referrer.toString()
+ }
+
+ // Returns the referrer policy associated with request.
+ // This is used during fetching to compute the value of the request’s
+ // referrer.
+ get referrerPolicy () {
+ webidl.brandCheck(this, Request)
+
+ // The referrerPolicy getter steps are to return this’s request’s referrer policy.
+ return this[kState].referrerPolicy
+ }
+
+ // Returns the mode associated with request, which is a string indicating
+ // whether the request will use CORS, or will be restricted to same-origin
+ // URLs.
+ get mode () {
+ webidl.brandCheck(this, Request)
+
+ // The mode getter steps are to return this’s request’s mode.
+ return this[kState].mode
+ }
+
+ // Returns the credentials mode associated with request,
+ // which is a string indicating whether credentials will be sent with the
+ // request always, never, or only when sent to a same-origin URL.
+ get credentials () {
+ // The credentials getter steps are to return this’s request’s credentials mode.
+ return this[kState].credentials
+ }
+
+ // Returns the cache mode associated with request,
+ // which is a string indicating how the request will
+ // interact with the browser’s cache when fetching.
+ get cache () {
+ webidl.brandCheck(this, Request)
+
+ // The cache getter steps are to return this’s request’s cache mode.
+ return this[kState].cache
+ }
+
+ // Returns the redirect mode associated with request,
+ // which is a string indicating how redirects for the
+ // request will be handled during fetching. A request
+ // will follow redirects by default.
+ get redirect () {
+ webidl.brandCheck(this, Request)
+
+ // The redirect getter steps are to return this’s request’s redirect mode.
+ return this[kState].redirect
+ }
+
+ // Returns request’s subresource integrity metadata, which is a
+ // cryptographic hash of the resource being fetched. Its value
+ // consists of multiple hashes separated by whitespace. [SRI]
+ get integrity () {
+ webidl.brandCheck(this, Request)
+
+ // The integrity getter steps are to return this’s request’s integrity
+ // metadata.
+ return this[kState].integrity
+ }
+
+ // Returns a boolean indicating whether or not request can outlive the
+ // global in which it was created.
+ get keepalive () {
+ webidl.brandCheck(this, Request)
+
+ // The keepalive getter steps are to return this’s request’s keepalive.
+ return this[kState].keepalive
+ }
+
+ // Returns a boolean indicating whether or not request is for a reload
+ // navigation.
+ get isReloadNavigation () {
+ webidl.brandCheck(this, Request)
+
+ // The isReloadNavigation getter steps are to return true if this’s
+ // request’s reload-navigation flag is set; otherwise false.
+ return this[kState].reloadNavigation
+ }
+
+ // Returns a boolean indicating whether or not request is for a history
+ // navigation (a.k.a. back-foward navigation).
+ get isHistoryNavigation () {
+ webidl.brandCheck(this, Request)
+
+ // The isHistoryNavigation getter steps are to return true if this’s request’s
+ // history-navigation flag is set; otherwise false.
+ return this[kState].historyNavigation
+ }
+
+ // Returns the signal associated with request, which is an AbortSignal
+ // object indicating whether or not request has been aborted, and its
+ // abort event handler.
+ get signal () {
+ webidl.brandCheck(this, Request)
+
+ // The signal getter steps are to return this’s signal.
+ return this[kSignal]
+ }
+
+ get body () {
+ webidl.brandCheck(this, Request)
+
+ return this[kState].body ? this[kState].body.stream : null
+ }
+
+ get bodyUsed () {
+ webidl.brandCheck(this, Request)
+
+ return !!this[kState].body && util.isDisturbed(this[kState].body.stream)
+ }
+
+ get duplex () {
+ webidl.brandCheck(this, Request)
+
+ return 'half'
+ }
+
+ // Returns a clone of request.
+ clone () {
+ webidl.brandCheck(this, Request)
+
+ // 1. If this is unusable, then throw a TypeError.
+ if (this.bodyUsed || this.body?.locked) {
+ throw new TypeError('unusable')
+ }
+
+ // 2. Let clonedRequest be the result of cloning this’s request.
+ const clonedRequest = cloneRequest(this[kState])
+
+ // 3. Let clonedRequestObject be the result of creating a Request object,
+ // given clonedRequest, this’s headers’s guard, and this’s relevant Realm.
+ const clonedRequestObject = new Request(kConstruct)
+ clonedRequestObject[kState] = clonedRequest
+ clonedRequestObject[kRealm] = this[kRealm]
+ clonedRequestObject[kHeaders] = new Headers(kConstruct)
+ clonedRequestObject[kHeaders][kHeadersList] = clonedRequest.headersList
+ clonedRequestObject[kHeaders][kGuard] = this[kHeaders][kGuard]
+ clonedRequestObject[kHeaders][kRealm] = this[kHeaders][kRealm]
+
+ // 4. Make clonedRequestObject’s signal follow this’s signal.
+ const ac = new AbortController()
+ if (this.signal.aborted) {
+ ac.abort(this.signal.reason)
+ } else {
+ util.addAbortListener(
+ this.signal,
+ () => {
+ ac.abort(this.signal.reason)
+ }
+ )
+ }
+ clonedRequestObject[kSignal] = ac.signal
+
+ // 4. Return clonedRequestObject.
+ return clonedRequestObject
+ }
+}
+
+mixinBody(Request)
+
+function makeRequest (init) {
+ // https://fetch.spec.whatwg.org/#requests
+ const request = {
+ method: 'GET',
+ localURLsOnly: false,
+ unsafeRequest: false,
+ body: null,
+ client: null,
+ reservedClient: null,
+ replacesClientId: '',
+ window: 'client',
+ keepalive: false,
+ serviceWorkers: 'all',
+ initiator: '',
+ destination: '',
+ priority: null,
+ origin: 'client',
+ policyContainer: 'client',
+ referrer: 'client',
+ referrerPolicy: '',
+ mode: 'no-cors',
+ useCORSPreflightFlag: false,
+ credentials: 'same-origin',
+ useCredentials: false,
+ cache: 'default',
+ redirect: 'follow',
+ integrity: '',
+ cryptoGraphicsNonceMetadata: '',
+ parserMetadata: '',
+ reloadNavigation: false,
+ historyNavigation: false,
+ userActivation: false,
+ taintedOrigin: false,
+ redirectCount: 0,
+ responseTainting: 'basic',
+ preventNoCacheCacheControlHeaderModification: false,
+ done: false,
+ timingAllowFailed: false,
+ ...init,
+ headersList: init.headersList
+ ? new HeadersList(init.headersList)
+ : new HeadersList()
+ }
+ request.url = request.urlList[0]
+ return request
+}
+
+// https://fetch.spec.whatwg.org/#concept-request-clone
+function cloneRequest (request) {
+ // To clone a request request, run these steps:
+
+ // 1. Let newRequest be a copy of request, except for its body.
+ const newRequest = makeRequest({ ...request, body: null })
+
+ // 2. If request’s body is non-null, set newRequest’s body to the
+ // result of cloning request’s body.
+ if (request.body != null) {
+ newRequest.body = cloneBody(request.body)
+ }
+
+ // 3. Return newRequest.
+ return newRequest
+}
+
+Object.defineProperties(Request.prototype, {
+ method: kEnumerableProperty,
+ url: kEnumerableProperty,
+ headers: kEnumerableProperty,
+ redirect: kEnumerableProperty,
+ clone: kEnumerableProperty,
+ signal: kEnumerableProperty,
+ duplex: kEnumerableProperty,
+ destination: kEnumerableProperty,
+ body: kEnumerableProperty,
+ bodyUsed: kEnumerableProperty,
+ isHistoryNavigation: kEnumerableProperty,
+ isReloadNavigation: kEnumerableProperty,
+ keepalive: kEnumerableProperty,
+ integrity: kEnumerableProperty,
+ cache: kEnumerableProperty,
+ credentials: kEnumerableProperty,
+ attribute: kEnumerableProperty,
+ referrerPolicy: kEnumerableProperty,
+ referrer: kEnumerableProperty,
+ mode: kEnumerableProperty,
+ [Symbol.toStringTag]: {
+ value: 'Request',
+ configurable: true
+ }
+})
+
+webidl.converters.Request = webidl.interfaceConverter(
+ Request
+)
+
+// https://fetch.spec.whatwg.org/#requestinfo
+webidl.converters.RequestInfo = function (V) {
+ if (typeof V === 'string') {
+ return webidl.converters.USVString(V)
+ }
+
+ if (V instanceof Request) {
+ return webidl.converters.Request(V)
+ }
+
+ return webidl.converters.USVString(V)
+}
+
+webidl.converters.AbortSignal = webidl.interfaceConverter(
+ AbortSignal
+)
+
+// https://fetch.spec.whatwg.org/#requestinit
+webidl.converters.RequestInit = webidl.dictionaryConverter([
+ {
+ key: 'method',
+ converter: webidl.converters.ByteString
+ },
+ {
+ key: 'headers',
+ converter: webidl.converters.HeadersInit
+ },
+ {
+ key: 'body',
+ converter: webidl.nullableConverter(
+ webidl.converters.BodyInit
+ )
+ },
+ {
+ key: 'referrer',
+ converter: webidl.converters.USVString
+ },
+ {
+ key: 'referrerPolicy',
+ converter: webidl.converters.DOMString,
+ // https://w3c.github.io/webappsec-referrer-policy/#referrer-policy
+ allowedValues: referrerPolicy
+ },
+ {
+ key: 'mode',
+ converter: webidl.converters.DOMString,
+ // https://fetch.spec.whatwg.org/#concept-request-mode
+ allowedValues: requestMode
+ },
+ {
+ key: 'credentials',
+ converter: webidl.converters.DOMString,
+ // https://fetch.spec.whatwg.org/#requestcredentials
+ allowedValues: requestCredentials
+ },
+ {
+ key: 'cache',
+ converter: webidl.converters.DOMString,
+ // https://fetch.spec.whatwg.org/#requestcache
+ allowedValues: requestCache
+ },
+ {
+ key: 'redirect',
+ converter: webidl.converters.DOMString,
+ // https://fetch.spec.whatwg.org/#requestredirect
+ allowedValues: requestRedirect
+ },
+ {
+ key: 'integrity',
+ converter: webidl.converters.DOMString
+ },
+ {
+ key: 'keepalive',
+ converter: webidl.converters.boolean
+ },
+ {
+ key: 'signal',
+ converter: webidl.nullableConverter(
+ (signal) => webidl.converters.AbortSignal(
+ signal,
+ { strict: false }
+ )
+ )
+ },
+ {
+ key: 'window',
+ converter: webidl.converters.any
+ },
+ {
+ key: 'duplex',
+ converter: webidl.converters.DOMString,
+ allowedValues: requestDuplex
+ }
+])
+
+module.exports = { Request, makeRequest }
diff --git a/lib/fetch/response.js b/lib/fetch/response.js
new file mode 100644
index 0000000..7338612
--- /dev/null
+++ b/lib/fetch/response.js
@@ -0,0 +1,571 @@
+'use strict'
+
+const { Headers, HeadersList, fill } = require('./headers')
+const { extractBody, cloneBody, mixinBody } = require('./body')
+const util = require('../core/util')
+const { kEnumerableProperty } = util
+const {
+ isValidReasonPhrase,
+ isCancelled,
+ isAborted,
+ isBlobLike,
+ serializeJavascriptValueToJSONString,
+ isErrorLike,
+ isomorphicEncode
+} = require('./util')
+const {
+ redirectStatusSet,
+ nullBodyStatus,
+ DOMException
+} = require('./constants')
+const { kState, kHeaders, kGuard, kRealm } = require('./symbols')
+const { webidl } = require('./webidl')
+const { FormData } = require('./formdata')
+const { getGlobalOrigin } = require('./global')
+const { URLSerializer } = require('./dataURL')
+const { kHeadersList, kConstruct } = require('../core/symbols')
+const assert = require('assert')
+const { types } = require('util')
+
+const ReadableStream = globalThis.ReadableStream || require('stream/web').ReadableStream
+const textEncoder = new TextEncoder('utf-8')
+
+// https://fetch.spec.whatwg.org/#response-class
+class Response {
+ // Creates network error Response.
+ static error () {
+ // TODO
+ const relevantRealm = { settingsObject: {} }
+
+ // The static error() method steps are to return the result of creating a
+ // Response object, given a new network error, "immutable", and this’s
+ // relevant Realm.
+ const responseObject = new Response()
+ responseObject[kState] = makeNetworkError()
+ responseObject[kRealm] = relevantRealm
+ responseObject[kHeaders][kHeadersList] = responseObject[kState].headersList
+ responseObject[kHeaders][kGuard] = 'immutable'
+ responseObject[kHeaders][kRealm] = relevantRealm
+ return responseObject
+ }
+
+ // https://fetch.spec.whatwg.org/#dom-response-json
+ static json (data, init = {}) {
+ webidl.argumentLengthCheck(arguments, 1, { header: 'Response.json' })
+
+ if (init !== null) {
+ init = webidl.converters.ResponseInit(init)
+ }
+
+ // 1. Let bytes the result of running serialize a JavaScript value to JSON bytes on data.
+ const bytes = textEncoder.encode(
+ serializeJavascriptValueToJSONString(data)
+ )
+
+ // 2. Let body be the result of extracting bytes.
+ const body = extractBody(bytes)
+
+ // 3. Let responseObject be the result of creating a Response object, given a new response,
+ // "response", and this’s relevant Realm.
+ const relevantRealm = { settingsObject: {} }
+ const responseObject = new Response()
+ responseObject[kRealm] = relevantRealm
+ responseObject[kHeaders][kGuard] = 'response'
+ responseObject[kHeaders][kRealm] = relevantRealm
+
+ // 4. Perform initialize a response given responseObject, init, and (body, "application/json").
+ initializeResponse(responseObject, init, { body: body[0], type: 'application/json' })
+
+ // 5. Return responseObject.
+ return responseObject
+ }
+
+ // Creates a redirect Response that redirects to url with status status.
+ static redirect (url, status = 302) {
+ const relevantRealm = { settingsObject: {} }
+
+ webidl.argumentLengthCheck(arguments, 1, { header: 'Response.redirect' })
+
+ url = webidl.converters.USVString(url)
+ status = webidl.converters['unsigned short'](status)
+
+ // 1. Let parsedURL be the result of parsing url with current settings
+ // object’s API base URL.
+ // 2. If parsedURL is failure, then throw a TypeError.
+ // TODO: base-URL?
+ let parsedURL
+ try {
+ parsedURL = new URL(url, getGlobalOrigin())
+ } catch (err) {
+ throw Object.assign(new TypeError('Failed to parse URL from ' + url), {
+ cause: err
+ })
+ }
+
+ // 3. If status is not a redirect status, then throw a RangeError.
+ if (!redirectStatusSet.has(status)) {
+ throw new RangeError('Invalid status code ' + status)
+ }
+
+ // 4. Let responseObject be the result of creating a Response object,
+ // given a new response, "immutable", and this’s relevant Realm.
+ const responseObject = new Response()
+ responseObject[kRealm] = relevantRealm
+ responseObject[kHeaders][kGuard] = 'immutable'
+ responseObject[kHeaders][kRealm] = relevantRealm
+
+ // 5. Set responseObject’s response’s status to status.
+ responseObject[kState].status = status
+
+ // 6. Let value be parsedURL, serialized and isomorphic encoded.
+ const value = isomorphicEncode(URLSerializer(parsedURL))
+
+ // 7. Append `Location`/value to responseObject’s response’s header list.
+ responseObject[kState].headersList.append('location', value)
+
+ // 8. Return responseObject.
+ return responseObject
+ }
+
+ // https://fetch.spec.whatwg.org/#dom-response
+ constructor (body = null, init = {}) {
+ if (body !== null) {
+ body = webidl.converters.BodyInit(body)
+ }
+
+ init = webidl.converters.ResponseInit(init)
+
+ // TODO
+ this[kRealm] = { settingsObject: {} }
+
+ // 1. Set this’s response to a new response.
+ this[kState] = makeResponse({})
+
+ // 2. Set this’s headers to a new Headers object with this’s relevant
+ // Realm, whose header list is this’s response’s header list and guard
+ // is "response".
+ this[kHeaders] = new Headers(kConstruct)
+ this[kHeaders][kGuard] = 'response'
+ this[kHeaders][kHeadersList] = this[kState].headersList
+ this[kHeaders][kRealm] = this[kRealm]
+
+ // 3. Let bodyWithType be null.
+ let bodyWithType = null
+
+ // 4. If body is non-null, then set bodyWithType to the result of extracting body.
+ if (body != null) {
+ const [extractedBody, type] = extractBody(body)
+ bodyWithType = { body: extractedBody, type }
+ }
+
+ // 5. Perform initialize a response given this, init, and bodyWithType.
+ initializeResponse(this, init, bodyWithType)
+ }
+
+ // Returns response’s type, e.g., "cors".
+ get type () {
+ webidl.brandCheck(this, Response)
+
+ // The type getter steps are to return this’s response’s type.
+ return this[kState].type
+ }
+
+ // Returns response’s URL, if it has one; otherwise the empty string.
+ get url () {
+ webidl.brandCheck(this, Response)
+
+ const urlList = this[kState].urlList
+
+ // The url getter steps are to return the empty string if this’s
+ // response’s URL is null; otherwise this’s response’s URL,
+ // serialized with exclude fragment set to true.
+ const url = urlList[urlList.length - 1] ?? null
+
+ if (url === null) {
+ return ''
+ }
+
+ return URLSerializer(url, true)
+ }
+
+ // Returns whether response was obtained through a redirect.
+ get redirected () {
+ webidl.brandCheck(this, Response)
+
+ // The redirected getter steps are to return true if this’s response’s URL
+ // list has more than one item; otherwise false.
+ return this[kState].urlList.length > 1
+ }
+
+ // Returns response’s status.
+ get status () {
+ webidl.brandCheck(this, Response)
+
+ // The status getter steps are to return this’s response’s status.
+ return this[kState].status
+ }
+
+ // Returns whether response’s status is an ok status.
+ get ok () {
+ webidl.brandCheck(this, Response)
+
+ // The ok getter steps are to return true if this’s response’s status is an
+ // ok status; otherwise false.
+ return this[kState].status >= 200 && this[kState].status <= 299
+ }
+
+ // Returns response’s status message.
+ get statusText () {
+ webidl.brandCheck(this, Response)
+
+ // The statusText getter steps are to return this’s response’s status
+ // message.
+ return this[kState].statusText
+ }
+
+ // Returns response’s headers as Headers.
+ get headers () {
+ webidl.brandCheck(this, Response)
+
+ // The headers getter steps are to return this’s headers.
+ return this[kHeaders]
+ }
+
+ get body () {
+ webidl.brandCheck(this, Response)
+
+ return this[kState].body ? this[kState].body.stream : null
+ }
+
+ get bodyUsed () {
+ webidl.brandCheck(this, Response)
+
+ return !!this[kState].body && util.isDisturbed(this[kState].body.stream)
+ }
+
+ // Returns a clone of response.
+ clone () {
+ webidl.brandCheck(this, Response)
+
+ // 1. If this is unusable, then throw a TypeError.
+ if (this.bodyUsed || (this.body && this.body.locked)) {
+ throw webidl.errors.exception({
+ header: 'Response.clone',
+ message: 'Body has already been consumed.'
+ })
+ }
+
+ // 2. Let clonedResponse be the result of cloning this’s response.
+ const clonedResponse = cloneResponse(this[kState])
+
+ // 3. Return the result of creating a Response object, given
+ // clonedResponse, this’s headers’s guard, and this’s relevant Realm.
+ const clonedResponseObject = new Response()
+ clonedResponseObject[kState] = clonedResponse
+ clonedResponseObject[kRealm] = this[kRealm]
+ clonedResponseObject[kHeaders][kHeadersList] = clonedResponse.headersList
+ clonedResponseObject[kHeaders][kGuard] = this[kHeaders][kGuard]
+ clonedResponseObject[kHeaders][kRealm] = this[kHeaders][kRealm]
+
+ return clonedResponseObject
+ }
+}
+
+mixinBody(Response)
+
+Object.defineProperties(Response.prototype, {
+ type: kEnumerableProperty,
+ url: kEnumerableProperty,
+ status: kEnumerableProperty,
+ ok: kEnumerableProperty,
+ redirected: kEnumerableProperty,
+ statusText: kEnumerableProperty,
+ headers: kEnumerableProperty,
+ clone: kEnumerableProperty,
+ body: kEnumerableProperty,
+ bodyUsed: kEnumerableProperty,
+ [Symbol.toStringTag]: {
+ value: 'Response',
+ configurable: true
+ }
+})
+
+Object.defineProperties(Response, {
+ json: kEnumerableProperty,
+ redirect: kEnumerableProperty,
+ error: kEnumerableProperty
+})
+
+// https://fetch.spec.whatwg.org/#concept-response-clone
+function cloneResponse (response) {
+ // To clone a response response, run these steps:
+
+ // 1. If response is a filtered response, then return a new identical
+ // filtered response whose internal response is a clone of response’s
+ // internal response.
+ if (response.internalResponse) {
+ return filterResponse(
+ cloneResponse(response.internalResponse),
+ response.type
+ )
+ }
+
+ // 2. Let newResponse be a copy of response, except for its body.
+ const newResponse = makeResponse({ ...response, body: null })
+
+ // 3. If response’s body is non-null, then set newResponse’s body to the
+ // result of cloning response’s body.
+ if (response.body != null) {
+ newResponse.body = cloneBody(response.body)
+ }
+
+ // 4. Return newResponse.
+ return newResponse
+}
+
+function makeResponse (init) {
+ return {
+ aborted: false,
+ rangeRequested: false,
+ timingAllowPassed: false,
+ requestIncludesCredentials: false,
+ type: 'default',
+ status: 200,
+ timingInfo: null,
+ cacheState: '',
+ statusText: '',
+ ...init,
+ headersList: init.headersList
+ ? new HeadersList(init.headersList)
+ : new HeadersList(),
+ urlList: init.urlList ? [...init.urlList] : []
+ }
+}
+
+function makeNetworkError (reason) {
+ const isError = isErrorLike(reason)
+ return makeResponse({
+ type: 'error',
+ status: 0,
+ error: isError
+ ? reason
+ : new Error(reason ? String(reason) : reason),
+ aborted: reason && reason.name === 'AbortError'
+ })
+}
+
+function makeFilteredResponse (response, state) {
+ state = {
+ internalResponse: response,
+ ...state
+ }
+
+ return new Proxy(response, {
+ get (target, p) {
+ return p in state ? state[p] : target[p]
+ },
+ set (target, p, value) {
+ assert(!(p in state))
+ target[p] = value
+ return true
+ }
+ })
+}
+
+// https://fetch.spec.whatwg.org/#concept-filtered-response
+function filterResponse (response, type) {
+ // Set response to the following filtered response with response as its
+ // internal response, depending on request’s response tainting:
+ if (type === 'basic') {
+ // A basic filtered response is a filtered response whose type is "basic"
+ // and header list excludes any headers in internal response’s header list
+ // whose name is a forbidden response-header name.
+
+ // Note: undici does not implement forbidden response-header names
+ return makeFilteredResponse(response, {
+ type: 'basic',
+ headersList: response.headersList
+ })
+ } else if (type === 'cors') {
+ // A CORS filtered response is a filtered response whose type is "cors"
+ // and header list excludes any headers in internal response’s header
+ // list whose name is not a CORS-safelisted response-header name, given
+ // internal response’s CORS-exposed header-name list.
+
+ // Note: undici does not implement CORS-safelisted response-header names
+ return makeFilteredResponse(response, {
+ type: 'cors',
+ headersList: response.headersList
+ })
+ } else if (type === 'opaque') {
+ // An opaque filtered response is a filtered response whose type is
+ // "opaque", URL list is the empty list, status is 0, status message
+ // is the empty byte sequence, header list is empty, and body is null.
+
+ return makeFilteredResponse(response, {
+ type: 'opaque',
+ urlList: Object.freeze([]),
+ status: 0,
+ statusText: '',
+ body: null
+ })
+ } else if (type === 'opaqueredirect') {
+ // An opaque-redirect filtered response is a filtered response whose type
+ // is "opaqueredirect", status is 0, status message is the empty byte
+ // sequence, header list is empty, and body is null.
+
+ return makeFilteredResponse(response, {
+ type: 'opaqueredirect',
+ status: 0,
+ statusText: '',
+ headersList: [],
+ body: null
+ })
+ } else {
+ assert(false)
+ }
+}
+
+// https://fetch.spec.whatwg.org/#appropriate-network-error
+function makeAppropriateNetworkError (fetchParams, err = null) {
+ // 1. Assert: fetchParams is canceled.
+ assert(isCancelled(fetchParams))
+
+ // 2. Return an aborted network error if fetchParams is aborted;
+ // otherwise return a network error.
+ return isAborted(fetchParams)
+ ? makeNetworkError(Object.assign(new DOMException('The operation was aborted.', 'AbortError'), { cause: err }))
+ : makeNetworkError(Object.assign(new DOMException('Request was cancelled.'), { cause: err }))
+}
+
+// https://whatpr.org/fetch/1392.html#initialize-a-response
+function initializeResponse (response, init, body) {
+ // 1. If init["status"] is not in the range 200 to 599, inclusive, then
+ // throw a RangeError.
+ if (init.status !== null && (init.status < 200 || init.status > 599)) {
+ throw new RangeError('init["status"] must be in the range of 200 to 599, inclusive.')
+ }
+
+ // 2. If init["statusText"] does not match the reason-phrase token production,
+ // then throw a TypeError.
+ if ('statusText' in init && init.statusText != null) {
+ // See, https://datatracker.ietf.org/doc/html/rfc7230#section-3.1.2:
+ // reason-phrase = *( HTAB / SP / VCHAR / obs-text )
+ if (!isValidReasonPhrase(String(init.statusText))) {
+ throw new TypeError('Invalid statusText')
+ }
+ }
+
+ // 3. Set response’s response’s status to init["status"].
+ if ('status' in init && init.status != null) {
+ response[kState].status = init.status
+ }
+
+ // 4. Set response’s response’s status message to init["statusText"].
+ if ('statusText' in init && init.statusText != null) {
+ response[kState].statusText = init.statusText
+ }
+
+ // 5. If init["headers"] exists, then fill response’s headers with init["headers"].
+ if ('headers' in init && init.headers != null) {
+ fill(response[kHeaders], init.headers)
+ }
+
+ // 6. If body was given, then:
+ if (body) {
+ // 1. If response's status is a null body status, then throw a TypeError.
+ if (nullBodyStatus.includes(response.status)) {
+ throw webidl.errors.exception({
+ header: 'Response constructor',
+ message: 'Invalid response status code ' + response.status
+ })
+ }
+
+ // 2. Set response's body to body's body.
+ response[kState].body = body.body
+
+ // 3. If body's type is non-null and response's header list does not contain
+ // `Content-Type`, then append (`Content-Type`, body's type) to response's header list.
+ if (body.type != null && !response[kState].headersList.contains('Content-Type')) {
+ response[kState].headersList.append('content-type', body.type)
+ }
+ }
+}
+
+webidl.converters.ReadableStream = webidl.interfaceConverter(
+ ReadableStream
+)
+
+webidl.converters.FormData = webidl.interfaceConverter(
+ FormData
+)
+
+webidl.converters.URLSearchParams = webidl.interfaceConverter(
+ URLSearchParams
+)
+
+// https://fetch.spec.whatwg.org/#typedefdef-xmlhttprequestbodyinit
+webidl.converters.XMLHttpRequestBodyInit = function (V) {
+ if (typeof V === 'string') {
+ return webidl.converters.USVString(V)
+ }
+
+ if (isBlobLike(V)) {
+ return webidl.converters.Blob(V, { strict: false })
+ }
+
+ if (types.isArrayBuffer(V) || types.isTypedArray(V) || types.isDataView(V)) {
+ return webidl.converters.BufferSource(V)
+ }
+
+ if (util.isFormDataLike(V)) {
+ return webidl.converters.FormData(V, { strict: false })
+ }
+
+ if (V instanceof URLSearchParams) {
+ return webidl.converters.URLSearchParams(V)
+ }
+
+ return webidl.converters.DOMString(V)
+}
+
+// https://fetch.spec.whatwg.org/#bodyinit
+webidl.converters.BodyInit = function (V) {
+ if (V instanceof ReadableStream) {
+ return webidl.converters.ReadableStream(V)
+ }
+
+ // Note: the spec doesn't include async iterables,
+ // this is an undici extension.
+ if (V?.[Symbol.asyncIterator]) {
+ return V
+ }
+
+ return webidl.converters.XMLHttpRequestBodyInit(V)
+}
+
+webidl.converters.ResponseInit = webidl.dictionaryConverter([
+ {
+ key: 'status',
+ converter: webidl.converters['unsigned short'],
+ defaultValue: 200
+ },
+ {
+ key: 'statusText',
+ converter: webidl.converters.ByteString,
+ defaultValue: ''
+ },
+ {
+ key: 'headers',
+ converter: webidl.converters.HeadersInit
+ }
+])
+
+module.exports = {
+ makeNetworkError,
+ makeResponse,
+ makeAppropriateNetworkError,
+ filterResponse,
+ Response,
+ cloneResponse
+}
diff --git a/lib/fetch/symbols.js b/lib/fetch/symbols.js
new file mode 100644
index 0000000..0b947d5
--- /dev/null
+++ b/lib/fetch/symbols.js
@@ -0,0 +1,10 @@
+'use strict'
+
+module.exports = {
+ kUrl: Symbol('url'),
+ kHeaders: Symbol('headers'),
+ kSignal: Symbol('signal'),
+ kState: Symbol('state'),
+ kGuard: Symbol('guard'),
+ kRealm: Symbol('realm')
+}
diff --git a/lib/fetch/util.js b/lib/fetch/util.js
new file mode 100644
index 0000000..b12142c
--- /dev/null
+++ b/lib/fetch/util.js
@@ -0,0 +1,1071 @@
+'use strict'
+
+const { redirectStatusSet, referrerPolicySet: referrerPolicyTokens, badPortsSet } = require('./constants')
+const { getGlobalOrigin } = require('./global')
+const { performance } = require('perf_hooks')
+const { isBlobLike, toUSVString, ReadableStreamFrom } = require('../core/util')
+const assert = require('assert')
+const { isUint8Array } = require('util/types')
+
+// https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable
+/** @type {import('crypto')|undefined} */
+let crypto
+
+try {
+ crypto = require('crypto')
+} catch {
+
+}
+
+function responseURL (response) {
+ // https://fetch.spec.whatwg.org/#responses
+ // A response has an associated URL. It is a pointer to the last URL
+ // in response’s URL list and null if response’s URL list is empty.
+ const urlList = response.urlList
+ const length = urlList.length
+ return length === 0 ? null : urlList[length - 1].toString()
+}
+
+// https://fetch.spec.whatwg.org/#concept-response-location-url
+function responseLocationURL (response, requestFragment) {
+ // 1. If response’s status is not a redirect status, then return null.
+ if (!redirectStatusSet.has(response.status)) {
+ return null
+ }
+
+ // 2. Let location be the result of extracting header list values given
+ // `Location` and response’s header list.
+ let location = response.headersList.get('location')
+
+ // 3. If location is a header value, then set location to the result of
+ // parsing location with response’s URL.
+ if (location !== null && isValidHeaderValue(location)) {
+ location = new URL(location, responseURL(response))
+ }
+
+ // 4. If location is a URL whose fragment is null, then set location’s
+ // fragment to requestFragment.
+ if (location && !location.hash) {
+ location.hash = requestFragment
+ }
+
+ // 5. Return location.
+ return location
+}
+
+/** @returns {URL} */
+function requestCurrentURL (request) {
+ return request.urlList[request.urlList.length - 1]
+}
+
+function requestBadPort (request) {
+ // 1. Let url be request’s current URL.
+ const url = requestCurrentURL(request)
+
+ // 2. If url’s scheme is an HTTP(S) scheme and url’s port is a bad port,
+ // then return blocked.
+ if (urlIsHttpHttpsScheme(url) && badPortsSet.has(url.port)) {
+ return 'blocked'
+ }
+
+ // 3. Return allowed.
+ return 'allowed'
+}
+
+function isErrorLike (object) {
+ return object instanceof Error || (
+ object?.constructor?.name === 'Error' ||
+ object?.constructor?.name === 'DOMException'
+ )
+}
+
+// Check whether |statusText| is a ByteString and
+// matches the Reason-Phrase token production.
+// RFC 2616: https://tools.ietf.org/html/rfc2616
+// RFC 7230: https://tools.ietf.org/html/rfc7230
+// "reason-phrase = *( HTAB / SP / VCHAR / obs-text )"
+// https://github.com/chromium/chromium/blob/94.0.4604.1/third_party/blink/renderer/core/fetch/response.cc#L116
+function isValidReasonPhrase (statusText) {
+ for (let i = 0; i < statusText.length; ++i) {
+ const c = statusText.charCodeAt(i)
+ if (
+ !(
+ (
+ c === 0x09 || // HTAB
+ (c >= 0x20 && c <= 0x7e) || // SP / VCHAR
+ (c >= 0x80 && c <= 0xff)
+ ) // obs-text
+ )
+ ) {
+ return false
+ }
+ }
+ return true
+}
+
+/**
+ * @see https://tools.ietf.org/html/rfc7230#section-3.2.6
+ * @param {number} c
+ */
+function isTokenCharCode (c) {
+ switch (c) {
+ case 0x22:
+ case 0x28:
+ case 0x29:
+ case 0x2c:
+ case 0x2f:
+ case 0x3a:
+ case 0x3b:
+ case 0x3c:
+ case 0x3d:
+ case 0x3e:
+ case 0x3f:
+ case 0x40:
+ case 0x5b:
+ case 0x5c:
+ case 0x5d:
+ case 0x7b:
+ case 0x7d:
+ // DQUOTE and "(),/:;<=>?@[\]{}"
+ return false
+ default:
+ // VCHAR %x21-7E
+ return c >= 0x21 && c <= 0x7e
+ }
+}
+
+/**
+ * @param {string} characters
+ */
+function isValidHTTPToken (characters) {
+ if (characters.length === 0) {
+ return false
+ }
+ for (let i = 0; i < characters.length; ++i) {
+ if (!isTokenCharCode(characters.charCodeAt(i))) {
+ return false
+ }
+ }
+ return true
+}
+
+/**
+ * @see https://fetch.spec.whatwg.org/#header-name
+ * @param {string} potentialValue
+ */
+function isValidHeaderName (potentialValue) {
+ return isValidHTTPToken(potentialValue)
+}
+
+/**
+ * @see https://fetch.spec.whatwg.org/#header-value
+ * @param {string} potentialValue
+ */
+function isValidHeaderValue (potentialValue) {
+ // - Has no leading or trailing HTTP tab or space bytes.
+ // - Contains no 0x00 (NUL) or HTTP newline bytes.
+ if (
+ potentialValue.startsWith('\t') ||
+ potentialValue.startsWith(' ') ||
+ potentialValue.endsWith('\t') ||
+ potentialValue.endsWith(' ')
+ ) {
+ return false
+ }
+
+ if (
+ potentialValue.includes('\0') ||
+ potentialValue.includes('\r') ||
+ potentialValue.includes('\n')
+ ) {
+ return false
+ }
+
+ return true
+}
+
+// https://w3c.github.io/webappsec-referrer-policy/#set-requests-referrer-policy-on-redirect
+function setRequestReferrerPolicyOnRedirect (request, actualResponse) {
+ // Given a request request and a response actualResponse, this algorithm
+ // updates request’s referrer policy according to the Referrer-Policy
+ // header (if any) in actualResponse.
+
+ // 1. Let policy be the result of executing § 8.1 Parse a referrer policy
+ // from a Referrer-Policy header on actualResponse.
+
+ // 8.1 Parse a referrer policy from a Referrer-Policy header
+ // 1. Let policy-tokens be the result of extracting header list values given `Referrer-Policy` and response’s header list.
+ const { headersList } = actualResponse
+ // 2. Let policy be the empty string.
+ // 3. For each token in policy-tokens, if token is a referrer policy and token is not the empty string, then set policy to token.
+ // 4. Return policy.
+ const policyHeader = (headersList.get('referrer-policy') ?? '').split(',')
+
+ // Note: As the referrer-policy can contain multiple policies
+ // separated by comma, we need to loop through all of them
+ // and pick the first valid one.
+ // Ref: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Referrer-Policy#specify_a_fallback_policy
+ let policy = ''
+ if (policyHeader.length > 0) {
+ // The right-most policy takes precedence.
+ // The left-most policy is the fallback.
+ for (let i = policyHeader.length; i !== 0; i--) {
+ const token = policyHeader[i - 1].trim()
+ if (referrerPolicyTokens.has(token)) {
+ policy = token
+ break
+ }
+ }
+ }
+
+ // 2. If policy is not the empty string, then set request’s referrer policy to policy.
+ if (policy !== '') {
+ request.referrerPolicy = policy
+ }
+}
+
+// https://fetch.spec.whatwg.org/#cross-origin-resource-policy-check
+function crossOriginResourcePolicyCheck () {
+ // TODO
+ return 'allowed'
+}
+
+// https://fetch.spec.whatwg.org/#concept-cors-check
+function corsCheck () {
+ // TODO
+ return 'success'
+}
+
+// https://fetch.spec.whatwg.org/#concept-tao-check
+function TAOCheck () {
+ // TODO
+ return 'success'
+}
+
+function appendFetchMetadata (httpRequest) {
+ // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-dest-header
+ // TODO
+
+ // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-mode-header
+
+ // 1. Assert: r’s url is a potentially trustworthy URL.
+ // TODO
+
+ // 2. Let header be a Structured Header whose value is a token.
+ let header = null
+
+ // 3. Set header’s value to r’s mode.
+ header = httpRequest.mode
+
+ // 4. Set a structured field value `Sec-Fetch-Mode`/header in r’s header list.
+ httpRequest.headersList.set('sec-fetch-mode', header)
+
+ // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-site-header
+ // TODO
+
+ // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-user-header
+ // TODO
+}
+
+// https://fetch.spec.whatwg.org/#append-a-request-origin-header
+function appendRequestOriginHeader (request) {
+ // 1. Let serializedOrigin be the result of byte-serializing a request origin with request.
+ let serializedOrigin = request.origin
+
+ // 2. If request’s response tainting is "cors" or request’s mode is "websocket", then append (`Origin`, serializedOrigin) to request’s header list.
+ if (request.responseTainting === 'cors' || request.mode === 'websocket') {
+ if (serializedOrigin) {
+ request.headersList.append('origin', serializedOrigin)
+ }
+
+ // 3. Otherwise, if request’s method is neither `GET` nor `HEAD`, then:
+ } else if (request.method !== 'GET' && request.method !== 'HEAD') {
+ // 1. Switch on request’s referrer policy:
+ switch (request.referrerPolicy) {
+ case 'no-referrer':
+ // Set serializedOrigin to `null`.
+ serializedOrigin = null
+ break
+ case 'no-referrer-when-downgrade':
+ case 'strict-origin':
+ case 'strict-origin-when-cross-origin':
+ // If request’s origin is a tuple origin, its scheme is "https", and request’s current URL’s scheme is not "https", then set serializedOrigin to `null`.
+ if (request.origin && urlHasHttpsScheme(request.origin) && !urlHasHttpsScheme(requestCurrentURL(request))) {
+ serializedOrigin = null
+ }
+ break
+ case 'same-origin':
+ // If request’s origin is not same origin with request’s current URL’s origin, then set serializedOrigin to `null`.
+ if (!sameOrigin(request, requestCurrentURL(request))) {
+ serializedOrigin = null
+ }
+ break
+ default:
+ // Do nothing.
+ }
+
+ if (serializedOrigin) {
+ // 2. Append (`Origin`, serializedOrigin) to request’s header list.
+ request.headersList.append('origin', serializedOrigin)
+ }
+ }
+}
+
+function coarsenedSharedCurrentTime (crossOriginIsolatedCapability) {
+ // TODO
+ return performance.now()
+}
+
+// https://fetch.spec.whatwg.org/#create-an-opaque-timing-info
+function createOpaqueTimingInfo (timingInfo) {
+ return {
+ startTime: timingInfo.startTime ?? 0,
+ redirectStartTime: 0,
+ redirectEndTime: 0,
+ postRedirectStartTime: timingInfo.startTime ?? 0,
+ finalServiceWorkerStartTime: 0,
+ finalNetworkResponseStartTime: 0,
+ finalNetworkRequestStartTime: 0,
+ endTime: 0,
+ encodedBodySize: 0,
+ decodedBodySize: 0,
+ finalConnectionTimingInfo: null
+ }
+}
+
+// https://html.spec.whatwg.org/multipage/origin.html#policy-container
+function makePolicyContainer () {
+ // Note: the fetch spec doesn't make use of embedder policy or CSP list
+ return {
+ referrerPolicy: 'strict-origin-when-cross-origin'
+ }
+}
+
+// https://html.spec.whatwg.org/multipage/origin.html#clone-a-policy-container
+function clonePolicyContainer (policyContainer) {
+ return {
+ referrerPolicy: policyContainer.referrerPolicy
+ }
+}
+
+// https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer
+function determineRequestsReferrer (request) {
+ // 1. Let policy be request's referrer policy.
+ const policy = request.referrerPolicy
+
+ // Note: policy cannot (shouldn't) be null or an empty string.
+ assert(policy)
+
+ // 2. Let environment be request’s client.
+
+ let referrerSource = null
+
+ // 3. Switch on request’s referrer:
+ if (request.referrer === 'client') {
+ // Note: node isn't a browser and doesn't implement document/iframes,
+ // so we bypass this step and replace it with our own.
+
+ const globalOrigin = getGlobalOrigin()
+
+ if (!globalOrigin || globalOrigin.origin === 'null') {
+ return 'no-referrer'
+ }
+
+ // note: we need to clone it as it's mutated
+ referrerSource = new URL(globalOrigin)
+ } else if (request.referrer instanceof URL) {
+ // Let referrerSource be request’s referrer.
+ referrerSource = request.referrer
+ }
+
+ // 4. Let request’s referrerURL be the result of stripping referrerSource for
+ // use as a referrer.
+ let referrerURL = stripURLForReferrer(referrerSource)
+
+ // 5. Let referrerOrigin be the result of stripping referrerSource for use as
+ // a referrer, with the origin-only flag set to true.
+ const referrerOrigin = stripURLForReferrer(referrerSource, true)
+
+ // 6. If the result of serializing referrerURL is a string whose length is
+ // greater than 4096, set referrerURL to referrerOrigin.
+ if (referrerURL.toString().length > 4096) {
+ referrerURL = referrerOrigin
+ }
+
+ const areSameOrigin = sameOrigin(request, referrerURL)
+ const isNonPotentiallyTrustWorthy = isURLPotentiallyTrustworthy(referrerURL) &&
+ !isURLPotentiallyTrustworthy(request.url)
+
+ // 8. Execute the switch statements corresponding to the value of policy:
+ switch (policy) {
+ case 'origin': return referrerOrigin != null ? referrerOrigin : stripURLForReferrer(referrerSource, true)
+ case 'unsafe-url': return referrerURL
+ case 'same-origin':
+ return areSameOrigin ? referrerOrigin : 'no-referrer'
+ case 'origin-when-cross-origin':
+ return areSameOrigin ? referrerURL : referrerOrigin
+ case 'strict-origin-when-cross-origin': {
+ const currentURL = requestCurrentURL(request)
+
+ // 1. If the origin of referrerURL and the origin of request’s current
+ // URL are the same, then return referrerURL.
+ if (sameOrigin(referrerURL, currentURL)) {
+ return referrerURL
+ }
+
+ // 2. If referrerURL is a potentially trustworthy URL and request’s
+ // current URL is not a potentially trustworthy URL, then return no
+ // referrer.
+ if (isURLPotentiallyTrustworthy(referrerURL) && !isURLPotentiallyTrustworthy(currentURL)) {
+ return 'no-referrer'
+ }
+
+ // 3. Return referrerOrigin.
+ return referrerOrigin
+ }
+ case 'strict-origin': // eslint-disable-line
+ /**
+ * 1. If referrerURL is a potentially trustworthy URL and
+ * request’s current URL is not a potentially trustworthy URL,
+ * then return no referrer.
+ * 2. Return referrerOrigin
+ */
+ case 'no-referrer-when-downgrade': // eslint-disable-line
+ /**
+ * 1. If referrerURL is a potentially trustworthy URL and
+ * request’s current URL is not a potentially trustworthy URL,
+ * then return no referrer.
+ * 2. Return referrerOrigin
+ */
+
+ default: // eslint-disable-line
+ return isNonPotentiallyTrustWorthy ? 'no-referrer' : referrerOrigin
+ }
+}
+
+/**
+ * @see https://w3c.github.io/webappsec-referrer-policy/#strip-url
+ * @param {URL} url
+ * @param {boolean|undefined} originOnly
+ */
+function stripURLForReferrer (url, originOnly) {
+ // 1. Assert: url is a URL.
+ assert(url instanceof URL)
+
+ // 2. If url’s scheme is a local scheme, then return no referrer.
+ if (url.protocol === 'file:' || url.protocol === 'about:' || url.protocol === 'blank:') {
+ return 'no-referrer'
+ }
+
+ // 3. Set url’s username to the empty string.
+ url.username = ''
+
+ // 4. Set url’s password to the empty string.
+ url.password = ''
+
+ // 5. Set url’s fragment to null.
+ url.hash = ''
+
+ // 6. If the origin-only flag is true, then:
+ if (originOnly) {
+ // 1. Set url’s path to « the empty string ».
+ url.pathname = ''
+
+ // 2. Set url’s query to null.
+ url.search = ''
+ }
+
+ // 7. Return url.
+ return url
+}
+
+function isURLPotentiallyTrustworthy (url) {
+ if (!(url instanceof URL)) {
+ return false
+ }
+
+ // If child of about, return true
+ if (url.href === 'about:blank' || url.href === 'about:srcdoc') {
+ return true
+ }
+
+ // If scheme is data, return true
+ if (url.protocol === 'data:') return true
+
+ // If file, return true
+ if (url.protocol === 'file:') return true
+
+ return isOriginPotentiallyTrustworthy(url.origin)
+
+ function isOriginPotentiallyTrustworthy (origin) {
+ // If origin is explicitly null, return false
+ if (origin == null || origin === 'null') return false
+
+ const originAsURL = new URL(origin)
+
+ // If secure, return true
+ if (originAsURL.protocol === 'https:' || originAsURL.protocol === 'wss:') {
+ return true
+ }
+
+ // If localhost or variants, return true
+ if (/^127(?:\.[0-9]+){0,2}\.[0-9]+$|^\[(?:0*:)*?:?0*1\]$/.test(originAsURL.hostname) ||
+ (originAsURL.hostname === 'localhost' || originAsURL.hostname.includes('localhost.')) ||
+ (originAsURL.hostname.endsWith('.localhost'))) {
+ return true
+ }
+
+ // If any other, return false
+ return false
+ }
+}
+
+/**
+ * @see https://w3c.github.io/webappsec-subresource-integrity/#does-response-match-metadatalist
+ * @param {Uint8Array} bytes
+ * @param {string} metadataList
+ */
+function bytesMatch (bytes, metadataList) {
+ // If node is not built with OpenSSL support, we cannot check
+ // a request's integrity, so allow it by default (the spec will
+ // allow requests if an invalid hash is given, as precedence).
+ /* istanbul ignore if: only if node is built with --without-ssl */
+ if (crypto === undefined) {
+ return true
+ }
+
+ // 1. Let parsedMetadata be the result of parsing metadataList.
+ const parsedMetadata = parseMetadata(metadataList)
+
+ // 2. If parsedMetadata is no metadata, return true.
+ if (parsedMetadata === 'no metadata') {
+ return true
+ }
+
+ // 3. If parsedMetadata is the empty set, return true.
+ if (parsedMetadata.length === 0) {
+ return true
+ }
+
+ // 4. Let metadata be the result of getting the strongest
+ // metadata from parsedMetadata.
+ const list = parsedMetadata.sort((c, d) => d.algo.localeCompare(c.algo))
+ // get the strongest algorithm
+ const strongest = list[0].algo
+ // get all entries that use the strongest algorithm; ignore weaker
+ const metadata = list.filter((item) => item.algo === strongest)
+
+ // 5. For each item in metadata:
+ for (const item of metadata) {
+ // 1. Let algorithm be the alg component of item.
+ const algorithm = item.algo
+
+ // 2. Let expectedValue be the val component of item.
+ let expectedValue = item.hash
+
+ // See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e
+ // "be liberal with padding". This is annoying, and it's not even in the spec.
+
+ if (expectedValue.endsWith('==')) {
+ expectedValue = expectedValue.slice(0, -2)
+ }
+
+ // 3. Let actualValue be the result of applying algorithm to bytes.
+ let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64')
+
+ if (actualValue.endsWith('==')) {
+ actualValue = actualValue.slice(0, -2)
+ }
+
+ // 4. If actualValue is a case-sensitive match for expectedValue,
+ // return true.
+ if (actualValue === expectedValue) {
+ return true
+ }
+
+ let actualBase64URL = crypto.createHash(algorithm).update(bytes).digest('base64url')
+
+ if (actualBase64URL.endsWith('==')) {
+ actualBase64URL = actualBase64URL.slice(0, -2)
+ }
+
+ if (actualBase64URL === expectedValue) {
+ return true
+ }
+ }
+
+ // 6. Return false.
+ return false
+}
+
+// https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options
+// https://www.w3.org/TR/CSP2/#source-list-syntax
+// https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1
+const parseHashWithOptions = /((?<algo>sha256|sha384|sha512)-(?<hash>[A-z0-9+/]{1}.*={0,2}))( +[\x21-\x7e]?)?/i
+
+/**
+ * @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata
+ * @param {string} metadata
+ */
+function parseMetadata (metadata) {
+ // 1. Let result be the empty set.
+ /** @type {{ algo: string, hash: string }[]} */
+ const result = []
+
+ // 2. Let empty be equal to true.
+ let empty = true
+
+ const supportedHashes = crypto.getHashes()
+
+ // 3. For each token returned by splitting metadata on spaces:
+ for (const token of metadata.split(' ')) {
+ // 1. Set empty to false.
+ empty = false
+
+ // 2. Parse token as a hash-with-options.
+ const parsedToken = parseHashWithOptions.exec(token)
+
+ // 3. If token does not parse, continue to the next token.
+ if (parsedToken === null || parsedToken.groups === undefined) {
+ // Note: Chromium blocks the request at this point, but Firefox
+ // gives a warning that an invalid integrity was given. The
+ // correct behavior is to ignore these, and subsequently not
+ // check the integrity of the resource.
+ continue
+ }
+
+ // 4. Let algorithm be the hash-algo component of token.
+ const algorithm = parsedToken.groups.algo
+
+ // 5. If algorithm is a hash function recognized by the user
+ // agent, add the parsed token to result.
+ if (supportedHashes.includes(algorithm.toLowerCase())) {
+ result.push(parsedToken.groups)
+ }
+ }
+
+ // 4. Return no metadata if empty is true, otherwise return result.
+ if (empty === true) {
+ return 'no metadata'
+ }
+
+ return result
+}
+
+// https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request
+function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) {
+ // TODO
+}
+
+/**
+ * @link {https://html.spec.whatwg.org/multipage/origin.html#same-origin}
+ * @param {URL} A
+ * @param {URL} B
+ */
+function sameOrigin (A, B) {
+ // 1. If A and B are the same opaque origin, then return true.
+ if (A.origin === B.origin && A.origin === 'null') {
+ return true
+ }
+
+ // 2. If A and B are both tuple origins and their schemes,
+ // hosts, and port are identical, then return true.
+ if (A.protocol === B.protocol && A.hostname === B.hostname && A.port === B.port) {
+ return true
+ }
+
+ // 3. Return false.
+ return false
+}
+
+function createDeferredPromise () {
+ let res
+ let rej
+ const promise = new Promise((resolve, reject) => {
+ res = resolve
+ rej = reject
+ })
+
+ return { promise, resolve: res, reject: rej }
+}
+
+function isAborted (fetchParams) {
+ return fetchParams.controller.state === 'aborted'
+}
+
+function isCancelled (fetchParams) {
+ return fetchParams.controller.state === 'aborted' ||
+ fetchParams.controller.state === 'terminated'
+}
+
+const normalizeMethodRecord = {
+ delete: 'DELETE',
+ DELETE: 'DELETE',
+ get: 'GET',
+ GET: 'GET',
+ head: 'HEAD',
+ HEAD: 'HEAD',
+ options: 'OPTIONS',
+ OPTIONS: 'OPTIONS',
+ post: 'POST',
+ POST: 'POST',
+ put: 'PUT',
+ PUT: 'PUT'
+}
+
+// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`.
+Object.setPrototypeOf(normalizeMethodRecord, null)
+
+/**
+ * @see https://fetch.spec.whatwg.org/#concept-method-normalize
+ * @param {string} method
+ */
+function normalizeMethod (method) {
+ return normalizeMethodRecord[method.toLowerCase()] ?? method
+}
+
+// https://infra.spec.whatwg.org/#serialize-a-javascript-value-to-a-json-string
+function serializeJavascriptValueToJSONString (value) {
+ // 1. Let result be ? Call(%JSON.stringify%, undefined, « value »).
+ const result = JSON.stringify(value)
+
+ // 2. If result is undefined, then throw a TypeError.
+ if (result === undefined) {
+ throw new TypeError('Value is not JSON serializable')
+ }
+
+ // 3. Assert: result is a string.
+ assert(typeof result === 'string')
+
+ // 4. Return result.
+ return result
+}
+
+// https://tc39.es/ecma262/#sec-%25iteratorprototype%25-object
+const esIteratorPrototype = Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))
+
+/**
+ * @see https://webidl.spec.whatwg.org/#dfn-iterator-prototype-object
+ * @param {() => unknown[]} iterator
+ * @param {string} name name of the instance
+ * @param {'key'|'value'|'key+value'} kind
+ */
+function makeIterator (iterator, name, kind) {
+ const object = {
+ index: 0,
+ kind,
+ target: iterator
+ }
+
+ const i = {
+ next () {
+ // 1. Let interface be the interface for which the iterator prototype object exists.
+
+ // 2. Let thisValue be the this value.
+
+ // 3. Let object be ? ToObject(thisValue).
+
+ // 4. If object is a platform object, then perform a security
+ // check, passing:
+
+ // 5. If object is not a default iterator object for interface,
+ // then throw a TypeError.
+ if (Object.getPrototypeOf(this) !== i) {
+ throw new TypeError(
+ `'next' called on an object that does not implement interface ${name} Iterator.`
+ )
+ }
+
+ // 6. Let index be object’s index.
+ // 7. Let kind be object’s kind.
+ // 8. Let values be object’s target's value pairs to iterate over.
+ const { index, kind, target } = object
+ const values = target()
+
+ // 9. Let len be the length of values.
+ const len = values.length
+
+ // 10. If index is greater than or equal to len, then return
+ // CreateIterResultObject(undefined, true).
+ if (index >= len) {
+ return { value: undefined, done: true }
+ }
+
+ // 11. Let pair be the entry in values at index index.
+ const pair = values[index]
+
+ // 12. Set object’s index to index + 1.
+ object.index = index + 1
+
+ // 13. Return the iterator result for pair and kind.
+ return iteratorResult(pair, kind)
+ },
+ // The class string of an iterator prototype object for a given interface is the
+ // result of concatenating the identifier of the interface and the string " Iterator".
+ [Symbol.toStringTag]: `${name} Iterator`
+ }
+
+ // The [[Prototype]] internal slot of an iterator prototype object must be %IteratorPrototype%.
+ Object.setPrototypeOf(i, esIteratorPrototype)
+ // esIteratorPrototype needs to be the prototype of i
+ // which is the prototype of an empty object. Yes, it's confusing.
+ return Object.setPrototypeOf({}, i)
+}
+
+// https://webidl.spec.whatwg.org/#iterator-result
+function iteratorResult (pair, kind) {
+ let result
+
+ // 1. Let result be a value determined by the value of kind:
+ switch (kind) {
+ case 'key': {
+ // 1. Let idlKey be pair’s key.
+ // 2. Let key be the result of converting idlKey to an
+ // ECMAScript value.
+ // 3. result is key.
+ result = pair[0]
+ break
+ }
+ case 'value': {
+ // 1. Let idlValue be pair’s value.
+ // 2. Let value be the result of converting idlValue to
+ // an ECMAScript value.
+ // 3. result is value.
+ result = pair[1]
+ break
+ }
+ case 'key+value': {
+ // 1. Let idlKey be pair’s key.
+ // 2. Let idlValue be pair’s value.
+ // 3. Let key be the result of converting idlKey to an
+ // ECMAScript value.
+ // 4. Let value be the result of converting idlValue to
+ // an ECMAScript value.
+ // 5. Let array be ! ArrayCreate(2).
+ // 6. Call ! CreateDataProperty(array, "0", key).
+ // 7. Call ! CreateDataProperty(array, "1", value).
+ // 8. result is array.
+ result = pair
+ break
+ }
+ }
+
+ // 2. Return CreateIterResultObject(result, false).
+ return { value: result, done: false }
+}
+
+/**
+ * @see https://fetch.spec.whatwg.org/#body-fully-read
+ */
+async function fullyReadBody (body, processBody, processBodyError) {
+ // 1. If taskDestination is null, then set taskDestination to
+ // the result of starting a new parallel queue.
+
+ // 2. Let successSteps given a byte sequence bytes be to queue a
+ // fetch task to run processBody given bytes, with taskDestination.
+ const successSteps = processBody
+
+ // 3. Let errorSteps be to queue a fetch task to run processBodyError,
+ // with taskDestination.
+ const errorSteps = processBodyError
+
+ // 4. Let reader be the result of getting a reader for body’s stream.
+ // If that threw an exception, then run errorSteps with that
+ // exception and return.
+ let reader
+
+ try {
+ reader = body.stream.getReader()
+ } catch (e) {
+ errorSteps(e)
+ return
+ }
+
+ // 5. Read all bytes from reader, given successSteps and errorSteps.
+ try {
+ const result = await readAllBytes(reader)
+ successSteps(result)
+ } catch (e) {
+ errorSteps(e)
+ }
+}
+
+/** @type {ReadableStream} */
+let ReadableStream = globalThis.ReadableStream
+
+function isReadableStreamLike (stream) {
+ if (!ReadableStream) {
+ ReadableStream = require('stream/web').ReadableStream
+ }
+
+ return stream instanceof ReadableStream || (
+ stream[Symbol.toStringTag] === 'ReadableStream' &&
+ typeof stream.tee === 'function'
+ )
+}
+
+const MAXIMUM_ARGUMENT_LENGTH = 65535
+
+/**
+ * @see https://infra.spec.whatwg.org/#isomorphic-decode
+ * @param {number[]|Uint8Array} input
+ */
+function isomorphicDecode (input) {
+ // 1. To isomorphic decode a byte sequence input, return a string whose code point
+ // length is equal to input’s length and whose code points have the same values
+ // as the values of input’s bytes, in the same order.
+
+ if (input.length < MAXIMUM_ARGUMENT_LENGTH) {
+ return String.fromCharCode(...input)
+ }
+
+ return input.reduce((previous, current) => previous + String.fromCharCode(current), '')
+}
+
+/**
+ * @param {ReadableStreamController<Uint8Array>} controller
+ */
+function readableStreamClose (controller) {
+ try {
+ controller.close()
+ } catch (err) {
+ // TODO: add comment explaining why this error occurs.
+ if (!err.message.includes('Controller is already closed')) {
+ throw err
+ }
+ }
+}
+
+/**
+ * @see https://infra.spec.whatwg.org/#isomorphic-encode
+ * @param {string} input
+ */
+function isomorphicEncode (input) {
+ // 1. Assert: input contains no code points greater than U+00FF.
+ for (let i = 0; i < input.length; i++) {
+ assert(input.charCodeAt(i) <= 0xFF)
+ }
+
+ // 2. Return a byte sequence whose length is equal to input’s code
+ // point length and whose bytes have the same values as the
+ // values of input’s code points, in the same order
+ return input
+}
+
+/**
+ * @see https://streams.spec.whatwg.org/#readablestreamdefaultreader-read-all-bytes
+ * @see https://streams.spec.whatwg.org/#read-loop
+ * @param {ReadableStreamDefaultReader} reader
+ */
+async function readAllBytes (reader) {
+ const bytes = []
+ let byteLength = 0
+
+ while (true) {
+ const { done, value: chunk } = await reader.read()
+
+ if (done) {
+ // 1. Call successSteps with bytes.
+ return Buffer.concat(bytes, byteLength)
+ }
+
+ // 1. If chunk is not a Uint8Array object, call failureSteps
+ // with a TypeError and abort these steps.
+ if (!isUint8Array(chunk)) {
+ throw new TypeError('Received non-Uint8Array chunk')
+ }
+
+ // 2. Append the bytes represented by chunk to bytes.
+ bytes.push(chunk)
+ byteLength += chunk.length
+
+ // 3. Read-loop given reader, bytes, successSteps, and failureSteps.
+ }
+}
+
+/**
+ * @see https://fetch.spec.whatwg.org/#is-local
+ * @param {URL} url
+ */
+function urlIsLocal (url) {
+ assert('protocol' in url) // ensure it's a url object
+
+ const protocol = url.protocol
+
+ return protocol === 'about:' || protocol === 'blob:' || protocol === 'data:'
+}
+
+/**
+ * @param {string|URL} url
+ */
+function urlHasHttpsScheme (url) {
+ if (typeof url === 'string') {
+ return url.startsWith('https:')
+ }
+
+ return url.protocol === 'https:'
+}
+
+/**
+ * @see https://fetch.spec.whatwg.org/#http-scheme
+ * @param {URL} url
+ */
+function urlIsHttpHttpsScheme (url) {
+ assert('protocol' in url) // ensure it's a url object
+
+ const protocol = url.protocol
+
+ return protocol === 'http:' || protocol === 'https:'
+}
+
+/**
+ * Fetch supports node >= 16.8.0, but Object.hasOwn was added in v16.9.0.
+ */
+const hasOwn = Object.hasOwn || ((dict, key) => Object.prototype.hasOwnProperty.call(dict, key))
+
+module.exports = {
+ isAborted,
+ isCancelled,
+ createDeferredPromise,
+ ReadableStreamFrom,
+ toUSVString,
+ tryUpgradeRequestToAPotentiallyTrustworthyURL,
+ coarsenedSharedCurrentTime,
+ determineRequestsReferrer,
+ makePolicyContainer,
+ clonePolicyContainer,
+ appendFetchMetadata,
+ appendRequestOriginHeader,
+ TAOCheck,
+ corsCheck,
+ crossOriginResourcePolicyCheck,
+ createOpaqueTimingInfo,
+ setRequestReferrerPolicyOnRedirect,
+ isValidHTTPToken,
+ requestBadPort,
+ requestCurrentURL,
+ responseURL,
+ responseLocationURL,
+ isBlobLike,
+ isURLPotentiallyTrustworthy,
+ isValidReasonPhrase,
+ sameOrigin,
+ normalizeMethod,
+ serializeJavascriptValueToJSONString,
+ makeIterator,
+ isValidHeaderName,
+ isValidHeaderValue,
+ hasOwn,
+ isErrorLike,
+ fullyReadBody,
+ bytesMatch,
+ isReadableStreamLike,
+ readableStreamClose,
+ isomorphicEncode,
+ isomorphicDecode,
+ urlIsLocal,
+ urlHasHttpsScheme,
+ urlIsHttpHttpsScheme,
+ readAllBytes,
+ normalizeMethodRecord
+}
diff --git a/lib/fetch/webidl.js b/lib/fetch/webidl.js
new file mode 100644
index 0000000..6fcf2ab
--- /dev/null
+++ b/lib/fetch/webidl.js
@@ -0,0 +1,646 @@
+'use strict'
+
+const { types } = require('util')
+const { hasOwn, toUSVString } = require('./util')
+
+/** @type {import('../../types/webidl').Webidl} */
+const webidl = {}
+webidl.converters = {}
+webidl.util = {}
+webidl.errors = {}
+
+webidl.errors.exception = function (message) {
+ return new TypeError(`${message.header}: ${message.message}`)
+}
+
+webidl.errors.conversionFailed = function (context) {
+ const plural = context.types.length === 1 ? '' : ' one of'
+ const message =
+ `${context.argument} could not be converted to` +
+ `${plural}: ${context.types.join(', ')}.`
+
+ return webidl.errors.exception({
+ header: context.prefix,
+ message
+ })
+}
+
+webidl.errors.invalidArgument = function (context) {
+ return webidl.errors.exception({
+ header: context.prefix,
+ message: `"${context.value}" is an invalid ${context.type}.`
+ })
+}
+
+// https://webidl.spec.whatwg.org/#implements
+webidl.brandCheck = function (V, I, opts = undefined) {
+ if (opts?.strict !== false && !(V instanceof I)) {
+ throw new TypeError('Illegal invocation')
+ } else {
+ return V?.[Symbol.toStringTag] === I.prototype[Symbol.toStringTag]
+ }
+}
+
+webidl.argumentLengthCheck = function ({ length }, min, ctx) {
+ if (length < min) {
+ throw webidl.errors.exception({
+ message: `${min} argument${min !== 1 ? 's' : ''} required, ` +
+ `but${length ? ' only' : ''} ${length} found.`,
+ ...ctx
+ })
+ }
+}
+
+webidl.illegalConstructor = function () {
+ throw webidl.errors.exception({
+ header: 'TypeError',
+ message: 'Illegal constructor'
+ })
+}
+
+// https://tc39.es/ecma262/#sec-ecmascript-data-types-and-values
+webidl.util.Type = function (V) {
+ switch (typeof V) {
+ case 'undefined': return 'Undefined'
+ case 'boolean': return 'Boolean'
+ case 'string': return 'String'
+ case 'symbol': return 'Symbol'
+ case 'number': return 'Number'
+ case 'bigint': return 'BigInt'
+ case 'function':
+ case 'object': {
+ if (V === null) {
+ return 'Null'
+ }
+
+ return 'Object'
+ }
+ }
+}
+
+// https://webidl.spec.whatwg.org/#abstract-opdef-converttoint
+webidl.util.ConvertToInt = function (V, bitLength, signedness, opts = {}) {
+ let upperBound
+ let lowerBound
+
+ // 1. If bitLength is 64, then:
+ if (bitLength === 64) {
+ // 1. Let upperBound be 2^53 − 1.
+ upperBound = Math.pow(2, 53) - 1
+
+ // 2. If signedness is "unsigned", then let lowerBound be 0.
+ if (signedness === 'unsigned') {
+ lowerBound = 0
+ } else {
+ // 3. Otherwise let lowerBound be −2^53 + 1.
+ lowerBound = Math.pow(-2, 53) + 1
+ }
+ } else if (signedness === 'unsigned') {
+ // 2. Otherwise, if signedness is "unsigned", then:
+
+ // 1. Let lowerBound be 0.
+ lowerBound = 0
+
+ // 2. Let upperBound be 2^bitLength − 1.
+ upperBound = Math.pow(2, bitLength) - 1
+ } else {
+ // 3. Otherwise:
+
+ // 1. Let lowerBound be -2^bitLength − 1.
+ lowerBound = Math.pow(-2, bitLength) - 1
+
+ // 2. Let upperBound be 2^bitLength − 1 − 1.
+ upperBound = Math.pow(2, bitLength - 1) - 1
+ }
+
+ // 4. Let x be ? ToNumber(V).
+ let x = Number(V)
+
+ // 5. If x is −0, then set x to +0.
+ if (x === 0) {
+ x = 0
+ }
+
+ // 6. If the conversion is to an IDL type associated
+ // with the [EnforceRange] extended attribute, then:
+ if (opts.enforceRange === true) {
+ // 1. If x is NaN, +∞, or −∞, then throw a TypeError.
+ if (
+ Number.isNaN(x) ||
+ x === Number.POSITIVE_INFINITY ||
+ x === Number.NEGATIVE_INFINITY
+ ) {
+ throw webidl.errors.exception({
+ header: 'Integer conversion',
+ message: `Could not convert ${V} to an integer.`
+ })
+ }
+
+ // 2. Set x to IntegerPart(x).
+ x = webidl.util.IntegerPart(x)
+
+ // 3. If x < lowerBound or x > upperBound, then
+ // throw a TypeError.
+ if (x < lowerBound || x > upperBound) {
+ throw webidl.errors.exception({
+ header: 'Integer conversion',
+ message: `Value must be between ${lowerBound}-${upperBound}, got ${x}.`
+ })
+ }
+
+ // 4. Return x.
+ return x
+ }
+
+ // 7. If x is not NaN and the conversion is to an IDL
+ // type associated with the [Clamp] extended
+ // attribute, then:
+ if (!Number.isNaN(x) && opts.clamp === true) {
+ // 1. Set x to min(max(x, lowerBound), upperBound).
+ x = Math.min(Math.max(x, lowerBound), upperBound)
+
+ // 2. Round x to the nearest integer, choosing the
+ // even integer if it lies halfway between two,
+ // and choosing +0 rather than −0.
+ if (Math.floor(x) % 2 === 0) {
+ x = Math.floor(x)
+ } else {
+ x = Math.ceil(x)
+ }
+
+ // 3. Return x.
+ return x
+ }
+
+ // 8. If x is NaN, +0, +∞, or −∞, then return +0.
+ if (
+ Number.isNaN(x) ||
+ (x === 0 && Object.is(0, x)) ||
+ x === Number.POSITIVE_INFINITY ||
+ x === Number.NEGATIVE_INFINITY
+ ) {
+ return 0
+ }
+
+ // 9. Set x to IntegerPart(x).
+ x = webidl.util.IntegerPart(x)
+
+ // 10. Set x to x modulo 2^bitLength.
+ x = x % Math.pow(2, bitLength)
+
+ // 11. If signedness is "signed" and x ≥ 2^bitLength − 1,
+ // then return x − 2^bitLength.
+ if (signedness === 'signed' && x >= Math.pow(2, bitLength) - 1) {
+ return x - Math.pow(2, bitLength)
+ }
+
+ // 12. Otherwise, return x.
+ return x
+}
+
+// https://webidl.spec.whatwg.org/#abstract-opdef-integerpart
+webidl.util.IntegerPart = function (n) {
+ // 1. Let r be floor(abs(n)).
+ const r = Math.floor(Math.abs(n))
+
+ // 2. If n < 0, then return -1 × r.
+ if (n < 0) {
+ return -1 * r
+ }
+
+ // 3. Otherwise, return r.
+ return r
+}
+
+// https://webidl.spec.whatwg.org/#es-sequence
+webidl.sequenceConverter = function (converter) {
+ return (V) => {
+ // 1. If Type(V) is not Object, throw a TypeError.
+ if (webidl.util.Type(V) !== 'Object') {
+ throw webidl.errors.exception({
+ header: 'Sequence',
+ message: `Value of type ${webidl.util.Type(V)} is not an Object.`
+ })
+ }
+
+ // 2. Let method be ? GetMethod(V, @@iterator).
+ /** @type {Generator} */
+ const method = V?.[Symbol.iterator]?.()
+ const seq = []
+
+ // 3. If method is undefined, throw a TypeError.
+ if (
+ method === undefined ||
+ typeof method.next !== 'function'
+ ) {
+ throw webidl.errors.exception({
+ header: 'Sequence',
+ message: 'Object is not an iterator.'
+ })
+ }
+
+ // https://webidl.spec.whatwg.org/#create-sequence-from-iterable
+ while (true) {
+ const { done, value } = method.next()
+
+ if (done) {
+ break
+ }
+
+ seq.push(converter(value))
+ }
+
+ return seq
+ }
+}
+
+// https://webidl.spec.whatwg.org/#es-to-record
+webidl.recordConverter = function (keyConverter, valueConverter) {
+ return (O) => {
+ // 1. If Type(O) is not Object, throw a TypeError.
+ if (webidl.util.Type(O) !== 'Object') {
+ throw webidl.errors.exception({
+ header: 'Record',
+ message: `Value of type ${webidl.util.Type(O)} is not an Object.`
+ })
+ }
+
+ // 2. Let result be a new empty instance of record<K, V>.
+ const result = {}
+
+ if (!types.isProxy(O)) {
+ // Object.keys only returns enumerable properties
+ const keys = Object.keys(O)
+
+ for (const key of keys) {
+ // 1. Let typedKey be key converted to an IDL value of type K.
+ const typedKey = keyConverter(key)
+
+ // 2. Let value be ? Get(O, key).
+ // 3. Let typedValue be value converted to an IDL value of type V.
+ const typedValue = valueConverter(O[key])
+
+ // 4. Set result[typedKey] to typedValue.
+ result[typedKey] = typedValue
+ }
+
+ // 5. Return result.
+ return result
+ }
+
+ // 3. Let keys be ? O.[[OwnPropertyKeys]]().
+ const keys = Reflect.ownKeys(O)
+
+ // 4. For each key of keys.
+ for (const key of keys) {
+ // 1. Let desc be ? O.[[GetOwnProperty]](key).
+ const desc = Reflect.getOwnPropertyDescriptor(O, key)
+
+ // 2. If desc is not undefined and desc.[[Enumerable]] is true:
+ if (desc?.enumerable) {
+ // 1. Let typedKey be key converted to an IDL value of type K.
+ const typedKey = keyConverter(key)
+
+ // 2. Let value be ? Get(O, key).
+ // 3. Let typedValue be value converted to an IDL value of type V.
+ const typedValue = valueConverter(O[key])
+
+ // 4. Set result[typedKey] to typedValue.
+ result[typedKey] = typedValue
+ }
+ }
+
+ // 5. Return result.
+ return result
+ }
+}
+
+webidl.interfaceConverter = function (i) {
+ return (V, opts = {}) => {
+ if (opts.strict !== false && !(V instanceof i)) {
+ throw webidl.errors.exception({
+ header: i.name,
+ message: `Expected ${V} to be an instance of ${i.name}.`
+ })
+ }
+
+ return V
+ }
+}
+
+webidl.dictionaryConverter = function (converters) {
+ return (dictionary) => {
+ const type = webidl.util.Type(dictionary)
+ const dict = {}
+
+ if (type === 'Null' || type === 'Undefined') {
+ return dict
+ } else if (type !== 'Object') {
+ throw webidl.errors.exception({
+ header: 'Dictionary',
+ message: `Expected ${dictionary} to be one of: Null, Undefined, Object.`
+ })
+ }
+
+ for (const options of converters) {
+ const { key, defaultValue, required, converter } = options
+
+ if (required === true) {
+ if (!hasOwn(dictionary, key)) {
+ throw webidl.errors.exception({
+ header: 'Dictionary',
+ message: `Missing required key "${key}".`
+ })
+ }
+ }
+
+ let value = dictionary[key]
+ const hasDefault = hasOwn(options, 'defaultValue')
+
+ // Only use defaultValue if value is undefined and
+ // a defaultValue options was provided.
+ if (hasDefault && value !== null) {
+ value = value ?? defaultValue
+ }
+
+ // A key can be optional and have no default value.
+ // When this happens, do not perform a conversion,
+ // and do not assign the key a value.
+ if (required || hasDefault || value !== undefined) {
+ value = converter(value)
+
+ if (
+ options.allowedValues &&
+ !options.allowedValues.includes(value)
+ ) {
+ throw webidl.errors.exception({
+ header: 'Dictionary',
+ message: `${value} is not an accepted type. Expected one of ${options.allowedValues.join(', ')}.`
+ })
+ }
+
+ dict[key] = value
+ }
+ }
+
+ return dict
+ }
+}
+
+webidl.nullableConverter = function (converter) {
+ return (V) => {
+ if (V === null) {
+ return V
+ }
+
+ return converter(V)
+ }
+}
+
+// https://webidl.spec.whatwg.org/#es-DOMString
+webidl.converters.DOMString = function (V, opts = {}) {
+ // 1. If V is null and the conversion is to an IDL type
+ // associated with the [LegacyNullToEmptyString]
+ // extended attribute, then return the DOMString value
+ // that represents the empty string.
+ if (V === null && opts.legacyNullToEmptyString) {
+ return ''
+ }
+
+ // 2. Let x be ? ToString(V).
+ if (typeof V === 'symbol') {
+ throw new TypeError('Could not convert argument of type symbol to string.')
+ }
+
+ // 3. Return the IDL DOMString value that represents the
+ // same sequence of code units as the one the
+ // ECMAScript String value x represents.
+ return String(V)
+}
+
+// https://webidl.spec.whatwg.org/#es-ByteString
+webidl.converters.ByteString = function (V) {
+ // 1. Let x be ? ToString(V).
+ // Note: DOMString converter perform ? ToString(V)
+ const x = webidl.converters.DOMString(V)
+
+ // 2. If the value of any element of x is greater than
+ // 255, then throw a TypeError.
+ for (let index = 0; index < x.length; index++) {
+ if (x.charCodeAt(index) > 255) {
+ throw new TypeError(
+ 'Cannot convert argument to a ByteString because the character at ' +
+ `index ${index} has a value of ${x.charCodeAt(index)} which is greater than 255.`
+ )
+ }
+ }
+
+ // 3. Return an IDL ByteString value whose length is the
+ // length of x, and where the value of each element is
+ // the value of the corresponding element of x.
+ return x
+}
+
+// https://webidl.spec.whatwg.org/#es-USVString
+webidl.converters.USVString = toUSVString
+
+// https://webidl.spec.whatwg.org/#es-boolean
+webidl.converters.boolean = function (V) {
+ // 1. Let x be the result of computing ToBoolean(V).
+ const x = Boolean(V)
+
+ // 2. Return the IDL boolean value that is the one that represents
+ // the same truth value as the ECMAScript Boolean value x.
+ return x
+}
+
+// https://webidl.spec.whatwg.org/#es-any
+webidl.converters.any = function (V) {
+ return V
+}
+
+// https://webidl.spec.whatwg.org/#es-long-long
+webidl.converters['long long'] = function (V) {
+ // 1. Let x be ? ConvertToInt(V, 64, "signed").
+ const x = webidl.util.ConvertToInt(V, 64, 'signed')
+
+ // 2. Return the IDL long long value that represents
+ // the same numeric value as x.
+ return x
+}
+
+// https://webidl.spec.whatwg.org/#es-unsigned-long-long
+webidl.converters['unsigned long long'] = function (V) {
+ // 1. Let x be ? ConvertToInt(V, 64, "unsigned").
+ const x = webidl.util.ConvertToInt(V, 64, 'unsigned')
+
+ // 2. Return the IDL unsigned long long value that
+ // represents the same numeric value as x.
+ return x
+}
+
+// https://webidl.spec.whatwg.org/#es-unsigned-long
+webidl.converters['unsigned long'] = function (V) {
+ // 1. Let x be ? ConvertToInt(V, 32, "unsigned").
+ const x = webidl.util.ConvertToInt(V, 32, 'unsigned')
+
+ // 2. Return the IDL unsigned long value that
+ // represents the same numeric value as x.
+ return x
+}
+
+// https://webidl.spec.whatwg.org/#es-unsigned-short
+webidl.converters['unsigned short'] = function (V, opts) {
+ // 1. Let x be ? ConvertToInt(V, 16, "unsigned").
+ const x = webidl.util.ConvertToInt(V, 16, 'unsigned', opts)
+
+ // 2. Return the IDL unsigned short value that represents
+ // the same numeric value as x.
+ return x
+}
+
+// https://webidl.spec.whatwg.org/#idl-ArrayBuffer
+webidl.converters.ArrayBuffer = function (V, opts = {}) {
+ // 1. If Type(V) is not Object, or V does not have an
+ // [[ArrayBufferData]] internal slot, then throw a
+ // TypeError.
+ // see: https://tc39.es/ecma262/#sec-properties-of-the-arraybuffer-instances
+ // see: https://tc39.es/ecma262/#sec-properties-of-the-sharedarraybuffer-instances
+ if (
+ webidl.util.Type(V) !== 'Object' ||
+ !types.isAnyArrayBuffer(V)
+ ) {
+ throw webidl.errors.conversionFailed({
+ prefix: `${V}`,
+ argument: `${V}`,
+ types: ['ArrayBuffer']
+ })
+ }
+
+ // 2. If the conversion is not to an IDL type associated
+ // with the [AllowShared] extended attribute, and
+ // IsSharedArrayBuffer(V) is true, then throw a
+ // TypeError.
+ if (opts.allowShared === false && types.isSharedArrayBuffer(V)) {
+ throw webidl.errors.exception({
+ header: 'ArrayBuffer',
+ message: 'SharedArrayBuffer is not allowed.'
+ })
+ }
+
+ // 3. If the conversion is not to an IDL type associated
+ // with the [AllowResizable] extended attribute, and
+ // IsResizableArrayBuffer(V) is true, then throw a
+ // TypeError.
+ // Note: resizable ArrayBuffers are currently a proposal.
+
+ // 4. Return the IDL ArrayBuffer value that is a
+ // reference to the same object as V.
+ return V
+}
+
+webidl.converters.TypedArray = function (V, T, opts = {}) {
+ // 1. Let T be the IDL type V is being converted to.
+
+ // 2. If Type(V) is not Object, or V does not have a
+ // [[TypedArrayName]] internal slot with a value
+ // equal to T’s name, then throw a TypeError.
+ if (
+ webidl.util.Type(V) !== 'Object' ||
+ !types.isTypedArray(V) ||
+ V.constructor.name !== T.name
+ ) {
+ throw webidl.errors.conversionFailed({
+ prefix: `${T.name}`,
+ argument: `${V}`,
+ types: [T.name]
+ })
+ }
+
+ // 3. If the conversion is not to an IDL type associated
+ // with the [AllowShared] extended attribute, and
+ // IsSharedArrayBuffer(V.[[ViewedArrayBuffer]]) is
+ // true, then throw a TypeError.
+ if (opts.allowShared === false && types.isSharedArrayBuffer(V.buffer)) {
+ throw webidl.errors.exception({
+ header: 'ArrayBuffer',
+ message: 'SharedArrayBuffer is not allowed.'
+ })
+ }
+
+ // 4. If the conversion is not to an IDL type associated
+ // with the [AllowResizable] extended attribute, and
+ // IsResizableArrayBuffer(V.[[ViewedArrayBuffer]]) is
+ // true, then throw a TypeError.
+ // Note: resizable array buffers are currently a proposal
+
+ // 5. Return the IDL value of type T that is a reference
+ // to the same object as V.
+ return V
+}
+
+webidl.converters.DataView = function (V, opts = {}) {
+ // 1. If Type(V) is not Object, or V does not have a
+ // [[DataView]] internal slot, then throw a TypeError.
+ if (webidl.util.Type(V) !== 'Object' || !types.isDataView(V)) {
+ throw webidl.errors.exception({
+ header: 'DataView',
+ message: 'Object is not a DataView.'
+ })
+ }
+
+ // 2. If the conversion is not to an IDL type associated
+ // with the [AllowShared] extended attribute, and
+ // IsSharedArrayBuffer(V.[[ViewedArrayBuffer]]) is true,
+ // then throw a TypeError.
+ if (opts.allowShared === false && types.isSharedArrayBuffer(V.buffer)) {
+ throw webidl.errors.exception({
+ header: 'ArrayBuffer',
+ message: 'SharedArrayBuffer is not allowed.'
+ })
+ }
+
+ // 3. If the conversion is not to an IDL type associated
+ // with the [AllowResizable] extended attribute, and
+ // IsResizableArrayBuffer(V.[[ViewedArrayBuffer]]) is
+ // true, then throw a TypeError.
+ // Note: resizable ArrayBuffers are currently a proposal
+
+ // 4. Return the IDL DataView value that is a reference
+ // to the same object as V.
+ return V
+}
+
+// https://webidl.spec.whatwg.org/#BufferSource
+webidl.converters.BufferSource = function (V, opts = {}) {
+ if (types.isAnyArrayBuffer(V)) {
+ return webidl.converters.ArrayBuffer(V, opts)
+ }
+
+ if (types.isTypedArray(V)) {
+ return webidl.converters.TypedArray(V, V.constructor)
+ }
+
+ if (types.isDataView(V)) {
+ return webidl.converters.DataView(V, opts)
+ }
+
+ throw new TypeError(`Could not convert ${V} to a BufferSource.`)
+}
+
+webidl.converters['sequence<ByteString>'] = webidl.sequenceConverter(
+ webidl.converters.ByteString
+)
+
+webidl.converters['sequence<sequence<ByteString>>'] = webidl.sequenceConverter(
+ webidl.converters['sequence<ByteString>']
+)
+
+webidl.converters['record<ByteString, ByteString>'] = webidl.recordConverter(
+ webidl.converters.ByteString,
+ webidl.converters.ByteString
+)
+
+module.exports = {
+ webidl
+}
diff --git a/lib/fileapi/encoding.js b/lib/fileapi/encoding.js
new file mode 100644
index 0000000..1d1d2b6
--- /dev/null
+++ b/lib/fileapi/encoding.js
@@ -0,0 +1,290 @@
+'use strict'
+
+/**
+ * @see https://encoding.spec.whatwg.org/#concept-encoding-get
+ * @param {string|undefined} label
+ */
+function getEncoding (label) {
+ if (!label) {
+ return 'failure'
+ }
+
+ // 1. Remove any leading and trailing ASCII whitespace from label.
+ // 2. If label is an ASCII case-insensitive match for any of the
+ // labels listed in the table below, then return the
+ // corresponding encoding; otherwise return failure.
+ switch (label.trim().toLowerCase()) {
+ case 'unicode-1-1-utf-8':
+ case 'unicode11utf8':
+ case 'unicode20utf8':
+ case 'utf-8':
+ case 'utf8':
+ case 'x-unicode20utf8':
+ return 'UTF-8'
+ case '866':
+ case 'cp866':
+ case 'csibm866':
+ case 'ibm866':
+ return 'IBM866'
+ case 'csisolatin2':
+ case 'iso-8859-2':
+ case 'iso-ir-101':
+ case 'iso8859-2':
+ case 'iso88592':
+ case 'iso_8859-2':
+ case 'iso_8859-2:1987':
+ case 'l2':
+ case 'latin2':
+ return 'ISO-8859-2'
+ case 'csisolatin3':
+ case 'iso-8859-3':
+ case 'iso-ir-109':
+ case 'iso8859-3':
+ case 'iso88593':
+ case 'iso_8859-3':
+ case 'iso_8859-3:1988':
+ case 'l3':
+ case 'latin3':
+ return 'ISO-8859-3'
+ case 'csisolatin4':
+ case 'iso-8859-4':
+ case 'iso-ir-110':
+ case 'iso8859-4':
+ case 'iso88594':
+ case 'iso_8859-4':
+ case 'iso_8859-4:1988':
+ case 'l4':
+ case 'latin4':
+ return 'ISO-8859-4'
+ case 'csisolatincyrillic':
+ case 'cyrillic':
+ case 'iso-8859-5':
+ case 'iso-ir-144':
+ case 'iso8859-5':
+ case 'iso88595':
+ case 'iso_8859-5':
+ case 'iso_8859-5:1988':
+ return 'ISO-8859-5'
+ case 'arabic':
+ case 'asmo-708':
+ case 'csiso88596e':
+ case 'csiso88596i':
+ case 'csisolatinarabic':
+ case 'ecma-114':
+ case 'iso-8859-6':
+ case 'iso-8859-6-e':
+ case 'iso-8859-6-i':
+ case 'iso-ir-127':
+ case 'iso8859-6':
+ case 'iso88596':
+ case 'iso_8859-6':
+ case 'iso_8859-6:1987':
+ return 'ISO-8859-6'
+ case 'csisolatingreek':
+ case 'ecma-118':
+ case 'elot_928':
+ case 'greek':
+ case 'greek8':
+ case 'iso-8859-7':
+ case 'iso-ir-126':
+ case 'iso8859-7':
+ case 'iso88597':
+ case 'iso_8859-7':
+ case 'iso_8859-7:1987':
+ case 'sun_eu_greek':
+ return 'ISO-8859-7'
+ case 'csiso88598e':
+ case 'csisolatinhebrew':
+ case 'hebrew':
+ case 'iso-8859-8':
+ case 'iso-8859-8-e':
+ case 'iso-ir-138':
+ case 'iso8859-8':
+ case 'iso88598':
+ case 'iso_8859-8':
+ case 'iso_8859-8:1988':
+ case 'visual':
+ return 'ISO-8859-8'
+ case 'csiso88598i':
+ case 'iso-8859-8-i':
+ case 'logical':
+ return 'ISO-8859-8-I'
+ case 'csisolatin6':
+ case 'iso-8859-10':
+ case 'iso-ir-157':
+ case 'iso8859-10':
+ case 'iso885910':
+ case 'l6':
+ case 'latin6':
+ return 'ISO-8859-10'
+ case 'iso-8859-13':
+ case 'iso8859-13':
+ case 'iso885913':
+ return 'ISO-8859-13'
+ case 'iso-8859-14':
+ case 'iso8859-14':
+ case 'iso885914':
+ return 'ISO-8859-14'
+ case 'csisolatin9':
+ case 'iso-8859-15':
+ case 'iso8859-15':
+ case 'iso885915':
+ case 'iso_8859-15':
+ case 'l9':
+ return 'ISO-8859-15'
+ case 'iso-8859-16':
+ return 'ISO-8859-16'
+ case 'cskoi8r':
+ case 'koi':
+ case 'koi8':
+ case 'koi8-r':
+ case 'koi8_r':
+ return 'KOI8-R'
+ case 'koi8-ru':
+ case 'koi8-u':
+ return 'KOI8-U'
+ case 'csmacintosh':
+ case 'mac':
+ case 'macintosh':
+ case 'x-mac-roman':
+ return 'macintosh'
+ case 'iso-8859-11':
+ case 'iso8859-11':
+ case 'iso885911':
+ case 'tis-620':
+ case 'windows-874':
+ return 'windows-874'
+ case 'cp1250':
+ case 'windows-1250':
+ case 'x-cp1250':
+ return 'windows-1250'
+ case 'cp1251':
+ case 'windows-1251':
+ case 'x-cp1251':
+ return 'windows-1251'
+ case 'ansi_x3.4-1968':
+ case 'ascii':
+ case 'cp1252':
+ case 'cp819':
+ case 'csisolatin1':
+ case 'ibm819':
+ case 'iso-8859-1':
+ case 'iso-ir-100':
+ case 'iso8859-1':
+ case 'iso88591':
+ case 'iso_8859-1':
+ case 'iso_8859-1:1987':
+ case 'l1':
+ case 'latin1':
+ case 'us-ascii':
+ case 'windows-1252':
+ case 'x-cp1252':
+ return 'windows-1252'
+ case 'cp1253':
+ case 'windows-1253':
+ case 'x-cp1253':
+ return 'windows-1253'
+ case 'cp1254':
+ case 'csisolatin5':
+ case 'iso-8859-9':
+ case 'iso-ir-148':
+ case 'iso8859-9':
+ case 'iso88599':
+ case 'iso_8859-9':
+ case 'iso_8859-9:1989':
+ case 'l5':
+ case 'latin5':
+ case 'windows-1254':
+ case 'x-cp1254':
+ return 'windows-1254'
+ case 'cp1255':
+ case 'windows-1255':
+ case 'x-cp1255':
+ return 'windows-1255'
+ case 'cp1256':
+ case 'windows-1256':
+ case 'x-cp1256':
+ return 'windows-1256'
+ case 'cp1257':
+ case 'windows-1257':
+ case 'x-cp1257':
+ return 'windows-1257'
+ case 'cp1258':
+ case 'windows-1258':
+ case 'x-cp1258':
+ return 'windows-1258'
+ case 'x-mac-cyrillic':
+ case 'x-mac-ukrainian':
+ return 'x-mac-cyrillic'
+ case 'chinese':
+ case 'csgb2312':
+ case 'csiso58gb231280':
+ case 'gb2312':
+ case 'gb_2312':
+ case 'gb_2312-80':
+ case 'gbk':
+ case 'iso-ir-58':
+ case 'x-gbk':
+ return 'GBK'
+ case 'gb18030':
+ return 'gb18030'
+ case 'big5':
+ case 'big5-hkscs':
+ case 'cn-big5':
+ case 'csbig5':
+ case 'x-x-big5':
+ return 'Big5'
+ case 'cseucpkdfmtjapanese':
+ case 'euc-jp':
+ case 'x-euc-jp':
+ return 'EUC-JP'
+ case 'csiso2022jp':
+ case 'iso-2022-jp':
+ return 'ISO-2022-JP'
+ case 'csshiftjis':
+ case 'ms932':
+ case 'ms_kanji':
+ case 'shift-jis':
+ case 'shift_jis':
+ case 'sjis':
+ case 'windows-31j':
+ case 'x-sjis':
+ return 'Shift_JIS'
+ case 'cseuckr':
+ case 'csksc56011987':
+ case 'euc-kr':
+ case 'iso-ir-149':
+ case 'korean':
+ case 'ks_c_5601-1987':
+ case 'ks_c_5601-1989':
+ case 'ksc5601':
+ case 'ksc_5601':
+ case 'windows-949':
+ return 'EUC-KR'
+ case 'csiso2022kr':
+ case 'hz-gb-2312':
+ case 'iso-2022-cn':
+ case 'iso-2022-cn-ext':
+ case 'iso-2022-kr':
+ case 'replacement':
+ return 'replacement'
+ case 'unicodefffe':
+ case 'utf-16be':
+ return 'UTF-16BE'
+ case 'csunicode':
+ case 'iso-10646-ucs-2':
+ case 'ucs-2':
+ case 'unicode':
+ case 'unicodefeff':
+ case 'utf-16':
+ case 'utf-16le':
+ return 'UTF-16LE'
+ case 'x-user-defined':
+ return 'x-user-defined'
+ default: return 'failure'
+ }
+}
+
+module.exports = {
+ getEncoding
+}
diff --git a/lib/fileapi/filereader.js b/lib/fileapi/filereader.js
new file mode 100644
index 0000000..cd36a22
--- /dev/null
+++ b/lib/fileapi/filereader.js
@@ -0,0 +1,344 @@
+'use strict'
+
+const {
+ staticPropertyDescriptors,
+ readOperation,
+ fireAProgressEvent
+} = require('./util')
+const {
+ kState,
+ kError,
+ kResult,
+ kEvents,
+ kAborted
+} = require('./symbols')
+const { webidl } = require('../fetch/webidl')
+const { kEnumerableProperty } = require('../core/util')
+
+class FileReader extends EventTarget {
+ constructor () {
+ super()
+
+ this[kState] = 'empty'
+ this[kResult] = null
+ this[kError] = null
+ this[kEvents] = {
+ loadend: null,
+ error: null,
+ abort: null,
+ load: null,
+ progress: null,
+ loadstart: null
+ }
+ }
+
+ /**
+ * @see https://w3c.github.io/FileAPI/#dfn-readAsArrayBuffer
+ * @param {import('buffer').Blob} blob
+ */
+ readAsArrayBuffer (blob) {
+ webidl.brandCheck(this, FileReader)
+
+ webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsArrayBuffer' })
+
+ blob = webidl.converters.Blob(blob, { strict: false })
+
+ // The readAsArrayBuffer(blob) method, when invoked,
+ // must initiate a read operation for blob with ArrayBuffer.
+ readOperation(this, blob, 'ArrayBuffer')
+ }
+
+ /**
+ * @see https://w3c.github.io/FileAPI/#readAsBinaryString
+ * @param {import('buffer').Blob} blob
+ */
+ readAsBinaryString (blob) {
+ webidl.brandCheck(this, FileReader)
+
+ webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsBinaryString' })
+
+ blob = webidl.converters.Blob(blob, { strict: false })
+
+ // The readAsBinaryString(blob) method, when invoked,
+ // must initiate a read operation for blob with BinaryString.
+ readOperation(this, blob, 'BinaryString')
+ }
+
+ /**
+ * @see https://w3c.github.io/FileAPI/#readAsDataText
+ * @param {import('buffer').Blob} blob
+ * @param {string?} encoding
+ */
+ readAsText (blob, encoding = undefined) {
+ webidl.brandCheck(this, FileReader)
+
+ webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsText' })
+
+ blob = webidl.converters.Blob(blob, { strict: false })
+
+ if (encoding !== undefined) {
+ encoding = webidl.converters.DOMString(encoding)
+ }
+
+ // The readAsText(blob, encoding) method, when invoked,
+ // must initiate a read operation for blob with Text and encoding.
+ readOperation(this, blob, 'Text', encoding)
+ }
+
+ /**
+ * @see https://w3c.github.io/FileAPI/#dfn-readAsDataURL
+ * @param {import('buffer').Blob} blob
+ */
+ readAsDataURL (blob) {
+ webidl.brandCheck(this, FileReader)
+
+ webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsDataURL' })
+
+ blob = webidl.converters.Blob(blob, { strict: false })
+
+ // The readAsDataURL(blob) method, when invoked, must
+ // initiate a read operation for blob with DataURL.
+ readOperation(this, blob, 'DataURL')
+ }
+
+ /**
+ * @see https://w3c.github.io/FileAPI/#dfn-abort
+ */
+ abort () {
+ // 1. If this's state is "empty" or if this's state is
+ // "done" set this's result to null and terminate
+ // this algorithm.
+ if (this[kState] === 'empty' || this[kState] === 'done') {
+ this[kResult] = null
+ return
+ }
+
+ // 2. If this's state is "loading" set this's state to
+ // "done" and set this's result to null.
+ if (this[kState] === 'loading') {
+ this[kState] = 'done'
+ this[kResult] = null
+ }
+
+ // 3. If there are any tasks from this on the file reading
+ // task source in an affiliated task queue, then remove
+ // those tasks from that task queue.
+ this[kAborted] = true
+
+ // 4. Terminate the algorithm for the read method being processed.
+ // TODO
+
+ // 5. Fire a progress event called abort at this.
+ fireAProgressEvent('abort', this)
+
+ // 6. If this's state is not "loading", fire a progress
+ // event called loadend at this.
+ if (this[kState] !== 'loading') {
+ fireAProgressEvent('loadend', this)
+ }
+ }
+
+ /**
+ * @see https://w3c.github.io/FileAPI/#dom-filereader-readystate
+ */
+ get readyState () {
+ webidl.brandCheck(this, FileReader)
+
+ switch (this[kState]) {
+ case 'empty': return this.EMPTY
+ case 'loading': return this.LOADING
+ case 'done': return this.DONE
+ }
+ }
+
+ /**
+ * @see https://w3c.github.io/FileAPI/#dom-filereader-result
+ */
+ get result () {
+ webidl.brandCheck(this, FileReader)
+
+ // The result attribute’s getter, when invoked, must return
+ // this's result.
+ return this[kResult]
+ }
+
+ /**
+ * @see https://w3c.github.io/FileAPI/#dom-filereader-error
+ */
+ get error () {
+ webidl.brandCheck(this, FileReader)
+
+ // The error attribute’s getter, when invoked, must return
+ // this's error.
+ return this[kError]
+ }
+
+ get onloadend () {
+ webidl.brandCheck(this, FileReader)
+
+ return this[kEvents].loadend
+ }
+
+ set onloadend (fn) {
+ webidl.brandCheck(this, FileReader)
+
+ if (this[kEvents].loadend) {
+ this.removeEventListener('loadend', this[kEvents].loadend)
+ }
+
+ if (typeof fn === 'function') {
+ this[kEvents].loadend = fn
+ this.addEventListener('loadend', fn)
+ } else {
+ this[kEvents].loadend = null
+ }
+ }
+
+ get onerror () {
+ webidl.brandCheck(this, FileReader)
+
+ return this[kEvents].error
+ }
+
+ set onerror (fn) {
+ webidl.brandCheck(this, FileReader)
+
+ if (this[kEvents].error) {
+ this.removeEventListener('error', this[kEvents].error)
+ }
+
+ if (typeof fn === 'function') {
+ this[kEvents].error = fn
+ this.addEventListener('error', fn)
+ } else {
+ this[kEvents].error = null
+ }
+ }
+
+ get onloadstart () {
+ webidl.brandCheck(this, FileReader)
+
+ return this[kEvents].loadstart
+ }
+
+ set onloadstart (fn) {
+ webidl.brandCheck(this, FileReader)
+
+ if (this[kEvents].loadstart) {
+ this.removeEventListener('loadstart', this[kEvents].loadstart)
+ }
+
+ if (typeof fn === 'function') {
+ this[kEvents].loadstart = fn
+ this.addEventListener('loadstart', fn)
+ } else {
+ this[kEvents].loadstart = null
+ }
+ }
+
+ get onprogress () {
+ webidl.brandCheck(this, FileReader)
+
+ return this[kEvents].progress
+ }
+
+ set onprogress (fn) {
+ webidl.brandCheck(this, FileReader)
+
+ if (this[kEvents].progress) {
+ this.removeEventListener('progress', this[kEvents].progress)
+ }
+
+ if (typeof fn === 'function') {
+ this[kEvents].progress = fn
+ this.addEventListener('progress', fn)
+ } else {
+ this[kEvents].progress = null
+ }
+ }
+
+ get onload () {
+ webidl.brandCheck(this, FileReader)
+
+ return this[kEvents].load
+ }
+
+ set onload (fn) {
+ webidl.brandCheck(this, FileReader)
+
+ if (this[kEvents].load) {
+ this.removeEventListener('load', this[kEvents].load)
+ }
+
+ if (typeof fn === 'function') {
+ this[kEvents].load = fn
+ this.addEventListener('load', fn)
+ } else {
+ this[kEvents].load = null
+ }
+ }
+
+ get onabort () {
+ webidl.brandCheck(this, FileReader)
+
+ return this[kEvents].abort
+ }
+
+ set onabort (fn) {
+ webidl.brandCheck(this, FileReader)
+
+ if (this[kEvents].abort) {
+ this.removeEventListener('abort', this[kEvents].abort)
+ }
+
+ if (typeof fn === 'function') {
+ this[kEvents].abort = fn
+ this.addEventListener('abort', fn)
+ } else {
+ this[kEvents].abort = null
+ }
+ }
+}
+
+// https://w3c.github.io/FileAPI/#dom-filereader-empty
+FileReader.EMPTY = FileReader.prototype.EMPTY = 0
+// https://w3c.github.io/FileAPI/#dom-filereader-loading
+FileReader.LOADING = FileReader.prototype.LOADING = 1
+// https://w3c.github.io/FileAPI/#dom-filereader-done
+FileReader.DONE = FileReader.prototype.DONE = 2
+
+Object.defineProperties(FileReader.prototype, {
+ EMPTY: staticPropertyDescriptors,
+ LOADING: staticPropertyDescriptors,
+ DONE: staticPropertyDescriptors,
+ readAsArrayBuffer: kEnumerableProperty,
+ readAsBinaryString: kEnumerableProperty,
+ readAsText: kEnumerableProperty,
+ readAsDataURL: kEnumerableProperty,
+ abort: kEnumerableProperty,
+ readyState: kEnumerableProperty,
+ result: kEnumerableProperty,
+ error: kEnumerableProperty,
+ onloadstart: kEnumerableProperty,
+ onprogress: kEnumerableProperty,
+ onload: kEnumerableProperty,
+ onabort: kEnumerableProperty,
+ onerror: kEnumerableProperty,
+ onloadend: kEnumerableProperty,
+ [Symbol.toStringTag]: {
+ value: 'FileReader',
+ writable: false,
+ enumerable: false,
+ configurable: true
+ }
+})
+
+Object.defineProperties(FileReader, {
+ EMPTY: staticPropertyDescriptors,
+ LOADING: staticPropertyDescriptors,
+ DONE: staticPropertyDescriptors
+})
+
+module.exports = {
+ FileReader
+}
diff --git a/lib/fileapi/progressevent.js b/lib/fileapi/progressevent.js
new file mode 100644
index 0000000..778cf22
--- /dev/null
+++ b/lib/fileapi/progressevent.js
@@ -0,0 +1,78 @@
+'use strict'
+
+const { webidl } = require('../fetch/webidl')
+
+const kState = Symbol('ProgressEvent state')
+
+/**
+ * @see https://xhr.spec.whatwg.org/#progressevent
+ */
+class ProgressEvent extends Event {
+ constructor (type, eventInitDict = {}) {
+ type = webidl.converters.DOMString(type)
+ eventInitDict = webidl.converters.ProgressEventInit(eventInitDict ?? {})
+
+ super(type, eventInitDict)
+
+ this[kState] = {
+ lengthComputable: eventInitDict.lengthComputable,
+ loaded: eventInitDict.loaded,
+ total: eventInitDict.total
+ }
+ }
+
+ get lengthComputable () {
+ webidl.brandCheck(this, ProgressEvent)
+
+ return this[kState].lengthComputable
+ }
+
+ get loaded () {
+ webidl.brandCheck(this, ProgressEvent)
+
+ return this[kState].loaded
+ }
+
+ get total () {
+ webidl.brandCheck(this, ProgressEvent)
+
+ return this[kState].total
+ }
+}
+
+webidl.converters.ProgressEventInit = webidl.dictionaryConverter([
+ {
+ key: 'lengthComputable',
+ converter: webidl.converters.boolean,
+ defaultValue: false
+ },
+ {
+ key: 'loaded',
+ converter: webidl.converters['unsigned long long'],
+ defaultValue: 0
+ },
+ {
+ key: 'total',
+ converter: webidl.converters['unsigned long long'],
+ defaultValue: 0
+ },
+ {
+ key: 'bubbles',
+ converter: webidl.converters.boolean,
+ defaultValue: false
+ },
+ {
+ key: 'cancelable',
+ converter: webidl.converters.boolean,
+ defaultValue: false
+ },
+ {
+ key: 'composed',
+ converter: webidl.converters.boolean,
+ defaultValue: false
+ }
+])
+
+module.exports = {
+ ProgressEvent
+}
diff --git a/lib/fileapi/symbols.js b/lib/fileapi/symbols.js
new file mode 100644
index 0000000..dd11746
--- /dev/null
+++ b/lib/fileapi/symbols.js
@@ -0,0 +1,10 @@
+'use strict'
+
+module.exports = {
+ kState: Symbol('FileReader state'),
+ kResult: Symbol('FileReader result'),
+ kError: Symbol('FileReader error'),
+ kLastProgressEventFired: Symbol('FileReader last progress event fired timestamp'),
+ kEvents: Symbol('FileReader events'),
+ kAborted: Symbol('FileReader aborted')
+}
diff --git a/lib/fileapi/util.js b/lib/fileapi/util.js
new file mode 100644
index 0000000..1d10899
--- /dev/null
+++ b/lib/fileapi/util.js
@@ -0,0 +1,392 @@
+'use strict'
+
+const {
+ kState,
+ kError,
+ kResult,
+ kAborted,
+ kLastProgressEventFired
+} = require('./symbols')
+const { ProgressEvent } = require('./progressevent')
+const { getEncoding } = require('./encoding')
+const { DOMException } = require('../fetch/constants')
+const { serializeAMimeType, parseMIMEType } = require('../fetch/dataURL')
+const { types } = require('util')
+const { StringDecoder } = require('string_decoder')
+const { btoa } = require('buffer')
+
+/** @type {PropertyDescriptor} */
+const staticPropertyDescriptors = {
+ enumerable: true,
+ writable: false,
+ configurable: false
+}
+
+/**
+ * @see https://w3c.github.io/FileAPI/#readOperation
+ * @param {import('./filereader').FileReader} fr
+ * @param {import('buffer').Blob} blob
+ * @param {string} type
+ * @param {string?} encodingName
+ */
+function readOperation (fr, blob, type, encodingName) {
+ // 1. If fr’s state is "loading", throw an InvalidStateError
+ // DOMException.
+ if (fr[kState] === 'loading') {
+ throw new DOMException('Invalid state', 'InvalidStateError')
+ }
+
+ // 2. Set fr’s state to "loading".
+ fr[kState] = 'loading'
+
+ // 3. Set fr’s result to null.
+ fr[kResult] = null
+
+ // 4. Set fr’s error to null.
+ fr[kError] = null
+
+ // 5. Let stream be the result of calling get stream on blob.
+ /** @type {import('stream/web').ReadableStream} */
+ const stream = blob.stream()
+
+ // 6. Let reader be the result of getting a reader from stream.
+ const reader = stream.getReader()
+
+ // 7. Let bytes be an empty byte sequence.
+ /** @type {Uint8Array[]} */
+ const bytes = []
+
+ // 8. Let chunkPromise be the result of reading a chunk from
+ // stream with reader.
+ let chunkPromise = reader.read()
+
+ // 9. Let isFirstChunk be true.
+ let isFirstChunk = true
+
+ // 10. In parallel, while true:
+ // Note: "In parallel" just means non-blocking
+ // Note 2: readOperation itself cannot be async as double
+ // reading the body would then reject the promise, instead
+ // of throwing an error.
+ ;(async () => {
+ while (!fr[kAborted]) {
+ // 1. Wait for chunkPromise to be fulfilled or rejected.
+ try {
+ const { done, value } = await chunkPromise
+
+ // 2. If chunkPromise is fulfilled, and isFirstChunk is
+ // true, queue a task to fire a progress event called
+ // loadstart at fr.
+ if (isFirstChunk && !fr[kAborted]) {
+ queueMicrotask(() => {
+ fireAProgressEvent('loadstart', fr)
+ })
+ }
+
+ // 3. Set isFirstChunk to false.
+ isFirstChunk = false
+
+ // 4. If chunkPromise is fulfilled with an object whose
+ // done property is false and whose value property is
+ // a Uint8Array object, run these steps:
+ if (!done && types.isUint8Array(value)) {
+ // 1. Let bs be the byte sequence represented by the
+ // Uint8Array object.
+
+ // 2. Append bs to bytes.
+ bytes.push(value)
+
+ // 3. If roughly 50ms have passed since these steps
+ // were last invoked, queue a task to fire a
+ // progress event called progress at fr.
+ if (
+ (
+ fr[kLastProgressEventFired] === undefined ||
+ Date.now() - fr[kLastProgressEventFired] >= 50
+ ) &&
+ !fr[kAborted]
+ ) {
+ fr[kLastProgressEventFired] = Date.now()
+ queueMicrotask(() => {
+ fireAProgressEvent('progress', fr)
+ })
+ }
+
+ // 4. Set chunkPromise to the result of reading a
+ // chunk from stream with reader.
+ chunkPromise = reader.read()
+ } else if (done) {
+ // 5. Otherwise, if chunkPromise is fulfilled with an
+ // object whose done property is true, queue a task
+ // to run the following steps and abort this algorithm:
+ queueMicrotask(() => {
+ // 1. Set fr’s state to "done".
+ fr[kState] = 'done'
+
+ // 2. Let result be the result of package data given
+ // bytes, type, blob’s type, and encodingName.
+ try {
+ const result = packageData(bytes, type, blob.type, encodingName)
+
+ // 4. Else:
+
+ if (fr[kAborted]) {
+ return
+ }
+
+ // 1. Set fr’s result to result.
+ fr[kResult] = result
+
+ // 2. Fire a progress event called load at the fr.
+ fireAProgressEvent('load', fr)
+ } catch (error) {
+ // 3. If package data threw an exception error:
+
+ // 1. Set fr’s error to error.
+ fr[kError] = error
+
+ // 2. Fire a progress event called error at fr.
+ fireAProgressEvent('error', fr)
+ }
+
+ // 5. If fr’s state is not "loading", fire a progress
+ // event called loadend at the fr.
+ if (fr[kState] !== 'loading') {
+ fireAProgressEvent('loadend', fr)
+ }
+ })
+
+ break
+ }
+ } catch (error) {
+ if (fr[kAborted]) {
+ return
+ }
+
+ // 6. Otherwise, if chunkPromise is rejected with an
+ // error error, queue a task to run the following
+ // steps and abort this algorithm:
+ queueMicrotask(() => {
+ // 1. Set fr’s state to "done".
+ fr[kState] = 'done'
+
+ // 2. Set fr’s error to error.
+ fr[kError] = error
+
+ // 3. Fire a progress event called error at fr.
+ fireAProgressEvent('error', fr)
+
+ // 4. If fr’s state is not "loading", fire a progress
+ // event called loadend at fr.
+ if (fr[kState] !== 'loading') {
+ fireAProgressEvent('loadend', fr)
+ }
+ })
+
+ break
+ }
+ }
+ })()
+}
+
+/**
+ * @see https://w3c.github.io/FileAPI/#fire-a-progress-event
+ * @see https://dom.spec.whatwg.org/#concept-event-fire
+ * @param {string} e The name of the event
+ * @param {import('./filereader').FileReader} reader
+ */
+function fireAProgressEvent (e, reader) {
+ // The progress event e does not bubble. e.bubbles must be false
+ // The progress event e is NOT cancelable. e.cancelable must be false
+ const event = new ProgressEvent(e, {
+ bubbles: false,
+ cancelable: false
+ })
+
+ reader.dispatchEvent(event)
+}
+
+/**
+ * @see https://w3c.github.io/FileAPI/#blob-package-data
+ * @param {Uint8Array[]} bytes
+ * @param {string} type
+ * @param {string?} mimeType
+ * @param {string?} encodingName
+ */
+function packageData (bytes, type, mimeType, encodingName) {
+ // 1. A Blob has an associated package data algorithm, given
+ // bytes, a type, a optional mimeType, and a optional
+ // encodingName, which switches on type and runs the
+ // associated steps:
+
+ switch (type) {
+ case 'DataURL': {
+ // 1. Return bytes as a DataURL [RFC2397] subject to
+ // the considerations below:
+ // * Use mimeType as part of the Data URL if it is
+ // available in keeping with the Data URL
+ // specification [RFC2397].
+ // * If mimeType is not available return a Data URL
+ // without a media-type. [RFC2397].
+
+ // https://datatracker.ietf.org/doc/html/rfc2397#section-3
+ // dataurl := "data:" [ mediatype ] [ ";base64" ] "," data
+ // mediatype := [ type "/" subtype ] *( ";" parameter )
+ // data := *urlchar
+ // parameter := attribute "=" value
+ let dataURL = 'data:'
+
+ const parsed = parseMIMEType(mimeType || 'application/octet-stream')
+
+ if (parsed !== 'failure') {
+ dataURL += serializeAMimeType(parsed)
+ }
+
+ dataURL += ';base64,'
+
+ const decoder = new StringDecoder('latin1')
+
+ for (const chunk of bytes) {
+ dataURL += btoa(decoder.write(chunk))
+ }
+
+ dataURL += btoa(decoder.end())
+
+ return dataURL
+ }
+ case 'Text': {
+ // 1. Let encoding be failure
+ let encoding = 'failure'
+
+ // 2. If the encodingName is present, set encoding to the
+ // result of getting an encoding from encodingName.
+ if (encodingName) {
+ encoding = getEncoding(encodingName)
+ }
+
+ // 3. If encoding is failure, and mimeType is present:
+ if (encoding === 'failure' && mimeType) {
+ // 1. Let type be the result of parse a MIME type
+ // given mimeType.
+ const type = parseMIMEType(mimeType)
+
+ // 2. If type is not failure, set encoding to the result
+ // of getting an encoding from type’s parameters["charset"].
+ if (type !== 'failure') {
+ encoding = getEncoding(type.parameters.get('charset'))
+ }
+ }
+
+ // 4. If encoding is failure, then set encoding to UTF-8.
+ if (encoding === 'failure') {
+ encoding = 'UTF-8'
+ }
+
+ // 5. Decode bytes using fallback encoding encoding, and
+ // return the result.
+ return decode(bytes, encoding)
+ }
+ case 'ArrayBuffer': {
+ // Return a new ArrayBuffer whose contents are bytes.
+ const sequence = combineByteSequences(bytes)
+
+ return sequence.buffer
+ }
+ case 'BinaryString': {
+ // Return bytes as a binary string, in which every byte
+ // is represented by a code unit of equal value [0..255].
+ let binaryString = ''
+
+ const decoder = new StringDecoder('latin1')
+
+ for (const chunk of bytes) {
+ binaryString += decoder.write(chunk)
+ }
+
+ binaryString += decoder.end()
+
+ return binaryString
+ }
+ }
+}
+
+/**
+ * @see https://encoding.spec.whatwg.org/#decode
+ * @param {Uint8Array[]} ioQueue
+ * @param {string} encoding
+ */
+function decode (ioQueue, encoding) {
+ const bytes = combineByteSequences(ioQueue)
+
+ // 1. Let BOMEncoding be the result of BOM sniffing ioQueue.
+ const BOMEncoding = BOMSniffing(bytes)
+
+ let slice = 0
+
+ // 2. If BOMEncoding is non-null:
+ if (BOMEncoding !== null) {
+ // 1. Set encoding to BOMEncoding.
+ encoding = BOMEncoding
+
+ // 2. Read three bytes from ioQueue, if BOMEncoding is
+ // UTF-8; otherwise read two bytes.
+ // (Do nothing with those bytes.)
+ slice = BOMEncoding === 'UTF-8' ? 3 : 2
+ }
+
+ // 3. Process a queue with an instance of encoding’s
+ // decoder, ioQueue, output, and "replacement".
+
+ // 4. Return output.
+
+ const sliced = bytes.slice(slice)
+ return new TextDecoder(encoding).decode(sliced)
+}
+
+/**
+ * @see https://encoding.spec.whatwg.org/#bom-sniff
+ * @param {Uint8Array} ioQueue
+ */
+function BOMSniffing (ioQueue) {
+ // 1. Let BOM be the result of peeking 3 bytes from ioQueue,
+ // converted to a byte sequence.
+ const [a, b, c] = ioQueue
+
+ // 2. For each of the rows in the table below, starting with
+ // the first one and going down, if BOM starts with the
+ // bytes given in the first column, then return the
+ // encoding given in the cell in the second column of that
+ // row. Otherwise, return null.
+ if (a === 0xEF && b === 0xBB && c === 0xBF) {
+ return 'UTF-8'
+ } else if (a === 0xFE && b === 0xFF) {
+ return 'UTF-16BE'
+ } else if (a === 0xFF && b === 0xFE) {
+ return 'UTF-16LE'
+ }
+
+ return null
+}
+
+/**
+ * @param {Uint8Array[]} sequences
+ */
+function combineByteSequences (sequences) {
+ const size = sequences.reduce((a, b) => {
+ return a + b.byteLength
+ }, 0)
+
+ let offset = 0
+
+ return sequences.reduce((a, b) => {
+ a.set(b, offset)
+ offset += b.byteLength
+ return a
+ }, new Uint8Array(size))
+}
+
+module.exports = {
+ staticPropertyDescriptors,
+ readOperation,
+ fireAProgressEvent
+}
diff --git a/lib/global.js b/lib/global.js
new file mode 100644
index 0000000..18bfd73
--- /dev/null
+++ b/lib/global.js
@@ -0,0 +1,32 @@
+'use strict'
+
+// We include a version number for the Dispatcher API. In case of breaking changes,
+// this version number must be increased to avoid conflicts.
+const globalDispatcher = Symbol.for('undici.globalDispatcher.1')
+const { InvalidArgumentError } = require('./core/errors')
+const Agent = require('./agent')
+
+if (getGlobalDispatcher() === undefined) {
+ setGlobalDispatcher(new Agent())
+}
+
+function setGlobalDispatcher (agent) {
+ if (!agent || typeof agent.dispatch !== 'function') {
+ throw new InvalidArgumentError('Argument agent must implement Agent')
+ }
+ Object.defineProperty(globalThis, globalDispatcher, {
+ value: agent,
+ writable: true,
+ enumerable: false,
+ configurable: false
+ })
+}
+
+function getGlobalDispatcher () {
+ return globalThis[globalDispatcher]
+}
+
+module.exports = {
+ setGlobalDispatcher,
+ getGlobalDispatcher
+}
diff --git a/lib/handler/DecoratorHandler.js b/lib/handler/DecoratorHandler.js
new file mode 100644
index 0000000..9d70a76
--- /dev/null
+++ b/lib/handler/DecoratorHandler.js
@@ -0,0 +1,35 @@
+'use strict'
+
+module.exports = class DecoratorHandler {
+ constructor (handler) {
+ this.handler = handler
+ }
+
+ onConnect (...args) {
+ return this.handler.onConnect(...args)
+ }
+
+ onError (...args) {
+ return this.handler.onError(...args)
+ }
+
+ onUpgrade (...args) {
+ return this.handler.onUpgrade(...args)
+ }
+
+ onHeaders (...args) {
+ return this.handler.onHeaders(...args)
+ }
+
+ onData (...args) {
+ return this.handler.onData(...args)
+ }
+
+ onComplete (...args) {
+ return this.handler.onComplete(...args)
+ }
+
+ onBodySent (...args) {
+ return this.handler.onBodySent(...args)
+ }
+}
diff --git a/lib/handler/RedirectHandler.js b/lib/handler/RedirectHandler.js
new file mode 100644
index 0000000..baca27e
--- /dev/null
+++ b/lib/handler/RedirectHandler.js
@@ -0,0 +1,216 @@
+'use strict'
+
+const util = require('../core/util')
+const { kBodyUsed } = require('../core/symbols')
+const assert = require('assert')
+const { InvalidArgumentError } = require('../core/errors')
+const EE = require('events')
+
+const redirectableStatusCodes = [300, 301, 302, 303, 307, 308]
+
+const kBody = Symbol('body')
+
+class BodyAsyncIterable {
+ constructor (body) {
+ this[kBody] = body
+ this[kBodyUsed] = false
+ }
+
+ async * [Symbol.asyncIterator] () {
+ assert(!this[kBodyUsed], 'disturbed')
+ this[kBodyUsed] = true
+ yield * this[kBody]
+ }
+}
+
+class RedirectHandler {
+ constructor (dispatch, maxRedirections, opts, handler) {
+ if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) {
+ throw new InvalidArgumentError('maxRedirections must be a positive number')
+ }
+
+ util.validateHandler(handler, opts.method, opts.upgrade)
+
+ this.dispatch = dispatch
+ this.location = null
+ this.abort = null
+ this.opts = { ...opts, maxRedirections: 0 } // opts must be a copy
+ this.maxRedirections = maxRedirections
+ this.handler = handler
+ this.history = []
+
+ if (util.isStream(this.opts.body)) {
+ // TODO (fix): Provide some way for the user to cache the file to e.g. /tmp
+ // so that it can be dispatched again?
+ // TODO (fix): Do we need 100-expect support to provide a way to do this properly?
+ if (util.bodyLength(this.opts.body) === 0) {
+ this.opts.body
+ .on('data', function () {
+ assert(false)
+ })
+ }
+
+ if (typeof this.opts.body.readableDidRead !== 'boolean') {
+ this.opts.body[kBodyUsed] = false
+ EE.prototype.on.call(this.opts.body, 'data', function () {
+ this[kBodyUsed] = true
+ })
+ }
+ } else if (this.opts.body && typeof this.opts.body.pipeTo === 'function') {
+ // TODO (fix): We can't access ReadableStream internal state
+ // to determine whether or not it has been disturbed. This is just
+ // a workaround.
+ this.opts.body = new BodyAsyncIterable(this.opts.body)
+ } else if (
+ this.opts.body &&
+ typeof this.opts.body !== 'string' &&
+ !ArrayBuffer.isView(this.opts.body) &&
+ util.isIterable(this.opts.body)
+ ) {
+ // TODO: Should we allow re-using iterable if !this.opts.idempotent
+ // or through some other flag?
+ this.opts.body = new BodyAsyncIterable(this.opts.body)
+ }
+ }
+
+ onConnect (abort) {
+ this.abort = abort
+ this.handler.onConnect(abort, { history: this.history })
+ }
+
+ onUpgrade (statusCode, headers, socket) {
+ this.handler.onUpgrade(statusCode, headers, socket)
+ }
+
+ onError (error) {
+ this.handler.onError(error)
+ }
+
+ onHeaders (statusCode, headers, resume, statusText) {
+ this.location = this.history.length >= this.maxRedirections || util.isDisturbed(this.opts.body)
+ ? null
+ : parseLocation(statusCode, headers)
+
+ if (this.opts.origin) {
+ this.history.push(new URL(this.opts.path, this.opts.origin))
+ }
+
+ if (!this.location) {
+ return this.handler.onHeaders(statusCode, headers, resume, statusText)
+ }
+
+ const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin)))
+ const path = search ? `${pathname}${search}` : pathname
+
+ // Remove headers referring to the original URL.
+ // By default it is Host only, unless it's a 303 (see below), which removes also all Content-* headers.
+ // https://tools.ietf.org/html/rfc7231#section-6.4
+ this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin)
+ this.opts.path = path
+ this.opts.origin = origin
+ this.opts.maxRedirections = 0
+ this.opts.query = null
+
+ // https://tools.ietf.org/html/rfc7231#section-6.4.4
+ // In case of HTTP 303, always replace method to be either HEAD or GET
+ if (statusCode === 303 && this.opts.method !== 'HEAD') {
+ this.opts.method = 'GET'
+ this.opts.body = null
+ }
+ }
+
+ onData (chunk) {
+ if (this.location) {
+ /*
+ https://tools.ietf.org/html/rfc7231#section-6.4
+
+ TLDR: undici always ignores 3xx response bodies.
+
+ Redirection is used to serve the requested resource from another URL, so it is assumes that
+ no body is generated (and thus can be ignored). Even though generating a body is not prohibited.
+
+ For status 301, 302, 303, 307 and 308 (the latter from RFC 7238), the specs mention that the body usually
+ (which means it's optional and not mandated) contain just an hyperlink to the value of
+ the Location response header, so the body can be ignored safely.
+
+ For status 300, which is "Multiple Choices", the spec mentions both generating a Location
+ response header AND a response body with the other possible location to follow.
+ Since the spec explicitily chooses not to specify a format for such body and leave it to
+ servers and browsers implementors, we ignore the body as there is no specified way to eventually parse it.
+ */
+ } else {
+ return this.handler.onData(chunk)
+ }
+ }
+
+ onComplete (trailers) {
+ if (this.location) {
+ /*
+ https://tools.ietf.org/html/rfc7231#section-6.4
+
+ TLDR: undici always ignores 3xx response trailers as they are not expected in case of redirections
+ and neither are useful if present.
+
+ See comment on onData method above for more detailed informations.
+ */
+
+ this.location = null
+ this.abort = null
+
+ this.dispatch(this.opts, this)
+ } else {
+ this.handler.onComplete(trailers)
+ }
+ }
+
+ onBodySent (chunk) {
+ if (this.handler.onBodySent) {
+ this.handler.onBodySent(chunk)
+ }
+ }
+}
+
+function parseLocation (statusCode, headers) {
+ if (redirectableStatusCodes.indexOf(statusCode) === -1) {
+ return null
+ }
+
+ for (let i = 0; i < headers.length; i += 2) {
+ if (headers[i].toString().toLowerCase() === 'location') {
+ return headers[i + 1]
+ }
+ }
+}
+
+// https://tools.ietf.org/html/rfc7231#section-6.4.4
+function shouldRemoveHeader (header, removeContent, unknownOrigin) {
+ return (
+ (header.length === 4 && header.toString().toLowerCase() === 'host') ||
+ (removeContent && header.toString().toLowerCase().indexOf('content-') === 0) ||
+ (unknownOrigin && header.length === 13 && header.toString().toLowerCase() === 'authorization') ||
+ (unknownOrigin && header.length === 6 && header.toString().toLowerCase() === 'cookie')
+ )
+}
+
+// https://tools.ietf.org/html/rfc7231#section-6.4
+function cleanRequestHeaders (headers, removeContent, unknownOrigin) {
+ const ret = []
+ if (Array.isArray(headers)) {
+ for (let i = 0; i < headers.length; i += 2) {
+ if (!shouldRemoveHeader(headers[i], removeContent, unknownOrigin)) {
+ ret.push(headers[i], headers[i + 1])
+ }
+ }
+ } else if (headers && typeof headers === 'object') {
+ for (const key of Object.keys(headers)) {
+ if (!shouldRemoveHeader(key, removeContent, unknownOrigin)) {
+ ret.push(key, headers[key])
+ }
+ }
+ } else {
+ assert(headers == null, 'headers must be an object or an array')
+ }
+ return ret
+}
+
+module.exports = RedirectHandler
diff --git a/lib/handler/RetryHandler.js b/lib/handler/RetryHandler.js
new file mode 100644
index 0000000..3710447
--- /dev/null
+++ b/lib/handler/RetryHandler.js
@@ -0,0 +1,336 @@
+const assert = require('assert')
+
+const { kRetryHandlerDefaultRetry } = require('../core/symbols')
+const { RequestRetryError } = require('../core/errors')
+const { isDisturbed, parseHeaders, parseRangeHeader } = require('../core/util')
+
+function calculateRetryAfterHeader (retryAfter) {
+ const current = Date.now()
+ const diff = new Date(retryAfter).getTime() - current
+
+ return diff
+}
+
+class RetryHandler {
+ constructor (opts, handlers) {
+ const { retryOptions, ...dispatchOpts } = opts
+ const {
+ // Retry scoped
+ retry: retryFn,
+ maxRetries,
+ maxTimeout,
+ minTimeout,
+ timeoutFactor,
+ // Response scoped
+ methods,
+ errorCodes,
+ retryAfter,
+ statusCodes
+ } = retryOptions ?? {}
+
+ this.dispatch = handlers.dispatch
+ this.handler = handlers.handler
+ this.opts = dispatchOpts
+ this.abort = null
+ this.aborted = false
+ this.retryOpts = {
+ retry: retryFn ?? RetryHandler[kRetryHandlerDefaultRetry],
+ retryAfter: retryAfter ?? true,
+ maxTimeout: maxTimeout ?? 30 * 1000, // 30s,
+ timeout: minTimeout ?? 500, // .5s
+ timeoutFactor: timeoutFactor ?? 2,
+ maxRetries: maxRetries ?? 5,
+ // What errors we should retry
+ methods: methods ?? ['GET', 'HEAD', 'OPTIONS', 'PUT', 'DELETE', 'TRACE'],
+ // Indicates which errors to retry
+ statusCodes: statusCodes ?? [500, 502, 503, 504, 429],
+ // List of errors to retry
+ errorCodes: errorCodes ?? [
+ 'ECONNRESET',
+ 'ECONNREFUSED',
+ 'ENOTFOUND',
+ 'ENETDOWN',
+ 'ENETUNREACH',
+ 'EHOSTDOWN',
+ 'EHOSTUNREACH',
+ 'EPIPE'
+ ]
+ }
+
+ this.retryCount = 0
+ this.start = 0
+ this.end = null
+ this.etag = null
+ this.resume = null
+
+ // Handle possible onConnect duplication
+ this.handler.onConnect(reason => {
+ this.aborted = true
+ if (this.abort) {
+ this.abort(reason)
+ } else {
+ this.reason = reason
+ }
+ })
+ }
+
+ onRequestSent () {
+ if (this.handler.onRequestSent) {
+ this.handler.onRequestSent()
+ }
+ }
+
+ onUpgrade (statusCode, headers, socket) {
+ if (this.handler.onUpgrade) {
+ this.handler.onUpgrade(statusCode, headers, socket)
+ }
+ }
+
+ onConnect (abort) {
+ if (this.aborted) {
+ abort(this.reason)
+ } else {
+ this.abort = abort
+ }
+ }
+
+ onBodySent (chunk) {
+ if (this.handler.onBodySent) return this.handler.onBodySent(chunk)
+ }
+
+ static [kRetryHandlerDefaultRetry] (err, { state, opts }, cb) {
+ const { statusCode, code, headers } = err
+ const { method, retryOptions } = opts
+ const {
+ maxRetries,
+ timeout,
+ maxTimeout,
+ timeoutFactor,
+ statusCodes,
+ errorCodes,
+ methods
+ } = retryOptions
+ let { counter, currentTimeout } = state
+
+ currentTimeout =
+ currentTimeout != null && currentTimeout > 0 ? currentTimeout : timeout
+
+ // Any code that is not a Undici's originated and allowed to retry
+ if (
+ code &&
+ code !== 'UND_ERR_REQ_RETRY' &&
+ code !== 'UND_ERR_SOCKET' &&
+ !errorCodes.includes(code)
+ ) {
+ cb(err)
+ return
+ }
+
+ // If a set of method are provided and the current method is not in the list
+ if (Array.isArray(methods) && !methods.includes(method)) {
+ cb(err)
+ return
+ }
+
+ // If a set of status code are provided and the current status code is not in the list
+ if (
+ statusCode != null &&
+ Array.isArray(statusCodes) &&
+ !statusCodes.includes(statusCode)
+ ) {
+ cb(err)
+ return
+ }
+
+ // If we reached the max number of retries
+ if (counter > maxRetries) {
+ cb(err)
+ return
+ }
+
+ let retryAfterHeader = headers != null && headers['retry-after']
+ if (retryAfterHeader) {
+ retryAfterHeader = Number(retryAfterHeader)
+ retryAfterHeader = isNaN(retryAfterHeader)
+ ? calculateRetryAfterHeader(retryAfterHeader)
+ : retryAfterHeader * 1e3 // Retry-After is in seconds
+ }
+
+ const retryTimeout =
+ retryAfterHeader > 0
+ ? Math.min(retryAfterHeader, maxTimeout)
+ : Math.min(currentTimeout * timeoutFactor ** counter, maxTimeout)
+
+ state.currentTimeout = retryTimeout
+
+ setTimeout(() => cb(null), retryTimeout)
+ }
+
+ onHeaders (statusCode, rawHeaders, resume, statusMessage) {
+ const headers = parseHeaders(rawHeaders)
+
+ this.retryCount += 1
+
+ if (statusCode >= 300) {
+ this.abort(
+ new RequestRetryError('Request failed', statusCode, {
+ headers,
+ count: this.retryCount
+ })
+ )
+ return false
+ }
+
+ // Checkpoint for resume from where we left it
+ if (this.resume != null) {
+ this.resume = null
+
+ if (statusCode !== 206) {
+ return true
+ }
+
+ const contentRange = parseRangeHeader(headers['content-range'])
+ // If no content range
+ if (!contentRange) {
+ this.abort(
+ new RequestRetryError('Content-Range mismatch', statusCode, {
+ headers,
+ count: this.retryCount
+ })
+ )
+ return false
+ }
+
+ // Let's start with a weak etag check
+ if (this.etag != null && this.etag !== headers.etag) {
+ this.abort(
+ new RequestRetryError('ETag mismatch', statusCode, {
+ headers,
+ count: this.retryCount
+ })
+ )
+ return false
+ }
+
+ const { start, size, end = size } = contentRange
+
+ assert(this.start === start, 'content-range mismatch')
+ assert(this.end == null || this.end === end, 'content-range mismatch')
+
+ this.resume = resume
+ return true
+ }
+
+ if (this.end == null) {
+ if (statusCode === 206) {
+ // First time we receive 206
+ const range = parseRangeHeader(headers['content-range'])
+
+ if (range == null) {
+ return this.handler.onHeaders(
+ statusCode,
+ rawHeaders,
+ resume,
+ statusMessage
+ )
+ }
+
+ const { start, size, end = size } = range
+
+ assert(
+ start != null && Number.isFinite(start) && this.start !== start,
+ 'content-range mismatch'
+ )
+ assert(Number.isFinite(start))
+ assert(
+ end != null && Number.isFinite(end) && this.end !== end,
+ 'invalid content-length'
+ )
+
+ this.start = start
+ this.end = end
+ }
+
+ // We make our best to checkpoint the body for further range headers
+ if (this.end == null) {
+ const contentLength = headers['content-length']
+ this.end = contentLength != null ? Number(contentLength) : null
+ }
+
+ assert(Number.isFinite(this.start))
+ assert(
+ this.end == null || Number.isFinite(this.end),
+ 'invalid content-length'
+ )
+
+ this.resume = resume
+ this.etag = headers.etag != null ? headers.etag : null
+
+ return this.handler.onHeaders(
+ statusCode,
+ rawHeaders,
+ resume,
+ statusMessage
+ )
+ }
+
+ const err = new RequestRetryError('Request failed', statusCode, {
+ headers,
+ count: this.retryCount
+ })
+
+ this.abort(err)
+
+ return false
+ }
+
+ onData (chunk) {
+ this.start += chunk.length
+
+ return this.handler.onData(chunk)
+ }
+
+ onComplete (rawTrailers) {
+ this.retryCount = 0
+ return this.handler.onComplete(rawTrailers)
+ }
+
+ onError (err) {
+ if (this.aborted || isDisturbed(this.opts.body)) {
+ return this.handler.onError(err)
+ }
+
+ this.retryOpts.retry(
+ err,
+ {
+ state: { counter: this.retryCount++, currentTimeout: this.retryAfter },
+ opts: { retryOptions: this.retryOpts, ...this.opts }
+ },
+ onRetry.bind(this)
+ )
+
+ function onRetry (err) {
+ if (err != null || this.aborted || isDisturbed(this.opts.body)) {
+ return this.handler.onError(err)
+ }
+
+ if (this.start !== 0) {
+ this.opts = {
+ ...this.opts,
+ headers: {
+ ...this.opts.headers,
+ range: `bytes=${this.start}-${this.end ?? ''}`
+ }
+ }
+ }
+
+ try {
+ this.dispatch(this.opts, this)
+ } catch (err) {
+ this.handler.onError(err)
+ }
+ }
+ }
+}
+
+module.exports = RetryHandler
diff --git a/lib/interceptor/redirectInterceptor.js b/lib/interceptor/redirectInterceptor.js
new file mode 100644
index 0000000..7cc035e
--- /dev/null
+++ b/lib/interceptor/redirectInterceptor.js
@@ -0,0 +1,21 @@
+'use strict'
+
+const RedirectHandler = require('../handler/RedirectHandler')
+
+function createRedirectInterceptor ({ maxRedirections: defaultMaxRedirections }) {
+ return (dispatch) => {
+ return function Intercept (opts, handler) {
+ const { maxRedirections = defaultMaxRedirections } = opts
+
+ if (!maxRedirections) {
+ return dispatch(opts, handler)
+ }
+
+ const redirectHandler = new RedirectHandler(dispatch, maxRedirections, opts, handler)
+ opts = { ...opts, maxRedirections: 0 } // Stop sub dispatcher from also redirecting.
+ return dispatch(opts, redirectHandler)
+ }
+ }
+}
+
+module.exports = createRedirectInterceptor
diff --git a/lib/llhttp/constants.d.ts b/lib/llhttp/constants.d.ts
new file mode 100644
index 0000000..b75ab1b
--- /dev/null
+++ b/lib/llhttp/constants.d.ts
@@ -0,0 +1,199 @@
+import { IEnumMap } from './utils';
+export declare type HTTPMode = 'loose' | 'strict';
+export declare enum ERROR {
+ OK = 0,
+ INTERNAL = 1,
+ STRICT = 2,
+ LF_EXPECTED = 3,
+ UNEXPECTED_CONTENT_LENGTH = 4,
+ CLOSED_CONNECTION = 5,
+ INVALID_METHOD = 6,
+ INVALID_URL = 7,
+ INVALID_CONSTANT = 8,
+ INVALID_VERSION = 9,
+ INVALID_HEADER_TOKEN = 10,
+ INVALID_CONTENT_LENGTH = 11,
+ INVALID_CHUNK_SIZE = 12,
+ INVALID_STATUS = 13,
+ INVALID_EOF_STATE = 14,
+ INVALID_TRANSFER_ENCODING = 15,
+ CB_MESSAGE_BEGIN = 16,
+ CB_HEADERS_COMPLETE = 17,
+ CB_MESSAGE_COMPLETE = 18,
+ CB_CHUNK_HEADER = 19,
+ CB_CHUNK_COMPLETE = 20,
+ PAUSED = 21,
+ PAUSED_UPGRADE = 22,
+ PAUSED_H2_UPGRADE = 23,
+ USER = 24
+}
+export declare enum TYPE {
+ BOTH = 0,
+ REQUEST = 1,
+ RESPONSE = 2
+}
+export declare enum FLAGS {
+ CONNECTION_KEEP_ALIVE = 1,
+ CONNECTION_CLOSE = 2,
+ CONNECTION_UPGRADE = 4,
+ CHUNKED = 8,
+ UPGRADE = 16,
+ CONTENT_LENGTH = 32,
+ SKIPBODY = 64,
+ TRAILING = 128,
+ TRANSFER_ENCODING = 512
+}
+export declare enum LENIENT_FLAGS {
+ HEADERS = 1,
+ CHUNKED_LENGTH = 2,
+ KEEP_ALIVE = 4
+}
+export declare enum METHODS {
+ DELETE = 0,
+ GET = 1,
+ HEAD = 2,
+ POST = 3,
+ PUT = 4,
+ CONNECT = 5,
+ OPTIONS = 6,
+ TRACE = 7,
+ COPY = 8,
+ LOCK = 9,
+ MKCOL = 10,
+ MOVE = 11,
+ PROPFIND = 12,
+ PROPPATCH = 13,
+ SEARCH = 14,
+ UNLOCK = 15,
+ BIND = 16,
+ REBIND = 17,
+ UNBIND = 18,
+ ACL = 19,
+ REPORT = 20,
+ MKACTIVITY = 21,
+ CHECKOUT = 22,
+ MERGE = 23,
+ 'M-SEARCH' = 24,
+ NOTIFY = 25,
+ SUBSCRIBE = 26,
+ UNSUBSCRIBE = 27,
+ PATCH = 28,
+ PURGE = 29,
+ MKCALENDAR = 30,
+ LINK = 31,
+ UNLINK = 32,
+ SOURCE = 33,
+ PRI = 34,
+ DESCRIBE = 35,
+ ANNOUNCE = 36,
+ SETUP = 37,
+ PLAY = 38,
+ PAUSE = 39,
+ TEARDOWN = 40,
+ GET_PARAMETER = 41,
+ SET_PARAMETER = 42,
+ REDIRECT = 43,
+ RECORD = 44,
+ FLUSH = 45
+}
+export declare const METHODS_HTTP: METHODS[];
+export declare const METHODS_ICE: METHODS[];
+export declare const METHODS_RTSP: METHODS[];
+export declare const METHOD_MAP: IEnumMap;
+export declare const H_METHOD_MAP: IEnumMap;
+export declare enum FINISH {
+ SAFE = 0,
+ SAFE_WITH_CB = 1,
+ UNSAFE = 2
+}
+export declare type CharList = Array<string | number>;
+export declare const ALPHA: CharList;
+export declare const NUM_MAP: {
+ 0: number;
+ 1: number;
+ 2: number;
+ 3: number;
+ 4: number;
+ 5: number;
+ 6: number;
+ 7: number;
+ 8: number;
+ 9: number;
+};
+export declare const HEX_MAP: {
+ 0: number;
+ 1: number;
+ 2: number;
+ 3: number;
+ 4: number;
+ 5: number;
+ 6: number;
+ 7: number;
+ 8: number;
+ 9: number;
+ A: number;
+ B: number;
+ C: number;
+ D: number;
+ E: number;
+ F: number;
+ a: number;
+ b: number;
+ c: number;
+ d: number;
+ e: number;
+ f: number;
+};
+export declare const NUM: CharList;
+export declare const ALPHANUM: CharList;
+export declare const MARK: CharList;
+export declare const USERINFO_CHARS: CharList;
+export declare const STRICT_URL_CHAR: CharList;
+export declare const URL_CHAR: CharList;
+export declare const HEX: CharList;
+export declare const STRICT_TOKEN: CharList;
+export declare const TOKEN: CharList;
+export declare const HEADER_CHARS: CharList;
+export declare const CONNECTION_TOKEN_CHARS: CharList;
+export declare const MAJOR: {
+ 0: number;
+ 1: number;
+ 2: number;
+ 3: number;
+ 4: number;
+ 5: number;
+ 6: number;
+ 7: number;
+ 8: number;
+ 9: number;
+};
+export declare const MINOR: {
+ 0: number;
+ 1: number;
+ 2: number;
+ 3: number;
+ 4: number;
+ 5: number;
+ 6: number;
+ 7: number;
+ 8: number;
+ 9: number;
+};
+export declare enum HEADER_STATE {
+ GENERAL = 0,
+ CONNECTION = 1,
+ CONTENT_LENGTH = 2,
+ TRANSFER_ENCODING = 3,
+ UPGRADE = 4,
+ CONNECTION_KEEP_ALIVE = 5,
+ CONNECTION_CLOSE = 6,
+ CONNECTION_UPGRADE = 7,
+ TRANSFER_ENCODING_CHUNKED = 8
+}
+export declare const SPECIAL_HEADERS: {
+ connection: HEADER_STATE;
+ 'content-length': HEADER_STATE;
+ 'proxy-connection': HEADER_STATE;
+ 'transfer-encoding': HEADER_STATE;
+ upgrade: HEADER_STATE;
+};
diff --git a/lib/llhttp/constants.js b/lib/llhttp/constants.js
new file mode 100644
index 0000000..fb0b5a2
--- /dev/null
+++ b/lib/llhttp/constants.js
@@ -0,0 +1,278 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.SPECIAL_HEADERS = exports.HEADER_STATE = exports.MINOR = exports.MAJOR = exports.CONNECTION_TOKEN_CHARS = exports.HEADER_CHARS = exports.TOKEN = exports.STRICT_TOKEN = exports.HEX = exports.URL_CHAR = exports.STRICT_URL_CHAR = exports.USERINFO_CHARS = exports.MARK = exports.ALPHANUM = exports.NUM = exports.HEX_MAP = exports.NUM_MAP = exports.ALPHA = exports.FINISH = exports.H_METHOD_MAP = exports.METHOD_MAP = exports.METHODS_RTSP = exports.METHODS_ICE = exports.METHODS_HTTP = exports.METHODS = exports.LENIENT_FLAGS = exports.FLAGS = exports.TYPE = exports.ERROR = void 0;
+const utils_1 = require("./utils");
+// C headers
+var ERROR;
+(function (ERROR) {
+ ERROR[ERROR["OK"] = 0] = "OK";
+ ERROR[ERROR["INTERNAL"] = 1] = "INTERNAL";
+ ERROR[ERROR["STRICT"] = 2] = "STRICT";
+ ERROR[ERROR["LF_EXPECTED"] = 3] = "LF_EXPECTED";
+ ERROR[ERROR["UNEXPECTED_CONTENT_LENGTH"] = 4] = "UNEXPECTED_CONTENT_LENGTH";
+ ERROR[ERROR["CLOSED_CONNECTION"] = 5] = "CLOSED_CONNECTION";
+ ERROR[ERROR["INVALID_METHOD"] = 6] = "INVALID_METHOD";
+ ERROR[ERROR["INVALID_URL"] = 7] = "INVALID_URL";
+ ERROR[ERROR["INVALID_CONSTANT"] = 8] = "INVALID_CONSTANT";
+ ERROR[ERROR["INVALID_VERSION"] = 9] = "INVALID_VERSION";
+ ERROR[ERROR["INVALID_HEADER_TOKEN"] = 10] = "INVALID_HEADER_TOKEN";
+ ERROR[ERROR["INVALID_CONTENT_LENGTH"] = 11] = "INVALID_CONTENT_LENGTH";
+ ERROR[ERROR["INVALID_CHUNK_SIZE"] = 12] = "INVALID_CHUNK_SIZE";
+ ERROR[ERROR["INVALID_STATUS"] = 13] = "INVALID_STATUS";
+ ERROR[ERROR["INVALID_EOF_STATE"] = 14] = "INVALID_EOF_STATE";
+ ERROR[ERROR["INVALID_TRANSFER_ENCODING"] = 15] = "INVALID_TRANSFER_ENCODING";
+ ERROR[ERROR["CB_MESSAGE_BEGIN"] = 16] = "CB_MESSAGE_BEGIN";
+ ERROR[ERROR["CB_HEADERS_COMPLETE"] = 17] = "CB_HEADERS_COMPLETE";
+ ERROR[ERROR["CB_MESSAGE_COMPLETE"] = 18] = "CB_MESSAGE_COMPLETE";
+ ERROR[ERROR["CB_CHUNK_HEADER"] = 19] = "CB_CHUNK_HEADER";
+ ERROR[ERROR["CB_CHUNK_COMPLETE"] = 20] = "CB_CHUNK_COMPLETE";
+ ERROR[ERROR["PAUSED"] = 21] = "PAUSED";
+ ERROR[ERROR["PAUSED_UPGRADE"] = 22] = "PAUSED_UPGRADE";
+ ERROR[ERROR["PAUSED_H2_UPGRADE"] = 23] = "PAUSED_H2_UPGRADE";
+ ERROR[ERROR["USER"] = 24] = "USER";
+})(ERROR = exports.ERROR || (exports.ERROR = {}));
+var TYPE;
+(function (TYPE) {
+ TYPE[TYPE["BOTH"] = 0] = "BOTH";
+ TYPE[TYPE["REQUEST"] = 1] = "REQUEST";
+ TYPE[TYPE["RESPONSE"] = 2] = "RESPONSE";
+})(TYPE = exports.TYPE || (exports.TYPE = {}));
+var FLAGS;
+(function (FLAGS) {
+ FLAGS[FLAGS["CONNECTION_KEEP_ALIVE"] = 1] = "CONNECTION_KEEP_ALIVE";
+ FLAGS[FLAGS["CONNECTION_CLOSE"] = 2] = "CONNECTION_CLOSE";
+ FLAGS[FLAGS["CONNECTION_UPGRADE"] = 4] = "CONNECTION_UPGRADE";
+ FLAGS[FLAGS["CHUNKED"] = 8] = "CHUNKED";
+ FLAGS[FLAGS["UPGRADE"] = 16] = "UPGRADE";
+ FLAGS[FLAGS["CONTENT_LENGTH"] = 32] = "CONTENT_LENGTH";
+ FLAGS[FLAGS["SKIPBODY"] = 64] = "SKIPBODY";
+ FLAGS[FLAGS["TRAILING"] = 128] = "TRAILING";
+ // 1 << 8 is unused
+ FLAGS[FLAGS["TRANSFER_ENCODING"] = 512] = "TRANSFER_ENCODING";
+})(FLAGS = exports.FLAGS || (exports.FLAGS = {}));
+var LENIENT_FLAGS;
+(function (LENIENT_FLAGS) {
+ LENIENT_FLAGS[LENIENT_FLAGS["HEADERS"] = 1] = "HEADERS";
+ LENIENT_FLAGS[LENIENT_FLAGS["CHUNKED_LENGTH"] = 2] = "CHUNKED_LENGTH";
+ LENIENT_FLAGS[LENIENT_FLAGS["KEEP_ALIVE"] = 4] = "KEEP_ALIVE";
+})(LENIENT_FLAGS = exports.LENIENT_FLAGS || (exports.LENIENT_FLAGS = {}));
+var METHODS;
+(function (METHODS) {
+ METHODS[METHODS["DELETE"] = 0] = "DELETE";
+ METHODS[METHODS["GET"] = 1] = "GET";
+ METHODS[METHODS["HEAD"] = 2] = "HEAD";
+ METHODS[METHODS["POST"] = 3] = "POST";
+ METHODS[METHODS["PUT"] = 4] = "PUT";
+ /* pathological */
+ METHODS[METHODS["CONNECT"] = 5] = "CONNECT";
+ METHODS[METHODS["OPTIONS"] = 6] = "OPTIONS";
+ METHODS[METHODS["TRACE"] = 7] = "TRACE";
+ /* WebDAV */
+ METHODS[METHODS["COPY"] = 8] = "COPY";
+ METHODS[METHODS["LOCK"] = 9] = "LOCK";
+ METHODS[METHODS["MKCOL"] = 10] = "MKCOL";
+ METHODS[METHODS["MOVE"] = 11] = "MOVE";
+ METHODS[METHODS["PROPFIND"] = 12] = "PROPFIND";
+ METHODS[METHODS["PROPPATCH"] = 13] = "PROPPATCH";
+ METHODS[METHODS["SEARCH"] = 14] = "SEARCH";
+ METHODS[METHODS["UNLOCK"] = 15] = "UNLOCK";
+ METHODS[METHODS["BIND"] = 16] = "BIND";
+ METHODS[METHODS["REBIND"] = 17] = "REBIND";
+ METHODS[METHODS["UNBIND"] = 18] = "UNBIND";
+ METHODS[METHODS["ACL"] = 19] = "ACL";
+ /* subversion */
+ METHODS[METHODS["REPORT"] = 20] = "REPORT";
+ METHODS[METHODS["MKACTIVITY"] = 21] = "MKACTIVITY";
+ METHODS[METHODS["CHECKOUT"] = 22] = "CHECKOUT";
+ METHODS[METHODS["MERGE"] = 23] = "MERGE";
+ /* upnp */
+ METHODS[METHODS["M-SEARCH"] = 24] = "M-SEARCH";
+ METHODS[METHODS["NOTIFY"] = 25] = "NOTIFY";
+ METHODS[METHODS["SUBSCRIBE"] = 26] = "SUBSCRIBE";
+ METHODS[METHODS["UNSUBSCRIBE"] = 27] = "UNSUBSCRIBE";
+ /* RFC-5789 */
+ METHODS[METHODS["PATCH"] = 28] = "PATCH";
+ METHODS[METHODS["PURGE"] = 29] = "PURGE";
+ /* CalDAV */
+ METHODS[METHODS["MKCALENDAR"] = 30] = "MKCALENDAR";
+ /* RFC-2068, section 19.6.1.2 */
+ METHODS[METHODS["LINK"] = 31] = "LINK";
+ METHODS[METHODS["UNLINK"] = 32] = "UNLINK";
+ /* icecast */
+ METHODS[METHODS["SOURCE"] = 33] = "SOURCE";
+ /* RFC-7540, section 11.6 */
+ METHODS[METHODS["PRI"] = 34] = "PRI";
+ /* RFC-2326 RTSP */
+ METHODS[METHODS["DESCRIBE"] = 35] = "DESCRIBE";
+ METHODS[METHODS["ANNOUNCE"] = 36] = "ANNOUNCE";
+ METHODS[METHODS["SETUP"] = 37] = "SETUP";
+ METHODS[METHODS["PLAY"] = 38] = "PLAY";
+ METHODS[METHODS["PAUSE"] = 39] = "PAUSE";
+ METHODS[METHODS["TEARDOWN"] = 40] = "TEARDOWN";
+ METHODS[METHODS["GET_PARAMETER"] = 41] = "GET_PARAMETER";
+ METHODS[METHODS["SET_PARAMETER"] = 42] = "SET_PARAMETER";
+ METHODS[METHODS["REDIRECT"] = 43] = "REDIRECT";
+ METHODS[METHODS["RECORD"] = 44] = "RECORD";
+ /* RAOP */
+ METHODS[METHODS["FLUSH"] = 45] = "FLUSH";
+})(METHODS = exports.METHODS || (exports.METHODS = {}));
+exports.METHODS_HTTP = [
+ METHODS.DELETE,
+ METHODS.GET,
+ METHODS.HEAD,
+ METHODS.POST,
+ METHODS.PUT,
+ METHODS.CONNECT,
+ METHODS.OPTIONS,
+ METHODS.TRACE,
+ METHODS.COPY,
+ METHODS.LOCK,
+ METHODS.MKCOL,
+ METHODS.MOVE,
+ METHODS.PROPFIND,
+ METHODS.PROPPATCH,
+ METHODS.SEARCH,
+ METHODS.UNLOCK,
+ METHODS.BIND,
+ METHODS.REBIND,
+ METHODS.UNBIND,
+ METHODS.ACL,
+ METHODS.REPORT,
+ METHODS.MKACTIVITY,
+ METHODS.CHECKOUT,
+ METHODS.MERGE,
+ METHODS['M-SEARCH'],
+ METHODS.NOTIFY,
+ METHODS.SUBSCRIBE,
+ METHODS.UNSUBSCRIBE,
+ METHODS.PATCH,
+ METHODS.PURGE,
+ METHODS.MKCALENDAR,
+ METHODS.LINK,
+ METHODS.UNLINK,
+ METHODS.PRI,
+ // TODO(indutny): should we allow it with HTTP?
+ METHODS.SOURCE,
+];
+exports.METHODS_ICE = [
+ METHODS.SOURCE,
+];
+exports.METHODS_RTSP = [
+ METHODS.OPTIONS,
+ METHODS.DESCRIBE,
+ METHODS.ANNOUNCE,
+ METHODS.SETUP,
+ METHODS.PLAY,
+ METHODS.PAUSE,
+ METHODS.TEARDOWN,
+ METHODS.GET_PARAMETER,
+ METHODS.SET_PARAMETER,
+ METHODS.REDIRECT,
+ METHODS.RECORD,
+ METHODS.FLUSH,
+ // For AirPlay
+ METHODS.GET,
+ METHODS.POST,
+];
+exports.METHOD_MAP = utils_1.enumToMap(METHODS);
+exports.H_METHOD_MAP = {};
+Object.keys(exports.METHOD_MAP).forEach((key) => {
+ if (/^H/.test(key)) {
+ exports.H_METHOD_MAP[key] = exports.METHOD_MAP[key];
+ }
+});
+var FINISH;
+(function (FINISH) {
+ FINISH[FINISH["SAFE"] = 0] = "SAFE";
+ FINISH[FINISH["SAFE_WITH_CB"] = 1] = "SAFE_WITH_CB";
+ FINISH[FINISH["UNSAFE"] = 2] = "UNSAFE";
+})(FINISH = exports.FINISH || (exports.FINISH = {}));
+exports.ALPHA = [];
+for (let i = 'A'.charCodeAt(0); i <= 'Z'.charCodeAt(0); i++) {
+ // Upper case
+ exports.ALPHA.push(String.fromCharCode(i));
+ // Lower case
+ exports.ALPHA.push(String.fromCharCode(i + 0x20));
+}
+exports.NUM_MAP = {
+ 0: 0, 1: 1, 2: 2, 3: 3, 4: 4,
+ 5: 5, 6: 6, 7: 7, 8: 8, 9: 9,
+};
+exports.HEX_MAP = {
+ 0: 0, 1: 1, 2: 2, 3: 3, 4: 4,
+ 5: 5, 6: 6, 7: 7, 8: 8, 9: 9,
+ A: 0XA, B: 0XB, C: 0XC, D: 0XD, E: 0XE, F: 0XF,
+ a: 0xa, b: 0xb, c: 0xc, d: 0xd, e: 0xe, f: 0xf,
+};
+exports.NUM = [
+ '0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
+];
+exports.ALPHANUM = exports.ALPHA.concat(exports.NUM);
+exports.MARK = ['-', '_', '.', '!', '~', '*', '\'', '(', ')'];
+exports.USERINFO_CHARS = exports.ALPHANUM
+ .concat(exports.MARK)
+ .concat(['%', ';', ':', '&', '=', '+', '$', ',']);
+// TODO(indutny): use RFC
+exports.STRICT_URL_CHAR = [
+ '!', '"', '$', '%', '&', '\'',
+ '(', ')', '*', '+', ',', '-', '.', '/',
+ ':', ';', '<', '=', '>',
+ '@', '[', '\\', ']', '^', '_',
+ '`',
+ '{', '|', '}', '~',
+].concat(exports.ALPHANUM);
+exports.URL_CHAR = exports.STRICT_URL_CHAR
+ .concat(['\t', '\f']);
+// All characters with 0x80 bit set to 1
+for (let i = 0x80; i <= 0xff; i++) {
+ exports.URL_CHAR.push(i);
+}
+exports.HEX = exports.NUM.concat(['a', 'b', 'c', 'd', 'e', 'f', 'A', 'B', 'C', 'D', 'E', 'F']);
+/* Tokens as defined by rfc 2616. Also lowercases them.
+ * token = 1*<any CHAR except CTLs or separators>
+ * separators = "(" | ")" | "<" | ">" | "@"
+ * | "," | ";" | ":" | "\" | <">
+ * | "/" | "[" | "]" | "?" | "="
+ * | "{" | "}" | SP | HT
+ */
+exports.STRICT_TOKEN = [
+ '!', '#', '$', '%', '&', '\'',
+ '*', '+', '-', '.',
+ '^', '_', '`',
+ '|', '~',
+].concat(exports.ALPHANUM);
+exports.TOKEN = exports.STRICT_TOKEN.concat([' ']);
+/*
+ * Verify that a char is a valid visible (printable) US-ASCII
+ * character or %x80-FF
+ */
+exports.HEADER_CHARS = ['\t'];
+for (let i = 32; i <= 255; i++) {
+ if (i !== 127) {
+ exports.HEADER_CHARS.push(i);
+ }
+}
+// ',' = \x44
+exports.CONNECTION_TOKEN_CHARS = exports.HEADER_CHARS.filter((c) => c !== 44);
+exports.MAJOR = exports.NUM_MAP;
+exports.MINOR = exports.MAJOR;
+var HEADER_STATE;
+(function (HEADER_STATE) {
+ HEADER_STATE[HEADER_STATE["GENERAL"] = 0] = "GENERAL";
+ HEADER_STATE[HEADER_STATE["CONNECTION"] = 1] = "CONNECTION";
+ HEADER_STATE[HEADER_STATE["CONTENT_LENGTH"] = 2] = "CONTENT_LENGTH";
+ HEADER_STATE[HEADER_STATE["TRANSFER_ENCODING"] = 3] = "TRANSFER_ENCODING";
+ HEADER_STATE[HEADER_STATE["UPGRADE"] = 4] = "UPGRADE";
+ HEADER_STATE[HEADER_STATE["CONNECTION_KEEP_ALIVE"] = 5] = "CONNECTION_KEEP_ALIVE";
+ HEADER_STATE[HEADER_STATE["CONNECTION_CLOSE"] = 6] = "CONNECTION_CLOSE";
+ HEADER_STATE[HEADER_STATE["CONNECTION_UPGRADE"] = 7] = "CONNECTION_UPGRADE";
+ HEADER_STATE[HEADER_STATE["TRANSFER_ENCODING_CHUNKED"] = 8] = "TRANSFER_ENCODING_CHUNKED";
+})(HEADER_STATE = exports.HEADER_STATE || (exports.HEADER_STATE = {}));
+exports.SPECIAL_HEADERS = {
+ 'connection': HEADER_STATE.CONNECTION,
+ 'content-length': HEADER_STATE.CONTENT_LENGTH,
+ 'proxy-connection': HEADER_STATE.CONNECTION,
+ 'transfer-encoding': HEADER_STATE.TRANSFER_ENCODING,
+ 'upgrade': HEADER_STATE.UPGRADE,
+};
+//# sourceMappingURL=constants.js.map \ No newline at end of file
diff --git a/lib/llhttp/utils.d.ts b/lib/llhttp/utils.d.ts
new file mode 100644
index 0000000..15497f3
--- /dev/null
+++ b/lib/llhttp/utils.d.ts
@@ -0,0 +1,4 @@
+export interface IEnumMap {
+ [key: string]: number;
+}
+export declare function enumToMap(obj: any): IEnumMap;
diff --git a/lib/llhttp/utils.js b/lib/llhttp/utils.js
new file mode 100644
index 0000000..8a32e56
--- /dev/null
+++ b/lib/llhttp/utils.js
@@ -0,0 +1,15 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.enumToMap = void 0;
+function enumToMap(obj) {
+ const res = {};
+ Object.keys(obj).forEach((key) => {
+ const value = obj[key];
+ if (typeof value === 'number') {
+ res[key] = value;
+ }
+ });
+ return res;
+}
+exports.enumToMap = enumToMap;
+//# sourceMappingURL=utils.js.map \ No newline at end of file
diff --git a/lib/llhttp/wasm_build_env.txt b/lib/llhttp/wasm_build_env.txt
new file mode 100644
index 0000000..5f478b5
--- /dev/null
+++ b/lib/llhttp/wasm_build_env.txt
@@ -0,0 +1,32 @@
+alpine-baselayout-data-3.4.0-r0
+musl-1.2.3-r4
+busybox-1.35.0-r29
+busybox-binsh-1.35.0-r29
+alpine-baselayout-3.4.0-r0
+alpine-keys-2.4-r1
+ca-certificates-bundle-20220614-r4
+libcrypto3-3.0.8-r3
+libssl3-3.0.8-r3
+ssl_client-1.35.0-r29
+zlib-1.2.13-r0
+apk-tools-2.12.10-r1
+scanelf-1.3.5-r1
+musl-utils-1.2.3-r4
+libc-utils-0.7.2-r3
+libgcc-12.2.1_git20220924-r4
+libstdc++-12.2.1_git20220924-r4
+libffi-3.4.4-r0
+xz-libs-5.2.9-r0
+libxml2-2.10.4-r0
+zstd-libs-1.5.5-r0
+llvm15-libs-15.0.7-r0
+clang15-libs-15.0.7-r0
+libstdc++-dev-12.2.1_git20220924-r4
+clang15-15.0.7-r0
+lld-libs-15.0.7-r0
+lld-15.0.7-r0
+wasi-libc-0.20220525-r1
+wasi-libcxx-15.0.7-r0
+wasi-libcxxabi-15.0.7-r0
+wasi-compiler-rt-15.0.7-r0
+wasi-sdk-16-r0
diff --git a/lib/mock/mock-agent.js b/lib/mock/mock-agent.js
new file mode 100644
index 0000000..828e8af
--- /dev/null
+++ b/lib/mock/mock-agent.js
@@ -0,0 +1,171 @@
+'use strict'
+
+const { kClients } = require('../core/symbols')
+const Agent = require('../agent')
+const {
+ kAgent,
+ kMockAgentSet,
+ kMockAgentGet,
+ kDispatches,
+ kIsMockActive,
+ kNetConnect,
+ kGetNetConnect,
+ kOptions,
+ kFactory
+} = require('./mock-symbols')
+const MockClient = require('./mock-client')
+const MockPool = require('./mock-pool')
+const { matchValue, buildMockOptions } = require('./mock-utils')
+const { InvalidArgumentError, UndiciError } = require('../core/errors')
+const Dispatcher = require('../dispatcher')
+const Pluralizer = require('./pluralizer')
+const PendingInterceptorsFormatter = require('./pending-interceptors-formatter')
+
+class FakeWeakRef {
+ constructor (value) {
+ this.value = value
+ }
+
+ deref () {
+ return this.value
+ }
+}
+
+class MockAgent extends Dispatcher {
+ constructor (opts) {
+ super(opts)
+
+ this[kNetConnect] = true
+ this[kIsMockActive] = true
+
+ // Instantiate Agent and encapsulate
+ if ((opts && opts.agent && typeof opts.agent.dispatch !== 'function')) {
+ throw new InvalidArgumentError('Argument opts.agent must implement Agent')
+ }
+ const agent = opts && opts.agent ? opts.agent : new Agent(opts)
+ this[kAgent] = agent
+
+ this[kClients] = agent[kClients]
+ this[kOptions] = buildMockOptions(opts)
+ }
+
+ get (origin) {
+ let dispatcher = this[kMockAgentGet](origin)
+
+ if (!dispatcher) {
+ dispatcher = this[kFactory](origin)
+ this[kMockAgentSet](origin, dispatcher)
+ }
+ return dispatcher
+ }
+
+ dispatch (opts, handler) {
+ // Call MockAgent.get to perform additional setup before dispatching as normal
+ this.get(opts.origin)
+ return this[kAgent].dispatch(opts, handler)
+ }
+
+ async close () {
+ await this[kAgent].close()
+ this[kClients].clear()
+ }
+
+ deactivate () {
+ this[kIsMockActive] = false
+ }
+
+ activate () {
+ this[kIsMockActive] = true
+ }
+
+ enableNetConnect (matcher) {
+ if (typeof matcher === 'string' || typeof matcher === 'function' || matcher instanceof RegExp) {
+ if (Array.isArray(this[kNetConnect])) {
+ this[kNetConnect].push(matcher)
+ } else {
+ this[kNetConnect] = [matcher]
+ }
+ } else if (typeof matcher === 'undefined') {
+ this[kNetConnect] = true
+ } else {
+ throw new InvalidArgumentError('Unsupported matcher. Must be one of String|Function|RegExp.')
+ }
+ }
+
+ disableNetConnect () {
+ this[kNetConnect] = false
+ }
+
+ // This is required to bypass issues caused by using global symbols - see:
+ // https://github.com/nodejs/undici/issues/1447
+ get isMockActive () {
+ return this[kIsMockActive]
+ }
+
+ [kMockAgentSet] (origin, dispatcher) {
+ this[kClients].set(origin, new FakeWeakRef(dispatcher))
+ }
+
+ [kFactory] (origin) {
+ const mockOptions = Object.assign({ agent: this }, this[kOptions])
+ return this[kOptions] && this[kOptions].connections === 1
+ ? new MockClient(origin, mockOptions)
+ : new MockPool(origin, mockOptions)
+ }
+
+ [kMockAgentGet] (origin) {
+ // First check if we can immediately find it
+ const ref = this[kClients].get(origin)
+ if (ref) {
+ return ref.deref()
+ }
+
+ // If the origin is not a string create a dummy parent pool and return to user
+ if (typeof origin !== 'string') {
+ const dispatcher = this[kFactory]('http://localhost:9999')
+ this[kMockAgentSet](origin, dispatcher)
+ return dispatcher
+ }
+
+ // If we match, create a pool and assign the same dispatches
+ for (const [keyMatcher, nonExplicitRef] of Array.from(this[kClients])) {
+ const nonExplicitDispatcher = nonExplicitRef.deref()
+ if (nonExplicitDispatcher && typeof keyMatcher !== 'string' && matchValue(keyMatcher, origin)) {
+ const dispatcher = this[kFactory](origin)
+ this[kMockAgentSet](origin, dispatcher)
+ dispatcher[kDispatches] = nonExplicitDispatcher[kDispatches]
+ return dispatcher
+ }
+ }
+ }
+
+ [kGetNetConnect] () {
+ return this[kNetConnect]
+ }
+
+ pendingInterceptors () {
+ const mockAgentClients = this[kClients]
+
+ return Array.from(mockAgentClients.entries())
+ .flatMap(([origin, scope]) => scope.deref()[kDispatches].map(dispatch => ({ ...dispatch, origin })))
+ .filter(({ pending }) => pending)
+ }
+
+ assertNoPendingInterceptors ({ pendingInterceptorsFormatter = new PendingInterceptorsFormatter() } = {}) {
+ const pending = this.pendingInterceptors()
+
+ if (pending.length === 0) {
+ return
+ }
+
+ const pluralizer = new Pluralizer('interceptor', 'interceptors').pluralize(pending.length)
+
+ throw new UndiciError(`
+${pluralizer.count} ${pluralizer.noun} ${pluralizer.is} pending:
+
+${pendingInterceptorsFormatter.format(pending)}
+`.trim())
+ }
+}
+
+module.exports = MockAgent
diff --git a/lib/mock/mock-client.js b/lib/mock/mock-client.js
new file mode 100644
index 0000000..5f31215
--- /dev/null
+++ b/lib/mock/mock-client.js
@@ -0,0 +1,59 @@
+'use strict'
+
+const { promisify } = require('util')
+const Client = require('../client')
+const { buildMockDispatch } = require('./mock-utils')
+const {
+ kDispatches,
+ kMockAgent,
+ kClose,
+ kOriginalClose,
+ kOrigin,
+ kOriginalDispatch,
+ kConnected
+} = require('./mock-symbols')
+const { MockInterceptor } = require('./mock-interceptor')
+const Symbols = require('../core/symbols')
+const { InvalidArgumentError } = require('../core/errors')
+
+/**
+ * MockClient provides an API that extends the Client to influence the mockDispatches.
+ */
+class MockClient extends Client {
+ constructor (origin, opts) {
+ super(origin, opts)
+
+ if (!opts || !opts.agent || typeof opts.agent.dispatch !== 'function') {
+ throw new InvalidArgumentError('Argument opts.agent must implement Agent')
+ }
+
+ this[kMockAgent] = opts.agent
+ this[kOrigin] = origin
+ this[kDispatches] = []
+ this[kConnected] = 1
+ this[kOriginalDispatch] = this.dispatch
+ this[kOriginalClose] = this.close.bind(this)
+
+ this.dispatch = buildMockDispatch.call(this)
+ this.close = this[kClose]
+ }
+
+ get [Symbols.kConnected] () {
+ return this[kConnected]
+ }
+
+ /**
+ * Sets up the base interceptor for mocking replies from undici.
+ */
+ intercept (opts) {
+ return new MockInterceptor(opts, this[kDispatches])
+ }
+
+ async [kClose] () {
+ await promisify(this[kOriginalClose])()
+ this[kConnected] = 0
+ this[kMockAgent][Symbols.kClients].delete(this[kOrigin])
+ }
+}
+
+module.exports = MockClient
diff --git a/lib/mock/mock-errors.js b/lib/mock/mock-errors.js
new file mode 100644
index 0000000..5442c0e
--- /dev/null
+++ b/lib/mock/mock-errors.js
@@ -0,0 +1,17 @@
+'use strict'
+
+const { UndiciError } = require('../core/errors')
+
+class MockNotMatchedError extends UndiciError {
+ constructor (message) {
+ super(message)
+ Error.captureStackTrace(this, MockNotMatchedError)
+ this.name = 'MockNotMatchedError'
+ this.message = message || 'The request does not match any registered mock dispatches'
+ this.code = 'UND_MOCK_ERR_MOCK_NOT_MATCHED'
+ }
+}
+
+module.exports = {
+ MockNotMatchedError
+}
diff --git a/lib/mock/mock-interceptor.js b/lib/mock/mock-interceptor.js
new file mode 100644
index 0000000..781e477
--- /dev/null
+++ b/lib/mock/mock-interceptor.js
@@ -0,0 +1,206 @@
+'use strict'
+
+const { getResponseData, buildKey, addMockDispatch } = require('./mock-utils')
+const {
+ kDispatches,
+ kDispatchKey,
+ kDefaultHeaders,
+ kDefaultTrailers,
+ kContentLength,
+ kMockDispatch
+} = require('./mock-symbols')
+const { InvalidArgumentError } = require('../core/errors')
+const { buildURL } = require('../core/util')
+
+/**
+ * Defines the scope API for an interceptor reply
+ */
+class MockScope {
+ constructor (mockDispatch) {
+ this[kMockDispatch] = mockDispatch
+ }
+
+ /**
+ * Delay a reply by a set amount in ms.
+ */
+ delay (waitInMs) {
+ if (typeof waitInMs !== 'number' || !Number.isInteger(waitInMs) || waitInMs <= 0) {
+ throw new InvalidArgumentError('waitInMs must be a valid integer > 0')
+ }
+
+ this[kMockDispatch].delay = waitInMs
+ return this
+ }
+
+ /**
+ * For a defined reply, never mark as consumed.
+ */
+ persist () {
+ this[kMockDispatch].persist = true
+ return this
+ }
+
+ /**
+ * Allow one to define a reply for a set amount of matching requests.
+ */
+ times (repeatTimes) {
+ if (typeof repeatTimes !== 'number' || !Number.isInteger(repeatTimes) || repeatTimes <= 0) {
+ throw new InvalidArgumentError('repeatTimes must be a valid integer > 0')
+ }
+
+ this[kMockDispatch].times = repeatTimes
+ return this
+ }
+}
+
+/**
+ * Defines an interceptor for a Mock
+ */
+class MockInterceptor {
+ constructor (opts, mockDispatches) {
+ if (typeof opts !== 'object') {
+ throw new InvalidArgumentError('opts must be an object')
+ }
+ if (typeof opts.path === 'undefined') {
+ throw new InvalidArgumentError('opts.path must be defined')
+ }
+ if (typeof opts.method === 'undefined') {
+ opts.method = 'GET'
+ }
+ // See https://github.com/nodejs/undici/issues/1245
+ // As per RFC 3986, clients are not supposed to send URI
+ // fragments to servers when they retrieve a document,
+ if (typeof opts.path === 'string') {
+ if (opts.query) {
+ opts.path = buildURL(opts.path, opts.query)
+ } else {
+ // Matches https://github.com/nodejs/undici/blob/main/lib/fetch/index.js#L1811
+ const parsedURL = new URL(opts.path, 'data://')
+ opts.path = parsedURL.pathname + parsedURL.search
+ }
+ }
+ if (typeof opts.method === 'string') {
+ opts.method = opts.method.toUpperCase()
+ }
+
+ this[kDispatchKey] = buildKey(opts)
+ this[kDispatches] = mockDispatches
+ this[kDefaultHeaders] = {}
+ this[kDefaultTrailers] = {}
+ this[kContentLength] = false
+ }
+
+ createMockScopeDispatchData (statusCode, data, responseOptions = {}) {
+ const responseData = getResponseData(data)
+ const contentLength = this[kContentLength] ? { 'content-length': responseData.length } : {}
+ const headers = { ...this[kDefaultHeaders], ...contentLength, ...responseOptions.headers }
+ const trailers = { ...this[kDefaultTrailers], ...responseOptions.trailers }
+
+ return { statusCode, data, headers, trailers }
+ }
+
+ validateReplyParameters (statusCode, data, responseOptions) {
+ if (typeof statusCode === 'undefined') {
+ throw new InvalidArgumentError('statusCode must be defined')
+ }
+ if (typeof data === 'undefined') {
+ throw new InvalidArgumentError('data must be defined')
+ }
+ if (typeof responseOptions !== 'object') {
+ throw new InvalidArgumentError('responseOptions must be an object')
+ }
+ }
+
+ /**
+ * Mock an undici request with a defined reply.
+ */
+ reply (replyData) {
+ // Values of reply aren't available right now as they
+ // can only be available when the reply callback is invoked.
+ if (typeof replyData === 'function') {
+ // We'll first wrap the provided callback in another function,
+ // this function will properly resolve the data from the callback
+ // when invoked.
+ const wrappedDefaultsCallback = (opts) => {
+ // Our reply options callback contains the parameter for statusCode, data and options.
+ const resolvedData = replyData(opts)
+
+ // Check if it is in the right format
+ if (typeof resolvedData !== 'object') {
+ throw new InvalidArgumentError('reply options callback must return an object')
+ }
+
+ const { statusCode, data = '', responseOptions = {} } = resolvedData
+ this.validateReplyParameters(statusCode, data, responseOptions)
+ // Since the values can be obtained immediately we return them
+ // from this higher order function that will be resolved later.
+ return {
+ ...this.createMockScopeDispatchData(statusCode, data, responseOptions)
+ }
+ }
+
+ // Add usual dispatch data, but this time set the data parameter to function that will eventually provide data.
+ const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], wrappedDefaultsCallback)
+ return new MockScope(newMockDispatch)
+ }
+
+ // We can have either one or three parameters, if we get here,
+ // we should have 1-3 parameters. So we spread the arguments of
+ // this function to obtain the parameters, since replyData will always
+ // just be the statusCode.
+ const [statusCode, data = '', responseOptions = {}] = [...arguments]
+ this.validateReplyParameters(statusCode, data, responseOptions)
+
+ // Send in-already provided data like usual
+ const dispatchData = this.createMockScopeDispatchData(statusCode, data, responseOptions)
+ const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], dispatchData)
+ return new MockScope(newMockDispatch)
+ }
+
+ /**
+ * Mock an undici request with a defined error.
+ */
+ replyWithError (error) {
+ if (typeof error === 'undefined') {
+ throw new InvalidArgumentError('error must be defined')
+ }
+
+ const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], { error })
+ return new MockScope(newMockDispatch)
+ }
+
+ /**
+ * Set default reply headers on the interceptor for subsequent replies
+ */
+ defaultReplyHeaders (headers) {
+ if (typeof headers === 'undefined') {
+ throw new InvalidArgumentError('headers must be defined')
+ }
+
+ this[kDefaultHeaders] = headers
+ return this
+ }
+
+ /**
+ * Set default reply trailers on the interceptor for subsequent replies
+ */
+ defaultReplyTrailers (trailers) {
+ if (typeof trailers === 'undefined') {
+ throw new InvalidArgumentError('trailers must be defined')
+ }
+
+ this[kDefaultTrailers] = trailers
+ return this
+ }
+
+ /**
+ * Set reply content length header for replies on the interceptor
+ */
+ replyContentLength () {
+ this[kContentLength] = true
+ return this
+ }
+}
+
+module.exports.MockInterceptor = MockInterceptor
+module.exports.MockScope = MockScope
diff --git a/lib/mock/mock-pool.js b/lib/mock/mock-pool.js
new file mode 100644
index 0000000..0a3a7cd
--- /dev/null
+++ b/lib/mock/mock-pool.js
@@ -0,0 +1,59 @@
+'use strict'
+
+const { promisify } = require('util')
+const Pool = require('../pool')
+const { buildMockDispatch } = require('./mock-utils')
+const {
+ kDispatches,
+ kMockAgent,
+ kClose,
+ kOriginalClose,
+ kOrigin,
+ kOriginalDispatch,
+ kConnected
+} = require('./mock-symbols')
+const { MockInterceptor } = require('./mock-interceptor')
+const Symbols = require('../core/symbols')
+const { InvalidArgumentError } = require('../core/errors')
+
+/**
+ * MockPool provides an API that extends the Pool to influence the mockDispatches.
+ */
+class MockPool extends Pool {
+ constructor (origin, opts) {
+ super(origin, opts)
+
+ if (!opts || !opts.agent || typeof opts.agent.dispatch !== 'function') {
+ throw new InvalidArgumentError('Argument opts.agent must implement Agent')
+ }
+
+ this[kMockAgent] = opts.agent
+ this[kOrigin] = origin
+ this[kDispatches] = []
+ this[kConnected] = 1
+ this[kOriginalDispatch] = this.dispatch
+ this[kOriginalClose] = this.close.bind(this)
+
+ this.dispatch = buildMockDispatch.call(this)
+ this.close = this[kClose]
+ }
+
+ get [Symbols.kConnected] () {
+ return this[kConnected]
+ }
+
+ /**
+ * Sets up the base interceptor for mocking replies from undici.
+ */
+ intercept (opts) {
+ return new MockInterceptor(opts, this[kDispatches])
+ }
+
+ async [kClose] () {
+ await promisify(this[kOriginalClose])()
+ this[kConnected] = 0
+ this[kMockAgent][Symbols.kClients].delete(this[kOrigin])
+ }
+}
+
+module.exports = MockPool
diff --git a/lib/mock/mock-symbols.js b/lib/mock/mock-symbols.js
new file mode 100644
index 0000000..8c4cbb6
--- /dev/null
+++ b/lib/mock/mock-symbols.js
@@ -0,0 +1,23 @@
+'use strict'
+
+module.exports = {
+ kAgent: Symbol('agent'),
+ kOptions: Symbol('options'),
+ kFactory: Symbol('factory'),
+ kDispatches: Symbol('dispatches'),
+ kDispatchKey: Symbol('dispatch key'),
+ kDefaultHeaders: Symbol('default headers'),
+ kDefaultTrailers: Symbol('default trailers'),
+ kContentLength: Symbol('content length'),
+ kMockAgent: Symbol('mock agent'),
+ kMockAgentSet: Symbol('mock agent set'),
+ kMockAgentGet: Symbol('mock agent get'),
+ kMockDispatch: Symbol('mock dispatch'),
+ kClose: Symbol('close'),
+ kOriginalClose: Symbol('original agent close'),
+ kOrigin: Symbol('origin'),
+ kIsMockActive: Symbol('is mock active'),
+ kNetConnect: Symbol('net connect'),
+ kGetNetConnect: Symbol('get net connect'),
+ kConnected: Symbol('connected')
+}
diff --git a/lib/mock/mock-utils.js b/lib/mock/mock-utils.js
new file mode 100644
index 0000000..42ea185
--- /dev/null
+++ b/lib/mock/mock-utils.js
@@ -0,0 +1,351 @@
+'use strict'
+
+const { MockNotMatchedError } = require('./mock-errors')
+const {
+ kDispatches,
+ kMockAgent,
+ kOriginalDispatch,
+ kOrigin,
+ kGetNetConnect
+} = require('./mock-symbols')
+const { buildURL, nop } = require('../core/util')
+const { STATUS_CODES } = require('http')
+const {
+ types: {
+ isPromise
+ }
+} = require('util')
+
+function matchValue (match, value) {
+ if (typeof match === 'string') {
+ return match === value
+ }
+ if (match instanceof RegExp) {
+ return match.test(value)
+ }
+ if (typeof match === 'function') {
+ return match(value) === true
+ }
+ return false
+}
+
+function lowerCaseEntries (headers) {
+ return Object.fromEntries(
+ Object.entries(headers).map(([headerName, headerValue]) => {
+ return [headerName.toLocaleLowerCase(), headerValue]
+ })
+ )
+}
+
+/**
+ * @param {import('../../index').Headers|string[]|Record<string, string>} headers
+ * @param {string} key
+ */
+function getHeaderByName (headers, key) {
+ if (Array.isArray(headers)) {
+ for (let i = 0; i < headers.length; i += 2) {
+ if (headers[i].toLocaleLowerCase() === key.toLocaleLowerCase()) {
+ return headers[i + 1]
+ }
+ }
+
+ return undefined
+ } else if (typeof headers.get === 'function') {
+ return headers.get(key)
+ } else {
+ return lowerCaseEntries(headers)[key.toLocaleLowerCase()]
+ }
+}
+
+/** @param {string[]} headers */
+function buildHeadersFromArray (headers) { // fetch HeadersList
+ const clone = headers.slice()
+ const entries = []
+ for (let index = 0; index < clone.length; index += 2) {
+ entries.push([clone[index], clone[index + 1]])
+ }
+ return Object.fromEntries(entries)
+}
+
+function matchHeaders (mockDispatch, headers) {
+ if (typeof mockDispatch.headers === 'function') {
+ if (Array.isArray(headers)) { // fetch HeadersList
+ headers = buildHeadersFromArray(headers)
+ }
+ return mockDispatch.headers(headers ? lowerCaseEntries(headers) : {})
+ }
+ if (typeof mockDispatch.headers === 'undefined') {
+ return true
+ }
+ if (typeof headers !== 'object' || typeof mockDispatch.headers !== 'object') {
+ return false
+ }
+
+ for (const [matchHeaderName, matchHeaderValue] of Object.entries(mockDispatch.headers)) {
+ const headerValue = getHeaderByName(headers, matchHeaderName)
+
+ if (!matchValue(matchHeaderValue, headerValue)) {
+ return false
+ }
+ }
+ return true
+}
+
+function safeUrl (path) {
+ if (typeof path !== 'string') {
+ return path
+ }
+
+ const pathSegments = path.split('?')
+
+ if (pathSegments.length !== 2) {
+ return path
+ }
+
+ const qp = new URLSearchParams(pathSegments.pop())
+ qp.sort()
+ return [...pathSegments, qp.toString()].join('?')
+}
+
+function matchKey (mockDispatch, { path, method, body, headers }) {
+ const pathMatch = matchValue(mockDispatch.path, path)
+ const methodMatch = matchValue(mockDispatch.method, method)
+ const bodyMatch = typeof mockDispatch.body !== 'undefined' ? matchValue(mockDispatch.body, body) : true
+ const headersMatch = matchHeaders(mockDispatch, headers)
+ return pathMatch && methodMatch && bodyMatch && headersMatch
+}
+
+function getResponseData (data) {
+ if (Buffer.isBuffer(data)) {
+ return data
+ } else if (typeof data === 'object') {
+ return JSON.stringify(data)
+ } else {
+ return data.toString()
+ }
+}
+
+function getMockDispatch (mockDispatches, key) {
+ const basePath = key.query ? buildURL(key.path, key.query) : key.path
+ const resolvedPath = typeof basePath === 'string' ? safeUrl(basePath) : basePath
+
+ // Match path
+ let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path }) => matchValue(safeUrl(path), resolvedPath))
+ if (matchedMockDispatches.length === 0) {
+ throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`)
+ }
+
+ // Match method
+ matchedMockDispatches = matchedMockDispatches.filter(({ method }) => matchValue(method, key.method))
+ if (matchedMockDispatches.length === 0) {
+ throw new MockNotMatchedError(`Mock dispatch not matched for method '${key.method}'`)
+ }
+
+ // Match body
+ matchedMockDispatches = matchedMockDispatches.filter(({ body }) => typeof body !== 'undefined' ? matchValue(body, key.body) : true)
+ if (matchedMockDispatches.length === 0) {
+ throw new MockNotMatchedError(`Mock dispatch not matched for body '${key.body}'`)
+ }
+
+ // Match headers
+ matchedMockDispatches = matchedMockDispatches.filter((mockDispatch) => matchHeaders(mockDispatch, key.headers))
+ if (matchedMockDispatches.length === 0) {
+ throw new MockNotMatchedError(`Mock dispatch not matched for headers '${typeof key.headers === 'object' ? JSON.stringify(key.headers) : key.headers}'`)
+ }
+
+ return matchedMockDispatches[0]
+}
+
+function addMockDispatch (mockDispatches, key, data) {
+ const baseData = { timesInvoked: 0, times: 1, persist: false, consumed: false }
+ const replyData = typeof data === 'function' ? { callback: data } : { ...data }
+ const newMockDispatch = { ...baseData, ...key, pending: true, data: { error: null, ...replyData } }
+ mockDispatches.push(newMockDispatch)
+ return newMockDispatch
+}
+
+function deleteMockDispatch (mockDispatches, key) {
+ const index = mockDispatches.findIndex(dispatch => {
+ if (!dispatch.consumed) {
+ return false
+ }
+ return matchKey(dispatch, key)
+ })
+ if (index !== -1) {
+ mockDispatches.splice(index, 1)
+ }
+}
+
+function buildKey (opts) {
+ const { path, method, body, headers, query } = opts
+ return {
+ path,
+ method,
+ body,
+ headers,
+ query
+ }
+}
+
+function generateKeyValues (data) {
+ return Object.entries(data).reduce((keyValuePairs, [key, value]) => [
+ ...keyValuePairs,
+ Buffer.from(`${key}`),
+ Array.isArray(value) ? value.map(x => Buffer.from(`${x}`)) : Buffer.from(`${value}`)
+ ], [])
+}
+
+/**
+ * @see https://developer.mozilla.org/en-US/docs/Web/HTTP/Status
+ * @param {number} statusCode
+ */
+function getStatusText (statusCode) {
+ return STATUS_CODES[statusCode] || 'unknown'
+}
+
+async function getResponse (body) {
+ const buffers = []
+ for await (const data of body) {
+ buffers.push(data)
+ }
+ return Buffer.concat(buffers).toString('utf8')
+}
+
+/**
+ * Mock dispatch function used to simulate undici dispatches
+ */
+function mockDispatch (opts, handler) {
+ // Get mock dispatch from built key
+ const key = buildKey(opts)
+ const mockDispatch = getMockDispatch(this[kDispatches], key)
+
+ mockDispatch.timesInvoked++
+
+ // Here's where we resolve a callback if a callback is present for the dispatch data.
+ if (mockDispatch.data.callback) {
+ mockDispatch.data = { ...mockDispatch.data, ...mockDispatch.data.callback(opts) }
+ }
+
+ // Parse mockDispatch data
+ const { data: { statusCode, data, headers, trailers, error }, delay, persist } = mockDispatch
+ const { timesInvoked, times } = mockDispatch
+
+ // If it's used up and not persistent, mark as consumed
+ mockDispatch.consumed = !persist && timesInvoked >= times
+ mockDispatch.pending = timesInvoked < times
+
+ // If specified, trigger dispatch error
+ if (error !== null) {
+ deleteMockDispatch(this[kDispatches], key)
+ handler.onError(error)
+ return true
+ }
+
+ // Handle the request with a delay if necessary
+ if (typeof delay === 'number' && delay > 0) {
+ setTimeout(() => {
+ handleReply(this[kDispatches])
+ }, delay)
+ } else {
+ handleReply(this[kDispatches])
+ }
+
+ function handleReply (mockDispatches, _data = data) {
+ // fetch's HeadersList is a 1D string array
+ const optsHeaders = Array.isArray(opts.headers)
+ ? buildHeadersFromArray(opts.headers)
+ : opts.headers
+ const body = typeof _data === 'function'
+ ? _data({ ...opts, headers: optsHeaders })
+ : _data
+
+ // util.types.isPromise is likely needed for jest.
+ if (isPromise(body)) {
+ // If handleReply is asynchronous, throwing an error
+ // in the callback will reject the promise, rather than
+ // synchronously throw the error, which breaks some tests.
+ // Rather, we wait for the callback to resolve if it is a
+ // promise, and then re-run handleReply with the new body.
+ body.then((newData) => handleReply(mockDispatches, newData))
+ return
+ }
+
+ const responseData = getResponseData(body)
+ const responseHeaders = generateKeyValues(headers)
+ const responseTrailers = generateKeyValues(trailers)
+
+ handler.abort = nop
+ handler.onHeaders(statusCode, responseHeaders, resume, getStatusText(statusCode))
+ handler.onData(Buffer.from(responseData))
+ handler.onComplete(responseTrailers)
+ deleteMockDispatch(mockDispatches, key)
+ }
+
+ function resume () {}
+
+ return true
+}
+
+function buildMockDispatch () {
+ const agent = this[kMockAgent]
+ const origin = this[kOrigin]
+ const originalDispatch = this[kOriginalDispatch]
+
+ return function dispatch (opts, handler) {
+ if (agent.isMockActive) {
+ try {
+ mockDispatch.call(this, opts, handler)
+ } catch (error) {
+ if (error instanceof MockNotMatchedError) {
+ const netConnect = agent[kGetNetConnect]()
+ if (netConnect === false) {
+ throw new MockNotMatchedError(`${error.message}: subsequent request to origin ${origin} was not allowed (net.connect disabled)`)
+ }
+ if (checkNetConnect(netConnect, origin)) {
+ originalDispatch.call(this, opts, handler)
+ } else {
+ throw new MockNotMatchedError(`${error.message}: subsequent request to origin ${origin} was not allowed (net.connect is not enabled for this origin)`)
+ }
+ } else {
+ throw error
+ }
+ }
+ } else {
+ originalDispatch.call(this, opts, handler)
+ }
+ }
+}
+
+function checkNetConnect (netConnect, origin) {
+ const url = new URL(origin)
+ if (netConnect === true) {
+ return true
+ } else if (Array.isArray(netConnect) && netConnect.some((matcher) => matchValue(matcher, url.host))) {
+ return true
+ }
+ return false
+}
+
+function buildMockOptions (opts) {
+ if (opts) {
+ const { agent, ...mockOptions } = opts
+ return mockOptions
+ }
+}
+
+module.exports = {
+ getResponseData,
+ getMockDispatch,
+ addMockDispatch,
+ deleteMockDispatch,
+ buildKey,
+ generateKeyValues,
+ matchValue,
+ getResponse,
+ getStatusText,
+ mockDispatch,
+ buildMockDispatch,
+ checkNetConnect,
+ buildMockOptions,
+ getHeaderByName
+}
diff --git a/lib/mock/pending-interceptors-formatter.js b/lib/mock/pending-interceptors-formatter.js
new file mode 100644
index 0000000..1bc7539
--- /dev/null
+++ b/lib/mock/pending-interceptors-formatter.js
@@ -0,0 +1,40 @@
+'use strict'
+
+const { Transform } = require('stream')
+const { Console } = require('console')
+
+/**
+ * Gets the output of `console.table(…)` as a string.
+ */
+module.exports = class PendingInterceptorsFormatter {
+ constructor ({ disableColors } = {}) {
+ this.transform = new Transform({
+ transform (chunk, _enc, cb) {
+ cb(null, chunk)
+ }
+ })
+
+ this.logger = new Console({
+ stdout: this.transform,
+ inspectOptions: {
+ colors: !disableColors && !process.env.CI
+ }
+ })
+ }
+
+ format (pendingInterceptors) {
+ const withPrettyHeaders = pendingInterceptors.map(
+ ({ method, path, data: { statusCode }, persist, times, timesInvoked, origin }) => ({
+ Method: method,
+ Origin: origin,
+ Path: path,
+ 'Status code': statusCode,
+ Persistent: persist ? '✅' : '❌',
+ Invocations: timesInvoked,
+ Remaining: persist ? Infinity : times - timesInvoked
+ }))
+
+ this.logger.table(withPrettyHeaders)
+ return this.transform.read().toString()
+ }
+}
diff --git a/lib/mock/pluralizer.js b/lib/mock/pluralizer.js
new file mode 100644
index 0000000..47f150b
--- /dev/null
+++ b/lib/mock/pluralizer.js
@@ -0,0 +1,29 @@
+'use strict'
+
+const singulars = {
+ pronoun: 'it',
+ is: 'is',
+ was: 'was',
+ this: 'this'
+}
+
+const plurals = {
+ pronoun: 'they',
+ is: 'are',
+ was: 'were',
+ this: 'these'
+}
+
+module.exports = class Pluralizer {
+ constructor (singular, plural) {
+ this.singular = singular
+ this.plural = plural
+ }
+
+ pluralize (count) {
+ const one = count === 1
+ const keys = one ? singulars : plurals
+ const noun = one ? this.singular : this.plural
+ return { ...keys, count, noun }
+ }
+}
diff --git a/lib/node/fixed-queue.js b/lib/node/fixed-queue.js
new file mode 100644
index 0000000..3572681
--- /dev/null
+++ b/lib/node/fixed-queue.js
@@ -0,0 +1,117 @@
+/* eslint-disable */
+
+'use strict'
+
+// Extracted from node/lib/internal/fixed_queue.js
+
+// Currently optimal queue size, tested on V8 6.0 - 6.6. Must be power of two.
+const kSize = 2048;
+const kMask = kSize - 1;
+
+// The FixedQueue is implemented as a singly-linked list of fixed-size
+// circular buffers. It looks something like this:
+//
+// head tail
+// | |
+// v v
+// +-----------+ <-----\ +-----------+ <------\ +-----------+
+// | [null] | \----- | next | \------- | next |
+// +-----------+ +-----------+ +-----------+
+// | item | <-- bottom | item | <-- bottom | [empty] |
+// | item | | item | | [empty] |
+// | item | | item | | [empty] |
+// | item | | item | | [empty] |
+// | item | | item | bottom --> | item |
+// | item | | item | | item |
+// | ... | | ... | | ... |
+// | item | | item | | item |
+// | item | | item | | item |
+// | [empty] | <-- top | item | | item |
+// | [empty] | | item | | item |
+// | [empty] | | [empty] | <-- top top --> | [empty] |
+// +-----------+ +-----------+ +-----------+
+//
+// Or, if there is only one circular buffer, it looks something
+// like either of these:
+//
+// head tail head tail
+// | | | |
+// v v v v
+// +-----------+ +-----------+
+// | [null] | | [null] |
+// +-----------+ +-----------+
+// | [empty] | | item |
+// | [empty] | | item |
+// | item | <-- bottom top --> | [empty] |
+// | item | | [empty] |
+// | [empty] | <-- top bottom --> | item |
+// | [empty] | | item |
+// +-----------+ +-----------+
+//
+// Adding a value means moving `top` forward by one, removing means
+// moving `bottom` forward by one. After reaching the end, the queue
+// wraps around.
+//
+// When `top === bottom` the current queue is empty and when
+// `top + 1 === bottom` it's full. This wastes a single space of storage
+// but allows much quicker checks.
+
+class FixedCircularBuffer {
+ constructor() {
+ this.bottom = 0;
+ this.top = 0;
+ this.list = new Array(kSize);
+ this.next = null;
+ }
+
+ isEmpty() {
+ return this.top === this.bottom;
+ }
+
+ isFull() {
+ return ((this.top + 1) & kMask) === this.bottom;
+ }
+
+ push(data) {
+ this.list[this.top] = data;
+ this.top = (this.top + 1) & kMask;
+ }
+
+ shift() {
+ const nextItem = this.list[this.bottom];
+ if (nextItem === undefined)
+ return null;
+ this.list[this.bottom] = undefined;
+ this.bottom = (this.bottom + 1) & kMask;
+ return nextItem;
+ }
+}
+
+module.exports = class FixedQueue {
+ constructor() {
+ this.head = this.tail = new FixedCircularBuffer();
+ }
+
+ isEmpty() {
+ return this.head.isEmpty();
+ }
+
+ push(data) {
+ if (this.head.isFull()) {
+ // Head is full: Creates a new queue, sets the old queue's `.next` to it,
+ // and sets it as the new main queue.
+ this.head = this.head.next = new FixedCircularBuffer();
+ }
+ this.head.push(data);
+ }
+
+ shift() {
+ const tail = this.tail;
+ const next = tail.shift();
+ if (tail.isEmpty() && tail.next !== null) {
+ // If there is another queue, it forms the new tail.
+ this.tail = tail.next;
+ }
+ return next;
+ }
+};
diff --git a/lib/pool-base.js b/lib/pool-base.js
new file mode 100644
index 0000000..2a909ee
--- /dev/null
+++ b/lib/pool-base.js
@@ -0,0 +1,194 @@
+'use strict'
+
+const DispatcherBase = require('./dispatcher-base')
+const FixedQueue = require('./node/fixed-queue')
+const { kConnected, kSize, kRunning, kPending, kQueued, kBusy, kFree, kUrl, kClose, kDestroy, kDispatch } = require('./core/symbols')
+const PoolStats = require('./pool-stats')
+
+const kClients = Symbol('clients')
+const kNeedDrain = Symbol('needDrain')
+const kQueue = Symbol('queue')
+const kClosedResolve = Symbol('closed resolve')
+const kOnDrain = Symbol('onDrain')
+const kOnConnect = Symbol('onConnect')
+const kOnDisconnect = Symbol('onDisconnect')
+const kOnConnectionError = Symbol('onConnectionError')
+const kGetDispatcher = Symbol('get dispatcher')
+const kAddClient = Symbol('add client')
+const kRemoveClient = Symbol('remove client')
+const kStats = Symbol('stats')
+
+class PoolBase extends DispatcherBase {
+ constructor () {
+ super()
+
+ this[kQueue] = new FixedQueue()
+ this[kClients] = []
+ this[kQueued] = 0
+
+ const pool = this
+
+ this[kOnDrain] = function onDrain (origin, targets) {
+ const queue = pool[kQueue]
+
+ let needDrain = false
+
+ while (!needDrain) {
+ const item = queue.shift()
+ if (!item) {
+ break
+ }
+ pool[kQueued]--
+ needDrain = !this.dispatch(item.opts, item.handler)
+ }
+
+ this[kNeedDrain] = needDrain
+
+ if (!this[kNeedDrain] && pool[kNeedDrain]) {
+ pool[kNeedDrain] = false
+ pool.emit('drain', origin, [pool, ...targets])
+ }
+
+ if (pool[kClosedResolve] && queue.isEmpty()) {
+ Promise
+ .all(pool[kClients].map(c => c.close()))
+ .then(pool[kClosedResolve])
+ }
+ }
+
+ this[kOnConnect] = (origin, targets) => {
+ pool.emit('connect', origin, [pool, ...targets])
+ }
+
+ this[kOnDisconnect] = (origin, targets, err) => {
+ pool.emit('disconnect', origin, [pool, ...targets], err)
+ }
+
+ this[kOnConnectionError] = (origin, targets, err) => {
+ pool.emit('connectionError', origin, [pool, ...targets], err)
+ }
+
+ this[kStats] = new PoolStats(this)
+ }
+
+ get [kBusy] () {
+ return this[kNeedDrain]
+ }
+
+ get [kConnected] () {
+ return this[kClients].filter(client => client[kConnected]).length
+ }
+
+ get [kFree] () {
+ return this[kClients].filter(client => client[kConnected] && !client[kNeedDrain]).length
+ }
+
+ get [kPending] () {
+ let ret = this[kQueued]
+ for (const { [kPending]: pending } of this[kClients]) {
+ ret += pending
+ }
+ return ret
+ }
+
+ get [kRunning] () {
+ let ret = 0
+ for (const { [kRunning]: running } of this[kClients]) {
+ ret += running
+ }
+ return ret
+ }
+
+ get [kSize] () {
+ let ret = this[kQueued]
+ for (const { [kSize]: size } of this[kClients]) {
+ ret += size
+ }
+ return ret
+ }
+
+ get stats () {
+ return this[kStats]
+ }
+
+ async [kClose] () {
+ if (this[kQueue].isEmpty()) {
+ return Promise.all(this[kClients].map(c => c.close()))
+ } else {
+ return new Promise((resolve) => {
+ this[kClosedResolve] = resolve
+ })
+ }
+ }
+
+ async [kDestroy] (err) {
+ while (true) {
+ const item = this[kQueue].shift()
+ if (!item) {
+ break
+ }
+ item.handler.onError(err)
+ }
+
+ return Promise.all(this[kClients].map(c => c.destroy(err)))
+ }
+
+ [kDispatch] (opts, handler) {
+ const dispatcher = this[kGetDispatcher]()
+
+ if (!dispatcher) {
+ this[kNeedDrain] = true
+ this[kQueue].push({ opts, handler })
+ this[kQueued]++
+ } else if (!dispatcher.dispatch(opts, handler)) {
+ dispatcher[kNeedDrain] = true
+ this[kNeedDrain] = !this[kGetDispatcher]()
+ }
+
+ return !this[kNeedDrain]
+ }
+
+ [kAddClient] (client) {
+ client
+ .on('drain', this[kOnDrain])
+ .on('connect', this[kOnConnect])
+ .on('disconnect', this[kOnDisconnect])
+ .on('connectionError', this[kOnConnectionError])
+
+ this[kClients].push(client)
+
+ if (this[kNeedDrain]) {
+ process.nextTick(() => {
+ if (this[kNeedDrain]) {
+ this[kOnDrain](client[kUrl], [this, client])
+ }
+ })
+ }
+
+ return this
+ }
+
+ [kRemoveClient] (client) {
+ client.close(() => {
+ const idx = this[kClients].indexOf(client)
+ if (idx !== -1) {
+ this[kClients].splice(idx, 1)
+ }
+ })
+
+ this[kNeedDrain] = this[kClients].some(dispatcher => (
+ !dispatcher[kNeedDrain] &&
+ dispatcher.closed !== true &&
+ dispatcher.destroyed !== true
+ ))
+ }
+}
+
+module.exports = {
+ PoolBase,
+ kClients,
+ kNeedDrain,
+ kAddClient,
+ kRemoveClient,
+ kGetDispatcher
+}
diff --git a/lib/pool-stats.js b/lib/pool-stats.js
new file mode 100644
index 0000000..b4af8ae
--- /dev/null
+++ b/lib/pool-stats.js
@@ -0,0 +1,34 @@
+const { kFree, kConnected, kPending, kQueued, kRunning, kSize } = require('./core/symbols')
+const kPool = Symbol('pool')
+
+class PoolStats {
+ constructor (pool) {
+ this[kPool] = pool
+ }
+
+ get connected () {
+ return this[kPool][kConnected]
+ }
+
+ get free () {
+ return this[kPool][kFree]
+ }
+
+ get pending () {
+ return this[kPool][kPending]
+ }
+
+ get queued () {
+ return this[kPool][kQueued]
+ }
+
+ get running () {
+ return this[kPool][kRunning]
+ }
+
+ get size () {
+ return this[kPool][kSize]
+ }
+}
+
+module.exports = PoolStats
diff --git a/lib/pool.js b/lib/pool.js
new file mode 100644
index 0000000..e3cd339
--- /dev/null
+++ b/lib/pool.js
@@ -0,0 +1,94 @@
+'use strict'
+
+const {
+ PoolBase,
+ kClients,
+ kNeedDrain,
+ kAddClient,
+ kGetDispatcher
+} = require('./pool-base')
+const Client = require('./client')
+const {
+ InvalidArgumentError
+} = require('./core/errors')
+const util = require('./core/util')
+const { kUrl, kInterceptors } = require('./core/symbols')
+const buildConnector = require('./core/connect')
+
+const kOptions = Symbol('options')
+const kConnections = Symbol('connections')
+const kFactory = Symbol('factory')
+
+function defaultFactory (origin, opts) {
+ return new Client(origin, opts)
+}
+
+class Pool extends PoolBase {
+ constructor (origin, {
+ connections,
+ factory = defaultFactory,
+ connect,
+ connectTimeout,
+ tls,
+ maxCachedSessions,
+ socketPath,
+ autoSelectFamily,
+ autoSelectFamilyAttemptTimeout,
+ allowH2,
+ ...options
+ } = {}) {
+ super()
+
+ if (connections != null && (!Number.isFinite(connections) || connections < 0)) {
+ throw new InvalidArgumentError('invalid connections')
+ }
+
+ if (typeof factory !== 'function') {
+ throw new InvalidArgumentError('factory must be a function.')
+ }
+
+ if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') {
+ throw new InvalidArgumentError('connect must be a function or an object')
+ }
+
+ if (typeof connect !== 'function') {
+ connect = buildConnector({
+ ...tls,
+ maxCachedSessions,
+ allowH2,
+ socketPath,
+ timeout: connectTimeout,
+ ...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined),
+ ...connect
+ })
+ }
+
+ this[kInterceptors] = options.interceptors && options.interceptors.Pool && Array.isArray(options.interceptors.Pool)
+ ? options.interceptors.Pool
+ : []
+ this[kConnections] = connections || null
+ this[kUrl] = util.parseOrigin(origin)
+ this[kOptions] = { ...util.deepClone(options), connect, allowH2 }
+ this[kOptions].interceptors = options.interceptors
+ ? { ...options.interceptors }
+ : undefined
+ this[kFactory] = factory
+ }
+
+ [kGetDispatcher] () {
+ let dispatcher = this[kClients].find(dispatcher => !dispatcher[kNeedDrain])
+
+ if (dispatcher) {
+ return dispatcher
+ }
+
+ if (!this[kConnections] || this[kClients].length < this[kConnections]) {
+ dispatcher = this[kFactory](this[kUrl], this[kOptions])
+ this[kAddClient](dispatcher)
+ }
+
+ return dispatcher
+ }
+}
+
+module.exports = Pool
diff --git a/lib/proxy-agent.js b/lib/proxy-agent.js
new file mode 100644
index 0000000..e3c0f6f
--- /dev/null
+++ b/lib/proxy-agent.js
@@ -0,0 +1,189 @@
+'use strict'
+
+const { kProxy, kClose, kDestroy, kInterceptors } = require('./core/symbols')
+const { URL } = require('url')
+const Agent = require('./agent')
+const Pool = require('./pool')
+const DispatcherBase = require('./dispatcher-base')
+const { InvalidArgumentError, RequestAbortedError } = require('./core/errors')
+const buildConnector = require('./core/connect')
+
+const kAgent = Symbol('proxy agent')
+const kClient = Symbol('proxy client')
+const kProxyHeaders = Symbol('proxy headers')
+const kRequestTls = Symbol('request tls settings')
+const kProxyTls = Symbol('proxy tls settings')
+const kConnectEndpoint = Symbol('connect endpoint function')
+
+function defaultProtocolPort (protocol) {
+ return protocol === 'https:' ? 443 : 80
+}
+
+function buildProxyOptions (opts) {
+ if (typeof opts === 'string') {
+ opts = { uri: opts }
+ }
+
+ if (!opts || !opts.uri) {
+ throw new InvalidArgumentError('Proxy opts.uri is mandatory')
+ }
+
+ return {
+ uri: opts.uri,
+ protocol: opts.protocol || 'https'
+ }
+}
+
+function defaultFactory (origin, opts) {
+ return new Pool(origin, opts)
+}
+
+class ProxyAgent extends DispatcherBase {
+ constructor (opts) {
+ super(opts)
+ this[kProxy] = buildProxyOptions(opts)
+ this[kAgent] = new Agent(opts)
+ this[kInterceptors] = opts.interceptors && opts.interceptors.ProxyAgent && Array.isArray(opts.interceptors.ProxyAgent)
+ ? opts.interceptors.ProxyAgent
+ : []
+
+ if (typeof opts === 'string') {
+ opts = { uri: opts }
+ }
+
+ if (!opts || !opts.uri) {
+ throw new InvalidArgumentError('Proxy opts.uri is mandatory')
+ }
+
+ const { clientFactory = defaultFactory } = opts
+
+ if (typeof clientFactory !== 'function') {
+ throw new InvalidArgumentError('Proxy opts.clientFactory must be a function.')
+ }
+
+ this[kRequestTls] = opts.requestTls
+ this[kProxyTls] = opts.proxyTls
+ this[kProxyHeaders] = opts.headers || {}
+
+ const resolvedUrl = new URL(opts.uri)
+ const { origin, port, host, username, password } = resolvedUrl
+
+ if (opts.auth && opts.token) {
+ throw new InvalidArgumentError('opts.auth cannot be used in combination with opts.token')
+ } else if (opts.auth) {
+ /* @deprecated in favour of opts.token */
+ this[kProxyHeaders]['proxy-authorization'] = `Basic ${opts.auth}`
+ } else if (opts.token) {
+ this[kProxyHeaders]['proxy-authorization'] = opts.token
+ } else if (username && password) {
+ this[kProxyHeaders]['proxy-authorization'] = `Basic ${Buffer.from(`${decodeURIComponent(username)}:${decodeURIComponent(password)}`).toString('base64')}`
+ }
+
+ const connect = buildConnector({ ...opts.proxyTls })
+ this[kConnectEndpoint] = buildConnector({ ...opts.requestTls })
+ this[kClient] = clientFactory(resolvedUrl, { connect })
+ this[kAgent] = new Agent({
+ ...opts,
+ connect: async (opts, callback) => {
+ let requestedHost = opts.host
+ if (!opts.port) {
+ requestedHost += `:${defaultProtocolPort(opts.protocol)}`
+ }
+ try {
+ const { socket, statusCode } = await this[kClient].connect({
+ origin,
+ port,
+ path: requestedHost,
+ signal: opts.signal,
+ headers: {
+ ...this[kProxyHeaders],
+ host
+ }
+ })
+ if (statusCode !== 200) {
+ socket.on('error', () => {}).destroy()
+ callback(new RequestAbortedError(`Proxy response (${statusCode}) !== 200 when HTTP Tunneling`))
+ }
+ if (opts.protocol !== 'https:') {
+ callback(null, socket)
+ return
+ }
+ let servername
+ if (this[kRequestTls]) {
+ servername = this[kRequestTls].servername
+ } else {
+ servername = opts.servername
+ }
+ this[kConnectEndpoint]({ ...opts, servername, httpSocket: socket }, callback)
+ } catch (err) {
+ callback(err)
+ }
+ }
+ })
+ }
+
+ dispatch (opts, handler) {
+ const { host } = new URL(opts.origin)
+ const headers = buildHeaders(opts.headers)
+ throwIfProxyAuthIsSent(headers)
+ return this[kAgent].dispatch(
+ {
+ ...opts,
+ headers: {
+ ...headers,
+ host
+ }
+ },
+ handler
+ )
+ }
+
+ async [kClose] () {
+ await this[kAgent].close()
+ await this[kClient].close()
+ }
+
+ async [kDestroy] () {
+ await this[kAgent].destroy()
+ await this[kClient].destroy()
+ }
+}
+
+/**
+ * @param {string[] | Record<string, string>} headers
+ * @returns {Record<string, string>}
+ */
+function buildHeaders (headers) {
+ // When using undici.fetch, the headers list is stored
+ // as an array.
+ if (Array.isArray(headers)) {
+ /** @type {Record<string, string>} */
+ const headersPair = {}
+
+ for (let i = 0; i < headers.length; i += 2) {
+ headersPair[headers[i]] = headers[i + 1]
+ }
+
+ return headersPair
+ }
+
+ return headers
+}
+
+/**
+ * @param {Record<string, string>} headers
+ *
+ * Previous versions of ProxyAgent suggests the Proxy-Authorization in request headers
+ * Nevertheless, it was changed and to avoid a security vulnerability by end users
+ * this check was created.
+ * It should be removed in the next major version for performance reasons
+ */
+function throwIfProxyAuthIsSent (headers) {
+ const existProxyAuth = headers && Object.keys(headers)
+ .find((key) => key.toLowerCase() === 'proxy-authorization')
+ if (existProxyAuth) {
+ throw new InvalidArgumentError('Proxy-Authorization should be sent in ProxyAgent constructor')
+ }
+}
+
+module.exports = ProxyAgent
diff --git a/lib/timers.js b/lib/timers.js
new file mode 100644
index 0000000..5782217
--- /dev/null
+++ b/lib/timers.js
@@ -0,0 +1,97 @@
+'use strict'
+
+let fastNow = Date.now()
+let fastNowTimeout
+
+const fastTimers = []
+
+function onTimeout () {
+ fastNow = Date.now()
+
+ let len = fastTimers.length
+ let idx = 0
+ while (idx < len) {
+ const timer = fastTimers[idx]
+
+ if (timer.state === 0) {
+ timer.state = fastNow + timer.delay
+ } else if (timer.state > 0 && fastNow >= timer.state) {
+ timer.state = -1
+ timer.callback(timer.opaque)
+ }
+
+ if (timer.state === -1) {
+ timer.state = -2
+ if (idx !== len - 1) {
+ fastTimers[idx] = fastTimers.pop()
+ } else {
+ fastTimers.pop()
+ }
+ len -= 1
+ } else {
+ idx += 1
+ }
+ }
+
+ if (fastTimers.length > 0) {
+ refreshTimeout()
+ }
+}
+
+function refreshTimeout () {
+ if (fastNowTimeout && fastNowTimeout.refresh) {
+ fastNowTimeout.refresh()
+ } else {
+ clearTimeout(fastNowTimeout)
+ fastNowTimeout = setTimeout(onTimeout, 1e3)
+ if (fastNowTimeout.unref) {
+ fastNowTimeout.unref()
+ }
+ }
+}
+
+class Timeout {
+ constructor (callback, delay, opaque) {
+ this.callback = callback
+ this.delay = delay
+ this.opaque = opaque
+
+ // -2 not in timer list
+ // -1 in timer list but inactive
+ // 0 in timer list waiting for time
+ // > 0 in timer list waiting for time to expire
+ this.state = -2
+
+ this.refresh()
+ }
+
+ refresh () {
+ if (this.state === -2) {
+ fastTimers.push(this)
+ if (!fastNowTimeout || fastTimers.length === 1) {
+ refreshTimeout()
+ }
+ }
+
+ this.state = 0
+ }
+
+ clear () {
+ this.state = -1
+ }
+}
+
+module.exports = {
+ setTimeout (callback, delay, opaque) {
+ return delay < 1e3
+ ? setTimeout(callback, delay, opaque)
+ : new Timeout(callback, delay, opaque)
+ },
+ clearTimeout (timeout) {
+ if (timeout instanceof Timeout) {
+ timeout.clear()
+ } else {
+ clearTimeout(timeout)
+ }
+ }
+}
diff --git a/lib/websocket/connection.js b/lib/websocket/connection.js
new file mode 100644
index 0000000..e0fa697
--- /dev/null
+++ b/lib/websocket/connection.js
@@ -0,0 +1,291 @@
+'use strict'
+
+const diagnosticsChannel = require('diagnostics_channel')
+const { uid, states } = require('./constants')
+const {
+ kReadyState,
+ kSentClose,
+ kByteParser,
+ kReceivedClose
+} = require('./symbols')
+const { fireEvent, failWebsocketConnection } = require('./util')
+const { CloseEvent } = require('./events')
+const { makeRequest } = require('../fetch/request')
+const { fetching } = require('../fetch/index')
+const { Headers } = require('../fetch/headers')
+const { getGlobalDispatcher } = require('../global')
+const { kHeadersList } = require('../core/symbols')
+
+const channels = {}
+channels.open = diagnosticsChannel.channel('undici:websocket:open')
+channels.close = diagnosticsChannel.channel('undici:websocket:close')
+channels.socketError = diagnosticsChannel.channel('undici:websocket:socket_error')
+
+/** @type {import('crypto')} */
+let crypto
+try {
+ crypto = require('crypto')
+} catch {
+
+}
+
+/**
+ * @see https://websockets.spec.whatwg.org/#concept-websocket-establish
+ * @param {URL} url
+ * @param {string|string[]} protocols
+ * @param {import('./websocket').WebSocket} ws
+ * @param {(response: any) => void} onEstablish
+ * @param {Partial<import('../../types/websocket').WebSocketInit>} options
+ */
+function establishWebSocketConnection (url, protocols, ws, onEstablish, options) {
+ // 1. Let requestURL be a copy of url, with its scheme set to "http", if url’s
+ // scheme is "ws", and to "https" otherwise.
+ const requestURL = url
+
+ requestURL.protocol = url.protocol === 'ws:' ? 'http:' : 'https:'
+
+ // 2. Let request be a new request, whose URL is requestURL, client is client,
+ // service-workers mode is "none", referrer is "no-referrer", mode is
+ // "websocket", credentials mode is "include", cache mode is "no-store" ,
+ // and redirect mode is "error".
+ const request = makeRequest({
+ urlList: [requestURL],
+ serviceWorkers: 'none',
+ referrer: 'no-referrer',
+ mode: 'websocket',
+ credentials: 'include',
+ cache: 'no-store',
+ redirect: 'error'
+ })
+
+ // Note: undici extension, allow setting custom headers.
+ if (options.headers) {
+ const headersList = new Headers(options.headers)[kHeadersList]
+
+ request.headersList = headersList
+ }
+
+ // 3. Append (`Upgrade`, `websocket`) to request’s header list.
+ // 4. Append (`Connection`, `Upgrade`) to request’s header list.
+ // Note: both of these are handled by undici currently.
+ // https://github.com/nodejs/undici/blob/68c269c4144c446f3f1220951338daef4a6b5ec4/lib/client.js#L1397
+
+ // 5. Let keyValue be a nonce consisting of a randomly selected
+ // 16-byte value that has been forgiving-base64-encoded and
+ // isomorphic encoded.
+ const keyValue = crypto.randomBytes(16).toString('base64')
+
+ // 6. Append (`Sec-WebSocket-Key`, keyValue) to request’s
+ // header list.
+ request.headersList.append('sec-websocket-key', keyValue)
+
+ // 7. Append (`Sec-WebSocket-Version`, `13`) to request’s
+ // header list.
+ request.headersList.append('sec-websocket-version', '13')
+
+ // 8. For each protocol in protocols, combine
+ // (`Sec-WebSocket-Protocol`, protocol) in request’s header
+ // list.
+ for (const protocol of protocols) {
+ request.headersList.append('sec-websocket-protocol', protocol)
+ }
+
+ // 9. Let permessageDeflate be a user-agent defined
+ // "permessage-deflate" extension header value.
+ // https://github.com/mozilla/gecko-dev/blob/ce78234f5e653a5d3916813ff990f053510227bc/netwerk/protocol/websocket/WebSocketChannel.cpp#L2673
+ // TODO: enable once permessage-deflate is supported
+ const permessageDeflate = '' // 'permessage-deflate; 15'
+
+ // 10. Append (`Sec-WebSocket-Extensions`, permessageDeflate) to
+ // request’s header list.
+ // request.headersList.append('sec-websocket-extensions', permessageDeflate)
+
+ // 11. Fetch request with useParallelQueue set to true, and
+ // processResponse given response being these steps:
+ const controller = fetching({
+ request,
+ useParallelQueue: true,
+ dispatcher: options.dispatcher ?? getGlobalDispatcher(),
+ processResponse (response) {
+ // 1. If response is a network error or its status is not 101,
+ // fail the WebSocket connection.
+ if (response.type === 'error' || response.status !== 101) {
+ failWebsocketConnection(ws, 'Received network error or non-101 status code.')
+ return
+ }
+
+ // 2. If protocols is not the empty list and extracting header
+ // list values given `Sec-WebSocket-Protocol` and response’s
+ // header list results in null, failure, or the empty byte
+ // sequence, then fail the WebSocket connection.
+ if (protocols.length !== 0 && !response.headersList.get('Sec-WebSocket-Protocol')) {
+ failWebsocketConnection(ws, 'Server did not respond with sent protocols.')
+ return
+ }
+
+ // 3. Follow the requirements stated step 2 to step 6, inclusive,
+ // of the last set of steps in section 4.1 of The WebSocket
+ // Protocol to validate response. This either results in fail
+ // the WebSocket connection or the WebSocket connection is
+ // established.
+
+ // 2. If the response lacks an |Upgrade| header field or the |Upgrade|
+ // header field contains a value that is not an ASCII case-
+ // insensitive match for the value "websocket", the client MUST
+ // _Fail the WebSocket Connection_.
+ if (response.headersList.get('Upgrade')?.toLowerCase() !== 'websocket') {
+ failWebsocketConnection(ws, 'Server did not set Upgrade header to "websocket".')
+ return
+ }
+
+ // 3. If the response lacks a |Connection| header field or the
+ // |Connection| header field doesn't contain a token that is an
+ // ASCII case-insensitive match for the value "Upgrade", the client
+ // MUST _Fail the WebSocket Connection_.
+ if (response.headersList.get('Connection')?.toLowerCase() !== 'upgrade') {
+ failWebsocketConnection(ws, 'Server did not set Connection header to "upgrade".')
+ return
+ }
+
+ // 4. If the response lacks a |Sec-WebSocket-Accept| header field or
+ // the |Sec-WebSocket-Accept| contains a value other than the
+ // base64-encoded SHA-1 of the concatenation of the |Sec-WebSocket-
+ // Key| (as a string, not base64-decoded) with the string "258EAFA5-
+ // E914-47DA-95CA-C5AB0DC85B11" but ignoring any leading and
+ // trailing whitespace, the client MUST _Fail the WebSocket
+ // Connection_.
+ const secWSAccept = response.headersList.get('Sec-WebSocket-Accept')
+ const digest = crypto.createHash('sha1').update(keyValue + uid).digest('base64')
+ if (secWSAccept !== digest) {
+ failWebsocketConnection(ws, 'Incorrect hash received in Sec-WebSocket-Accept header.')
+ return
+ }
+
+ // 5. If the response includes a |Sec-WebSocket-Extensions| header
+ // field and this header field indicates the use of an extension
+ // that was not present in the client's handshake (the server has
+ // indicated an extension not requested by the client), the client
+ // MUST _Fail the WebSocket Connection_. (The parsing of this
+ // header field to determine which extensions are requested is
+ // discussed in Section 9.1.)
+ const secExtension = response.headersList.get('Sec-WebSocket-Extensions')
+
+ if (secExtension !== null && secExtension !== permessageDeflate) {
+ failWebsocketConnection(ws, 'Received different permessage-deflate than the one set.')
+ return
+ }
+
+ // 6. If the response includes a |Sec-WebSocket-Protocol| header field
+ // and this header field indicates the use of a subprotocol that was
+ // not present in the client's handshake (the server has indicated a
+ // subprotocol not requested by the client), the client MUST _Fail
+ // the WebSocket Connection_.
+ const secProtocol = response.headersList.get('Sec-WebSocket-Protocol')
+
+ if (secProtocol !== null && secProtocol !== request.headersList.get('Sec-WebSocket-Protocol')) {
+ failWebsocketConnection(ws, 'Protocol was not set in the opening handshake.')
+ return
+ }
+
+ response.socket.on('data', onSocketData)
+ response.socket.on('close', onSocketClose)
+ response.socket.on('error', onSocketError)
+
+ if (channels.open.hasSubscribers) {
+ channels.open.publish({
+ address: response.socket.address(),
+ protocol: secProtocol,
+ extensions: secExtension
+ })
+ }
+
+ onEstablish(response)
+ }
+ })
+
+ return controller
+}
+
+/**
+ * @param {Buffer} chunk
+ */
+function onSocketData (chunk) {
+ if (!this.ws[kByteParser].write(chunk)) {
+ this.pause()
+ }
+}
+
+/**
+ * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol
+ * @see https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.4
+ */
+function onSocketClose () {
+ const { ws } = this
+
+ // If the TCP connection was closed after the
+ // WebSocket closing handshake was completed, the WebSocket connection
+ // is said to have been closed _cleanly_.
+ const wasClean = ws[kSentClose] && ws[kReceivedClose]
+
+ let code = 1005
+ let reason = ''
+
+ const result = ws[kByteParser].closingInfo
+
+ if (result) {
+ code = result.code ?? 1005
+ reason = result.reason
+ } else if (!ws[kSentClose]) {
+ // If _The WebSocket
+ // Connection is Closed_ and no Close control frame was received by the
+ // endpoint (such as could occur if the underlying transport connection
+ // is lost), _The WebSocket Connection Close Code_ is considered to be
+ // 1006.
+ code = 1006
+ }
+
+ // 1. Change the ready state to CLOSED (3).
+ ws[kReadyState] = states.CLOSED
+
+ // 2. If the user agent was required to fail the WebSocket
+ // connection, or if the WebSocket connection was closed
+ // after being flagged as full, fire an event named error
+ // at the WebSocket object.
+ // TODO
+
+ // 3. Fire an event named close at the WebSocket object,
+ // using CloseEvent, with the wasClean attribute
+ // initialized to true if the connection closed cleanly
+ // and false otherwise, the code attribute initialized to
+ // the WebSocket connection close code, and the reason
+ // attribute initialized to the result of applying UTF-8
+ // decode without BOM to the WebSocket connection close
+ // reason.
+ fireEvent('close', ws, CloseEvent, {
+ wasClean, code, reason
+ })
+
+ if (channels.close.hasSubscribers) {
+ channels.close.publish({
+ websocket: ws,
+ code,
+ reason
+ })
+ }
+}
+
+function onSocketError (error) {
+ const { ws } = this
+
+ ws[kReadyState] = states.CLOSING
+
+ if (channels.socketError.hasSubscribers) {
+ channels.socketError.publish(error)
+ }
+
+ this.destroy()
+}
+
+module.exports = {
+ establishWebSocketConnection
+}
diff --git a/lib/websocket/constants.js b/lib/websocket/constants.js
new file mode 100644
index 0000000..406b8e3
--- /dev/null
+++ b/lib/websocket/constants.js
@@ -0,0 +1,51 @@
+'use strict'
+
+// This is a Globally Unique Identifier unique used
+// to validate that the endpoint accepts websocket
+// connections.
+// See https://www.rfc-editor.org/rfc/rfc6455.html#section-1.3
+const uid = '258EAFA5-E914-47DA-95CA-C5AB0DC85B11'
+
+/** @type {PropertyDescriptor} */
+const staticPropertyDescriptors = {
+ enumerable: true,
+ writable: false,
+ configurable: false
+}
+
+const states = {
+ CONNECTING: 0,
+ OPEN: 1,
+ CLOSING: 2,
+ CLOSED: 3
+}
+
+const opcodes = {
+ CONTINUATION: 0x0,
+ TEXT: 0x1,
+ BINARY: 0x2,
+ CLOSE: 0x8,
+ PING: 0x9,
+ PONG: 0xA
+}
+
+const maxUnsigned16Bit = 2 ** 16 - 1 // 65535
+
+const parserStates = {
+ INFO: 0,
+ PAYLOADLENGTH_16: 2,
+ PAYLOADLENGTH_64: 3,
+ READ_DATA: 4
+}
+
+const emptyBuffer = Buffer.allocUnsafe(0)
+
+module.exports = {
+ uid,
+ staticPropertyDescriptors,
+ states,
+ opcodes,
+ maxUnsigned16Bit,
+ parserStates,
+ emptyBuffer
+}
diff --git a/lib/websocket/events.js b/lib/websocket/events.js
new file mode 100644
index 0000000..621a226
--- /dev/null
+++ b/lib/websocket/events.js
@@ -0,0 +1,303 @@
+'use strict'
+
+const { webidl } = require('../fetch/webidl')
+const { kEnumerableProperty } = require('../core/util')
+const { MessagePort } = require('worker_threads')
+
+/**
+ * @see https://html.spec.whatwg.org/multipage/comms.html#messageevent
+ */
+class MessageEvent extends Event {
+ #eventInit
+
+ constructor (type, eventInitDict = {}) {
+ webidl.argumentLengthCheck(arguments, 1, { header: 'MessageEvent constructor' })
+
+ type = webidl.converters.DOMString(type)
+ eventInitDict = webidl.converters.MessageEventInit(eventInitDict)
+
+ super(type, eventInitDict)
+
+ this.#eventInit = eventInitDict
+ }
+
+ get data () {
+ webidl.brandCheck(this, MessageEvent)
+
+ return this.#eventInit.data
+ }
+
+ get origin () {
+ webidl.brandCheck(this, MessageEvent)
+
+ return this.#eventInit.origin
+ }
+
+ get lastEventId () {
+ webidl.brandCheck(this, MessageEvent)
+
+ return this.#eventInit.lastEventId
+ }
+
+ get source () {
+ webidl.brandCheck(this, MessageEvent)
+
+ return this.#eventInit.source
+ }
+
+ get ports () {
+ webidl.brandCheck(this, MessageEvent)
+
+ if (!Object.isFrozen(this.#eventInit.ports)) {
+ Object.freeze(this.#eventInit.ports)
+ }
+
+ return this.#eventInit.ports
+ }
+
+ initMessageEvent (
+ type,
+ bubbles = false,
+ cancelable = false,
+ data = null,
+ origin = '',
+ lastEventId = '',
+ source = null,
+ ports = []
+ ) {
+ webidl.brandCheck(this, MessageEvent)
+
+ webidl.argumentLengthCheck(arguments, 1, { header: 'MessageEvent.initMessageEvent' })
+
+ return new MessageEvent(type, {
+ bubbles, cancelable, data, origin, lastEventId, source, ports
+ })
+ }
+}
+
+/**
+ * @see https://websockets.spec.whatwg.org/#the-closeevent-interface
+ */
+class CloseEvent extends Event {
+ #eventInit
+
+ constructor (type, eventInitDict = {}) {
+ webidl.argumentLengthCheck(arguments, 1, { header: 'CloseEvent constructor' })
+
+ type = webidl.converters.DOMString(type)
+ eventInitDict = webidl.converters.CloseEventInit(eventInitDict)
+
+ super(type, eventInitDict)
+
+ this.#eventInit = eventInitDict
+ }
+
+ get wasClean () {
+ webidl.brandCheck(this, CloseEvent)
+
+ return this.#eventInit.wasClean
+ }
+
+ get code () {
+ webidl.brandCheck(this, CloseEvent)
+
+ return this.#eventInit.code
+ }
+
+ get reason () {
+ webidl.brandCheck(this, CloseEvent)
+
+ return this.#eventInit.reason
+ }
+}
+
+// https://html.spec.whatwg.org/multipage/webappapis.html#the-errorevent-interface
+class ErrorEvent extends Event {
+ #eventInit
+
+ constructor (type, eventInitDict) {
+ webidl.argumentLengthCheck(arguments, 1, { header: 'ErrorEvent constructor' })
+
+ super(type, eventInitDict)
+
+ type = webidl.converters.DOMString(type)
+ eventInitDict = webidl.converters.ErrorEventInit(eventInitDict ?? {})
+
+ this.#eventInit = eventInitDict
+ }
+
+ get message () {
+ webidl.brandCheck(this, ErrorEvent)
+
+ return this.#eventInit.message
+ }
+
+ get filename () {
+ webidl.brandCheck(this, ErrorEvent)
+
+ return this.#eventInit.filename
+ }
+
+ get lineno () {
+ webidl.brandCheck(this, ErrorEvent)
+
+ return this.#eventInit.lineno
+ }
+
+ get colno () {
+ webidl.brandCheck(this, ErrorEvent)
+
+ return this.#eventInit.colno
+ }
+
+ get error () {
+ webidl.brandCheck(this, ErrorEvent)
+
+ return this.#eventInit.error
+ }
+}
+
+Object.defineProperties(MessageEvent.prototype, {
+ [Symbol.toStringTag]: {
+ value: 'MessageEvent',
+ configurable: true
+ },
+ data: kEnumerableProperty,
+ origin: kEnumerableProperty,
+ lastEventId: kEnumerableProperty,
+ source: kEnumerableProperty,
+ ports: kEnumerableProperty,
+ initMessageEvent: kEnumerableProperty
+})
+
+Object.defineProperties(CloseEvent.prototype, {
+ [Symbol.toStringTag]: {
+ value: 'CloseEvent',
+ configurable: true
+ },
+ reason: kEnumerableProperty,
+ code: kEnumerableProperty,
+ wasClean: kEnumerableProperty
+})
+
+Object.defineProperties(ErrorEvent.prototype, {
+ [Symbol.toStringTag]: {
+ value: 'ErrorEvent',
+ configurable: true
+ },
+ message: kEnumerableProperty,
+ filename: kEnumerableProperty,
+ lineno: kEnumerableProperty,
+ colno: kEnumerableProperty,
+ error: kEnumerableProperty
+})
+
+webidl.converters.MessagePort = webidl.interfaceConverter(MessagePort)
+
+webidl.converters['sequence<MessagePort>'] = webidl.sequenceConverter(
+ webidl.converters.MessagePort
+)
+
+const eventInit = [
+ {
+ key: 'bubbles',
+ converter: webidl.converters.boolean,
+ defaultValue: false
+ },
+ {
+ key: 'cancelable',
+ converter: webidl.converters.boolean,
+ defaultValue: false
+ },
+ {
+ key: 'composed',
+ converter: webidl.converters.boolean,
+ defaultValue: false
+ }
+]
+
+webidl.converters.MessageEventInit = webidl.dictionaryConverter([
+ ...eventInit,
+ {
+ key: 'data',
+ converter: webidl.converters.any,
+ defaultValue: null
+ },
+ {
+ key: 'origin',
+ converter: webidl.converters.USVString,
+ defaultValue: ''
+ },
+ {
+ key: 'lastEventId',
+ converter: webidl.converters.DOMString,
+ defaultValue: ''
+ },
+ {
+ key: 'source',
+ // Node doesn't implement WindowProxy or ServiceWorker, so the only
+ // valid value for source is a MessagePort.
+ converter: webidl.nullableConverter(webidl.converters.MessagePort),
+ defaultValue: null
+ },
+ {
+ key: 'ports',
+ converter: webidl.converters['sequence<MessagePort>'],
+ get defaultValue () {
+ return []
+ }
+ }
+])
+
+webidl.converters.CloseEventInit = webidl.dictionaryConverter([
+ ...eventInit,
+ {
+ key: 'wasClean',
+ converter: webidl.converters.boolean,
+ defaultValue: false
+ },
+ {
+ key: 'code',
+ converter: webidl.converters['unsigned short'],
+ defaultValue: 0
+ },
+ {
+ key: 'reason',
+ converter: webidl.converters.USVString,
+ defaultValue: ''
+ }
+])
+
+webidl.converters.ErrorEventInit = webidl.dictionaryConverter([
+ ...eventInit,
+ {
+ key: 'message',
+ converter: webidl.converters.DOMString,
+ defaultValue: ''
+ },
+ {
+ key: 'filename',
+ converter: webidl.converters.USVString,
+ defaultValue: ''
+ },
+ {
+ key: 'lineno',
+ converter: webidl.converters['unsigned long'],
+ defaultValue: 0
+ },
+ {
+ key: 'colno',
+ converter: webidl.converters['unsigned long'],
+ defaultValue: 0
+ },
+ {
+ key: 'error',
+ converter: webidl.converters.any
+ }
+])
+
+module.exports = {
+ MessageEvent,
+ CloseEvent,
+ ErrorEvent
+}
diff --git a/lib/websocket/frame.js b/lib/websocket/frame.js
new file mode 100644
index 0000000..d867ad1
--- /dev/null
+++ b/lib/websocket/frame.js
@@ -0,0 +1,73 @@
+'use strict'
+
+const { maxUnsigned16Bit } = require('./constants')
+
+/** @type {import('crypto')} */
+let crypto
+try {
+ crypto = require('crypto')
+} catch {
+
+}
+
+class WebsocketFrameSend {
+ /**
+ * @param {Buffer|undefined} data
+ */
+ constructor (data) {
+ this.frameData = data
+ this.maskKey = crypto.randomBytes(4)
+ }
+
+ createFrame (opcode) {
+ const bodyLength = this.frameData?.byteLength ?? 0
+
+ /** @type {number} */
+ let payloadLength = bodyLength // 0-125
+ let offset = 6
+
+ if (bodyLength > maxUnsigned16Bit) {
+ offset += 8 // payload length is next 8 bytes
+ payloadLength = 127
+ } else if (bodyLength > 125) {
+ offset += 2 // payload length is next 2 bytes
+ payloadLength = 126
+ }
+
+ const buffer = Buffer.allocUnsafe(bodyLength + offset)
+
+ // Clear first 2 bytes, everything else is overwritten
+ buffer[0] = buffer[1] = 0
+ buffer[0] |= 0x80 // FIN
+ buffer[0] = (buffer[0] & 0xF0) + opcode // opcode
+
+ /*! ws. MIT License. Einar Otto Stangvik <einaros@gmail.com> */
+ buffer[offset - 4] = this.maskKey[0]
+ buffer[offset - 3] = this.maskKey[1]
+ buffer[offset - 2] = this.maskKey[2]
+ buffer[offset - 1] = this.maskKey[3]
+
+ buffer[1] = payloadLength
+
+ if (payloadLength === 126) {
+ buffer.writeUInt16BE(bodyLength, 2)
+ } else if (payloadLength === 127) {
+ // Clear extended payload length
+ buffer[2] = buffer[3] = 0
+ buffer.writeUIntBE(bodyLength, 4, 6)
+ }
+
+ buffer[1] |= 0x80 // MASK
+
+ // mask body
+ for (let i = 0; i < bodyLength; i++) {
+ buffer[offset + i] = this.frameData[i] ^ this.maskKey[i % 4]
+ }
+
+ return buffer
+ }
+}
+
+module.exports = {
+ WebsocketFrameSend
+}
diff --git a/lib/websocket/receiver.js b/lib/websocket/receiver.js
new file mode 100644
index 0000000..bdd2031
--- /dev/null
+++ b/lib/websocket/receiver.js
@@ -0,0 +1,344 @@
+'use strict'
+
+const { Writable } = require('stream')
+const diagnosticsChannel = require('diagnostics_channel')
+const { parserStates, opcodes, states, emptyBuffer } = require('./constants')
+const { kReadyState, kSentClose, kResponse, kReceivedClose } = require('./symbols')
+const { isValidStatusCode, failWebsocketConnection, websocketMessageReceived } = require('./util')
+const { WebsocketFrameSend } = require('./frame')
+
+// This code was influenced by ws released under the MIT license.
+// Copyright (c) 2011 Einar Otto Stangvik <einaros@gmail.com>
+// Copyright (c) 2013 Arnout Kazemier and contributors
+// Copyright (c) 2016 Luigi Pinca and contributors
+
+const channels = {}
+channels.ping = diagnosticsChannel.channel('undici:websocket:ping')
+channels.pong = diagnosticsChannel.channel('undici:websocket:pong')
+
+class ByteParser extends Writable {
+ #buffers = []
+ #byteOffset = 0
+
+ #state = parserStates.INFO
+
+ #info = {}
+ #fragments = []
+
+ constructor (ws) {
+ super()
+
+ this.ws = ws
+ }
+
+ /**
+ * @param {Buffer} chunk
+ * @param {() => void} callback
+ */
+ _write (chunk, _, callback) {
+ this.#buffers.push(chunk)
+ this.#byteOffset += chunk.length
+
+ this.run(callback)
+ }
+
+ /**
+ * Runs whenever a new chunk is received.
+ * Callback is called whenever there are no more chunks buffering,
+ * or not enough bytes are buffered to parse.
+ */
+ run (callback) {
+ while (true) {
+ if (this.#state === parserStates.INFO) {
+ // If there aren't enough bytes to parse the payload length, etc.
+ if (this.#byteOffset < 2) {
+ return callback()
+ }
+
+ const buffer = this.consume(2)
+
+ this.#info.fin = (buffer[0] & 0x80) !== 0
+ this.#info.opcode = buffer[0] & 0x0F
+
+ // If we receive a fragmented message, we use the type of the first
+ // frame to parse the full message as binary/text, when it's terminated
+ this.#info.originalOpcode ??= this.#info.opcode
+
+ this.#info.fragmented = !this.#info.fin && this.#info.opcode !== opcodes.CONTINUATION
+
+ if (this.#info.fragmented && this.#info.opcode !== opcodes.BINARY && this.#info.opcode !== opcodes.TEXT) {
+ // Only text and binary frames can be fragmented
+ failWebsocketConnection(this.ws, 'Invalid frame type was fragmented.')
+ return
+ }
+
+ const payloadLength = buffer[1] & 0x7F
+
+ if (payloadLength <= 125) {
+ this.#info.payloadLength = payloadLength
+ this.#state = parserStates.READ_DATA
+ } else if (payloadLength === 126) {
+ this.#state = parserStates.PAYLOADLENGTH_16
+ } else if (payloadLength === 127) {
+ this.#state = parserStates.PAYLOADLENGTH_64
+ }
+
+ if (this.#info.fragmented && payloadLength > 125) {
+ // A fragmented frame can't be fragmented itself
+ failWebsocketConnection(this.ws, 'Fragmented frame exceeded 125 bytes.')
+ return
+ } else if (
+ (this.#info.opcode === opcodes.PING ||
+ this.#info.opcode === opcodes.PONG ||
+ this.#info.opcode === opcodes.CLOSE) &&
+ payloadLength > 125
+ ) {
+ // Control frames can have a payload length of 125 bytes MAX
+ failWebsocketConnection(this.ws, 'Payload length for control frame exceeded 125 bytes.')
+ return
+ } else if (this.#info.opcode === opcodes.CLOSE) {
+ if (payloadLength === 1) {
+ failWebsocketConnection(this.ws, 'Received close frame with a 1-byte body.')
+ return
+ }
+
+ const body = this.consume(payloadLength)
+
+ this.#info.closeInfo = this.parseCloseBody(false, body)
+
+ if (!this.ws[kSentClose]) {
+ // If an endpoint receives a Close frame and did not previously send a
+ // Close frame, the endpoint MUST send a Close frame in response. (When
+ // sending a Close frame in response, the endpoint typically echos the
+ // status code it received.)
+ const body = Buffer.allocUnsafe(2)
+ body.writeUInt16BE(this.#info.closeInfo.code, 0)
+ const closeFrame = new WebsocketFrameSend(body)
+
+ this.ws[kResponse].socket.write(
+ closeFrame.createFrame(opcodes.CLOSE),
+ (err) => {
+ if (!err) {
+ this.ws[kSentClose] = true
+ }
+ }
+ )
+ }
+
+ // Upon either sending or receiving a Close control frame, it is said
+ // that _The WebSocket Closing Handshake is Started_ and that the
+ // WebSocket connection is in the CLOSING state.
+ this.ws[kReadyState] = states.CLOSING
+ this.ws[kReceivedClose] = true
+
+ this.end()
+
+ return
+ } else if (this.#info.opcode === opcodes.PING) {
+ // Upon receipt of a Ping frame, an endpoint MUST send a Pong frame in
+ // response, unless it already received a Close frame.
+ // A Pong frame sent in response to a Ping frame must have identical
+ // "Application data"
+
+ const body = this.consume(payloadLength)
+
+ if (!this.ws[kReceivedClose]) {
+ const frame = new WebsocketFrameSend(body)
+
+ this.ws[kResponse].socket.write(frame.createFrame(opcodes.PONG))
+
+ if (channels.ping.hasSubscribers) {
+ channels.ping.publish({
+ payload: body
+ })
+ }
+ }
+
+ this.#state = parserStates.INFO
+
+ if (this.#byteOffset > 0) {
+ continue
+ } else {
+ callback()
+ return
+ }
+ } else if (this.#info.opcode === opcodes.PONG) {
+ // A Pong frame MAY be sent unsolicited. This serves as a
+ // unidirectional heartbeat. A response to an unsolicited Pong frame is
+ // not expected.
+
+ const body = this.consume(payloadLength)
+
+ if (channels.pong.hasSubscribers) {
+ channels.pong.publish({
+ payload: body
+ })
+ }
+
+ if (this.#byteOffset > 0) {
+ continue
+ } else {
+ callback()
+ return
+ }
+ }
+ } else if (this.#state === parserStates.PAYLOADLENGTH_16) {
+ if (this.#byteOffset < 2) {
+ return callback()
+ }
+
+ const buffer = this.consume(2)
+
+ this.#info.payloadLength = buffer.readUInt16BE(0)
+ this.#state = parserStates.READ_DATA
+ } else if (this.#state === parserStates.PAYLOADLENGTH_64) {
+ if (this.#byteOffset < 8) {
+ return callback()
+ }
+
+ const buffer = this.consume(8)
+ const upper = buffer.readUInt32BE(0)
+
+ // 2^31 is the maxinimum bytes an arraybuffer can contain
+ // on 32-bit systems. Although, on 64-bit systems, this is
+ // 2^53-1 bytes.
+ // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Errors/Invalid_array_length
+ // https://source.chromium.org/chromium/chromium/src/+/main:v8/src/common/globals.h;drc=1946212ac0100668f14eb9e2843bdd846e510a1e;bpv=1;bpt=1;l=1275
+ // https://source.chromium.org/chromium/chromium/src/+/main:v8/src/objects/js-array-buffer.h;l=34;drc=1946212ac0100668f14eb9e2843bdd846e510a1e
+ if (upper > 2 ** 31 - 1) {
+ failWebsocketConnection(this.ws, 'Received payload length > 2^31 bytes.')
+ return
+ }
+
+ const lower = buffer.readUInt32BE(4)
+
+ this.#info.payloadLength = (upper << 8) + lower
+ this.#state = parserStates.READ_DATA
+ } else if (this.#state === parserStates.READ_DATA) {
+ if (this.#byteOffset < this.#info.payloadLength) {
+ // If there is still more data in this chunk that needs to be read
+ return callback()
+ } else if (this.#byteOffset >= this.#info.payloadLength) {
+ // If the server sent multiple frames in a single chunk
+
+ const body = this.consume(this.#info.payloadLength)
+
+ this.#fragments.push(body)
+
+ // If the frame is unfragmented, or a fragmented frame was terminated,
+ // a message was received
+ if (!this.#info.fragmented || (this.#info.fin && this.#info.opcode === opcodes.CONTINUATION)) {
+ const fullMessage = Buffer.concat(this.#fragments)
+
+ websocketMessageReceived(this.ws, this.#info.originalOpcode, fullMessage)
+
+ this.#info = {}
+ this.#fragments.length = 0
+ }
+
+ this.#state = parserStates.INFO
+ }
+ }
+
+ if (this.#byteOffset > 0) {
+ continue
+ } else {
+ callback()
+ break
+ }
+ }
+ }
+
+ /**
+ * Take n bytes from the buffered Buffers
+ * @param {number} n
+ * @returns {Buffer|null}
+ */
+ consume (n) {
+ if (n > this.#byteOffset) {
+ return null
+ } else if (n === 0) {
+ return emptyBuffer
+ }
+
+ if (this.#buffers[0].length === n) {
+ this.#byteOffset -= this.#buffers[0].length
+ return this.#buffers.shift()
+ }
+
+ const buffer = Buffer.allocUnsafe(n)
+ let offset = 0
+
+ while (offset !== n) {
+ const next = this.#buffers[0]
+ const { length } = next
+
+ if (length + offset === n) {
+ buffer.set(this.#buffers.shift(), offset)
+ break
+ } else if (length + offset > n) {
+ buffer.set(next.subarray(0, n - offset), offset)
+ this.#buffers[0] = next.subarray(n - offset)
+ break
+ } else {
+ buffer.set(this.#buffers.shift(), offset)
+ offset += next.length
+ }
+ }
+
+ this.#byteOffset -= n
+
+ return buffer
+ }
+
+ parseCloseBody (onlyCode, data) {
+ // https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.5
+ /** @type {number|undefined} */
+ let code
+
+ if (data.length >= 2) {
+ // _The WebSocket Connection Close Code_ is
+ // defined as the status code (Section 7.4) contained in the first Close
+ // control frame received by the application
+ code = data.readUInt16BE(0)
+ }
+
+ if (onlyCode) {
+ if (!isValidStatusCode(code)) {
+ return null
+ }
+
+ return { code }
+ }
+
+ // https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.6
+ /** @type {Buffer} */
+ let reason = data.subarray(2)
+
+ // Remove BOM
+ if (reason[0] === 0xEF && reason[1] === 0xBB && reason[2] === 0xBF) {
+ reason = reason.subarray(3)
+ }
+
+ if (code !== undefined && !isValidStatusCode(code)) {
+ return null
+ }
+
+ try {
+ // TODO: optimize this
+ reason = new TextDecoder('utf-8', { fatal: true }).decode(reason)
+ } catch {
+ return null
+ }
+
+ return { code, reason }
+ }
+
+ get closingInfo () {
+ return this.#info.closeInfo
+ }
+}
+
+module.exports = {
+ ByteParser
+}
diff --git a/lib/websocket/symbols.js b/lib/websocket/symbols.js
new file mode 100644
index 0000000..11d03e3
--- /dev/null
+++ b/lib/websocket/symbols.js
@@ -0,0 +1,12 @@
+'use strict'
+
+module.exports = {
+ kWebSocketURL: Symbol('url'),
+ kReadyState: Symbol('ready state'),
+ kController: Symbol('controller'),
+ kResponse: Symbol('response'),
+ kBinaryType: Symbol('binary type'),
+ kSentClose: Symbol('sent close'),
+ kReceivedClose: Symbol('received close'),
+ kByteParser: Symbol('byte parser')
+}
diff --git a/lib/websocket/util.js b/lib/websocket/util.js
new file mode 100644
index 0000000..6c59b2c
--- /dev/null
+++ b/lib/websocket/util.js
@@ -0,0 +1,200 @@
+'use strict'
+
+const { kReadyState, kController, kResponse, kBinaryType, kWebSocketURL } = require('./symbols')
+const { states, opcodes } = require('./constants')
+const { MessageEvent, ErrorEvent } = require('./events')
+
+/* globals Blob */
+
+/**
+ * @param {import('./websocket').WebSocket} ws
+ */
+function isEstablished (ws) {
+ // If the server's response is validated as provided for above, it is
+ // said that _The WebSocket Connection is Established_ and that the
+ // WebSocket Connection is in the OPEN state.
+ return ws[kReadyState] === states.OPEN
+}
+
+/**
+ * @param {import('./websocket').WebSocket} ws
+ */
+function isClosing (ws) {
+ // Upon either sending or receiving a Close control frame, it is said
+ // that _The WebSocket Closing Handshake is Started_ and that the
+ // WebSocket connection is in the CLOSING state.
+ return ws[kReadyState] === states.CLOSING
+}
+
+/**
+ * @param {import('./websocket').WebSocket} ws
+ */
+function isClosed (ws) {
+ return ws[kReadyState] === states.CLOSED
+}
+
+/**
+ * @see https://dom.spec.whatwg.org/#concept-event-fire
+ * @param {string} e
+ * @param {EventTarget} target
+ * @param {EventInit | undefined} eventInitDict
+ */
+function fireEvent (e, target, eventConstructor = Event, eventInitDict) {
+ // 1. If eventConstructor is not given, then let eventConstructor be Event.
+
+ // 2. Let event be the result of creating an event given eventConstructor,
+ // in the relevant realm of target.
+ // 3. Initialize event’s type attribute to e.
+ const event = new eventConstructor(e, eventInitDict) // eslint-disable-line new-cap
+
+ // 4. Initialize any other IDL attributes of event as described in the
+ // invocation of this algorithm.
+
+ // 5. Return the result of dispatching event at target, with legacy target
+ // override flag set if set.
+ target.dispatchEvent(event)
+}
+
+/**
+ * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol
+ * @param {import('./websocket').WebSocket} ws
+ * @param {number} type Opcode
+ * @param {Buffer} data application data
+ */
+function websocketMessageReceived (ws, type, data) {
+ // 1. If ready state is not OPEN (1), then return.
+ if (ws[kReadyState] !== states.OPEN) {
+ return
+ }
+
+ // 2. Let dataForEvent be determined by switching on type and binary type:
+ let dataForEvent
+
+ if (type === opcodes.TEXT) {
+ // -> type indicates that the data is Text
+ // a new DOMString containing data
+ try {
+ dataForEvent = new TextDecoder('utf-8', { fatal: true }).decode(data)
+ } catch {
+ failWebsocketConnection(ws, 'Received invalid UTF-8 in text frame.')
+ return
+ }
+ } else if (type === opcodes.BINARY) {
+ if (ws[kBinaryType] === 'blob') {
+ // -> type indicates that the data is Binary and binary type is "blob"
+ // a new Blob object, created in the relevant Realm of the WebSocket
+ // object, that represents data as its raw data
+ dataForEvent = new Blob([data])
+ } else {
+ // -> type indicates that the data is Binary and binary type is "arraybuffer"
+ // a new ArrayBuffer object, created in the relevant Realm of the
+ // WebSocket object, whose contents are data
+ dataForEvent = new Uint8Array(data).buffer
+ }
+ }
+
+ // 3. Fire an event named message at the WebSocket object, using MessageEvent,
+ // with the origin attribute initialized to the serialization of the WebSocket
+ // object’s url's origin, and the data attribute initialized to dataForEvent.
+ fireEvent('message', ws, MessageEvent, {
+ origin: ws[kWebSocketURL].origin,
+ data: dataForEvent
+ })
+}
+
+/**
+ * @see https://datatracker.ietf.org/doc/html/rfc6455
+ * @see https://datatracker.ietf.org/doc/html/rfc2616
+ * @see https://bugs.chromium.org/p/chromium/issues/detail?id=398407
+ * @param {string} protocol
+ */
+function isValidSubprotocol (protocol) {
+ // If present, this value indicates one
+ // or more comma-separated subprotocol the client wishes to speak,
+ // ordered by preference. The elements that comprise this value
+ // MUST be non-empty strings with characters in the range U+0021 to
+ // U+007E not including separator characters as defined in
+ // [RFC2616] and MUST all be unique strings.
+ if (protocol.length === 0) {
+ return false
+ }
+
+ for (const char of protocol) {
+ const code = char.charCodeAt(0)
+
+ if (
+ code < 0x21 ||
+ code > 0x7E ||
+ char === '(' ||
+ char === ')' ||
+ char === '<' ||
+ char === '>' ||
+ char === '@' ||
+ char === ',' ||
+ char === ';' ||
+ char === ':' ||
+ char === '\\' ||
+ char === '"' ||
+ char === '/' ||
+ char === '[' ||
+ char === ']' ||
+ char === '?' ||
+ char === '=' ||
+ char === '{' ||
+ char === '}' ||
+ code === 32 || // SP
+ code === 9 // HT
+ ) {
+ return false
+ }
+ }
+
+ return true
+}
+
+/**
+ * @see https://datatracker.ietf.org/doc/html/rfc6455#section-7-4
+ * @param {number} code
+ */
+function isValidStatusCode (code) {
+ if (code >= 1000 && code < 1015) {
+ return (
+ code !== 1004 && // reserved
+ code !== 1005 && // "MUST NOT be set as a status code"
+ code !== 1006 // "MUST NOT be set as a status code"
+ )
+ }
+
+ return code >= 3000 && code <= 4999
+}
+
+/**
+ * @param {import('./websocket').WebSocket} ws
+ * @param {string|undefined} reason
+ */
+function failWebsocketConnection (ws, reason) {
+ const { [kController]: controller, [kResponse]: response } = ws
+
+ controller.abort()
+
+ if (response?.socket && !response.socket.destroyed) {
+ response.socket.destroy()
+ }
+
+ if (reason) {
+ fireEvent('error', ws, ErrorEvent, {
+ error: new Error(reason)
+ })
+ }
+}
+
+module.exports = {
+ isEstablished,
+ isClosing,
+ isClosed,
+ fireEvent,
+ isValidSubprotocol,
+ isValidStatusCode,
+ failWebsocketConnection,
+ websocketMessageReceived
+}
diff --git a/lib/websocket/websocket.js b/lib/websocket/websocket.js
new file mode 100644
index 0000000..e4aa58f
--- /dev/null
+++ b/lib/websocket/websocket.js
@@ -0,0 +1,641 @@
+'use strict'
+
+const { webidl } = require('../fetch/webidl')
+const { DOMException } = require('../fetch/constants')
+const { URLSerializer } = require('../fetch/dataURL')
+const { getGlobalOrigin } = require('../fetch/global')
+const { staticPropertyDescriptors, states, opcodes, emptyBuffer } = require('./constants')
+const {
+ kWebSocketURL,
+ kReadyState,
+ kController,
+ kBinaryType,
+ kResponse,
+ kSentClose,
+ kByteParser
+} = require('./symbols')
+const { isEstablished, isClosing, isValidSubprotocol, failWebsocketConnection, fireEvent } = require('./util')
+const { establishWebSocketConnection } = require('./connection')
+const { WebsocketFrameSend } = require('./frame')
+const { ByteParser } = require('./receiver')
+const { kEnumerableProperty, isBlobLike } = require('../core/util')
+const { getGlobalDispatcher } = require('../global')
+const { types } = require('util')
+
+let experimentalWarned = false
+
+// https://websockets.spec.whatwg.org/#interface-definition
+class WebSocket extends EventTarget {
+ #events = {
+ open: null,
+ error: null,
+ close: null,
+ message: null
+ }
+
+ #bufferedAmount = 0
+ #protocol = ''
+ #extensions = ''
+
+ /**
+ * @param {string} url
+ * @param {string|string[]} protocols
+ */
+ constructor (url, protocols = []) {
+ super()
+
+ webidl.argumentLengthCheck(arguments, 1, { header: 'WebSocket constructor' })
+
+ if (!experimentalWarned) {
+ experimentalWarned = true
+ process.emitWarning('WebSockets are experimental, expect them to change at any time.', {
+ code: 'UNDICI-WS'
+ })
+ }
+
+ const options = webidl.converters['DOMString or sequence<DOMString> or WebSocketInit'](protocols)
+
+ url = webidl.converters.USVString(url)
+ protocols = options.protocols
+
+ // 1. Let baseURL be this's relevant settings object's API base URL.
+ const baseURL = getGlobalOrigin()
+
+ // 1. Let urlRecord be the result of applying the URL parser to url with baseURL.
+ let urlRecord
+
+ try {
+ urlRecord = new URL(url, baseURL)
+ } catch (e) {
+ // 3. If urlRecord is failure, then throw a "SyntaxError" DOMException.
+ throw new DOMException(e, 'SyntaxError')
+ }
+
+ // 4. If urlRecord’s scheme is "http", then set urlRecord’s scheme to "ws".
+ if (urlRecord.protocol === 'http:') {
+ urlRecord.protocol = 'ws:'
+ } else if (urlRecord.protocol === 'https:') {
+ // 5. Otherwise, if urlRecord’s scheme is "https", set urlRecord’s scheme to "wss".
+ urlRecord.protocol = 'wss:'
+ }
+
+ // 6. If urlRecord’s scheme is not "ws" or "wss", then throw a "SyntaxError" DOMException.
+ if (urlRecord.protocol !== 'ws:' && urlRecord.protocol !== 'wss:') {
+ throw new DOMException(
+ `Expected a ws: or wss: protocol, got ${urlRecord.protocol}`,
+ 'SyntaxError'
+ )
+ }
+
+ // 7. If urlRecord’s fragment is non-null, then throw a "SyntaxError"
+ // DOMException.
+ if (urlRecord.hash || urlRecord.href.endsWith('#')) {
+ throw new DOMException('Got fragment', 'SyntaxError')
+ }
+
+ // 8. If protocols is a string, set protocols to a sequence consisting
+ // of just that string.
+ if (typeof protocols === 'string') {
+ protocols = [protocols]
+ }
+
+ // 9. If any of the values in protocols occur more than once or otherwise
+ // fail to match the requirements for elements that comprise the value
+ // of `Sec-WebSocket-Protocol` fields as defined by The WebSocket
+ // protocol, then throw a "SyntaxError" DOMException.
+ if (protocols.length !== new Set(protocols.map(p => p.toLowerCase())).size) {
+ throw new DOMException('Invalid Sec-WebSocket-Protocol value', 'SyntaxError')
+ }
+
+ if (protocols.length > 0 && !protocols.every(p => isValidSubprotocol(p))) {
+ throw new DOMException('Invalid Sec-WebSocket-Protocol value', 'SyntaxError')
+ }
+
+ // 10. Set this's url to urlRecord.
+ this[kWebSocketURL] = new URL(urlRecord.href)
+
+ // 11. Let client be this's relevant settings object.
+
+ // 12. Run this step in parallel:
+
+ // 1. Establish a WebSocket connection given urlRecord, protocols,
+ // and client.
+ this[kController] = establishWebSocketConnection(
+ urlRecord,
+ protocols,
+ this,
+ (response) => this.#onConnectionEstablished(response),
+ options
+ )
+
+ // Each WebSocket object has an associated ready state, which is a
+ // number representing the state of the connection. Initially it must
+ // be CONNECTING (0).
+ this[kReadyState] = WebSocket.CONNECTING
+
+ // The extensions attribute must initially return the empty string.
+
+ // The protocol attribute must initially return the empty string.
+
+ // Each WebSocket object has an associated binary type, which is a
+ // BinaryType. Initially it must be "blob".
+ this[kBinaryType] = 'blob'
+ }
+
+ /**
+ * @see https://websockets.spec.whatwg.org/#dom-websocket-close
+ * @param {number|undefined} code
+ * @param {string|undefined} reason
+ */
+ close (code = undefined, reason = undefined) {
+ webidl.brandCheck(this, WebSocket)
+
+ if (code !== undefined) {
+ code = webidl.converters['unsigned short'](code, { clamp: true })
+ }
+
+ if (reason !== undefined) {
+ reason = webidl.converters.USVString(reason)
+ }
+
+ // 1. If code is present, but is neither an integer equal to 1000 nor an
+ // integer in the range 3000 to 4999, inclusive, throw an
+ // "InvalidAccessError" DOMException.
+ if (code !== undefined) {
+ if (code !== 1000 && (code < 3000 || code > 4999)) {
+ throw new DOMException('invalid code', 'InvalidAccessError')
+ }
+ }
+
+ let reasonByteLength = 0
+
+ // 2. If reason is present, then run these substeps:
+ if (reason !== undefined) {
+ // 1. Let reasonBytes be the result of encoding reason.
+ // 2. If reasonBytes is longer than 123 bytes, then throw a
+ // "SyntaxError" DOMException.
+ reasonByteLength = Buffer.byteLength(reason)
+
+ if (reasonByteLength > 123) {
+ throw new DOMException(
+ `Reason must be less than 123 bytes; received ${reasonByteLength}`,
+ 'SyntaxError'
+ )
+ }
+ }
+
+ // 3. Run the first matching steps from the following list:
+ if (this[kReadyState] === WebSocket.CLOSING || this[kReadyState] === WebSocket.CLOSED) {
+ // If this's ready state is CLOSING (2) or CLOSED (3)
+ // Do nothing.
+ } else if (!isEstablished(this)) {
+ // If the WebSocket connection is not yet established
+ // Fail the WebSocket connection and set this's ready state
+ // to CLOSING (2).
+ failWebsocketConnection(this, 'Connection was closed before it was established.')
+ this[kReadyState] = WebSocket.CLOSING
+ } else if (!isClosing(this)) {
+ // If the WebSocket closing handshake has not yet been started
+ // Start the WebSocket closing handshake and set this's ready
+ // state to CLOSING (2).
+ // - If neither code nor reason is present, the WebSocket Close
+ // message must not have a body.
+ // - If code is present, then the status code to use in the
+ // WebSocket Close message must be the integer given by code.
+ // - If reason is also present, then reasonBytes must be
+ // provided in the Close message after the status code.
+
+ const frame = new WebsocketFrameSend()
+
+ // If neither code nor reason is present, the WebSocket Close
+ // message must not have a body.
+
+ // If code is present, then the status code to use in the
+ // WebSocket Close message must be the integer given by code.
+ if (code !== undefined && reason === undefined) {
+ frame.frameData = Buffer.allocUnsafe(2)
+ frame.frameData.writeUInt16BE(code, 0)
+ } else if (code !== undefined && reason !== undefined) {
+ // If reason is also present, then reasonBytes must be
+ // provided in the Close message after the status code.
+ frame.frameData = Buffer.allocUnsafe(2 + reasonByteLength)
+ frame.frameData.writeUInt16BE(code, 0)
+ // the body MAY contain UTF-8-encoded data with value /reason/
+ frame.frameData.write(reason, 2, 'utf-8')
+ } else {
+ frame.frameData = emptyBuffer
+ }
+
+ /** @type {import('stream').Duplex} */
+ const socket = this[kResponse].socket
+
+ socket.write(frame.createFrame(opcodes.CLOSE), (err) => {
+ if (!err) {
+ this[kSentClose] = true
+ }
+ })
+
+ // Upon either sending or receiving a Close control frame, it is said
+ // that _The WebSocket Closing Handshake is Started_ and that the
+ // WebSocket connection is in the CLOSING state.
+ this[kReadyState] = states.CLOSING
+ } else {
+ // Otherwise
+ // Set this's ready state to CLOSING (2).
+ this[kReadyState] = WebSocket.CLOSING
+ }
+ }
+
+ /**
+ * @see https://websockets.spec.whatwg.org/#dom-websocket-send
+ * @param {NodeJS.TypedArray|ArrayBuffer|Blob|string} data
+ */
+ send (data) {
+ webidl.brandCheck(this, WebSocket)
+
+ webidl.argumentLengthCheck(arguments, 1, { header: 'WebSocket.send' })
+
+ data = webidl.converters.WebSocketSendData(data)
+
+ // 1. If this's ready state is CONNECTING, then throw an
+ // "InvalidStateError" DOMException.
+ if (this[kReadyState] === WebSocket.CONNECTING) {
+ throw new DOMException('Sent before connected.', 'InvalidStateError')
+ }
+
+ // 2. Run the appropriate set of steps from the following list:
+ // https://datatracker.ietf.org/doc/html/rfc6455#section-6.1
+ // https://datatracker.ietf.org/doc/html/rfc6455#section-5.2
+
+ if (!isEstablished(this) || isClosing(this)) {
+ return
+ }
+
+ /** @type {import('stream').Duplex} */
+ const socket = this[kResponse].socket
+
+ // If data is a string
+ if (typeof data === 'string') {
+ // If the WebSocket connection is established and the WebSocket
+ // closing handshake has not yet started, then the user agent
+ // must send a WebSocket Message comprised of the data argument
+ // using a text frame opcode; if the data cannot be sent, e.g.
+ // because it would need to be buffered but the buffer is full,
+ // the user agent must flag the WebSocket as full and then close
+ // the WebSocket connection. Any invocation of this method with a
+ // string argument that does not throw an exception must increase
+ // the bufferedAmount attribute by the number of bytes needed to
+ // express the argument as UTF-8.
+
+ const value = Buffer.from(data)
+ const frame = new WebsocketFrameSend(value)
+ const buffer = frame.createFrame(opcodes.TEXT)
+
+ this.#bufferedAmount += value.byteLength
+ socket.write(buffer, () => {
+ this.#bufferedAmount -= value.byteLength
+ })
+ } else if (types.isArrayBuffer(data)) {
+ // If the WebSocket connection is established, and the WebSocket
+ // closing handshake has not yet started, then the user agent must
+ // send a WebSocket Message comprised of data using a binary frame
+ // opcode; if the data cannot be sent, e.g. because it would need
+ // to be buffered but the buffer is full, the user agent must flag
+ // the WebSocket as full and then close the WebSocket connection.
+ // The data to be sent is the data stored in the buffer described
+ // by the ArrayBuffer object. Any invocation of this method with an
+ // ArrayBuffer argument that does not throw an exception must
+ // increase the bufferedAmount attribute by the length of the
+ // ArrayBuffer in bytes.
+
+ const value = Buffer.from(data)
+ const frame = new WebsocketFrameSend(value)
+ const buffer = frame.createFrame(opcodes.BINARY)
+
+ this.#bufferedAmount += value.byteLength
+ socket.write(buffer, () => {
+ this.#bufferedAmount -= value.byteLength
+ })
+ } else if (ArrayBuffer.isView(data)) {
+ // If the WebSocket connection is established, and the WebSocket
+ // closing handshake has not yet started, then the user agent must
+ // send a WebSocket Message comprised of data using a binary frame
+ // opcode; if the data cannot be sent, e.g. because it would need to
+ // be buffered but the buffer is full, the user agent must flag the
+ // WebSocket as full and then close the WebSocket connection. The
+ // data to be sent is the data stored in the section of the buffer
+ // described by the ArrayBuffer object that data references. Any
+ // invocation of this method with this kind of argument that does
+ // not throw an exception must increase the bufferedAmount attribute
+ // by the length of data’s buffer in bytes.
+
+ const ab = Buffer.from(data, data.byteOffset, data.byteLength)
+
+ const frame = new WebsocketFrameSend(ab)
+ const buffer = frame.createFrame(opcodes.BINARY)
+
+ this.#bufferedAmount += ab.byteLength
+ socket.write(buffer, () => {
+ this.#bufferedAmount -= ab.byteLength
+ })
+ } else if (isBlobLike(data)) {
+ // If the WebSocket connection is established, and the WebSocket
+ // closing handshake has not yet started, then the user agent must
+ // send a WebSocket Message comprised of data using a binary frame
+ // opcode; if the data cannot be sent, e.g. because it would need to
+ // be buffered but the buffer is full, the user agent must flag the
+ // WebSocket as full and then close the WebSocket connection. The data
+ // to be sent is the raw data represented by the Blob object. Any
+ // invocation of this method with a Blob argument that does not throw
+ // an exception must increase the bufferedAmount attribute by the size
+ // of the Blob object’s raw data, in bytes.
+
+ const frame = new WebsocketFrameSend()
+
+ data.arrayBuffer().then((ab) => {
+ const value = Buffer.from(ab)
+ frame.frameData = value
+ const buffer = frame.createFrame(opcodes.BINARY)
+
+ this.#bufferedAmount += value.byteLength
+ socket.write(buffer, () => {
+ this.#bufferedAmount -= value.byteLength
+ })
+ })
+ }
+ }
+
+ get readyState () {
+ webidl.brandCheck(this, WebSocket)
+
+ // The readyState getter steps are to return this's ready state.
+ return this[kReadyState]
+ }
+
+ get bufferedAmount () {
+ webidl.brandCheck(this, WebSocket)
+
+ return this.#bufferedAmount
+ }
+
+ get url () {
+ webidl.brandCheck(this, WebSocket)
+
+ // The url getter steps are to return this's url, serialized.
+ return URLSerializer(this[kWebSocketURL])
+ }
+
+ get extensions () {
+ webidl.brandCheck(this, WebSocket)
+
+ return this.#extensions
+ }
+
+ get protocol () {
+ webidl.brandCheck(this, WebSocket)
+
+ return this.#protocol
+ }
+
+ get onopen () {
+ webidl.brandCheck(this, WebSocket)
+
+ return this.#events.open
+ }
+
+ set onopen (fn) {
+ webidl.brandCheck(this, WebSocket)
+
+ if (this.#events.open) {
+ this.removeEventListener('open', this.#events.open)
+ }
+
+ if (typeof fn === 'function') {
+ this.#events.open = fn
+ this.addEventListener('open', fn)
+ } else {
+ this.#events.open = null
+ }
+ }
+
+ get onerror () {
+ webidl.brandCheck(this, WebSocket)
+
+ return this.#events.error
+ }
+
+ set onerror (fn) {
+ webidl.brandCheck(this, WebSocket)
+
+ if (this.#events.error) {
+ this.removeEventListener('error', this.#events.error)
+ }
+
+ if (typeof fn === 'function') {
+ this.#events.error = fn
+ this.addEventListener('error', fn)
+ } else {
+ this.#events.error = null
+ }
+ }
+
+ get onclose () {
+ webidl.brandCheck(this, WebSocket)
+
+ return this.#events.close
+ }
+
+ set onclose (fn) {
+ webidl.brandCheck(this, WebSocket)
+
+ if (this.#events.close) {
+ this.removeEventListener('close', this.#events.close)
+ }
+
+ if (typeof fn === 'function') {
+ this.#events.close = fn
+ this.addEventListener('close', fn)
+ } else {
+ this.#events.close = null
+ }
+ }
+
+ get onmessage () {
+ webidl.brandCheck(this, WebSocket)
+
+ return this.#events.message
+ }
+
+ set onmessage (fn) {
+ webidl.brandCheck(this, WebSocket)
+
+ if (this.#events.message) {
+ this.removeEventListener('message', this.#events.message)
+ }
+
+ if (typeof fn === 'function') {
+ this.#events.message = fn
+ this.addEventListener('message', fn)
+ } else {
+ this.#events.message = null
+ }
+ }
+
+ get binaryType () {
+ webidl.brandCheck(this, WebSocket)
+
+ return this[kBinaryType]
+ }
+
+ set binaryType (type) {
+ webidl.brandCheck(this, WebSocket)
+
+ if (type !== 'blob' && type !== 'arraybuffer') {
+ this[kBinaryType] = 'blob'
+ } else {
+ this[kBinaryType] = type
+ }
+ }
+
+ /**
+ * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol
+ */
+ #onConnectionEstablished (response) {
+ // processResponse is called when the "response’s header list has been received and initialized."
+ // once this happens, the connection is open
+ this[kResponse] = response
+
+ const parser = new ByteParser(this)
+ parser.on('drain', function onParserDrain () {
+ this.ws[kResponse].socket.resume()
+ })
+
+ response.socket.ws = this
+ this[kByteParser] = parser
+
+ // 1. Change the ready state to OPEN (1).
+ this[kReadyState] = states.OPEN
+
+ // 2. Change the extensions attribute’s value to the extensions in use, if
+ // it is not the null value.
+ // https://datatracker.ietf.org/doc/html/rfc6455#section-9.1
+ const extensions = response.headersList.get('sec-websocket-extensions')
+
+ if (extensions !== null) {
+ this.#extensions = extensions
+ }
+
+ // 3. Change the protocol attribute’s value to the subprotocol in use, if
+ // it is not the null value.
+ // https://datatracker.ietf.org/doc/html/rfc6455#section-1.9
+ const protocol = response.headersList.get('sec-websocket-protocol')
+
+ if (protocol !== null) {
+ this.#protocol = protocol
+ }
+
+ // 4. Fire an event named open at the WebSocket object.
+ fireEvent('open', this)
+ }
+}
+
+// https://websockets.spec.whatwg.org/#dom-websocket-connecting
+WebSocket.CONNECTING = WebSocket.prototype.CONNECTING = states.CONNECTING
+// https://websockets.spec.whatwg.org/#dom-websocket-open
+WebSocket.OPEN = WebSocket.prototype.OPEN = states.OPEN
+// https://websockets.spec.whatwg.org/#dom-websocket-closing
+WebSocket.CLOSING = WebSocket.prototype.CLOSING = states.CLOSING
+// https://websockets.spec.whatwg.org/#dom-websocket-closed
+WebSocket.CLOSED = WebSocket.prototype.CLOSED = states.CLOSED
+
+Object.defineProperties(WebSocket.prototype, {
+ CONNECTING: staticPropertyDescriptors,
+ OPEN: staticPropertyDescriptors,
+ CLOSING: staticPropertyDescriptors,
+ CLOSED: staticPropertyDescriptors,
+ url: kEnumerableProperty,
+ readyState: kEnumerableProperty,
+ bufferedAmount: kEnumerableProperty,
+ onopen: kEnumerableProperty,
+ onerror: kEnumerableProperty,
+ onclose: kEnumerableProperty,
+ close: kEnumerableProperty,
+ onmessage: kEnumerableProperty,
+ binaryType: kEnumerableProperty,
+ send: kEnumerableProperty,
+ extensions: kEnumerableProperty,
+ protocol: kEnumerableProperty,
+ [Symbol.toStringTag]: {
+ value: 'WebSocket',
+ writable: false,
+ enumerable: false,
+ configurable: true
+ }
+})
+
+Object.defineProperties(WebSocket, {
+ CONNECTING: staticPropertyDescriptors,
+ OPEN: staticPropertyDescriptors,
+ CLOSING: staticPropertyDescriptors,
+ CLOSED: staticPropertyDescriptors
+})
+
+webidl.converters['sequence<DOMString>'] = webidl.sequenceConverter(
+ webidl.converters.DOMString
+)
+
+webidl.converters['DOMString or sequence<DOMString>'] = function (V) {
+ if (webidl.util.Type(V) === 'Object' && Symbol.iterator in V) {
+ return webidl.converters['sequence<DOMString>'](V)
+ }
+
+ return webidl.converters.DOMString(V)
+}
+
+// This implements the propsal made in https://github.com/whatwg/websockets/issues/42
+webidl.converters.WebSocketInit = webidl.dictionaryConverter([
+ {
+ key: 'protocols',
+ converter: webidl.converters['DOMString or sequence<DOMString>'],
+ get defaultValue () {
+ return []
+ }
+ },
+ {
+ key: 'dispatcher',
+ converter: (V) => V,
+ get defaultValue () {
+ return getGlobalDispatcher()
+ }
+ },
+ {
+ key: 'headers',
+ converter: webidl.nullableConverter(webidl.converters.HeadersInit)
+ }
+])
+
+webidl.converters['DOMString or sequence<DOMString> or WebSocketInit'] = function (V) {
+ if (webidl.util.Type(V) === 'Object' && !(Symbol.iterator in V)) {
+ return webidl.converters.WebSocketInit(V)
+ }
+
+ return { protocols: webidl.converters['DOMString or sequence<DOMString>'](V) }
+}
+
+webidl.converters.WebSocketSendData = function (V) {
+ if (webidl.util.Type(V) === 'Object') {
+ if (isBlobLike(V)) {
+ return webidl.converters.Blob(V, { strict: false })
+ }
+
+ if (ArrayBuffer.isView(V) || types.isAnyArrayBuffer(V)) {
+ return webidl.converters.BufferSource(V)
+ }
+ }
+
+ return webidl.converters.USVString(V)
+}
+
+module.exports = {
+ WebSocket
+}