Initial commit: MangaReader iOS App
✨ Features: - App iOS completa para leer manga sin publicidad - Scraper con WKWebView para manhwaweb.com - Sistema de descargas offline - Lector con zoom y navegación - Favoritos y progreso de lectura - Compatible con iOS 15+ y Sideloadly/3uTools 📦 Contenido: - Backend Node.js con Puppeteer (opcional) - App iOS con SwiftUI - Scraper de capítulos e imágenes - Sistema de almacenamiento local - Testing completo - Documentación exhaustiva 🧪 Prueba: Capítulo 789 de One Piece descargado exitosamente - 21 páginas descargadas - 4.68 MB total - URLs verificadas y funcionales 🎉 Generated with Claude Code (https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
59
backend/node_modules/undici/lib/api/abort-signal.js
generated
vendored
Normal file
59
backend/node_modules/undici/lib/api/abort-signal.js
generated
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
'use strict'
|
||||
|
||||
const { addAbortListener } = require('../core/util')
|
||||
const { RequestAbortedError } = require('../core/errors')
|
||||
|
||||
const kListener = Symbol('kListener')
|
||||
const kSignal = Symbol('kSignal')
|
||||
|
||||
function abort (self) {
|
||||
if (self.abort) {
|
||||
self.abort(self[kSignal]?.reason)
|
||||
} else {
|
||||
self.reason = self[kSignal]?.reason ?? new RequestAbortedError()
|
||||
}
|
||||
removeSignal(self)
|
||||
}
|
||||
|
||||
function addSignal (self, signal) {
|
||||
self.reason = null
|
||||
|
||||
self[kSignal] = null
|
||||
self[kListener] = null
|
||||
|
||||
if (!signal) {
|
||||
return
|
||||
}
|
||||
|
||||
if (signal.aborted) {
|
||||
abort(self)
|
||||
return
|
||||
}
|
||||
|
||||
self[kSignal] = signal
|
||||
self[kListener] = () => {
|
||||
abort(self)
|
||||
}
|
||||
|
||||
addAbortListener(self[kSignal], self[kListener])
|
||||
}
|
||||
|
||||
function removeSignal (self) {
|
||||
if (!self[kSignal]) {
|
||||
return
|
||||
}
|
||||
|
||||
if ('removeEventListener' in self[kSignal]) {
|
||||
self[kSignal].removeEventListener('abort', self[kListener])
|
||||
} else {
|
||||
self[kSignal].removeListener('abort', self[kListener])
|
||||
}
|
||||
|
||||
self[kSignal] = null
|
||||
self[kListener] = null
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
addSignal,
|
||||
removeSignal
|
||||
}
|
||||
110
backend/node_modules/undici/lib/api/api-connect.js
generated
vendored
Normal file
110
backend/node_modules/undici/lib/api/api-connect.js
generated
vendored
Normal file
@@ -0,0 +1,110 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert')
|
||||
const { AsyncResource } = require('node:async_hooks')
|
||||
const { InvalidArgumentError, SocketError } = require('../core/errors')
|
||||
const util = require('../core/util')
|
||||
const { addSignal, removeSignal } = require('./abort-signal')
|
||||
|
||||
class ConnectHandler extends AsyncResource {
|
||||
constructor (opts, callback) {
|
||||
if (!opts || typeof opts !== 'object') {
|
||||
throw new InvalidArgumentError('invalid opts')
|
||||
}
|
||||
|
||||
if (typeof callback !== 'function') {
|
||||
throw new InvalidArgumentError('invalid callback')
|
||||
}
|
||||
|
||||
const { signal, opaque, responseHeaders } = opts
|
||||
|
||||
if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
|
||||
throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
|
||||
}
|
||||
|
||||
super('UNDICI_CONNECT')
|
||||
|
||||
this.opaque = opaque || null
|
||||
this.responseHeaders = responseHeaders || null
|
||||
this.callback = callback
|
||||
this.abort = null
|
||||
|
||||
addSignal(this, signal)
|
||||
}
|
||||
|
||||
onConnect (abort, context) {
|
||||
if (this.reason) {
|
||||
abort(this.reason)
|
||||
return
|
||||
}
|
||||
|
||||
assert(this.callback)
|
||||
|
||||
this.abort = abort
|
||||
this.context = context
|
||||
}
|
||||
|
||||
onHeaders () {
|
||||
throw new SocketError('bad connect', null)
|
||||
}
|
||||
|
||||
onUpgrade (statusCode, rawHeaders, socket) {
|
||||
const { callback, opaque, context } = this
|
||||
|
||||
removeSignal(this)
|
||||
|
||||
this.callback = null
|
||||
|
||||
let headers = rawHeaders
|
||||
// Indicates is an HTTP2Session
|
||||
if (headers != null) {
|
||||
headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
|
||||
}
|
||||
|
||||
this.runInAsyncScope(callback, null, null, {
|
||||
statusCode,
|
||||
headers,
|
||||
socket,
|
||||
opaque,
|
||||
context
|
||||
})
|
||||
}
|
||||
|
||||
onError (err) {
|
||||
const { callback, opaque } = this
|
||||
|
||||
removeSignal(this)
|
||||
|
||||
if (callback) {
|
||||
this.callback = null
|
||||
queueMicrotask(() => {
|
||||
this.runInAsyncScope(callback, null, err, { opaque })
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function connect (opts, callback) {
|
||||
if (callback === undefined) {
|
||||
return new Promise((resolve, reject) => {
|
||||
connect.call(this, opts, (err, data) => {
|
||||
return err ? reject(err) : resolve(data)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
try {
|
||||
const connectHandler = new ConnectHandler(opts, callback)
|
||||
const connectOptions = { ...opts, method: 'CONNECT' }
|
||||
|
||||
this.dispatch(connectOptions, connectHandler)
|
||||
} catch (err) {
|
||||
if (typeof callback !== 'function') {
|
||||
throw err
|
||||
}
|
||||
const opaque = opts?.opaque
|
||||
queueMicrotask(() => callback(err, { opaque }))
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = connect
|
||||
252
backend/node_modules/undici/lib/api/api-pipeline.js
generated
vendored
Normal file
252
backend/node_modules/undici/lib/api/api-pipeline.js
generated
vendored
Normal file
@@ -0,0 +1,252 @@
|
||||
'use strict'
|
||||
|
||||
const {
|
||||
Readable,
|
||||
Duplex,
|
||||
PassThrough
|
||||
} = require('node:stream')
|
||||
const assert = require('node:assert')
|
||||
const { AsyncResource } = require('node:async_hooks')
|
||||
const {
|
||||
InvalidArgumentError,
|
||||
InvalidReturnValueError,
|
||||
RequestAbortedError
|
||||
} = require('../core/errors')
|
||||
const util = require('../core/util')
|
||||
const { addSignal, removeSignal } = require('./abort-signal')
|
||||
|
||||
function noop () {}
|
||||
|
||||
const kResume = Symbol('resume')
|
||||
|
||||
class PipelineRequest extends Readable {
|
||||
constructor () {
|
||||
super({ autoDestroy: true })
|
||||
|
||||
this[kResume] = null
|
||||
}
|
||||
|
||||
_read () {
|
||||
const { [kResume]: resume } = this
|
||||
|
||||
if (resume) {
|
||||
this[kResume] = null
|
||||
resume()
|
||||
}
|
||||
}
|
||||
|
||||
_destroy (err, callback) {
|
||||
this._read()
|
||||
|
||||
callback(err)
|
||||
}
|
||||
}
|
||||
|
||||
class PipelineResponse extends Readable {
|
||||
constructor (resume) {
|
||||
super({ autoDestroy: true })
|
||||
this[kResume] = resume
|
||||
}
|
||||
|
||||
_read () {
|
||||
this[kResume]()
|
||||
}
|
||||
|
||||
_destroy (err, callback) {
|
||||
if (!err && !this._readableState.endEmitted) {
|
||||
err = new RequestAbortedError()
|
||||
}
|
||||
|
||||
callback(err)
|
||||
}
|
||||
}
|
||||
|
||||
class PipelineHandler extends AsyncResource {
|
||||
constructor (opts, handler) {
|
||||
if (!opts || typeof opts !== 'object') {
|
||||
throw new InvalidArgumentError('invalid opts')
|
||||
}
|
||||
|
||||
if (typeof handler !== 'function') {
|
||||
throw new InvalidArgumentError('invalid handler')
|
||||
}
|
||||
|
||||
const { signal, method, opaque, onInfo, responseHeaders } = opts
|
||||
|
||||
if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
|
||||
throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
|
||||
}
|
||||
|
||||
if (method === 'CONNECT') {
|
||||
throw new InvalidArgumentError('invalid method')
|
||||
}
|
||||
|
||||
if (onInfo && typeof onInfo !== 'function') {
|
||||
throw new InvalidArgumentError('invalid onInfo callback')
|
||||
}
|
||||
|
||||
super('UNDICI_PIPELINE')
|
||||
|
||||
this.opaque = opaque || null
|
||||
this.responseHeaders = responseHeaders || null
|
||||
this.handler = handler
|
||||
this.abort = null
|
||||
this.context = null
|
||||
this.onInfo = onInfo || null
|
||||
|
||||
this.req = new PipelineRequest().on('error', noop)
|
||||
|
||||
this.ret = new Duplex({
|
||||
readableObjectMode: opts.objectMode,
|
||||
autoDestroy: true,
|
||||
read: () => {
|
||||
const { body } = this
|
||||
|
||||
if (body?.resume) {
|
||||
body.resume()
|
||||
}
|
||||
},
|
||||
write: (chunk, encoding, callback) => {
|
||||
const { req } = this
|
||||
|
||||
if (req.push(chunk, encoding) || req._readableState.destroyed) {
|
||||
callback()
|
||||
} else {
|
||||
req[kResume] = callback
|
||||
}
|
||||
},
|
||||
destroy: (err, callback) => {
|
||||
const { body, req, res, ret, abort } = this
|
||||
|
||||
if (!err && !ret._readableState.endEmitted) {
|
||||
err = new RequestAbortedError()
|
||||
}
|
||||
|
||||
if (abort && err) {
|
||||
abort()
|
||||
}
|
||||
|
||||
util.destroy(body, err)
|
||||
util.destroy(req, err)
|
||||
util.destroy(res, err)
|
||||
|
||||
removeSignal(this)
|
||||
|
||||
callback(err)
|
||||
}
|
||||
}).on('prefinish', () => {
|
||||
const { req } = this
|
||||
|
||||
// Node < 15 does not call _final in same tick.
|
||||
req.push(null)
|
||||
})
|
||||
|
||||
this.res = null
|
||||
|
||||
addSignal(this, signal)
|
||||
}
|
||||
|
||||
onConnect (abort, context) {
|
||||
const { res } = this
|
||||
|
||||
if (this.reason) {
|
||||
abort(this.reason)
|
||||
return
|
||||
}
|
||||
|
||||
assert(!res, 'pipeline cannot be retried')
|
||||
|
||||
this.abort = abort
|
||||
this.context = context
|
||||
}
|
||||
|
||||
onHeaders (statusCode, rawHeaders, resume) {
|
||||
const { opaque, handler, context } = this
|
||||
|
||||
if (statusCode < 200) {
|
||||
if (this.onInfo) {
|
||||
const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
|
||||
this.onInfo({ statusCode, headers })
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
this.res = new PipelineResponse(resume)
|
||||
|
||||
let body
|
||||
try {
|
||||
this.handler = null
|
||||
const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
|
||||
body = this.runInAsyncScope(handler, null, {
|
||||
statusCode,
|
||||
headers,
|
||||
opaque,
|
||||
body: this.res,
|
||||
context
|
||||
})
|
||||
} catch (err) {
|
||||
this.res.on('error', noop)
|
||||
throw err
|
||||
}
|
||||
|
||||
if (!body || typeof body.on !== 'function') {
|
||||
throw new InvalidReturnValueError('expected Readable')
|
||||
}
|
||||
|
||||
body
|
||||
.on('data', (chunk) => {
|
||||
const { ret, body } = this
|
||||
|
||||
if (!ret.push(chunk) && body.pause) {
|
||||
body.pause()
|
||||
}
|
||||
})
|
||||
.on('error', (err) => {
|
||||
const { ret } = this
|
||||
|
||||
util.destroy(ret, err)
|
||||
})
|
||||
.on('end', () => {
|
||||
const { ret } = this
|
||||
|
||||
ret.push(null)
|
||||
})
|
||||
.on('close', () => {
|
||||
const { ret } = this
|
||||
|
||||
if (!ret._readableState.ended) {
|
||||
util.destroy(ret, new RequestAbortedError())
|
||||
}
|
||||
})
|
||||
|
||||
this.body = body
|
||||
}
|
||||
|
||||
onData (chunk) {
|
||||
const { res } = this
|
||||
return res.push(chunk)
|
||||
}
|
||||
|
||||
onComplete (trailers) {
|
||||
const { res } = this
|
||||
res.push(null)
|
||||
}
|
||||
|
||||
onError (err) {
|
||||
const { ret } = this
|
||||
this.handler = null
|
||||
util.destroy(ret, err)
|
||||
}
|
||||
}
|
||||
|
||||
function pipeline (opts, handler) {
|
||||
try {
|
||||
const pipelineHandler = new PipelineHandler(opts, handler)
|
||||
this.dispatch({ ...opts, body: pipelineHandler.req }, pipelineHandler)
|
||||
return pipelineHandler.ret
|
||||
} catch (err) {
|
||||
return new PassThrough().destroy(err)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = pipeline
|
||||
214
backend/node_modules/undici/lib/api/api-request.js
generated
vendored
Normal file
214
backend/node_modules/undici/lib/api/api-request.js
generated
vendored
Normal file
@@ -0,0 +1,214 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert')
|
||||
const { AsyncResource } = require('node:async_hooks')
|
||||
const { Readable } = require('./readable')
|
||||
const { InvalidArgumentError, RequestAbortedError } = require('../core/errors')
|
||||
const util = require('../core/util')
|
||||
|
||||
function noop () {}
|
||||
|
||||
class RequestHandler extends AsyncResource {
|
||||
constructor (opts, callback) {
|
||||
if (!opts || typeof opts !== 'object') {
|
||||
throw new InvalidArgumentError('invalid opts')
|
||||
}
|
||||
|
||||
const { signal, method, opaque, body, onInfo, responseHeaders, highWaterMark } = opts
|
||||
|
||||
try {
|
||||
if (typeof callback !== 'function') {
|
||||
throw new InvalidArgumentError('invalid callback')
|
||||
}
|
||||
|
||||
if (highWaterMark && (typeof highWaterMark !== 'number' || highWaterMark < 0)) {
|
||||
throw new InvalidArgumentError('invalid highWaterMark')
|
||||
}
|
||||
|
||||
if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
|
||||
throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
|
||||
}
|
||||
|
||||
if (method === 'CONNECT') {
|
||||
throw new InvalidArgumentError('invalid method')
|
||||
}
|
||||
|
||||
if (onInfo && typeof onInfo !== 'function') {
|
||||
throw new InvalidArgumentError('invalid onInfo callback')
|
||||
}
|
||||
|
||||
super('UNDICI_REQUEST')
|
||||
} catch (err) {
|
||||
if (util.isStream(body)) {
|
||||
util.destroy(body.on('error', noop), err)
|
||||
}
|
||||
throw err
|
||||
}
|
||||
|
||||
this.method = method
|
||||
this.responseHeaders = responseHeaders || null
|
||||
this.opaque = opaque || null
|
||||
this.callback = callback
|
||||
this.res = null
|
||||
this.abort = null
|
||||
this.body = body
|
||||
this.trailers = {}
|
||||
this.context = null
|
||||
this.onInfo = onInfo || null
|
||||
this.highWaterMark = highWaterMark
|
||||
this.reason = null
|
||||
this.removeAbortListener = null
|
||||
|
||||
if (signal?.aborted) {
|
||||
this.reason = signal.reason ?? new RequestAbortedError()
|
||||
} else if (signal) {
|
||||
this.removeAbortListener = util.addAbortListener(signal, () => {
|
||||
this.reason = signal.reason ?? new RequestAbortedError()
|
||||
if (this.res) {
|
||||
util.destroy(this.res.on('error', noop), this.reason)
|
||||
} else if (this.abort) {
|
||||
this.abort(this.reason)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
onConnect (abort, context) {
|
||||
if (this.reason) {
|
||||
abort(this.reason)
|
||||
return
|
||||
}
|
||||
|
||||
assert(this.callback)
|
||||
|
||||
this.abort = abort
|
||||
this.context = context
|
||||
}
|
||||
|
||||
onHeaders (statusCode, rawHeaders, resume, statusMessage) {
|
||||
const { callback, opaque, abort, context, responseHeaders, highWaterMark } = this
|
||||
|
||||
const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
|
||||
|
||||
if (statusCode < 200) {
|
||||
if (this.onInfo) {
|
||||
this.onInfo({ statusCode, headers })
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers
|
||||
const contentType = parsedHeaders['content-type']
|
||||
const contentLength = parsedHeaders['content-length']
|
||||
const res = new Readable({
|
||||
resume,
|
||||
abort,
|
||||
contentType,
|
||||
contentLength: this.method !== 'HEAD' && contentLength
|
||||
? Number(contentLength)
|
||||
: null,
|
||||
highWaterMark
|
||||
})
|
||||
|
||||
if (this.removeAbortListener) {
|
||||
res.on('close', this.removeAbortListener)
|
||||
this.removeAbortListener = null
|
||||
}
|
||||
|
||||
this.callback = null
|
||||
this.res = res
|
||||
if (callback !== null) {
|
||||
try {
|
||||
this.runInAsyncScope(callback, null, null, {
|
||||
statusCode,
|
||||
statusText: statusMessage,
|
||||
headers,
|
||||
trailers: this.trailers,
|
||||
opaque,
|
||||
body: res,
|
||||
context
|
||||
})
|
||||
} catch (err) {
|
||||
// If the callback throws synchronously, we need to handle it
|
||||
// Remove reference to res to allow res being garbage collected
|
||||
this.res = null
|
||||
|
||||
// Destroy the response stream
|
||||
util.destroy(res.on('error', noop), err)
|
||||
|
||||
// Use queueMicrotask to re-throw the error so it reaches uncaughtException
|
||||
queueMicrotask(() => {
|
||||
throw err
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
onData (chunk) {
|
||||
return this.res.push(chunk)
|
||||
}
|
||||
|
||||
onComplete (trailers) {
|
||||
util.parseHeaders(trailers, this.trailers)
|
||||
this.res.push(null)
|
||||
}
|
||||
|
||||
onError (err) {
|
||||
const { res, callback, body, opaque } = this
|
||||
|
||||
if (callback) {
|
||||
// TODO: Does this need queueMicrotask?
|
||||
this.callback = null
|
||||
queueMicrotask(() => {
|
||||
this.runInAsyncScope(callback, null, err, { opaque })
|
||||
})
|
||||
}
|
||||
|
||||
if (res) {
|
||||
this.res = null
|
||||
// Ensure all queued handlers are invoked before destroying res.
|
||||
queueMicrotask(() => {
|
||||
util.destroy(res.on('error', noop), err)
|
||||
})
|
||||
}
|
||||
|
||||
if (body) {
|
||||
this.body = null
|
||||
|
||||
if (util.isStream(body)) {
|
||||
body.on('error', noop)
|
||||
util.destroy(body, err)
|
||||
}
|
||||
}
|
||||
|
||||
if (this.removeAbortListener) {
|
||||
this.removeAbortListener()
|
||||
this.removeAbortListener = null
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function request (opts, callback) {
|
||||
if (callback === undefined) {
|
||||
return new Promise((resolve, reject) => {
|
||||
request.call(this, opts, (err, data) => {
|
||||
return err ? reject(err) : resolve(data)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
try {
|
||||
const handler = new RequestHandler(opts, callback)
|
||||
|
||||
this.dispatch(opts, handler)
|
||||
} catch (err) {
|
||||
if (typeof callback !== 'function') {
|
||||
throw err
|
||||
}
|
||||
const opaque = opts?.opaque
|
||||
queueMicrotask(() => callback(err, { opaque }))
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = request
|
||||
module.exports.RequestHandler = RequestHandler
|
||||
209
backend/node_modules/undici/lib/api/api-stream.js
generated
vendored
Normal file
209
backend/node_modules/undici/lib/api/api-stream.js
generated
vendored
Normal file
@@ -0,0 +1,209 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert')
|
||||
const { finished } = require('node:stream')
|
||||
const { AsyncResource } = require('node:async_hooks')
|
||||
const { InvalidArgumentError, InvalidReturnValueError } = require('../core/errors')
|
||||
const util = require('../core/util')
|
||||
const { addSignal, removeSignal } = require('./abort-signal')
|
||||
|
||||
function noop () {}
|
||||
|
||||
class StreamHandler extends AsyncResource {
|
||||
constructor (opts, factory, callback) {
|
||||
if (!opts || typeof opts !== 'object') {
|
||||
throw new InvalidArgumentError('invalid opts')
|
||||
}
|
||||
|
||||
const { signal, method, opaque, body, onInfo, responseHeaders } = opts
|
||||
|
||||
try {
|
||||
if (typeof callback !== 'function') {
|
||||
throw new InvalidArgumentError('invalid callback')
|
||||
}
|
||||
|
||||
if (typeof factory !== 'function') {
|
||||
throw new InvalidArgumentError('invalid factory')
|
||||
}
|
||||
|
||||
if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
|
||||
throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
|
||||
}
|
||||
|
||||
if (method === 'CONNECT') {
|
||||
throw new InvalidArgumentError('invalid method')
|
||||
}
|
||||
|
||||
if (onInfo && typeof onInfo !== 'function') {
|
||||
throw new InvalidArgumentError('invalid onInfo callback')
|
||||
}
|
||||
|
||||
super('UNDICI_STREAM')
|
||||
} catch (err) {
|
||||
if (util.isStream(body)) {
|
||||
util.destroy(body.on('error', noop), err)
|
||||
}
|
||||
throw err
|
||||
}
|
||||
|
||||
this.responseHeaders = responseHeaders || null
|
||||
this.opaque = opaque || null
|
||||
this.factory = factory
|
||||
this.callback = callback
|
||||
this.res = null
|
||||
this.abort = null
|
||||
this.context = null
|
||||
this.trailers = null
|
||||
this.body = body
|
||||
this.onInfo = onInfo || null
|
||||
|
||||
if (util.isStream(body)) {
|
||||
body.on('error', (err) => {
|
||||
this.onError(err)
|
||||
})
|
||||
}
|
||||
|
||||
addSignal(this, signal)
|
||||
}
|
||||
|
||||
onConnect (abort, context) {
|
||||
if (this.reason) {
|
||||
abort(this.reason)
|
||||
return
|
||||
}
|
||||
|
||||
assert(this.callback)
|
||||
|
||||
this.abort = abort
|
||||
this.context = context
|
||||
}
|
||||
|
||||
onHeaders (statusCode, rawHeaders, resume, statusMessage) {
|
||||
const { factory, opaque, context, responseHeaders } = this
|
||||
|
||||
const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
|
||||
|
||||
if (statusCode < 200) {
|
||||
if (this.onInfo) {
|
||||
this.onInfo({ statusCode, headers })
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
this.factory = null
|
||||
|
||||
if (factory === null) {
|
||||
return
|
||||
}
|
||||
|
||||
const res = this.runInAsyncScope(factory, null, {
|
||||
statusCode,
|
||||
headers,
|
||||
opaque,
|
||||
context
|
||||
})
|
||||
|
||||
if (
|
||||
!res ||
|
||||
typeof res.write !== 'function' ||
|
||||
typeof res.end !== 'function' ||
|
||||
typeof res.on !== 'function'
|
||||
) {
|
||||
throw new InvalidReturnValueError('expected Writable')
|
||||
}
|
||||
|
||||
// TODO: Avoid finished. It registers an unnecessary amount of listeners.
|
||||
finished(res, { readable: false }, (err) => {
|
||||
const { callback, res, opaque, trailers, abort } = this
|
||||
|
||||
this.res = null
|
||||
if (err || !res?.readable) {
|
||||
util.destroy(res, err)
|
||||
}
|
||||
|
||||
this.callback = null
|
||||
this.runInAsyncScope(callback, null, err || null, { opaque, trailers })
|
||||
|
||||
if (err) {
|
||||
abort()
|
||||
}
|
||||
})
|
||||
|
||||
res.on('drain', resume)
|
||||
|
||||
this.res = res
|
||||
|
||||
const needDrain = res.writableNeedDrain !== undefined
|
||||
? res.writableNeedDrain
|
||||
: res._writableState?.needDrain
|
||||
|
||||
return needDrain !== true
|
||||
}
|
||||
|
||||
onData (chunk) {
|
||||
const { res } = this
|
||||
|
||||
return res ? res.write(chunk) : true
|
||||
}
|
||||
|
||||
onComplete (trailers) {
|
||||
const { res } = this
|
||||
|
||||
removeSignal(this)
|
||||
|
||||
if (!res) {
|
||||
return
|
||||
}
|
||||
|
||||
this.trailers = util.parseHeaders(trailers)
|
||||
|
||||
res.end()
|
||||
}
|
||||
|
||||
onError (err) {
|
||||
const { res, callback, opaque, body } = this
|
||||
|
||||
removeSignal(this)
|
||||
|
||||
this.factory = null
|
||||
|
||||
if (res) {
|
||||
this.res = null
|
||||
util.destroy(res, err)
|
||||
} else if (callback) {
|
||||
this.callback = null
|
||||
queueMicrotask(() => {
|
||||
this.runInAsyncScope(callback, null, err, { opaque })
|
||||
})
|
||||
}
|
||||
|
||||
if (body) {
|
||||
this.body = null
|
||||
util.destroy(body, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function stream (opts, factory, callback) {
|
||||
if (callback === undefined) {
|
||||
return new Promise((resolve, reject) => {
|
||||
stream.call(this, opts, factory, (err, data) => {
|
||||
return err ? reject(err) : resolve(data)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
try {
|
||||
const handler = new StreamHandler(opts, factory, callback)
|
||||
|
||||
this.dispatch(opts, handler)
|
||||
} catch (err) {
|
||||
if (typeof callback !== 'function') {
|
||||
throw err
|
||||
}
|
||||
const opaque = opts?.opaque
|
||||
queueMicrotask(() => callback(err, { opaque }))
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = stream
|
||||
111
backend/node_modules/undici/lib/api/api-upgrade.js
generated
vendored
Normal file
111
backend/node_modules/undici/lib/api/api-upgrade.js
generated
vendored
Normal file
@@ -0,0 +1,111 @@
|
||||
'use strict'
|
||||
|
||||
const { InvalidArgumentError, SocketError } = require('../core/errors')
|
||||
const { AsyncResource } = require('node:async_hooks')
|
||||
const assert = require('node:assert')
|
||||
const util = require('../core/util')
|
||||
const { kHTTP2Stream } = require('../core/symbols')
|
||||
const { addSignal, removeSignal } = require('./abort-signal')
|
||||
|
||||
class UpgradeHandler extends AsyncResource {
|
||||
constructor (opts, callback) {
|
||||
if (!opts || typeof opts !== 'object') {
|
||||
throw new InvalidArgumentError('invalid opts')
|
||||
}
|
||||
|
||||
if (typeof callback !== 'function') {
|
||||
throw new InvalidArgumentError('invalid callback')
|
||||
}
|
||||
|
||||
const { signal, opaque, responseHeaders } = opts
|
||||
|
||||
if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
|
||||
throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
|
||||
}
|
||||
|
||||
super('UNDICI_UPGRADE')
|
||||
|
||||
this.responseHeaders = responseHeaders || null
|
||||
this.opaque = opaque || null
|
||||
this.callback = callback
|
||||
this.abort = null
|
||||
this.context = null
|
||||
|
||||
addSignal(this, signal)
|
||||
}
|
||||
|
||||
onConnect (abort, context) {
|
||||
if (this.reason) {
|
||||
abort(this.reason)
|
||||
return
|
||||
}
|
||||
|
||||
assert(this.callback)
|
||||
|
||||
this.abort = abort
|
||||
this.context = null
|
||||
}
|
||||
|
||||
onHeaders () {
|
||||
throw new SocketError('bad upgrade', null)
|
||||
}
|
||||
|
||||
onUpgrade (statusCode, rawHeaders, socket) {
|
||||
assert(socket[kHTTP2Stream] === true ? statusCode === 200 : statusCode === 101)
|
||||
|
||||
const { callback, opaque, context } = this
|
||||
|
||||
removeSignal(this)
|
||||
|
||||
this.callback = null
|
||||
const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
|
||||
this.runInAsyncScope(callback, null, null, {
|
||||
headers,
|
||||
socket,
|
||||
opaque,
|
||||
context
|
||||
})
|
||||
}
|
||||
|
||||
onError (err) {
|
||||
const { callback, opaque } = this
|
||||
|
||||
removeSignal(this)
|
||||
|
||||
if (callback) {
|
||||
this.callback = null
|
||||
queueMicrotask(() => {
|
||||
this.runInAsyncScope(callback, null, err, { opaque })
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function upgrade (opts, callback) {
|
||||
if (callback === undefined) {
|
||||
return new Promise((resolve, reject) => {
|
||||
upgrade.call(this, opts, (err, data) => {
|
||||
return err ? reject(err) : resolve(data)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
try {
|
||||
const upgradeHandler = new UpgradeHandler(opts, callback)
|
||||
const upgradeOpts = {
|
||||
...opts,
|
||||
method: opts.method || 'GET',
|
||||
upgrade: opts.protocol || 'Websocket'
|
||||
}
|
||||
|
||||
this.dispatch(upgradeOpts, upgradeHandler)
|
||||
} catch (err) {
|
||||
if (typeof callback !== 'function') {
|
||||
throw err
|
||||
}
|
||||
const opaque = opts?.opaque
|
||||
queueMicrotask(() => callback(err, { opaque }))
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = upgrade
|
||||
7
backend/node_modules/undici/lib/api/index.js
generated
vendored
Normal file
7
backend/node_modules/undici/lib/api/index.js
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
'use strict'
|
||||
|
||||
module.exports.request = require('./api-request')
|
||||
module.exports.stream = require('./api-stream')
|
||||
module.exports.pipeline = require('./api-pipeline')
|
||||
module.exports.upgrade = require('./api-upgrade')
|
||||
module.exports.connect = require('./api-connect')
|
||||
580
backend/node_modules/undici/lib/api/readable.js
generated
vendored
Normal file
580
backend/node_modules/undici/lib/api/readable.js
generated
vendored
Normal file
@@ -0,0 +1,580 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert')
|
||||
const { Readable } = require('node:stream')
|
||||
const { RequestAbortedError, NotSupportedError, InvalidArgumentError, AbortError } = require('../core/errors')
|
||||
const util = require('../core/util')
|
||||
const { ReadableStreamFrom } = require('../core/util')
|
||||
|
||||
const kConsume = Symbol('kConsume')
|
||||
const kReading = Symbol('kReading')
|
||||
const kBody = Symbol('kBody')
|
||||
const kAbort = Symbol('kAbort')
|
||||
const kContentType = Symbol('kContentType')
|
||||
const kContentLength = Symbol('kContentLength')
|
||||
const kUsed = Symbol('kUsed')
|
||||
const kBytesRead = Symbol('kBytesRead')
|
||||
|
||||
const noop = () => {}
|
||||
|
||||
/**
|
||||
* @class
|
||||
* @extends {Readable}
|
||||
* @see https://fetch.spec.whatwg.org/#body
|
||||
*/
|
||||
class BodyReadable extends Readable {
|
||||
/**
|
||||
* @param {object} opts
|
||||
* @param {(this: Readable, size: number) => void} opts.resume
|
||||
* @param {() => (void | null)} opts.abort
|
||||
* @param {string} [opts.contentType = '']
|
||||
* @param {number} [opts.contentLength]
|
||||
* @param {number} [opts.highWaterMark = 64 * 1024]
|
||||
*/
|
||||
constructor ({
|
||||
resume,
|
||||
abort,
|
||||
contentType = '',
|
||||
contentLength,
|
||||
highWaterMark = 64 * 1024 // Same as nodejs fs streams.
|
||||
}) {
|
||||
super({
|
||||
autoDestroy: true,
|
||||
read: resume,
|
||||
highWaterMark
|
||||
})
|
||||
|
||||
this._readableState.dataEmitted = false
|
||||
|
||||
this[kAbort] = abort
|
||||
|
||||
/** @type {Consume | null} */
|
||||
this[kConsume] = null
|
||||
|
||||
/** @type {number} */
|
||||
this[kBytesRead] = 0
|
||||
|
||||
/** @type {ReadableStream|null} */
|
||||
this[kBody] = null
|
||||
|
||||
/** @type {boolean} */
|
||||
this[kUsed] = false
|
||||
|
||||
/** @type {string} */
|
||||
this[kContentType] = contentType
|
||||
|
||||
/** @type {number|null} */
|
||||
this[kContentLength] = Number.isFinite(contentLength) ? contentLength : null
|
||||
|
||||
/**
|
||||
* Is stream being consumed through Readable API?
|
||||
* This is an optimization so that we avoid checking
|
||||
* for 'data' and 'readable' listeners in the hot path
|
||||
* inside push().
|
||||
*
|
||||
* @type {boolean}
|
||||
*/
|
||||
this[kReading] = false
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Error|null} err
|
||||
* @param {(error:(Error|null)) => void} callback
|
||||
* @returns {void}
|
||||
*/
|
||||
_destroy (err, callback) {
|
||||
if (!err && !this._readableState.endEmitted) {
|
||||
err = new RequestAbortedError()
|
||||
}
|
||||
|
||||
if (err) {
|
||||
this[kAbort]()
|
||||
}
|
||||
|
||||
// Workaround for Node "bug". If the stream is destroyed in same
|
||||
// tick as it is created, then a user who is waiting for a
|
||||
// promise (i.e micro tick) for installing an 'error' listener will
|
||||
// never get a chance and will always encounter an unhandled exception.
|
||||
if (!this[kUsed]) {
|
||||
setImmediate(callback, err)
|
||||
} else {
|
||||
callback(err)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string|symbol} event
|
||||
* @param {(...args: any[]) => void} listener
|
||||
* @returns {this}
|
||||
*/
|
||||
on (event, listener) {
|
||||
if (event === 'data' || event === 'readable') {
|
||||
this[kReading] = true
|
||||
this[kUsed] = true
|
||||
}
|
||||
return super.on(event, listener)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string|symbol} event
|
||||
* @param {(...args: any[]) => void} listener
|
||||
* @returns {this}
|
||||
*/
|
||||
addListener (event, listener) {
|
||||
return this.on(event, listener)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string|symbol} event
|
||||
* @param {(...args: any[]) => void} listener
|
||||
* @returns {this}
|
||||
*/
|
||||
off (event, listener) {
|
||||
const ret = super.off(event, listener)
|
||||
if (event === 'data' || event === 'readable') {
|
||||
this[kReading] = (
|
||||
this.listenerCount('data') > 0 ||
|
||||
this.listenerCount('readable') > 0
|
||||
)
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string|symbol} event
|
||||
* @param {(...args: any[]) => void} listener
|
||||
* @returns {this}
|
||||
*/
|
||||
removeListener (event, listener) {
|
||||
return this.off(event, listener)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Buffer|null} chunk
|
||||
* @returns {boolean}
|
||||
*/
|
||||
push (chunk) {
|
||||
if (chunk) {
|
||||
this[kBytesRead] += chunk.length
|
||||
if (this[kConsume]) {
|
||||
consumePush(this[kConsume], chunk)
|
||||
return this[kReading] ? super.push(chunk) : true
|
||||
}
|
||||
}
|
||||
|
||||
return super.push(chunk)
|
||||
}
|
||||
|
||||
/**
|
||||
* Consumes and returns the body as a string.
|
||||
*
|
||||
* @see https://fetch.spec.whatwg.org/#dom-body-text
|
||||
* @returns {Promise<string>}
|
||||
*/
|
||||
text () {
|
||||
return consume(this, 'text')
|
||||
}
|
||||
|
||||
/**
|
||||
* Consumes and returns the body as a JavaScript Object.
|
||||
*
|
||||
* @see https://fetch.spec.whatwg.org/#dom-body-json
|
||||
* @returns {Promise<unknown>}
|
||||
*/
|
||||
json () {
|
||||
return consume(this, 'json')
|
||||
}
|
||||
|
||||
/**
|
||||
* Consumes and returns the body as a Blob
|
||||
*
|
||||
* @see https://fetch.spec.whatwg.org/#dom-body-blob
|
||||
* @returns {Promise<Blob>}
|
||||
*/
|
||||
blob () {
|
||||
return consume(this, 'blob')
|
||||
}
|
||||
|
||||
/**
|
||||
* Consumes and returns the body as an Uint8Array.
|
||||
*
|
||||
* @see https://fetch.spec.whatwg.org/#dom-body-bytes
|
||||
* @returns {Promise<Uint8Array>}
|
||||
*/
|
||||
bytes () {
|
||||
return consume(this, 'bytes')
|
||||
}
|
||||
|
||||
/**
|
||||
* Consumes and returns the body as an ArrayBuffer.
|
||||
*
|
||||
* @see https://fetch.spec.whatwg.org/#dom-body-arraybuffer
|
||||
* @returns {Promise<ArrayBuffer>}
|
||||
*/
|
||||
arrayBuffer () {
|
||||
return consume(this, 'arrayBuffer')
|
||||
}
|
||||
|
||||
/**
|
||||
* Not implemented
|
||||
*
|
||||
* @see https://fetch.spec.whatwg.org/#dom-body-formdata
|
||||
* @throws {NotSupportedError}
|
||||
*/
|
||||
async formData () {
|
||||
// TODO: Implement.
|
||||
throw new NotSupportedError()
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the body is not null and the body has been consumed.
|
||||
* Otherwise, returns false.
|
||||
*
|
||||
* @see https://fetch.spec.whatwg.org/#dom-body-bodyused
|
||||
* @readonly
|
||||
* @returns {boolean}
|
||||
*/
|
||||
get bodyUsed () {
|
||||
return util.isDisturbed(this)
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://fetch.spec.whatwg.org/#dom-body-body
|
||||
* @readonly
|
||||
* @returns {ReadableStream}
|
||||
*/
|
||||
get body () {
|
||||
if (!this[kBody]) {
|
||||
this[kBody] = ReadableStreamFrom(this)
|
||||
if (this[kConsume]) {
|
||||
// TODO: Is this the best way to force a lock?
|
||||
this[kBody].getReader() // Ensure stream is locked.
|
||||
assert(this[kBody].locked)
|
||||
}
|
||||
}
|
||||
return this[kBody]
|
||||
}
|
||||
|
||||
/**
|
||||
* Dumps the response body by reading `limit` number of bytes.
|
||||
* @param {object} opts
|
||||
* @param {number} [opts.limit = 131072] Number of bytes to read.
|
||||
* @param {AbortSignal} [opts.signal] An AbortSignal to cancel the dump.
|
||||
* @returns {Promise<null>}
|
||||
*/
|
||||
dump (opts) {
|
||||
const signal = opts?.signal
|
||||
|
||||
if (signal != null && (typeof signal !== 'object' || !('aborted' in signal))) {
|
||||
return Promise.reject(new InvalidArgumentError('signal must be an AbortSignal'))
|
||||
}
|
||||
|
||||
const limit = opts?.limit && Number.isFinite(opts.limit)
|
||||
? opts.limit
|
||||
: 128 * 1024
|
||||
|
||||
if (signal?.aborted) {
|
||||
return Promise.reject(signal.reason ?? new AbortError())
|
||||
}
|
||||
|
||||
if (this._readableState.closeEmitted) {
|
||||
return Promise.resolve(null)
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
if (
|
||||
(this[kContentLength] && (this[kContentLength] > limit)) ||
|
||||
this[kBytesRead] > limit
|
||||
) {
|
||||
this.destroy(new AbortError())
|
||||
}
|
||||
|
||||
if (signal) {
|
||||
const onAbort = () => {
|
||||
this.destroy(signal.reason ?? new AbortError())
|
||||
}
|
||||
signal.addEventListener('abort', onAbort)
|
||||
this
|
||||
.on('close', function () {
|
||||
signal.removeEventListener('abort', onAbort)
|
||||
if (signal.aborted) {
|
||||
reject(signal.reason ?? new AbortError())
|
||||
} else {
|
||||
resolve(null)
|
||||
}
|
||||
})
|
||||
} else {
|
||||
this.on('close', resolve)
|
||||
}
|
||||
|
||||
this
|
||||
.on('error', noop)
|
||||
.on('data', () => {
|
||||
if (this[kBytesRead] > limit) {
|
||||
this.destroy()
|
||||
}
|
||||
})
|
||||
.resume()
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {BufferEncoding} encoding
|
||||
* @returns {this}
|
||||
*/
|
||||
setEncoding (encoding) {
|
||||
if (Buffer.isEncoding(encoding)) {
|
||||
this._readableState.encoding = encoding
|
||||
}
|
||||
return this
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://streams.spec.whatwg.org/#readablestream-locked
|
||||
* @param {BodyReadable} bodyReadable
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isLocked (bodyReadable) {
|
||||
// Consume is an implicit lock.
|
||||
return bodyReadable[kBody]?.locked === true || bodyReadable[kConsume] !== null
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://fetch.spec.whatwg.org/#body-unusable
|
||||
* @param {BodyReadable} bodyReadable
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isUnusable (bodyReadable) {
|
||||
return util.isDisturbed(bodyReadable) || isLocked(bodyReadable)
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef {'text' | 'json' | 'blob' | 'bytes' | 'arrayBuffer'} ConsumeType
|
||||
*/
|
||||
|
||||
/**
|
||||
* @template {ConsumeType} T
|
||||
* @typedef {T extends 'text' ? string :
|
||||
* T extends 'json' ? unknown :
|
||||
* T extends 'blob' ? Blob :
|
||||
* T extends 'arrayBuffer' ? ArrayBuffer :
|
||||
* T extends 'bytes' ? Uint8Array :
|
||||
* never
|
||||
* } ConsumeReturnType
|
||||
*/
|
||||
/**
|
||||
* @typedef {object} Consume
|
||||
* @property {ConsumeType} type
|
||||
* @property {BodyReadable} stream
|
||||
* @property {((value?: any) => void)} resolve
|
||||
* @property {((err: Error) => void)} reject
|
||||
* @property {number} length
|
||||
* @property {Buffer[]} body
|
||||
*/
|
||||
|
||||
/**
|
||||
* @template {ConsumeType} T
|
||||
* @param {BodyReadable} stream
|
||||
* @param {T} type
|
||||
* @returns {Promise<ConsumeReturnType<T>>}
|
||||
*/
|
||||
function consume (stream, type) {
|
||||
assert(!stream[kConsume])
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
if (isUnusable(stream)) {
|
||||
const rState = stream._readableState
|
||||
if (rState.destroyed && rState.closeEmitted === false) {
|
||||
stream
|
||||
.on('error', reject)
|
||||
.on('close', () => {
|
||||
reject(new TypeError('unusable'))
|
||||
})
|
||||
} else {
|
||||
reject(rState.errored ?? new TypeError('unusable'))
|
||||
}
|
||||
} else {
|
||||
queueMicrotask(() => {
|
||||
stream[kConsume] = {
|
||||
type,
|
||||
stream,
|
||||
resolve,
|
||||
reject,
|
||||
length: 0,
|
||||
body: []
|
||||
}
|
||||
|
||||
stream
|
||||
.on('error', function (err) {
|
||||
consumeFinish(this[kConsume], err)
|
||||
})
|
||||
.on('close', function () {
|
||||
if (this[kConsume].body !== null) {
|
||||
consumeFinish(this[kConsume], new RequestAbortedError())
|
||||
}
|
||||
})
|
||||
|
||||
consumeStart(stream[kConsume])
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Consume} consume
|
||||
* @returns {void}
|
||||
*/
|
||||
function consumeStart (consume) {
|
||||
if (consume.body === null) {
|
||||
return
|
||||
}
|
||||
|
||||
const { _readableState: state } = consume.stream
|
||||
|
||||
if (state.bufferIndex) {
|
||||
const start = state.bufferIndex
|
||||
const end = state.buffer.length
|
||||
for (let n = start; n < end; n++) {
|
||||
consumePush(consume, state.buffer[n])
|
||||
}
|
||||
} else {
|
||||
for (const chunk of state.buffer) {
|
||||
consumePush(consume, chunk)
|
||||
}
|
||||
}
|
||||
|
||||
if (state.endEmitted) {
|
||||
consumeEnd(this[kConsume], this._readableState.encoding)
|
||||
} else {
|
||||
consume.stream.on('end', function () {
|
||||
consumeEnd(this[kConsume], this._readableState.encoding)
|
||||
})
|
||||
}
|
||||
|
||||
consume.stream.resume()
|
||||
|
||||
while (consume.stream.read() != null) {
|
||||
// Loop
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Buffer[]} chunks
|
||||
* @param {number} length
|
||||
* @param {BufferEncoding} [encoding='utf8']
|
||||
* @returns {string}
|
||||
*/
|
||||
function chunksDecode (chunks, length, encoding) {
|
||||
if (chunks.length === 0 || length === 0) {
|
||||
return ''
|
||||
}
|
||||
const buffer = chunks.length === 1 ? chunks[0] : Buffer.concat(chunks, length)
|
||||
const bufferLength = buffer.length
|
||||
|
||||
// Skip BOM.
|
||||
const start =
|
||||
bufferLength > 2 &&
|
||||
buffer[0] === 0xef &&
|
||||
buffer[1] === 0xbb &&
|
||||
buffer[2] === 0xbf
|
||||
? 3
|
||||
: 0
|
||||
if (!encoding || encoding === 'utf8' || encoding === 'utf-8') {
|
||||
return buffer.utf8Slice(start, bufferLength)
|
||||
} else {
|
||||
return buffer.subarray(start, bufferLength).toString(encoding)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Buffer[]} chunks
|
||||
* @param {number} length
|
||||
* @returns {Uint8Array}
|
||||
*/
|
||||
function chunksConcat (chunks, length) {
|
||||
if (chunks.length === 0 || length === 0) {
|
||||
return new Uint8Array(0)
|
||||
}
|
||||
if (chunks.length === 1) {
|
||||
// fast-path
|
||||
return new Uint8Array(chunks[0])
|
||||
}
|
||||
const buffer = new Uint8Array(Buffer.allocUnsafeSlow(length).buffer)
|
||||
|
||||
let offset = 0
|
||||
for (let i = 0; i < chunks.length; ++i) {
|
||||
const chunk = chunks[i]
|
||||
buffer.set(chunk, offset)
|
||||
offset += chunk.length
|
||||
}
|
||||
|
||||
return buffer
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Consume} consume
|
||||
* @param {BufferEncoding} encoding
|
||||
* @returns {void}
|
||||
*/
|
||||
function consumeEnd (consume, encoding) {
|
||||
const { type, body, resolve, stream, length } = consume
|
||||
|
||||
try {
|
||||
if (type === 'text') {
|
||||
resolve(chunksDecode(body, length, encoding))
|
||||
} else if (type === 'json') {
|
||||
resolve(JSON.parse(chunksDecode(body, length, encoding)))
|
||||
} else if (type === 'arrayBuffer') {
|
||||
resolve(chunksConcat(body, length).buffer)
|
||||
} else if (type === 'blob') {
|
||||
resolve(new Blob(body, { type: stream[kContentType] }))
|
||||
} else if (type === 'bytes') {
|
||||
resolve(chunksConcat(body, length))
|
||||
}
|
||||
|
||||
consumeFinish(consume)
|
||||
} catch (err) {
|
||||
stream.destroy(err)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Consume} consume
|
||||
* @param {Buffer} chunk
|
||||
* @returns {void}
|
||||
*/
|
||||
function consumePush (consume, chunk) {
|
||||
consume.length += chunk.length
|
||||
consume.body.push(chunk)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Consume} consume
|
||||
* @param {Error} [err]
|
||||
* @returns {void}
|
||||
*/
|
||||
function consumeFinish (consume, err) {
|
||||
if (consume.body === null) {
|
||||
return
|
||||
}
|
||||
|
||||
if (err) {
|
||||
consume.reject(err)
|
||||
} else {
|
||||
consume.resolve()
|
||||
}
|
||||
|
||||
// Reset the consume object to allow for garbage collection.
|
||||
consume.type = null
|
||||
consume.stream = null
|
||||
consume.resolve = null
|
||||
consume.reject = null
|
||||
consume.length = 0
|
||||
consume.body = null
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
Readable: BodyReadable,
|
||||
chunksDecode
|
||||
}
|
||||
234
backend/node_modules/undici/lib/cache/memory-cache-store.js
generated
vendored
Normal file
234
backend/node_modules/undici/lib/cache/memory-cache-store.js
generated
vendored
Normal file
@@ -0,0 +1,234 @@
|
||||
'use strict'
|
||||
|
||||
const { Writable } = require('node:stream')
|
||||
const { EventEmitter } = require('node:events')
|
||||
const { assertCacheKey, assertCacheValue } = require('../util/cache.js')
|
||||
|
||||
/**
|
||||
* @typedef {import('../../types/cache-interceptor.d.ts').default.CacheKey} CacheKey
|
||||
* @typedef {import('../../types/cache-interceptor.d.ts').default.CacheValue} CacheValue
|
||||
* @typedef {import('../../types/cache-interceptor.d.ts').default.CacheStore} CacheStore
|
||||
* @typedef {import('../../types/cache-interceptor.d.ts').default.GetResult} GetResult
|
||||
*/
|
||||
|
||||
/**
|
||||
* @implements {CacheStore}
|
||||
* @extends {EventEmitter}
|
||||
*/
|
||||
class MemoryCacheStore extends EventEmitter {
|
||||
#maxCount = 1024
|
||||
#maxSize = 104857600 // 100MB
|
||||
#maxEntrySize = 5242880 // 5MB
|
||||
|
||||
#size = 0
|
||||
#count = 0
|
||||
#entries = new Map()
|
||||
#hasEmittedMaxSizeEvent = false
|
||||
|
||||
/**
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.MemoryCacheStoreOpts | undefined} [opts]
|
||||
*/
|
||||
constructor (opts) {
|
||||
super()
|
||||
if (opts) {
|
||||
if (typeof opts !== 'object') {
|
||||
throw new TypeError('MemoryCacheStore options must be an object')
|
||||
}
|
||||
|
||||
if (opts.maxCount !== undefined) {
|
||||
if (
|
||||
typeof opts.maxCount !== 'number' ||
|
||||
!Number.isInteger(opts.maxCount) ||
|
||||
opts.maxCount < 0
|
||||
) {
|
||||
throw new TypeError('MemoryCacheStore options.maxCount must be a non-negative integer')
|
||||
}
|
||||
this.#maxCount = opts.maxCount
|
||||
}
|
||||
|
||||
if (opts.maxSize !== undefined) {
|
||||
if (
|
||||
typeof opts.maxSize !== 'number' ||
|
||||
!Number.isInteger(opts.maxSize) ||
|
||||
opts.maxSize < 0
|
||||
) {
|
||||
throw new TypeError('MemoryCacheStore options.maxSize must be a non-negative integer')
|
||||
}
|
||||
this.#maxSize = opts.maxSize
|
||||
}
|
||||
|
||||
if (opts.maxEntrySize !== undefined) {
|
||||
if (
|
||||
typeof opts.maxEntrySize !== 'number' ||
|
||||
!Number.isInteger(opts.maxEntrySize) ||
|
||||
opts.maxEntrySize < 0
|
||||
) {
|
||||
throw new TypeError('MemoryCacheStore options.maxEntrySize must be a non-negative integer')
|
||||
}
|
||||
this.#maxEntrySize = opts.maxEntrySize
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the current size of the cache in bytes
|
||||
* @returns {number} The current size of the cache in bytes
|
||||
*/
|
||||
get size () {
|
||||
return this.#size
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the cache is full (either max size or max count reached)
|
||||
* @returns {boolean} True if the cache is full, false otherwise
|
||||
*/
|
||||
isFull () {
|
||||
return this.#size >= this.#maxSize || this.#count >= this.#maxCount
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} req
|
||||
* @returns {import('../../types/cache-interceptor.d.ts').default.GetResult | undefined}
|
||||
*/
|
||||
get (key) {
|
||||
assertCacheKey(key)
|
||||
|
||||
const topLevelKey = `${key.origin}:${key.path}`
|
||||
|
||||
const now = Date.now()
|
||||
const entries = this.#entries.get(topLevelKey)
|
||||
|
||||
const entry = entries ? findEntry(key, entries, now) : null
|
||||
|
||||
return entry == null
|
||||
? undefined
|
||||
: {
|
||||
statusMessage: entry.statusMessage,
|
||||
statusCode: entry.statusCode,
|
||||
headers: entry.headers,
|
||||
body: entry.body,
|
||||
vary: entry.vary ? entry.vary : undefined,
|
||||
etag: entry.etag,
|
||||
cacheControlDirectives: entry.cacheControlDirectives,
|
||||
cachedAt: entry.cachedAt,
|
||||
staleAt: entry.staleAt,
|
||||
deleteAt: entry.deleteAt
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} key
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheValue} val
|
||||
* @returns {Writable | undefined}
|
||||
*/
|
||||
createWriteStream (key, val) {
|
||||
assertCacheKey(key)
|
||||
assertCacheValue(val)
|
||||
|
||||
const topLevelKey = `${key.origin}:${key.path}`
|
||||
|
||||
const store = this
|
||||
const entry = { ...key, ...val, body: [], size: 0 }
|
||||
|
||||
return new Writable({
|
||||
write (chunk, encoding, callback) {
|
||||
if (typeof chunk === 'string') {
|
||||
chunk = Buffer.from(chunk, encoding)
|
||||
}
|
||||
|
||||
entry.size += chunk.byteLength
|
||||
|
||||
if (entry.size >= store.#maxEntrySize) {
|
||||
this.destroy()
|
||||
} else {
|
||||
entry.body.push(chunk)
|
||||
}
|
||||
|
||||
callback(null)
|
||||
},
|
||||
final (callback) {
|
||||
let entries = store.#entries.get(topLevelKey)
|
||||
if (!entries) {
|
||||
entries = []
|
||||
store.#entries.set(topLevelKey, entries)
|
||||
}
|
||||
const previousEntry = findEntry(key, entries, Date.now())
|
||||
if (previousEntry) {
|
||||
const index = entries.indexOf(previousEntry)
|
||||
entries.splice(index, 1, entry)
|
||||
store.#size -= previousEntry.size
|
||||
} else {
|
||||
entries.push(entry)
|
||||
store.#count += 1
|
||||
}
|
||||
|
||||
store.#size += entry.size
|
||||
|
||||
// Check if cache is full and emit event if needed
|
||||
if (store.#size > store.#maxSize || store.#count > store.#maxCount) {
|
||||
// Emit maxSizeExceeded event if we haven't already
|
||||
if (!store.#hasEmittedMaxSizeEvent) {
|
||||
store.emit('maxSizeExceeded', {
|
||||
size: store.#size,
|
||||
maxSize: store.#maxSize,
|
||||
count: store.#count,
|
||||
maxCount: store.#maxCount
|
||||
})
|
||||
store.#hasEmittedMaxSizeEvent = true
|
||||
}
|
||||
|
||||
// Perform eviction
|
||||
for (const [key, entries] of store.#entries) {
|
||||
for (const entry of entries.splice(0, entries.length / 2)) {
|
||||
store.#size -= entry.size
|
||||
store.#count -= 1
|
||||
}
|
||||
if (entries.length === 0) {
|
||||
store.#entries.delete(key)
|
||||
}
|
||||
}
|
||||
|
||||
// Reset the event flag after eviction
|
||||
if (store.#size < store.#maxSize && store.#count < store.#maxCount) {
|
||||
store.#hasEmittedMaxSizeEvent = false
|
||||
}
|
||||
}
|
||||
|
||||
callback(null)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {CacheKey} key
|
||||
*/
|
||||
delete (key) {
|
||||
if (typeof key !== 'object') {
|
||||
throw new TypeError(`expected key to be object, got ${typeof key}`)
|
||||
}
|
||||
|
||||
const topLevelKey = `${key.origin}:${key.path}`
|
||||
|
||||
for (const entry of this.#entries.get(topLevelKey) ?? []) {
|
||||
this.#size -= entry.size
|
||||
this.#count -= 1
|
||||
}
|
||||
this.#entries.delete(topLevelKey)
|
||||
}
|
||||
}
|
||||
|
||||
function findEntry (key, entries, now) {
|
||||
return entries.find((entry) => (
|
||||
entry.deleteAt > now &&
|
||||
entry.method === key.method &&
|
||||
(entry.vary == null || Object.keys(entry.vary).every(headerName => {
|
||||
if (entry.vary[headerName] === null) {
|
||||
return key.headers[headerName] === undefined
|
||||
}
|
||||
|
||||
return entry.vary[headerName] === key.headers[headerName]
|
||||
}))
|
||||
))
|
||||
}
|
||||
|
||||
module.exports = MemoryCacheStore
|
||||
461
backend/node_modules/undici/lib/cache/sqlite-cache-store.js
generated
vendored
Normal file
461
backend/node_modules/undici/lib/cache/sqlite-cache-store.js
generated
vendored
Normal file
@@ -0,0 +1,461 @@
|
||||
'use strict'
|
||||
|
||||
const { Writable } = require('node:stream')
|
||||
const { assertCacheKey, assertCacheValue } = require('../util/cache.js')
|
||||
|
||||
let DatabaseSync
|
||||
|
||||
const VERSION = 3
|
||||
|
||||
// 2gb
|
||||
const MAX_ENTRY_SIZE = 2 * 1000 * 1000 * 1000
|
||||
|
||||
/**
|
||||
* @typedef {import('../../types/cache-interceptor.d.ts').default.CacheStore} CacheStore
|
||||
* @implements {CacheStore}
|
||||
*
|
||||
* @typedef {{
|
||||
* id: Readonly<number>,
|
||||
* body?: Uint8Array
|
||||
* statusCode: number
|
||||
* statusMessage: string
|
||||
* headers?: string
|
||||
* vary?: string
|
||||
* etag?: string
|
||||
* cacheControlDirectives?: string
|
||||
* cachedAt: number
|
||||
* staleAt: number
|
||||
* deleteAt: number
|
||||
* }} SqliteStoreValue
|
||||
*/
|
||||
module.exports = class SqliteCacheStore {
|
||||
#maxEntrySize = MAX_ENTRY_SIZE
|
||||
#maxCount = Infinity
|
||||
|
||||
/**
|
||||
* @type {import('node:sqlite').DatabaseSync}
|
||||
*/
|
||||
#db
|
||||
|
||||
/**
|
||||
* @type {import('node:sqlite').StatementSync}
|
||||
*/
|
||||
#getValuesQuery
|
||||
|
||||
/**
|
||||
* @type {import('node:sqlite').StatementSync}
|
||||
*/
|
||||
#updateValueQuery
|
||||
|
||||
/**
|
||||
* @type {import('node:sqlite').StatementSync}
|
||||
*/
|
||||
#insertValueQuery
|
||||
|
||||
/**
|
||||
* @type {import('node:sqlite').StatementSync}
|
||||
*/
|
||||
#deleteExpiredValuesQuery
|
||||
|
||||
/**
|
||||
* @type {import('node:sqlite').StatementSync}
|
||||
*/
|
||||
#deleteByUrlQuery
|
||||
|
||||
/**
|
||||
* @type {import('node:sqlite').StatementSync}
|
||||
*/
|
||||
#countEntriesQuery
|
||||
|
||||
/**
|
||||
* @type {import('node:sqlite').StatementSync | null}
|
||||
*/
|
||||
#deleteOldValuesQuery
|
||||
|
||||
/**
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.SqliteCacheStoreOpts | undefined} opts
|
||||
*/
|
||||
constructor (opts) {
|
||||
if (opts) {
|
||||
if (typeof opts !== 'object') {
|
||||
throw new TypeError('SqliteCacheStore options must be an object')
|
||||
}
|
||||
|
||||
if (opts.maxEntrySize !== undefined) {
|
||||
if (
|
||||
typeof opts.maxEntrySize !== 'number' ||
|
||||
!Number.isInteger(opts.maxEntrySize) ||
|
||||
opts.maxEntrySize < 0
|
||||
) {
|
||||
throw new TypeError('SqliteCacheStore options.maxEntrySize must be a non-negative integer')
|
||||
}
|
||||
|
||||
if (opts.maxEntrySize > MAX_ENTRY_SIZE) {
|
||||
throw new TypeError('SqliteCacheStore options.maxEntrySize must be less than 2gb')
|
||||
}
|
||||
|
||||
this.#maxEntrySize = opts.maxEntrySize
|
||||
}
|
||||
|
||||
if (opts.maxCount !== undefined) {
|
||||
if (
|
||||
typeof opts.maxCount !== 'number' ||
|
||||
!Number.isInteger(opts.maxCount) ||
|
||||
opts.maxCount < 0
|
||||
) {
|
||||
throw new TypeError('SqliteCacheStore options.maxCount must be a non-negative integer')
|
||||
}
|
||||
this.#maxCount = opts.maxCount
|
||||
}
|
||||
}
|
||||
|
||||
if (!DatabaseSync) {
|
||||
DatabaseSync = require('node:sqlite').DatabaseSync
|
||||
}
|
||||
this.#db = new DatabaseSync(opts?.location ?? ':memory:')
|
||||
|
||||
this.#db.exec(`
|
||||
PRAGMA journal_mode = WAL;
|
||||
PRAGMA synchronous = NORMAL;
|
||||
PRAGMA temp_store = memory;
|
||||
PRAGMA optimize;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS cacheInterceptorV${VERSION} (
|
||||
-- Data specific to us
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
url TEXT NOT NULL,
|
||||
method TEXT NOT NULL,
|
||||
|
||||
-- Data returned to the interceptor
|
||||
body BUF NULL,
|
||||
deleteAt INTEGER NOT NULL,
|
||||
statusCode INTEGER NOT NULL,
|
||||
statusMessage TEXT NOT NULL,
|
||||
headers TEXT NULL,
|
||||
cacheControlDirectives TEXT NULL,
|
||||
etag TEXT NULL,
|
||||
vary TEXT NULL,
|
||||
cachedAt INTEGER NOT NULL,
|
||||
staleAt INTEGER NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_cacheInterceptorV${VERSION}_getValuesQuery ON cacheInterceptorV${VERSION}(url, method, deleteAt);
|
||||
CREATE INDEX IF NOT EXISTS idx_cacheInterceptorV${VERSION}_deleteByUrlQuery ON cacheInterceptorV${VERSION}(deleteAt);
|
||||
`)
|
||||
|
||||
this.#getValuesQuery = this.#db.prepare(`
|
||||
SELECT
|
||||
id,
|
||||
body,
|
||||
deleteAt,
|
||||
statusCode,
|
||||
statusMessage,
|
||||
headers,
|
||||
etag,
|
||||
cacheControlDirectives,
|
||||
vary,
|
||||
cachedAt,
|
||||
staleAt
|
||||
FROM cacheInterceptorV${VERSION}
|
||||
WHERE
|
||||
url = ?
|
||||
AND method = ?
|
||||
ORDER BY
|
||||
deleteAt ASC
|
||||
`)
|
||||
|
||||
this.#updateValueQuery = this.#db.prepare(`
|
||||
UPDATE cacheInterceptorV${VERSION} SET
|
||||
body = ?,
|
||||
deleteAt = ?,
|
||||
statusCode = ?,
|
||||
statusMessage = ?,
|
||||
headers = ?,
|
||||
etag = ?,
|
||||
cacheControlDirectives = ?,
|
||||
cachedAt = ?,
|
||||
staleAt = ?
|
||||
WHERE
|
||||
id = ?
|
||||
`)
|
||||
|
||||
this.#insertValueQuery = this.#db.prepare(`
|
||||
INSERT INTO cacheInterceptorV${VERSION} (
|
||||
url,
|
||||
method,
|
||||
body,
|
||||
deleteAt,
|
||||
statusCode,
|
||||
statusMessage,
|
||||
headers,
|
||||
etag,
|
||||
cacheControlDirectives,
|
||||
vary,
|
||||
cachedAt,
|
||||
staleAt
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`)
|
||||
|
||||
this.#deleteByUrlQuery = this.#db.prepare(
|
||||
`DELETE FROM cacheInterceptorV${VERSION} WHERE url = ?`
|
||||
)
|
||||
|
||||
this.#countEntriesQuery = this.#db.prepare(
|
||||
`SELECT COUNT(*) AS total FROM cacheInterceptorV${VERSION}`
|
||||
)
|
||||
|
||||
this.#deleteExpiredValuesQuery = this.#db.prepare(
|
||||
`DELETE FROM cacheInterceptorV${VERSION} WHERE deleteAt <= ?`
|
||||
)
|
||||
|
||||
this.#deleteOldValuesQuery = this.#maxCount === Infinity
|
||||
? null
|
||||
: this.#db.prepare(`
|
||||
DELETE FROM cacheInterceptorV${VERSION}
|
||||
WHERE id IN (
|
||||
SELECT
|
||||
id
|
||||
FROM cacheInterceptorV${VERSION}
|
||||
ORDER BY cachedAt DESC
|
||||
LIMIT ?
|
||||
)
|
||||
`)
|
||||
}
|
||||
|
||||
close () {
|
||||
this.#db.close()
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} key
|
||||
* @returns {(import('../../types/cache-interceptor.d.ts').default.GetResult & { body?: Buffer }) | undefined}
|
||||
*/
|
||||
get (key) {
|
||||
assertCacheKey(key)
|
||||
|
||||
const value = this.#findValue(key)
|
||||
return value
|
||||
? {
|
||||
body: value.body ? Buffer.from(value.body.buffer, value.body.byteOffset, value.body.byteLength) : undefined,
|
||||
statusCode: value.statusCode,
|
||||
statusMessage: value.statusMessage,
|
||||
headers: value.headers ? JSON.parse(value.headers) : undefined,
|
||||
etag: value.etag ? value.etag : undefined,
|
||||
vary: value.vary ? JSON.parse(value.vary) : undefined,
|
||||
cacheControlDirectives: value.cacheControlDirectives
|
||||
? JSON.parse(value.cacheControlDirectives)
|
||||
: undefined,
|
||||
cachedAt: value.cachedAt,
|
||||
staleAt: value.staleAt,
|
||||
deleteAt: value.deleteAt
|
||||
}
|
||||
: undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} key
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheValue & { body: null | Buffer | Array<Buffer>}} value
|
||||
*/
|
||||
set (key, value) {
|
||||
assertCacheKey(key)
|
||||
|
||||
const url = this.#makeValueUrl(key)
|
||||
const body = Array.isArray(value.body) ? Buffer.concat(value.body) : value.body
|
||||
const size = body?.byteLength
|
||||
|
||||
if (size && size > this.#maxEntrySize) {
|
||||
return
|
||||
}
|
||||
|
||||
const existingValue = this.#findValue(key, true)
|
||||
if (existingValue) {
|
||||
// Updating an existing response, let's overwrite it
|
||||
this.#updateValueQuery.run(
|
||||
body,
|
||||
value.deleteAt,
|
||||
value.statusCode,
|
||||
value.statusMessage,
|
||||
value.headers ? JSON.stringify(value.headers) : null,
|
||||
value.etag ? value.etag : null,
|
||||
value.cacheControlDirectives ? JSON.stringify(value.cacheControlDirectives) : null,
|
||||
value.cachedAt,
|
||||
value.staleAt,
|
||||
existingValue.id
|
||||
)
|
||||
} else {
|
||||
this.#prune()
|
||||
// New response, let's insert it
|
||||
this.#insertValueQuery.run(
|
||||
url,
|
||||
key.method,
|
||||
body,
|
||||
value.deleteAt,
|
||||
value.statusCode,
|
||||
value.statusMessage,
|
||||
value.headers ? JSON.stringify(value.headers) : null,
|
||||
value.etag ? value.etag : null,
|
||||
value.cacheControlDirectives ? JSON.stringify(value.cacheControlDirectives) : null,
|
||||
value.vary ? JSON.stringify(value.vary) : null,
|
||||
value.cachedAt,
|
||||
value.staleAt
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} key
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheValue} value
|
||||
* @returns {Writable | undefined}
|
||||
*/
|
||||
createWriteStream (key, value) {
|
||||
assertCacheKey(key)
|
||||
assertCacheValue(value)
|
||||
|
||||
let size = 0
|
||||
/**
|
||||
* @type {Buffer[] | null}
|
||||
*/
|
||||
const body = []
|
||||
const store = this
|
||||
|
||||
return new Writable({
|
||||
decodeStrings: true,
|
||||
write (chunk, encoding, callback) {
|
||||
size += chunk.byteLength
|
||||
|
||||
if (size < store.#maxEntrySize) {
|
||||
body.push(chunk)
|
||||
} else {
|
||||
this.destroy()
|
||||
}
|
||||
|
||||
callback()
|
||||
},
|
||||
final (callback) {
|
||||
store.set(key, { ...value, body })
|
||||
callback()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} key
|
||||
*/
|
||||
delete (key) {
|
||||
if (typeof key !== 'object') {
|
||||
throw new TypeError(`expected key to be object, got ${typeof key}`)
|
||||
}
|
||||
|
||||
this.#deleteByUrlQuery.run(this.#makeValueUrl(key))
|
||||
}
|
||||
|
||||
#prune () {
|
||||
if (Number.isFinite(this.#maxCount) && this.size <= this.#maxCount) {
|
||||
return 0
|
||||
}
|
||||
|
||||
{
|
||||
const removed = this.#deleteExpiredValuesQuery.run(Date.now()).changes
|
||||
if (removed) {
|
||||
return removed
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
const removed = this.#deleteOldValuesQuery?.run(Math.max(Math.floor(this.#maxCount * 0.1), 1)).changes
|
||||
if (removed) {
|
||||
return removed
|
||||
}
|
||||
}
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
/**
|
||||
* Counts the number of rows in the cache
|
||||
* @returns {Number}
|
||||
*/
|
||||
get size () {
|
||||
const { total } = this.#countEntriesQuery.get()
|
||||
return total
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} key
|
||||
* @returns {string}
|
||||
*/
|
||||
#makeValueUrl (key) {
|
||||
return `${key.origin}/${key.path}`
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} key
|
||||
* @param {boolean} [canBeExpired=false]
|
||||
* @returns {SqliteStoreValue | undefined}
|
||||
*/
|
||||
#findValue (key, canBeExpired = false) {
|
||||
const url = this.#makeValueUrl(key)
|
||||
const { headers, method } = key
|
||||
|
||||
/**
|
||||
* @type {SqliteStoreValue[]}
|
||||
*/
|
||||
const values = this.#getValuesQuery.all(url, method)
|
||||
|
||||
if (values.length === 0) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const now = Date.now()
|
||||
for (const value of values) {
|
||||
if (now >= value.deleteAt && !canBeExpired) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
let matches = true
|
||||
|
||||
if (value.vary) {
|
||||
const vary = JSON.parse(value.vary)
|
||||
|
||||
for (const header in vary) {
|
||||
if (!headerValueEquals(headers[header], vary[header])) {
|
||||
matches = false
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (matches) {
|
||||
return value
|
||||
}
|
||||
}
|
||||
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string|string[]|null|undefined} lhs
|
||||
* @param {string|string[]|null|undefined} rhs
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function headerValueEquals (lhs, rhs) {
|
||||
if (lhs == null && rhs == null) {
|
||||
return true
|
||||
}
|
||||
|
||||
if ((lhs == null && rhs != null) ||
|
||||
(lhs != null && rhs == null)) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (Array.isArray(lhs) && Array.isArray(rhs)) {
|
||||
if (lhs.length !== rhs.length) {
|
||||
return false
|
||||
}
|
||||
|
||||
return lhs.every((x, i) => x === rhs[i])
|
||||
}
|
||||
|
||||
return lhs === rhs
|
||||
}
|
||||
137
backend/node_modules/undici/lib/core/connect.js
generated
vendored
Normal file
137
backend/node_modules/undici/lib/core/connect.js
generated
vendored
Normal file
@@ -0,0 +1,137 @@
|
||||
'use strict'
|
||||
|
||||
const net = require('node:net')
|
||||
const assert = require('node:assert')
|
||||
const util = require('./util')
|
||||
const { InvalidArgumentError } = require('./errors')
|
||||
|
||||
let tls // include tls conditionally since it is not always available
|
||||
|
||||
// TODO: session re-use does not wait for the first
|
||||
// connection to resolve the session and might therefore
|
||||
// resolve the same servername multiple times even when
|
||||
// re-use is enabled.
|
||||
|
||||
const SessionCache = class WeakSessionCache {
|
||||
constructor (maxCachedSessions) {
|
||||
this._maxCachedSessions = maxCachedSessions
|
||||
this._sessionCache = new Map()
|
||||
this._sessionRegistry = new FinalizationRegistry((key) => {
|
||||
if (this._sessionCache.size < this._maxCachedSessions) {
|
||||
return
|
||||
}
|
||||
|
||||
const ref = this._sessionCache.get(key)
|
||||
if (ref !== undefined && ref.deref() === undefined) {
|
||||
this._sessionCache.delete(key)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
get (sessionKey) {
|
||||
const ref = this._sessionCache.get(sessionKey)
|
||||
return ref ? ref.deref() : null
|
||||
}
|
||||
|
||||
set (sessionKey, session) {
|
||||
if (this._maxCachedSessions === 0) {
|
||||
return
|
||||
}
|
||||
|
||||
this._sessionCache.set(sessionKey, new WeakRef(session))
|
||||
this._sessionRegistry.register(session, sessionKey)
|
||||
}
|
||||
}
|
||||
|
||||
function buildConnector ({ allowH2, useH2c, maxCachedSessions, socketPath, timeout, session: customSession, ...opts }) {
|
||||
if (maxCachedSessions != null && (!Number.isInteger(maxCachedSessions) || maxCachedSessions < 0)) {
|
||||
throw new InvalidArgumentError('maxCachedSessions must be a positive integer or zero')
|
||||
}
|
||||
|
||||
const options = { path: socketPath, ...opts }
|
||||
const sessionCache = new SessionCache(maxCachedSessions == null ? 100 : maxCachedSessions)
|
||||
timeout = timeout == null ? 10e3 : timeout
|
||||
allowH2 = allowH2 != null ? allowH2 : false
|
||||
return function connect ({ hostname, host, protocol, port, servername, localAddress, httpSocket }, callback) {
|
||||
let socket
|
||||
if (protocol === 'https:') {
|
||||
if (!tls) {
|
||||
tls = require('node:tls')
|
||||
}
|
||||
servername = servername || options.servername || util.getServerName(host) || null
|
||||
|
||||
const sessionKey = servername || hostname
|
||||
assert(sessionKey)
|
||||
|
||||
const session = customSession || sessionCache.get(sessionKey) || null
|
||||
|
||||
port = port || 443
|
||||
|
||||
socket = tls.connect({
|
||||
highWaterMark: 16384, // TLS in node can't have bigger HWM anyway...
|
||||
...options,
|
||||
servername,
|
||||
session,
|
||||
localAddress,
|
||||
ALPNProtocols: allowH2 ? ['http/1.1', 'h2'] : ['http/1.1'],
|
||||
socket: httpSocket, // upgrade socket connection
|
||||
port,
|
||||
host: hostname
|
||||
})
|
||||
|
||||
socket
|
||||
.on('session', function (session) {
|
||||
// TODO (fix): Can a session become invalid once established? Don't think so?
|
||||
sessionCache.set(sessionKey, session)
|
||||
})
|
||||
} else {
|
||||
assert(!httpSocket, 'httpSocket can only be sent on TLS update')
|
||||
|
||||
port = port || 80
|
||||
|
||||
socket = net.connect({
|
||||
highWaterMark: 64 * 1024, // Same as nodejs fs streams.
|
||||
...options,
|
||||
localAddress,
|
||||
port,
|
||||
host: hostname
|
||||
})
|
||||
if (useH2c === true) {
|
||||
socket.alpnProtocol = 'h2'
|
||||
}
|
||||
}
|
||||
|
||||
// Set TCP keep alive options on the socket here instead of in connect() for the case of assigning the socket
|
||||
if (options.keepAlive == null || options.keepAlive) {
|
||||
const keepAliveInitialDelay = options.keepAliveInitialDelay === undefined ? 60e3 : options.keepAliveInitialDelay
|
||||
socket.setKeepAlive(true, keepAliveInitialDelay)
|
||||
}
|
||||
|
||||
const clearConnectTimeout = util.setupConnectTimeout(new WeakRef(socket), { timeout, hostname, port })
|
||||
|
||||
socket
|
||||
.setNoDelay(true)
|
||||
.once(protocol === 'https:' ? 'secureConnect' : 'connect', function () {
|
||||
queueMicrotask(clearConnectTimeout)
|
||||
|
||||
if (callback) {
|
||||
const cb = callback
|
||||
callback = null
|
||||
cb(null, this)
|
||||
}
|
||||
})
|
||||
.on('error', function (err) {
|
||||
queueMicrotask(clearConnectTimeout)
|
||||
|
||||
if (callback) {
|
||||
const cb = callback
|
||||
callback = null
|
||||
cb(err)
|
||||
}
|
||||
})
|
||||
|
||||
return socket
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildConnector
|
||||
143
backend/node_modules/undici/lib/core/constants.js
generated
vendored
Normal file
143
backend/node_modules/undici/lib/core/constants.js
generated
vendored
Normal file
@@ -0,0 +1,143 @@
|
||||
'use strict'
|
||||
|
||||
/**
|
||||
* @see https://developer.mozilla.org/docs/Web/HTTP/Headers
|
||||
*/
|
||||
const wellknownHeaderNames = /** @type {const} */ ([
|
||||
'Accept',
|
||||
'Accept-Encoding',
|
||||
'Accept-Language',
|
||||
'Accept-Ranges',
|
||||
'Access-Control-Allow-Credentials',
|
||||
'Access-Control-Allow-Headers',
|
||||
'Access-Control-Allow-Methods',
|
||||
'Access-Control-Allow-Origin',
|
||||
'Access-Control-Expose-Headers',
|
||||
'Access-Control-Max-Age',
|
||||
'Access-Control-Request-Headers',
|
||||
'Access-Control-Request-Method',
|
||||
'Age',
|
||||
'Allow',
|
||||
'Alt-Svc',
|
||||
'Alt-Used',
|
||||
'Authorization',
|
||||
'Cache-Control',
|
||||
'Clear-Site-Data',
|
||||
'Connection',
|
||||
'Content-Disposition',
|
||||
'Content-Encoding',
|
||||
'Content-Language',
|
||||
'Content-Length',
|
||||
'Content-Location',
|
||||
'Content-Range',
|
||||
'Content-Security-Policy',
|
||||
'Content-Security-Policy-Report-Only',
|
||||
'Content-Type',
|
||||
'Cookie',
|
||||
'Cross-Origin-Embedder-Policy',
|
||||
'Cross-Origin-Opener-Policy',
|
||||
'Cross-Origin-Resource-Policy',
|
||||
'Date',
|
||||
'Device-Memory',
|
||||
'Downlink',
|
||||
'ECT',
|
||||
'ETag',
|
||||
'Expect',
|
||||
'Expect-CT',
|
||||
'Expires',
|
||||
'Forwarded',
|
||||
'From',
|
||||
'Host',
|
||||
'If-Match',
|
||||
'If-Modified-Since',
|
||||
'If-None-Match',
|
||||
'If-Range',
|
||||
'If-Unmodified-Since',
|
||||
'Keep-Alive',
|
||||
'Last-Modified',
|
||||
'Link',
|
||||
'Location',
|
||||
'Max-Forwards',
|
||||
'Origin',
|
||||
'Permissions-Policy',
|
||||
'Pragma',
|
||||
'Proxy-Authenticate',
|
||||
'Proxy-Authorization',
|
||||
'RTT',
|
||||
'Range',
|
||||
'Referer',
|
||||
'Referrer-Policy',
|
||||
'Refresh',
|
||||
'Retry-After',
|
||||
'Sec-WebSocket-Accept',
|
||||
'Sec-WebSocket-Extensions',
|
||||
'Sec-WebSocket-Key',
|
||||
'Sec-WebSocket-Protocol',
|
||||
'Sec-WebSocket-Version',
|
||||
'Server',
|
||||
'Server-Timing',
|
||||
'Service-Worker-Allowed',
|
||||
'Service-Worker-Navigation-Preload',
|
||||
'Set-Cookie',
|
||||
'SourceMap',
|
||||
'Strict-Transport-Security',
|
||||
'Supports-Loading-Mode',
|
||||
'TE',
|
||||
'Timing-Allow-Origin',
|
||||
'Trailer',
|
||||
'Transfer-Encoding',
|
||||
'Upgrade',
|
||||
'Upgrade-Insecure-Requests',
|
||||
'User-Agent',
|
||||
'Vary',
|
||||
'Via',
|
||||
'WWW-Authenticate',
|
||||
'X-Content-Type-Options',
|
||||
'X-DNS-Prefetch-Control',
|
||||
'X-Frame-Options',
|
||||
'X-Permitted-Cross-Domain-Policies',
|
||||
'X-Powered-By',
|
||||
'X-Requested-With',
|
||||
'X-XSS-Protection'
|
||||
])
|
||||
|
||||
/** @type {Record<typeof wellknownHeaderNames[number]|Lowercase<typeof wellknownHeaderNames[number]>, string>} */
|
||||
const headerNameLowerCasedRecord = {}
|
||||
|
||||
// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`.
|
||||
Object.setPrototypeOf(headerNameLowerCasedRecord, null)
|
||||
|
||||
/**
|
||||
* @type {Record<Lowercase<typeof wellknownHeaderNames[number]>, Buffer>}
|
||||
*/
|
||||
const wellknownHeaderNameBuffers = {}
|
||||
|
||||
// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`.
|
||||
Object.setPrototypeOf(wellknownHeaderNameBuffers, null)
|
||||
|
||||
/**
|
||||
* @param {string} header Lowercased header
|
||||
* @returns {Buffer}
|
||||
*/
|
||||
function getHeaderNameAsBuffer (header) {
|
||||
let buffer = wellknownHeaderNameBuffers[header]
|
||||
|
||||
if (buffer === undefined) {
|
||||
buffer = Buffer.from(header)
|
||||
}
|
||||
|
||||
return buffer
|
||||
}
|
||||
|
||||
for (let i = 0; i < wellknownHeaderNames.length; ++i) {
|
||||
const key = wellknownHeaderNames[i]
|
||||
const lowerCasedKey = key.toLowerCase()
|
||||
headerNameLowerCasedRecord[key] = headerNameLowerCasedRecord[lowerCasedKey] =
|
||||
lowerCasedKey
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
wellknownHeaderNames,
|
||||
headerNameLowerCasedRecord,
|
||||
getHeaderNameAsBuffer
|
||||
}
|
||||
225
backend/node_modules/undici/lib/core/diagnostics.js
generated
vendored
Normal file
225
backend/node_modules/undici/lib/core/diagnostics.js
generated
vendored
Normal file
@@ -0,0 +1,225 @@
|
||||
'use strict'
|
||||
|
||||
const diagnosticsChannel = require('node:diagnostics_channel')
|
||||
const util = require('node:util')
|
||||
|
||||
const undiciDebugLog = util.debuglog('undici')
|
||||
const fetchDebuglog = util.debuglog('fetch')
|
||||
const websocketDebuglog = util.debuglog('websocket')
|
||||
|
||||
const channels = {
|
||||
// Client
|
||||
beforeConnect: diagnosticsChannel.channel('undici:client:beforeConnect'),
|
||||
connected: diagnosticsChannel.channel('undici:client:connected'),
|
||||
connectError: diagnosticsChannel.channel('undici:client:connectError'),
|
||||
sendHeaders: diagnosticsChannel.channel('undici:client:sendHeaders'),
|
||||
// Request
|
||||
create: diagnosticsChannel.channel('undici:request:create'),
|
||||
bodySent: diagnosticsChannel.channel('undici:request:bodySent'),
|
||||
bodyChunkSent: diagnosticsChannel.channel('undici:request:bodyChunkSent'),
|
||||
bodyChunkReceived: diagnosticsChannel.channel('undici:request:bodyChunkReceived'),
|
||||
headers: diagnosticsChannel.channel('undici:request:headers'),
|
||||
trailers: diagnosticsChannel.channel('undici:request:trailers'),
|
||||
error: diagnosticsChannel.channel('undici:request:error'),
|
||||
// WebSocket
|
||||
open: diagnosticsChannel.channel('undici:websocket:open'),
|
||||
close: diagnosticsChannel.channel('undici:websocket:close'),
|
||||
socketError: diagnosticsChannel.channel('undici:websocket:socket_error'),
|
||||
ping: diagnosticsChannel.channel('undici:websocket:ping'),
|
||||
pong: diagnosticsChannel.channel('undici:websocket:pong'),
|
||||
// ProxyAgent
|
||||
proxyConnected: diagnosticsChannel.channel('undici:proxy:connected')
|
||||
}
|
||||
|
||||
let isTrackingClientEvents = false
|
||||
|
||||
function trackClientEvents (debugLog = undiciDebugLog) {
|
||||
if (isTrackingClientEvents) {
|
||||
return
|
||||
}
|
||||
|
||||
// Check if any of the channels already have subscribers to prevent duplicate subscriptions
|
||||
// This can happen when both Node.js built-in undici and undici as a dependency are present
|
||||
if (channels.beforeConnect.hasSubscribers || channels.connected.hasSubscribers ||
|
||||
channels.connectError.hasSubscribers || channels.sendHeaders.hasSubscribers) {
|
||||
isTrackingClientEvents = true
|
||||
return
|
||||
}
|
||||
|
||||
isTrackingClientEvents = true
|
||||
|
||||
diagnosticsChannel.subscribe('undici:client:beforeConnect',
|
||||
evt => {
|
||||
const {
|
||||
connectParams: { version, protocol, port, host }
|
||||
} = evt
|
||||
debugLog(
|
||||
'connecting to %s%s using %s%s',
|
||||
host,
|
||||
port ? `:${port}` : '',
|
||||
protocol,
|
||||
version
|
||||
)
|
||||
})
|
||||
|
||||
diagnosticsChannel.subscribe('undici:client:connected',
|
||||
evt => {
|
||||
const {
|
||||
connectParams: { version, protocol, port, host }
|
||||
} = evt
|
||||
debugLog(
|
||||
'connected to %s%s using %s%s',
|
||||
host,
|
||||
port ? `:${port}` : '',
|
||||
protocol,
|
||||
version
|
||||
)
|
||||
})
|
||||
|
||||
diagnosticsChannel.subscribe('undici:client:connectError',
|
||||
evt => {
|
||||
const {
|
||||
connectParams: { version, protocol, port, host },
|
||||
error
|
||||
} = evt
|
||||
debugLog(
|
||||
'connection to %s%s using %s%s errored - %s',
|
||||
host,
|
||||
port ? `:${port}` : '',
|
||||
protocol,
|
||||
version,
|
||||
error.message
|
||||
)
|
||||
})
|
||||
|
||||
diagnosticsChannel.subscribe('undici:client:sendHeaders',
|
||||
evt => {
|
||||
const {
|
||||
request: { method, path, origin }
|
||||
} = evt
|
||||
debugLog('sending request to %s %s%s', method, origin, path)
|
||||
})
|
||||
}
|
||||
|
||||
let isTrackingRequestEvents = false
|
||||
|
||||
function trackRequestEvents (debugLog = undiciDebugLog) {
|
||||
if (isTrackingRequestEvents) {
|
||||
return
|
||||
}
|
||||
|
||||
// Check if any of the channels already have subscribers to prevent duplicate subscriptions
|
||||
// This can happen when both Node.js built-in undici and undici as a dependency are present
|
||||
if (channels.headers.hasSubscribers || channels.trailers.hasSubscribers ||
|
||||
channels.error.hasSubscribers) {
|
||||
isTrackingRequestEvents = true
|
||||
return
|
||||
}
|
||||
|
||||
isTrackingRequestEvents = true
|
||||
|
||||
diagnosticsChannel.subscribe('undici:request:headers',
|
||||
evt => {
|
||||
const {
|
||||
request: { method, path, origin },
|
||||
response: { statusCode }
|
||||
} = evt
|
||||
debugLog(
|
||||
'received response to %s %s%s - HTTP %d',
|
||||
method,
|
||||
origin,
|
||||
path,
|
||||
statusCode
|
||||
)
|
||||
})
|
||||
|
||||
diagnosticsChannel.subscribe('undici:request:trailers',
|
||||
evt => {
|
||||
const {
|
||||
request: { method, path, origin }
|
||||
} = evt
|
||||
debugLog('trailers received from %s %s%s', method, origin, path)
|
||||
})
|
||||
|
||||
diagnosticsChannel.subscribe('undici:request:error',
|
||||
evt => {
|
||||
const {
|
||||
request: { method, path, origin },
|
||||
error
|
||||
} = evt
|
||||
debugLog(
|
||||
'request to %s %s%s errored - %s',
|
||||
method,
|
||||
origin,
|
||||
path,
|
||||
error.message
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
let isTrackingWebSocketEvents = false
|
||||
|
||||
function trackWebSocketEvents (debugLog = websocketDebuglog) {
|
||||
if (isTrackingWebSocketEvents) {
|
||||
return
|
||||
}
|
||||
|
||||
// Check if any of the channels already have subscribers to prevent duplicate subscriptions
|
||||
// This can happen when both Node.js built-in undici and undici as a dependency are present
|
||||
if (channels.open.hasSubscribers || channels.close.hasSubscribers ||
|
||||
channels.socketError.hasSubscribers || channels.ping.hasSubscribers ||
|
||||
channels.pong.hasSubscribers) {
|
||||
isTrackingWebSocketEvents = true
|
||||
return
|
||||
}
|
||||
|
||||
isTrackingWebSocketEvents = true
|
||||
|
||||
diagnosticsChannel.subscribe('undici:websocket:open',
|
||||
evt => {
|
||||
const {
|
||||
address: { address, port }
|
||||
} = evt
|
||||
debugLog('connection opened %s%s', address, port ? `:${port}` : '')
|
||||
})
|
||||
|
||||
diagnosticsChannel.subscribe('undici:websocket:close',
|
||||
evt => {
|
||||
const { websocket, code, reason } = evt
|
||||
debugLog(
|
||||
'closed connection to %s - %s %s',
|
||||
websocket.url,
|
||||
code,
|
||||
reason
|
||||
)
|
||||
})
|
||||
|
||||
diagnosticsChannel.subscribe('undici:websocket:socket_error',
|
||||
err => {
|
||||
debugLog('connection errored - %s', err.message)
|
||||
})
|
||||
|
||||
diagnosticsChannel.subscribe('undici:websocket:ping',
|
||||
evt => {
|
||||
debugLog('ping received')
|
||||
})
|
||||
|
||||
diagnosticsChannel.subscribe('undici:websocket:pong',
|
||||
evt => {
|
||||
debugLog('pong received')
|
||||
})
|
||||
}
|
||||
|
||||
if (undiciDebugLog.enabled || fetchDebuglog.enabled) {
|
||||
trackClientEvents(fetchDebuglog.enabled ? fetchDebuglog : undiciDebugLog)
|
||||
trackRequestEvents(fetchDebuglog.enabled ? fetchDebuglog : undiciDebugLog)
|
||||
}
|
||||
|
||||
if (websocketDebuglog.enabled) {
|
||||
trackClientEvents(undiciDebugLog.enabled ? undiciDebugLog : websocketDebuglog)
|
||||
trackWebSocketEvents(websocketDebuglog)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
channels
|
||||
}
|
||||
448
backend/node_modules/undici/lib/core/errors.js
generated
vendored
Normal file
448
backend/node_modules/undici/lib/core/errors.js
generated
vendored
Normal file
@@ -0,0 +1,448 @@
|
||||
'use strict'
|
||||
|
||||
const kUndiciError = Symbol.for('undici.error.UND_ERR')
|
||||
class UndiciError extends Error {
|
||||
constructor (message, options) {
|
||||
super(message, options)
|
||||
this.name = 'UndiciError'
|
||||
this.code = 'UND_ERR'
|
||||
}
|
||||
|
||||
static [Symbol.hasInstance] (instance) {
|
||||
return instance && instance[kUndiciError] === true
|
||||
}
|
||||
|
||||
get [kUndiciError] () {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
const kConnectTimeoutError = Symbol.for('undici.error.UND_ERR_CONNECT_TIMEOUT')
|
||||
class ConnectTimeoutError extends UndiciError {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
this.name = 'ConnectTimeoutError'
|
||||
this.message = message || 'Connect Timeout Error'
|
||||
this.code = 'UND_ERR_CONNECT_TIMEOUT'
|
||||
}
|
||||
|
||||
static [Symbol.hasInstance] (instance) {
|
||||
return instance && instance[kConnectTimeoutError] === true
|
||||
}
|
||||
|
||||
get [kConnectTimeoutError] () {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
const kHeadersTimeoutError = Symbol.for('undici.error.UND_ERR_HEADERS_TIMEOUT')
|
||||
class HeadersTimeoutError extends UndiciError {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
this.name = 'HeadersTimeoutError'
|
||||
this.message = message || 'Headers Timeout Error'
|
||||
this.code = 'UND_ERR_HEADERS_TIMEOUT'
|
||||
}
|
||||
|
||||
static [Symbol.hasInstance] (instance) {
|
||||
return instance && instance[kHeadersTimeoutError] === true
|
||||
}
|
||||
|
||||
get [kHeadersTimeoutError] () {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
const kHeadersOverflowError = Symbol.for('undici.error.UND_ERR_HEADERS_OVERFLOW')
|
||||
class HeadersOverflowError extends UndiciError {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
this.name = 'HeadersOverflowError'
|
||||
this.message = message || 'Headers Overflow Error'
|
||||
this.code = 'UND_ERR_HEADERS_OVERFLOW'
|
||||
}
|
||||
|
||||
static [Symbol.hasInstance] (instance) {
|
||||
return instance && instance[kHeadersOverflowError] === true
|
||||
}
|
||||
|
||||
get [kHeadersOverflowError] () {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
const kBodyTimeoutError = Symbol.for('undici.error.UND_ERR_BODY_TIMEOUT')
|
||||
class BodyTimeoutError extends UndiciError {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
this.name = 'BodyTimeoutError'
|
||||
this.message = message || 'Body Timeout Error'
|
||||
this.code = 'UND_ERR_BODY_TIMEOUT'
|
||||
}
|
||||
|
||||
static [Symbol.hasInstance] (instance) {
|
||||
return instance && instance[kBodyTimeoutError] === true
|
||||
}
|
||||
|
||||
get [kBodyTimeoutError] () {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
const kInvalidArgumentError = Symbol.for('undici.error.UND_ERR_INVALID_ARG')
|
||||
class InvalidArgumentError extends UndiciError {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
this.name = 'InvalidArgumentError'
|
||||
this.message = message || 'Invalid Argument Error'
|
||||
this.code = 'UND_ERR_INVALID_ARG'
|
||||
}
|
||||
|
||||
static [Symbol.hasInstance] (instance) {
|
||||
return instance && instance[kInvalidArgumentError] === true
|
||||
}
|
||||
|
||||
get [kInvalidArgumentError] () {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
const kInvalidReturnValueError = Symbol.for('undici.error.UND_ERR_INVALID_RETURN_VALUE')
|
||||
class InvalidReturnValueError extends UndiciError {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
this.name = 'InvalidReturnValueError'
|
||||
this.message = message || 'Invalid Return Value Error'
|
||||
this.code = 'UND_ERR_INVALID_RETURN_VALUE'
|
||||
}
|
||||
|
||||
static [Symbol.hasInstance] (instance) {
|
||||
return instance && instance[kInvalidReturnValueError] === true
|
||||
}
|
||||
|
||||
get [kInvalidReturnValueError] () {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
const kAbortError = Symbol.for('undici.error.UND_ERR_ABORT')
|
||||
class AbortError extends UndiciError {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
this.name = 'AbortError'
|
||||
this.message = message || 'The operation was aborted'
|
||||
this.code = 'UND_ERR_ABORT'
|
||||
}
|
||||
|
||||
static [Symbol.hasInstance] (instance) {
|
||||
return instance && instance[kAbortError] === true
|
||||
}
|
||||
|
||||
get [kAbortError] () {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
const kRequestAbortedError = Symbol.for('undici.error.UND_ERR_ABORTED')
|
||||
class RequestAbortedError extends AbortError {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
this.name = 'AbortError'
|
||||
this.message = message || 'Request aborted'
|
||||
this.code = 'UND_ERR_ABORTED'
|
||||
}
|
||||
|
||||
static [Symbol.hasInstance] (instance) {
|
||||
return instance && instance[kRequestAbortedError] === true
|
||||
}
|
||||
|
||||
get [kRequestAbortedError] () {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
const kInformationalError = Symbol.for('undici.error.UND_ERR_INFO')
|
||||
class InformationalError extends UndiciError {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
this.name = 'InformationalError'
|
||||
this.message = message || 'Request information'
|
||||
this.code = 'UND_ERR_INFO'
|
||||
}
|
||||
|
||||
static [Symbol.hasInstance] (instance) {
|
||||
return instance && instance[kInformationalError] === true
|
||||
}
|
||||
|
||||
get [kInformationalError] () {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
const kRequestContentLengthMismatchError = Symbol.for('undici.error.UND_ERR_REQ_CONTENT_LENGTH_MISMATCH')
|
||||
class RequestContentLengthMismatchError extends UndiciError {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
this.name = 'RequestContentLengthMismatchError'
|
||||
this.message = message || 'Request body length does not match content-length header'
|
||||
this.code = 'UND_ERR_REQ_CONTENT_LENGTH_MISMATCH'
|
||||
}
|
||||
|
||||
static [Symbol.hasInstance] (instance) {
|
||||
return instance && instance[kRequestContentLengthMismatchError] === true
|
||||
}
|
||||
|
||||
get [kRequestContentLengthMismatchError] () {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
const kResponseContentLengthMismatchError = Symbol.for('undici.error.UND_ERR_RES_CONTENT_LENGTH_MISMATCH')
|
||||
class ResponseContentLengthMismatchError extends UndiciError {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
this.name = 'ResponseContentLengthMismatchError'
|
||||
this.message = message || 'Response body length does not match content-length header'
|
||||
this.code = 'UND_ERR_RES_CONTENT_LENGTH_MISMATCH'
|
||||
}
|
||||
|
||||
static [Symbol.hasInstance] (instance) {
|
||||
return instance && instance[kResponseContentLengthMismatchError] === true
|
||||
}
|
||||
|
||||
get [kResponseContentLengthMismatchError] () {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
const kClientDestroyedError = Symbol.for('undici.error.UND_ERR_DESTROYED')
|
||||
class ClientDestroyedError extends UndiciError {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
this.name = 'ClientDestroyedError'
|
||||
this.message = message || 'The client is destroyed'
|
||||
this.code = 'UND_ERR_DESTROYED'
|
||||
}
|
||||
|
||||
static [Symbol.hasInstance] (instance) {
|
||||
return instance && instance[kClientDestroyedError] === true
|
||||
}
|
||||
|
||||
get [kClientDestroyedError] () {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
const kClientClosedError = Symbol.for('undici.error.UND_ERR_CLOSED')
|
||||
class ClientClosedError extends UndiciError {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
this.name = 'ClientClosedError'
|
||||
this.message = message || 'The client is closed'
|
||||
this.code = 'UND_ERR_CLOSED'
|
||||
}
|
||||
|
||||
static [Symbol.hasInstance] (instance) {
|
||||
return instance && instance[kClientClosedError] === true
|
||||
}
|
||||
|
||||
get [kClientClosedError] () {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
const kSocketError = Symbol.for('undici.error.UND_ERR_SOCKET')
|
||||
class SocketError extends UndiciError {
|
||||
constructor (message, socket) {
|
||||
super(message)
|
||||
this.name = 'SocketError'
|
||||
this.message = message || 'Socket error'
|
||||
this.code = 'UND_ERR_SOCKET'
|
||||
this.socket = socket
|
||||
}
|
||||
|
||||
static [Symbol.hasInstance] (instance) {
|
||||
return instance && instance[kSocketError] === true
|
||||
}
|
||||
|
||||
get [kSocketError] () {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
const kNotSupportedError = Symbol.for('undici.error.UND_ERR_NOT_SUPPORTED')
|
||||
class NotSupportedError extends UndiciError {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
this.name = 'NotSupportedError'
|
||||
this.message = message || 'Not supported error'
|
||||
this.code = 'UND_ERR_NOT_SUPPORTED'
|
||||
}
|
||||
|
||||
static [Symbol.hasInstance] (instance) {
|
||||
return instance && instance[kNotSupportedError] === true
|
||||
}
|
||||
|
||||
get [kNotSupportedError] () {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
const kBalancedPoolMissingUpstreamError = Symbol.for('undici.error.UND_ERR_BPL_MISSING_UPSTREAM')
|
||||
class BalancedPoolMissingUpstreamError extends UndiciError {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
this.name = 'MissingUpstreamError'
|
||||
this.message = message || 'No upstream has been added to the BalancedPool'
|
||||
this.code = 'UND_ERR_BPL_MISSING_UPSTREAM'
|
||||
}
|
||||
|
||||
static [Symbol.hasInstance] (instance) {
|
||||
return instance && instance[kBalancedPoolMissingUpstreamError] === true
|
||||
}
|
||||
|
||||
get [kBalancedPoolMissingUpstreamError] () {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
const kHTTPParserError = Symbol.for('undici.error.UND_ERR_HTTP_PARSER')
|
||||
class HTTPParserError extends Error {
|
||||
constructor (message, code, data) {
|
||||
super(message)
|
||||
this.name = 'HTTPParserError'
|
||||
this.code = code ? `HPE_${code}` : undefined
|
||||
this.data = data ? data.toString() : undefined
|
||||
}
|
||||
|
||||
static [Symbol.hasInstance] (instance) {
|
||||
return instance && instance[kHTTPParserError] === true
|
||||
}
|
||||
|
||||
get [kHTTPParserError] () {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
const kResponseExceededMaxSizeError = Symbol.for('undici.error.UND_ERR_RES_EXCEEDED_MAX_SIZE')
|
||||
class ResponseExceededMaxSizeError extends UndiciError {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
this.name = 'ResponseExceededMaxSizeError'
|
||||
this.message = message || 'Response content exceeded max size'
|
||||
this.code = 'UND_ERR_RES_EXCEEDED_MAX_SIZE'
|
||||
}
|
||||
|
||||
static [Symbol.hasInstance] (instance) {
|
||||
return instance && instance[kResponseExceededMaxSizeError] === true
|
||||
}
|
||||
|
||||
get [kResponseExceededMaxSizeError] () {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
const kRequestRetryError = Symbol.for('undici.error.UND_ERR_REQ_RETRY')
|
||||
class RequestRetryError extends UndiciError {
|
||||
constructor (message, code, { headers, data }) {
|
||||
super(message)
|
||||
this.name = 'RequestRetryError'
|
||||
this.message = message || 'Request retry error'
|
||||
this.code = 'UND_ERR_REQ_RETRY'
|
||||
this.statusCode = code
|
||||
this.data = data
|
||||
this.headers = headers
|
||||
}
|
||||
|
||||
static [Symbol.hasInstance] (instance) {
|
||||
return instance && instance[kRequestRetryError] === true
|
||||
}
|
||||
|
||||
get [kRequestRetryError] () {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
const kResponseError = Symbol.for('undici.error.UND_ERR_RESPONSE')
|
||||
class ResponseError extends UndiciError {
|
||||
constructor (message, code, { headers, body }) {
|
||||
super(message)
|
||||
this.name = 'ResponseError'
|
||||
this.message = message || 'Response error'
|
||||
this.code = 'UND_ERR_RESPONSE'
|
||||
this.statusCode = code
|
||||
this.body = body
|
||||
this.headers = headers
|
||||
}
|
||||
|
||||
static [Symbol.hasInstance] (instance) {
|
||||
return instance && instance[kResponseError] === true
|
||||
}
|
||||
|
||||
get [kResponseError] () {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
const kSecureProxyConnectionError = Symbol.for('undici.error.UND_ERR_PRX_TLS')
|
||||
class SecureProxyConnectionError extends UndiciError {
|
||||
constructor (cause, message, options = {}) {
|
||||
super(message, { cause, ...options })
|
||||
this.name = 'SecureProxyConnectionError'
|
||||
this.message = message || 'Secure Proxy Connection failed'
|
||||
this.code = 'UND_ERR_PRX_TLS'
|
||||
this.cause = cause
|
||||
}
|
||||
|
||||
static [Symbol.hasInstance] (instance) {
|
||||
return instance && instance[kSecureProxyConnectionError] === true
|
||||
}
|
||||
|
||||
get [kSecureProxyConnectionError] () {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
const kMaxOriginsReachedError = Symbol.for('undici.error.UND_ERR_MAX_ORIGINS_REACHED')
|
||||
class MaxOriginsReachedError extends UndiciError {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
this.name = 'MaxOriginsReachedError'
|
||||
this.message = message || 'Maximum allowed origins reached'
|
||||
this.code = 'UND_ERR_MAX_ORIGINS_REACHED'
|
||||
}
|
||||
|
||||
static [Symbol.hasInstance] (instance) {
|
||||
return instance && instance[kMaxOriginsReachedError] === true
|
||||
}
|
||||
|
||||
get [kMaxOriginsReachedError] () {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
AbortError,
|
||||
HTTPParserError,
|
||||
UndiciError,
|
||||
HeadersTimeoutError,
|
||||
HeadersOverflowError,
|
||||
BodyTimeoutError,
|
||||
RequestContentLengthMismatchError,
|
||||
ConnectTimeoutError,
|
||||
InvalidArgumentError,
|
||||
InvalidReturnValueError,
|
||||
RequestAbortedError,
|
||||
ClientDestroyedError,
|
||||
ClientClosedError,
|
||||
InformationalError,
|
||||
SocketError,
|
||||
NotSupportedError,
|
||||
ResponseContentLengthMismatchError,
|
||||
BalancedPoolMissingUpstreamError,
|
||||
ResponseExceededMaxSizeError,
|
||||
RequestRetryError,
|
||||
ResponseError,
|
||||
SecureProxyConnectionError,
|
||||
MaxOriginsReachedError
|
||||
}
|
||||
412
backend/node_modules/undici/lib/core/request.js
generated
vendored
Normal file
412
backend/node_modules/undici/lib/core/request.js
generated
vendored
Normal file
@@ -0,0 +1,412 @@
|
||||
'use strict'
|
||||
|
||||
const {
|
||||
InvalidArgumentError,
|
||||
NotSupportedError
|
||||
} = require('./errors')
|
||||
const assert = require('node:assert')
|
||||
const {
|
||||
isValidHTTPToken,
|
||||
isValidHeaderValue,
|
||||
isStream,
|
||||
destroy,
|
||||
isBuffer,
|
||||
isFormDataLike,
|
||||
isIterable,
|
||||
isBlobLike,
|
||||
serializePathWithQuery,
|
||||
assertRequestHandler,
|
||||
getServerName,
|
||||
normalizedMethodRecords,
|
||||
getProtocolFromUrlString
|
||||
} = require('./util')
|
||||
const { channels } = require('./diagnostics.js')
|
||||
const { headerNameLowerCasedRecord } = require('./constants')
|
||||
|
||||
// Verifies that a given path is valid does not contain control chars \x00 to \x20
|
||||
const invalidPathRegex = /[^\u0021-\u00ff]/
|
||||
|
||||
const kHandler = Symbol('handler')
|
||||
|
||||
class Request {
|
||||
constructor (origin, {
|
||||
path,
|
||||
method,
|
||||
body,
|
||||
headers,
|
||||
query,
|
||||
idempotent,
|
||||
blocking,
|
||||
upgrade,
|
||||
headersTimeout,
|
||||
bodyTimeout,
|
||||
reset,
|
||||
expectContinue,
|
||||
servername,
|
||||
throwOnError,
|
||||
maxRedirections
|
||||
}, handler) {
|
||||
if (typeof path !== 'string') {
|
||||
throw new InvalidArgumentError('path must be a string')
|
||||
} else if (
|
||||
path[0] !== '/' &&
|
||||
!(path.startsWith('http://') || path.startsWith('https://')) &&
|
||||
method !== 'CONNECT'
|
||||
) {
|
||||
throw new InvalidArgumentError('path must be an absolute URL or start with a slash')
|
||||
} else if (invalidPathRegex.test(path)) {
|
||||
throw new InvalidArgumentError('invalid request path')
|
||||
}
|
||||
|
||||
if (typeof method !== 'string') {
|
||||
throw new InvalidArgumentError('method must be a string')
|
||||
} else if (normalizedMethodRecords[method] === undefined && !isValidHTTPToken(method)) {
|
||||
throw new InvalidArgumentError('invalid request method')
|
||||
}
|
||||
|
||||
if (upgrade && typeof upgrade !== 'string') {
|
||||
throw new InvalidArgumentError('upgrade must be a string')
|
||||
}
|
||||
|
||||
if (headersTimeout != null && (!Number.isFinite(headersTimeout) || headersTimeout < 0)) {
|
||||
throw new InvalidArgumentError('invalid headersTimeout')
|
||||
}
|
||||
|
||||
if (bodyTimeout != null && (!Number.isFinite(bodyTimeout) || bodyTimeout < 0)) {
|
||||
throw new InvalidArgumentError('invalid bodyTimeout')
|
||||
}
|
||||
|
||||
if (reset != null && typeof reset !== 'boolean') {
|
||||
throw new InvalidArgumentError('invalid reset')
|
||||
}
|
||||
|
||||
if (expectContinue != null && typeof expectContinue !== 'boolean') {
|
||||
throw new InvalidArgumentError('invalid expectContinue')
|
||||
}
|
||||
|
||||
if (throwOnError != null) {
|
||||
throw new InvalidArgumentError('invalid throwOnError')
|
||||
}
|
||||
|
||||
if (maxRedirections != null && maxRedirections !== 0) {
|
||||
throw new InvalidArgumentError('maxRedirections is not supported, use the redirect interceptor')
|
||||
}
|
||||
|
||||
this.headersTimeout = headersTimeout
|
||||
|
||||
this.bodyTimeout = bodyTimeout
|
||||
|
||||
this.method = method
|
||||
|
||||
this.abort = null
|
||||
|
||||
if (body == null) {
|
||||
this.body = null
|
||||
} else if (isStream(body)) {
|
||||
this.body = body
|
||||
|
||||
const rState = this.body._readableState
|
||||
if (!rState || !rState.autoDestroy) {
|
||||
this.endHandler = function autoDestroy () {
|
||||
destroy(this)
|
||||
}
|
||||
this.body.on('end', this.endHandler)
|
||||
}
|
||||
|
||||
this.errorHandler = err => {
|
||||
if (this.abort) {
|
||||
this.abort(err)
|
||||
} else {
|
||||
this.error = err
|
||||
}
|
||||
}
|
||||
this.body.on('error', this.errorHandler)
|
||||
} else if (isBuffer(body)) {
|
||||
this.body = body.byteLength ? body : null
|
||||
} else if (ArrayBuffer.isView(body)) {
|
||||
this.body = body.buffer.byteLength ? Buffer.from(body.buffer, body.byteOffset, body.byteLength) : null
|
||||
} else if (body instanceof ArrayBuffer) {
|
||||
this.body = body.byteLength ? Buffer.from(body) : null
|
||||
} else if (typeof body === 'string') {
|
||||
this.body = body.length ? Buffer.from(body) : null
|
||||
} else if (isFormDataLike(body) || isIterable(body) || isBlobLike(body)) {
|
||||
this.body = body
|
||||
} else {
|
||||
throw new InvalidArgumentError('body must be a string, a Buffer, a Readable stream, an iterable, or an async iterable')
|
||||
}
|
||||
|
||||
this.completed = false
|
||||
this.aborted = false
|
||||
|
||||
this.upgrade = upgrade || null
|
||||
|
||||
this.path = query ? serializePathWithQuery(path, query) : path
|
||||
|
||||
// TODO: shall we maybe standardize it to an URL object?
|
||||
this.origin = origin
|
||||
|
||||
this.protocol = getProtocolFromUrlString(origin)
|
||||
|
||||
this.idempotent = idempotent == null
|
||||
? method === 'HEAD' || method === 'GET'
|
||||
: idempotent
|
||||
|
||||
this.blocking = blocking ?? this.method !== 'HEAD'
|
||||
|
||||
this.reset = reset == null ? null : reset
|
||||
|
||||
this.host = null
|
||||
|
||||
this.contentLength = null
|
||||
|
||||
this.contentType = null
|
||||
|
||||
this.headers = []
|
||||
|
||||
// Only for H2
|
||||
this.expectContinue = expectContinue != null ? expectContinue : false
|
||||
|
||||
if (Array.isArray(headers)) {
|
||||
if (headers.length % 2 !== 0) {
|
||||
throw new InvalidArgumentError('headers array must be even')
|
||||
}
|
||||
for (let i = 0; i < headers.length; i += 2) {
|
||||
processHeader(this, headers[i], headers[i + 1])
|
||||
}
|
||||
} else if (headers && typeof headers === 'object') {
|
||||
if (headers[Symbol.iterator]) {
|
||||
for (const header of headers) {
|
||||
if (!Array.isArray(header) || header.length !== 2) {
|
||||
throw new InvalidArgumentError('headers must be in key-value pair format')
|
||||
}
|
||||
processHeader(this, header[0], header[1])
|
||||
}
|
||||
} else {
|
||||
const keys = Object.keys(headers)
|
||||
for (let i = 0; i < keys.length; ++i) {
|
||||
processHeader(this, keys[i], headers[keys[i]])
|
||||
}
|
||||
}
|
||||
} else if (headers != null) {
|
||||
throw new InvalidArgumentError('headers must be an object or an array')
|
||||
}
|
||||
|
||||
assertRequestHandler(handler, method, upgrade)
|
||||
|
||||
this.servername = servername || getServerName(this.host) || null
|
||||
|
||||
this[kHandler] = handler
|
||||
|
||||
if (channels.create.hasSubscribers) {
|
||||
channels.create.publish({ request: this })
|
||||
}
|
||||
}
|
||||
|
||||
onBodySent (chunk) {
|
||||
if (channels.bodyChunkSent.hasSubscribers) {
|
||||
channels.bodyChunkSent.publish({ request: this, chunk })
|
||||
}
|
||||
if (this[kHandler].onBodySent) {
|
||||
try {
|
||||
return this[kHandler].onBodySent(chunk)
|
||||
} catch (err) {
|
||||
this.abort(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
onRequestSent () {
|
||||
if (channels.bodySent.hasSubscribers) {
|
||||
channels.bodySent.publish({ request: this })
|
||||
}
|
||||
|
||||
if (this[kHandler].onRequestSent) {
|
||||
try {
|
||||
return this[kHandler].onRequestSent()
|
||||
} catch (err) {
|
||||
this.abort(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
onConnect (abort) {
|
||||
assert(!this.aborted)
|
||||
assert(!this.completed)
|
||||
|
||||
if (this.error) {
|
||||
abort(this.error)
|
||||
} else {
|
||||
this.abort = abort
|
||||
return this[kHandler].onConnect(abort)
|
||||
}
|
||||
}
|
||||
|
||||
onResponseStarted () {
|
||||
return this[kHandler].onResponseStarted?.()
|
||||
}
|
||||
|
||||
onHeaders (statusCode, headers, resume, statusText) {
|
||||
assert(!this.aborted)
|
||||
assert(!this.completed)
|
||||
|
||||
if (channels.headers.hasSubscribers) {
|
||||
channels.headers.publish({ request: this, response: { statusCode, headers, statusText } })
|
||||
}
|
||||
|
||||
try {
|
||||
return this[kHandler].onHeaders(statusCode, headers, resume, statusText)
|
||||
} catch (err) {
|
||||
this.abort(err)
|
||||
}
|
||||
}
|
||||
|
||||
onData (chunk) {
|
||||
assert(!this.aborted)
|
||||
assert(!this.completed)
|
||||
|
||||
if (channels.bodyChunkReceived.hasSubscribers) {
|
||||
channels.bodyChunkReceived.publish({ request: this, chunk })
|
||||
}
|
||||
try {
|
||||
return this[kHandler].onData(chunk)
|
||||
} catch (err) {
|
||||
this.abort(err)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
onUpgrade (statusCode, headers, socket) {
|
||||
assert(!this.aborted)
|
||||
assert(!this.completed)
|
||||
|
||||
return this[kHandler].onUpgrade(statusCode, headers, socket)
|
||||
}
|
||||
|
||||
onComplete (trailers) {
|
||||
this.onFinally()
|
||||
|
||||
assert(!this.aborted)
|
||||
assert(!this.completed)
|
||||
|
||||
this.completed = true
|
||||
if (channels.trailers.hasSubscribers) {
|
||||
channels.trailers.publish({ request: this, trailers })
|
||||
}
|
||||
|
||||
try {
|
||||
return this[kHandler].onComplete(trailers)
|
||||
} catch (err) {
|
||||
// TODO (fix): This might be a bad idea?
|
||||
this.onError(err)
|
||||
}
|
||||
}
|
||||
|
||||
onError (error) {
|
||||
this.onFinally()
|
||||
|
||||
if (channels.error.hasSubscribers) {
|
||||
channels.error.publish({ request: this, error })
|
||||
}
|
||||
|
||||
if (this.aborted) {
|
||||
return
|
||||
}
|
||||
this.aborted = true
|
||||
|
||||
return this[kHandler].onError(error)
|
||||
}
|
||||
|
||||
onFinally () {
|
||||
if (this.errorHandler) {
|
||||
this.body.off('error', this.errorHandler)
|
||||
this.errorHandler = null
|
||||
}
|
||||
|
||||
if (this.endHandler) {
|
||||
this.body.off('end', this.endHandler)
|
||||
this.endHandler = null
|
||||
}
|
||||
}
|
||||
|
||||
addHeader (key, value) {
|
||||
processHeader(this, key, value)
|
||||
return this
|
||||
}
|
||||
}
|
||||
|
||||
function processHeader (request, key, val) {
|
||||
if (val && (typeof val === 'object' && !Array.isArray(val))) {
|
||||
throw new InvalidArgumentError(`invalid ${key} header`)
|
||||
} else if (val === undefined) {
|
||||
return
|
||||
}
|
||||
|
||||
let headerName = headerNameLowerCasedRecord[key]
|
||||
|
||||
if (headerName === undefined) {
|
||||
headerName = key.toLowerCase()
|
||||
if (headerNameLowerCasedRecord[headerName] === undefined && !isValidHTTPToken(headerName)) {
|
||||
throw new InvalidArgumentError('invalid header key')
|
||||
}
|
||||
}
|
||||
|
||||
if (Array.isArray(val)) {
|
||||
const arr = []
|
||||
for (let i = 0; i < val.length; i++) {
|
||||
if (typeof val[i] === 'string') {
|
||||
if (!isValidHeaderValue(val[i])) {
|
||||
throw new InvalidArgumentError(`invalid ${key} header`)
|
||||
}
|
||||
arr.push(val[i])
|
||||
} else if (val[i] === null) {
|
||||
arr.push('')
|
||||
} else if (typeof val[i] === 'object') {
|
||||
throw new InvalidArgumentError(`invalid ${key} header`)
|
||||
} else {
|
||||
arr.push(`${val[i]}`)
|
||||
}
|
||||
}
|
||||
val = arr
|
||||
} else if (typeof val === 'string') {
|
||||
if (!isValidHeaderValue(val)) {
|
||||
throw new InvalidArgumentError(`invalid ${key} header`)
|
||||
}
|
||||
} else if (val === null) {
|
||||
val = ''
|
||||
} else {
|
||||
val = `${val}`
|
||||
}
|
||||
|
||||
if (request.host === null && headerName === 'host') {
|
||||
if (typeof val !== 'string') {
|
||||
throw new InvalidArgumentError('invalid host header')
|
||||
}
|
||||
// Consumed by Client
|
||||
request.host = val
|
||||
} else if (request.contentLength === null && headerName === 'content-length') {
|
||||
request.contentLength = parseInt(val, 10)
|
||||
if (!Number.isFinite(request.contentLength)) {
|
||||
throw new InvalidArgumentError('invalid content-length header')
|
||||
}
|
||||
} else if (request.contentType === null && headerName === 'content-type') {
|
||||
request.contentType = val
|
||||
request.headers.push(key, val)
|
||||
} else if (headerName === 'transfer-encoding' || headerName === 'keep-alive' || headerName === 'upgrade') {
|
||||
throw new InvalidArgumentError(`invalid ${headerName} header`)
|
||||
} else if (headerName === 'connection') {
|
||||
const value = typeof val === 'string' ? val.toLowerCase() : null
|
||||
if (value !== 'close' && value !== 'keep-alive') {
|
||||
throw new InvalidArgumentError('invalid connection header')
|
||||
}
|
||||
|
||||
if (value === 'close') {
|
||||
request.reset = true
|
||||
}
|
||||
} else if (headerName === 'expect') {
|
||||
throw new NotSupportedError('expect header not supported')
|
||||
} else {
|
||||
request.headers.push(key, val)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Request
|
||||
73
backend/node_modules/undici/lib/core/symbols.js
generated
vendored
Normal file
73
backend/node_modules/undici/lib/core/symbols.js
generated
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
'use strict'
|
||||
|
||||
module.exports = {
|
||||
kClose: Symbol('close'),
|
||||
kDestroy: Symbol('destroy'),
|
||||
kDispatch: Symbol('dispatch'),
|
||||
kUrl: Symbol('url'),
|
||||
kWriting: Symbol('writing'),
|
||||
kResuming: Symbol('resuming'),
|
||||
kQueue: Symbol('queue'),
|
||||
kConnect: Symbol('connect'),
|
||||
kConnecting: Symbol('connecting'),
|
||||
kKeepAliveDefaultTimeout: Symbol('default keep alive timeout'),
|
||||
kKeepAliveMaxTimeout: Symbol('max keep alive timeout'),
|
||||
kKeepAliveTimeoutThreshold: Symbol('keep alive timeout threshold'),
|
||||
kKeepAliveTimeoutValue: Symbol('keep alive timeout'),
|
||||
kKeepAlive: Symbol('keep alive'),
|
||||
kHeadersTimeout: Symbol('headers timeout'),
|
||||
kBodyTimeout: Symbol('body timeout'),
|
||||
kServerName: Symbol('server name'),
|
||||
kLocalAddress: Symbol('local address'),
|
||||
kHost: Symbol('host'),
|
||||
kNoRef: Symbol('no ref'),
|
||||
kBodyUsed: Symbol('used'),
|
||||
kBody: Symbol('abstracted request body'),
|
||||
kRunning: Symbol('running'),
|
||||
kBlocking: Symbol('blocking'),
|
||||
kPending: Symbol('pending'),
|
||||
kSize: Symbol('size'),
|
||||
kBusy: Symbol('busy'),
|
||||
kQueued: Symbol('queued'),
|
||||
kFree: Symbol('free'),
|
||||
kConnected: Symbol('connected'),
|
||||
kClosed: Symbol('closed'),
|
||||
kNeedDrain: Symbol('need drain'),
|
||||
kReset: Symbol('reset'),
|
||||
kDestroyed: Symbol.for('nodejs.stream.destroyed'),
|
||||
kResume: Symbol('resume'),
|
||||
kOnError: Symbol('on error'),
|
||||
kMaxHeadersSize: Symbol('max headers size'),
|
||||
kRunningIdx: Symbol('running index'),
|
||||
kPendingIdx: Symbol('pending index'),
|
||||
kError: Symbol('error'),
|
||||
kClients: Symbol('clients'),
|
||||
kClient: Symbol('client'),
|
||||
kParser: Symbol('parser'),
|
||||
kOnDestroyed: Symbol('destroy callbacks'),
|
||||
kPipelining: Symbol('pipelining'),
|
||||
kSocket: Symbol('socket'),
|
||||
kHostHeader: Symbol('host header'),
|
||||
kConnector: Symbol('connector'),
|
||||
kStrictContentLength: Symbol('strict content length'),
|
||||
kMaxRedirections: Symbol('maxRedirections'),
|
||||
kMaxRequests: Symbol('maxRequestsPerClient'),
|
||||
kProxy: Symbol('proxy agent options'),
|
||||
kCounter: Symbol('socket request counter'),
|
||||
kMaxResponseSize: Symbol('max response size'),
|
||||
kHTTP2Session: Symbol('http2Session'),
|
||||
kHTTP2SessionState: Symbol('http2Session state'),
|
||||
kRetryHandlerDefaultRetry: Symbol('retry agent default retry'),
|
||||
kConstruct: Symbol('constructable'),
|
||||
kListeners: Symbol('listeners'),
|
||||
kHTTPContext: Symbol('http context'),
|
||||
kMaxConcurrentStreams: Symbol('max concurrent streams'),
|
||||
kHTTP2InitialWindowSize: Symbol('http2 initial window size'),
|
||||
kHTTP2ConnectionWindowSize: Symbol('http2 connection window size'),
|
||||
kEnableConnectProtocol: Symbol('http2session connect protocol'),
|
||||
kRemoteSettings: Symbol('http2session remote settings'),
|
||||
kHTTP2Stream: Symbol('http2session client stream'),
|
||||
kNoProxyAgent: Symbol('no proxy agent'),
|
||||
kHttpProxyAgent: Symbol('http proxy agent'),
|
||||
kHttpsProxyAgent: Symbol('https proxy agent')
|
||||
}
|
||||
160
backend/node_modules/undici/lib/core/tree.js
generated
vendored
Normal file
160
backend/node_modules/undici/lib/core/tree.js
generated
vendored
Normal file
@@ -0,0 +1,160 @@
|
||||
'use strict'
|
||||
|
||||
const {
|
||||
wellknownHeaderNames,
|
||||
headerNameLowerCasedRecord
|
||||
} = require('./constants')
|
||||
|
||||
class TstNode {
|
||||
/** @type {any} */
|
||||
value = null
|
||||
/** @type {null | TstNode} */
|
||||
left = null
|
||||
/** @type {null | TstNode} */
|
||||
middle = null
|
||||
/** @type {null | TstNode} */
|
||||
right = null
|
||||
/** @type {number} */
|
||||
code
|
||||
/**
|
||||
* @param {string} key
|
||||
* @param {any} value
|
||||
* @param {number} index
|
||||
*/
|
||||
constructor (key, value, index) {
|
||||
if (index === undefined || index >= key.length) {
|
||||
throw new TypeError('Unreachable')
|
||||
}
|
||||
const code = this.code = key.charCodeAt(index)
|
||||
// check code is ascii string
|
||||
if (code > 0x7F) {
|
||||
throw new TypeError('key must be ascii string')
|
||||
}
|
||||
if (key.length !== ++index) {
|
||||
this.middle = new TstNode(key, value, index)
|
||||
} else {
|
||||
this.value = value
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} key
|
||||
* @param {any} value
|
||||
* @returns {void}
|
||||
*/
|
||||
add (key, value) {
|
||||
const length = key.length
|
||||
if (length === 0) {
|
||||
throw new TypeError('Unreachable')
|
||||
}
|
||||
let index = 0
|
||||
/**
|
||||
* @type {TstNode}
|
||||
*/
|
||||
let node = this
|
||||
while (true) {
|
||||
const code = key.charCodeAt(index)
|
||||
// check code is ascii string
|
||||
if (code > 0x7F) {
|
||||
throw new TypeError('key must be ascii string')
|
||||
}
|
||||
if (node.code === code) {
|
||||
if (length === ++index) {
|
||||
node.value = value
|
||||
break
|
||||
} else if (node.middle !== null) {
|
||||
node = node.middle
|
||||
} else {
|
||||
node.middle = new TstNode(key, value, index)
|
||||
break
|
||||
}
|
||||
} else if (node.code < code) {
|
||||
if (node.left !== null) {
|
||||
node = node.left
|
||||
} else {
|
||||
node.left = new TstNode(key, value, index)
|
||||
break
|
||||
}
|
||||
} else if (node.right !== null) {
|
||||
node = node.right
|
||||
} else {
|
||||
node.right = new TstNode(key, value, index)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} key
|
||||
* @returns {TstNode | null}
|
||||
*/
|
||||
search (key) {
|
||||
const keylength = key.length
|
||||
let index = 0
|
||||
/**
|
||||
* @type {TstNode|null}
|
||||
*/
|
||||
let node = this
|
||||
while (node !== null && index < keylength) {
|
||||
let code = key[index]
|
||||
// A-Z
|
||||
// First check if it is bigger than 0x5a.
|
||||
// Lowercase letters have higher char codes than uppercase ones.
|
||||
// Also we assume that headers will mostly contain lowercase characters.
|
||||
if (code <= 0x5a && code >= 0x41) {
|
||||
// Lowercase for uppercase.
|
||||
code |= 32
|
||||
}
|
||||
while (node !== null) {
|
||||
if (code === node.code) {
|
||||
if (keylength === ++index) {
|
||||
// Returns Node since it is the last key.
|
||||
return node
|
||||
}
|
||||
node = node.middle
|
||||
break
|
||||
}
|
||||
node = node.code < code ? node.left : node.right
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
class TernarySearchTree {
|
||||
/** @type {TstNode | null} */
|
||||
node = null
|
||||
|
||||
/**
|
||||
* @param {string} key
|
||||
* @param {any} value
|
||||
* @returns {void}
|
||||
* */
|
||||
insert (key, value) {
|
||||
if (this.node === null) {
|
||||
this.node = new TstNode(key, value, 0)
|
||||
} else {
|
||||
this.node.add(key, value)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} key
|
||||
* @returns {any}
|
||||
*/
|
||||
lookup (key) {
|
||||
return this.node?.search(key)?.value ?? null
|
||||
}
|
||||
}
|
||||
|
||||
const tree = new TernarySearchTree()
|
||||
|
||||
for (let i = 0; i < wellknownHeaderNames.length; ++i) {
|
||||
const key = headerNameLowerCasedRecord[wellknownHeaderNames[i]]
|
||||
tree.insert(key, key)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
TernarySearchTree,
|
||||
tree
|
||||
}
|
||||
957
backend/node_modules/undici/lib/core/util.js
generated
vendored
Normal file
957
backend/node_modules/undici/lib/core/util.js
generated
vendored
Normal file
@@ -0,0 +1,957 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert')
|
||||
const { kDestroyed, kBodyUsed, kListeners, kBody } = require('./symbols')
|
||||
const { IncomingMessage } = require('node:http')
|
||||
const stream = require('node:stream')
|
||||
const net = require('node:net')
|
||||
const { stringify } = require('node:querystring')
|
||||
const { EventEmitter: EE } = require('node:events')
|
||||
const timers = require('../util/timers')
|
||||
const { InvalidArgumentError, ConnectTimeoutError } = require('./errors')
|
||||
const { headerNameLowerCasedRecord } = require('./constants')
|
||||
const { tree } = require('./tree')
|
||||
|
||||
const [nodeMajor, nodeMinor] = process.versions.node.split('.', 2).map(v => Number(v))
|
||||
|
||||
class BodyAsyncIterable {
|
||||
constructor (body) {
|
||||
this[kBody] = body
|
||||
this[kBodyUsed] = false
|
||||
}
|
||||
|
||||
async * [Symbol.asyncIterator] () {
|
||||
assert(!this[kBodyUsed], 'disturbed')
|
||||
this[kBodyUsed] = true
|
||||
yield * this[kBody]
|
||||
}
|
||||
}
|
||||
|
||||
function noop () {}
|
||||
|
||||
/**
|
||||
* @param {*} body
|
||||
* @returns {*}
|
||||
*/
|
||||
function wrapRequestBody (body) {
|
||||
if (isStream(body)) {
|
||||
// TODO (fix): Provide some way for the user to cache the file to e.g. /tmp
|
||||
// so that it can be dispatched again?
|
||||
// TODO (fix): Do we need 100-expect support to provide a way to do this properly?
|
||||
if (bodyLength(body) === 0) {
|
||||
body
|
||||
.on('data', function () {
|
||||
assert(false)
|
||||
})
|
||||
}
|
||||
|
||||
if (typeof body.readableDidRead !== 'boolean') {
|
||||
body[kBodyUsed] = false
|
||||
EE.prototype.on.call(body, 'data', function () {
|
||||
this[kBodyUsed] = true
|
||||
})
|
||||
}
|
||||
|
||||
return body
|
||||
} else if (body && typeof body.pipeTo === 'function') {
|
||||
// TODO (fix): We can't access ReadableStream internal state
|
||||
// to determine whether or not it has been disturbed. This is just
|
||||
// a workaround.
|
||||
return new BodyAsyncIterable(body)
|
||||
} else if (body && isFormDataLike(body)) {
|
||||
return body
|
||||
} else if (
|
||||
body &&
|
||||
typeof body !== 'string' &&
|
||||
!ArrayBuffer.isView(body) &&
|
||||
isIterable(body)
|
||||
) {
|
||||
// TODO: Should we allow re-using iterable if !this.opts.idempotent
|
||||
// or through some other flag?
|
||||
return new BodyAsyncIterable(body)
|
||||
} else {
|
||||
return body
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {*} obj
|
||||
* @returns {obj is import('node:stream').Stream}
|
||||
*/
|
||||
function isStream (obj) {
|
||||
return obj && typeof obj === 'object' && typeof obj.pipe === 'function' && typeof obj.on === 'function'
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {*} object
|
||||
* @returns {object is Blob}
|
||||
* based on https://github.com/node-fetch/fetch-blob/blob/8ab587d34080de94140b54f07168451e7d0b655e/index.js#L229-L241 (MIT License)
|
||||
*/
|
||||
function isBlobLike (object) {
|
||||
if (object === null) {
|
||||
return false
|
||||
} else if (object instanceof Blob) {
|
||||
return true
|
||||
} else if (typeof object !== 'object') {
|
||||
return false
|
||||
} else {
|
||||
const sTag = object[Symbol.toStringTag]
|
||||
|
||||
return (sTag === 'Blob' || sTag === 'File') && (
|
||||
('stream' in object && typeof object.stream === 'function') ||
|
||||
('arrayBuffer' in object && typeof object.arrayBuffer === 'function')
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} url The path to check for query strings or fragments.
|
||||
* @returns {boolean} Returns true if the path contains a query string or fragment.
|
||||
*/
|
||||
function pathHasQueryOrFragment (url) {
|
||||
return (
|
||||
url.includes('?') ||
|
||||
url.includes('#')
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} url The URL to add the query params to
|
||||
* @param {import('node:querystring').ParsedUrlQueryInput} queryParams The object to serialize into a URL query string
|
||||
* @returns {string} The URL with the query params added
|
||||
*/
|
||||
function serializePathWithQuery (url, queryParams) {
|
||||
if (pathHasQueryOrFragment(url)) {
|
||||
throw new Error('Query params cannot be passed when url already contains "?" or "#".')
|
||||
}
|
||||
|
||||
const stringified = stringify(queryParams)
|
||||
|
||||
if (stringified) {
|
||||
url += '?' + stringified
|
||||
}
|
||||
|
||||
return url
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number|string|undefined} port
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isValidPort (port) {
|
||||
const value = parseInt(port, 10)
|
||||
return (
|
||||
value === Number(port) &&
|
||||
value >= 0 &&
|
||||
value <= 65535
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the value is a valid http or https prefixed string.
|
||||
*
|
||||
* @param {string} value
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isHttpOrHttpsPrefixed (value) {
|
||||
return (
|
||||
value != null &&
|
||||
value[0] === 'h' &&
|
||||
value[1] === 't' &&
|
||||
value[2] === 't' &&
|
||||
value[3] === 'p' &&
|
||||
(
|
||||
value[4] === ':' ||
|
||||
(
|
||||
value[4] === 's' &&
|
||||
value[5] === ':'
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string|URL|Record<string,string>} url
|
||||
* @returns {URL}
|
||||
*/
|
||||
function parseURL (url) {
|
||||
if (typeof url === 'string') {
|
||||
/**
|
||||
* @type {URL}
|
||||
*/
|
||||
url = new URL(url)
|
||||
|
||||
if (!isHttpOrHttpsPrefixed(url.origin || url.protocol)) {
|
||||
throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.')
|
||||
}
|
||||
|
||||
return url
|
||||
}
|
||||
|
||||
if (!url || typeof url !== 'object') {
|
||||
throw new InvalidArgumentError('Invalid URL: The URL argument must be a non-null object.')
|
||||
}
|
||||
|
||||
if (!(url instanceof URL)) {
|
||||
if (url.port != null && url.port !== '' && isValidPort(url.port) === false) {
|
||||
throw new InvalidArgumentError('Invalid URL: port must be a valid integer or a string representation of an integer.')
|
||||
}
|
||||
|
||||
if (url.path != null && typeof url.path !== 'string') {
|
||||
throw new InvalidArgumentError('Invalid URL path: the path must be a string or null/undefined.')
|
||||
}
|
||||
|
||||
if (url.pathname != null && typeof url.pathname !== 'string') {
|
||||
throw new InvalidArgumentError('Invalid URL pathname: the pathname must be a string or null/undefined.')
|
||||
}
|
||||
|
||||
if (url.hostname != null && typeof url.hostname !== 'string') {
|
||||
throw new InvalidArgumentError('Invalid URL hostname: the hostname must be a string or null/undefined.')
|
||||
}
|
||||
|
||||
if (url.origin != null && typeof url.origin !== 'string') {
|
||||
throw new InvalidArgumentError('Invalid URL origin: the origin must be a string or null/undefined.')
|
||||
}
|
||||
|
||||
if (!isHttpOrHttpsPrefixed(url.origin || url.protocol)) {
|
||||
throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.')
|
||||
}
|
||||
|
||||
const port = url.port != null
|
||||
? url.port
|
||||
: (url.protocol === 'https:' ? 443 : 80)
|
||||
let origin = url.origin != null
|
||||
? url.origin
|
||||
: `${url.protocol || ''}//${url.hostname || ''}:${port}`
|
||||
let path = url.path != null
|
||||
? url.path
|
||||
: `${url.pathname || ''}${url.search || ''}`
|
||||
|
||||
if (origin[origin.length - 1] === '/') {
|
||||
origin = origin.slice(0, origin.length - 1)
|
||||
}
|
||||
|
||||
if (path && path[0] !== '/') {
|
||||
path = `/${path}`
|
||||
}
|
||||
// new URL(path, origin) is unsafe when `path` contains an absolute URL
|
||||
// From https://developer.mozilla.org/en-US/docs/Web/API/URL/URL:
|
||||
// If first parameter is a relative URL, second param is required, and will be used as the base URL.
|
||||
// If first parameter is an absolute URL, a given second param will be ignored.
|
||||
return new URL(`${origin}${path}`)
|
||||
}
|
||||
|
||||
if (!isHttpOrHttpsPrefixed(url.origin || url.protocol)) {
|
||||
throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.')
|
||||
}
|
||||
|
||||
return url
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string|URL|Record<string, string>} url
|
||||
* @returns {URL}
|
||||
*/
|
||||
function parseOrigin (url) {
|
||||
url = parseURL(url)
|
||||
|
||||
if (url.pathname !== '/' || url.search || url.hash) {
|
||||
throw new InvalidArgumentError('invalid url')
|
||||
}
|
||||
|
||||
return url
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} host
|
||||
* @returns {string}
|
||||
*/
|
||||
function getHostname (host) {
|
||||
if (host[0] === '[') {
|
||||
const idx = host.indexOf(']')
|
||||
|
||||
assert(idx !== -1)
|
||||
return host.substring(1, idx)
|
||||
}
|
||||
|
||||
const idx = host.indexOf(':')
|
||||
if (idx === -1) return host
|
||||
|
||||
return host.substring(0, idx)
|
||||
}
|
||||
|
||||
/**
|
||||
* IP addresses are not valid server names per RFC6066
|
||||
* Currently, the only server names supported are DNS hostnames
|
||||
* @param {string|null} host
|
||||
* @returns {string|null}
|
||||
*/
|
||||
function getServerName (host) {
|
||||
if (!host) {
|
||||
return null
|
||||
}
|
||||
|
||||
assert(typeof host === 'string')
|
||||
|
||||
const servername = getHostname(host)
|
||||
if (net.isIP(servername)) {
|
||||
return ''
|
||||
}
|
||||
|
||||
return servername
|
||||
}
|
||||
|
||||
/**
|
||||
* @function
|
||||
* @template T
|
||||
* @param {T} obj
|
||||
* @returns {T}
|
||||
*/
|
||||
function deepClone (obj) {
|
||||
return JSON.parse(JSON.stringify(obj))
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {*} obj
|
||||
* @returns {obj is AsyncIterable}
|
||||
*/
|
||||
function isAsyncIterable (obj) {
|
||||
return !!(obj != null && typeof obj[Symbol.asyncIterator] === 'function')
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {*} obj
|
||||
* @returns {obj is Iterable}
|
||||
*/
|
||||
function isIterable (obj) {
|
||||
return !!(obj != null && (typeof obj[Symbol.iterator] === 'function' || typeof obj[Symbol.asyncIterator] === 'function'))
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Blob|Buffer|import ('stream').Stream} body
|
||||
* @returns {number|null}
|
||||
*/
|
||||
function bodyLength (body) {
|
||||
if (body == null) {
|
||||
return 0
|
||||
} else if (isStream(body)) {
|
||||
const state = body._readableState
|
||||
return state && state.objectMode === false && state.ended === true && Number.isFinite(state.length)
|
||||
? state.length
|
||||
: null
|
||||
} else if (isBlobLike(body)) {
|
||||
return body.size != null ? body.size : null
|
||||
} else if (isBuffer(body)) {
|
||||
return body.byteLength
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import ('stream').Stream} body
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isDestroyed (body) {
|
||||
return body && !!(body.destroyed || body[kDestroyed] || (stream.isDestroyed?.(body)))
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import ('stream').Stream} stream
|
||||
* @param {Error} [err]
|
||||
* @returns {void}
|
||||
*/
|
||||
function destroy (stream, err) {
|
||||
if (stream == null || !isStream(stream) || isDestroyed(stream)) {
|
||||
return
|
||||
}
|
||||
|
||||
if (typeof stream.destroy === 'function') {
|
||||
if (Object.getPrototypeOf(stream).constructor === IncomingMessage) {
|
||||
// See: https://github.com/nodejs/node/pull/38505/files
|
||||
stream.socket = null
|
||||
}
|
||||
|
||||
stream.destroy(err)
|
||||
} else if (err) {
|
||||
queueMicrotask(() => {
|
||||
stream.emit('error', err)
|
||||
})
|
||||
}
|
||||
|
||||
if (stream.destroyed !== true) {
|
||||
stream[kDestroyed] = true
|
||||
}
|
||||
}
|
||||
|
||||
const KEEPALIVE_TIMEOUT_EXPR = /timeout=(\d+)/
|
||||
/**
|
||||
* @param {string} val
|
||||
* @returns {number | null}
|
||||
*/
|
||||
function parseKeepAliveTimeout (val) {
|
||||
const m = val.match(KEEPALIVE_TIMEOUT_EXPR)
|
||||
return m ? parseInt(m[1], 10) * 1000 : null
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves a header name and returns its lowercase value.
|
||||
* @param {string | Buffer} value Header name
|
||||
* @returns {string}
|
||||
*/
|
||||
function headerNameToString (value) {
|
||||
return typeof value === 'string'
|
||||
? headerNameLowerCasedRecord[value] ?? value.toLowerCase()
|
||||
: tree.lookup(value) ?? value.toString('latin1').toLowerCase()
|
||||
}
|
||||
|
||||
/**
|
||||
* Receive the buffer as a string and return its lowercase value.
|
||||
* @param {Buffer} value Header name
|
||||
* @returns {string}
|
||||
*/
|
||||
function bufferToLowerCasedHeaderName (value) {
|
||||
return tree.lookup(value) ?? value.toString('latin1').toLowerCase()
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {(Buffer | string)[]} headers
|
||||
* @param {Record<string, string | string[]>} [obj]
|
||||
* @returns {Record<string, string | string[]>}
|
||||
*/
|
||||
function parseHeaders (headers, obj) {
|
||||
if (obj === undefined) obj = {}
|
||||
|
||||
for (let i = 0; i < headers.length; i += 2) {
|
||||
const key = headerNameToString(headers[i])
|
||||
let val = obj[key]
|
||||
|
||||
if (val) {
|
||||
if (typeof val === 'string') {
|
||||
val = [val]
|
||||
obj[key] = val
|
||||
}
|
||||
val.push(headers[i + 1].toString('latin1'))
|
||||
} else {
|
||||
const headersValue = headers[i + 1]
|
||||
if (typeof headersValue === 'string') {
|
||||
obj[key] = headersValue
|
||||
} else {
|
||||
obj[key] = Array.isArray(headersValue) ? headersValue.map(x => x.toString('latin1')) : headersValue.toString('latin1')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return obj
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Buffer[]} headers
|
||||
* @returns {string[]}
|
||||
*/
|
||||
function parseRawHeaders (headers) {
|
||||
const headersLength = headers.length
|
||||
/**
|
||||
* @type {string[]}
|
||||
*/
|
||||
const ret = new Array(headersLength)
|
||||
|
||||
let key
|
||||
let val
|
||||
|
||||
for (let n = 0; n < headersLength; n += 2) {
|
||||
key = headers[n]
|
||||
val = headers[n + 1]
|
||||
|
||||
typeof key !== 'string' && (key = key.toString())
|
||||
typeof val !== 'string' && (val = val.toString('latin1'))
|
||||
|
||||
ret[n] = key
|
||||
ret[n + 1] = val
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string[]} headers
|
||||
* @param {Buffer[]} headers
|
||||
*/
|
||||
function encodeRawHeaders (headers) {
|
||||
if (!Array.isArray(headers)) {
|
||||
throw new TypeError('expected headers to be an array')
|
||||
}
|
||||
return headers.map(x => Buffer.from(x))
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {*} buffer
|
||||
* @returns {buffer is Buffer}
|
||||
*/
|
||||
function isBuffer (buffer) {
|
||||
// See, https://github.com/mcollina/undici/pull/319
|
||||
return buffer instanceof Uint8Array || Buffer.isBuffer(buffer)
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that the handler object is a request handler.
|
||||
*
|
||||
* @param {object} handler
|
||||
* @param {string} method
|
||||
* @param {string} [upgrade]
|
||||
* @returns {asserts handler is import('../api/api-request').RequestHandler}
|
||||
*/
|
||||
function assertRequestHandler (handler, method, upgrade) {
|
||||
if (!handler || typeof handler !== 'object') {
|
||||
throw new InvalidArgumentError('handler must be an object')
|
||||
}
|
||||
|
||||
if (typeof handler.onRequestStart === 'function') {
|
||||
// TODO (fix): More checks...
|
||||
return
|
||||
}
|
||||
|
||||
if (typeof handler.onConnect !== 'function') {
|
||||
throw new InvalidArgumentError('invalid onConnect method')
|
||||
}
|
||||
|
||||
if (typeof handler.onError !== 'function') {
|
||||
throw new InvalidArgumentError('invalid onError method')
|
||||
}
|
||||
|
||||
if (typeof handler.onBodySent !== 'function' && handler.onBodySent !== undefined) {
|
||||
throw new InvalidArgumentError('invalid onBodySent method')
|
||||
}
|
||||
|
||||
if (upgrade || method === 'CONNECT') {
|
||||
if (typeof handler.onUpgrade !== 'function') {
|
||||
throw new InvalidArgumentError('invalid onUpgrade method')
|
||||
}
|
||||
} else {
|
||||
if (typeof handler.onHeaders !== 'function') {
|
||||
throw new InvalidArgumentError('invalid onHeaders method')
|
||||
}
|
||||
|
||||
if (typeof handler.onData !== 'function') {
|
||||
throw new InvalidArgumentError('invalid onData method')
|
||||
}
|
||||
|
||||
if (typeof handler.onComplete !== 'function') {
|
||||
throw new InvalidArgumentError('invalid onComplete method')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A body is disturbed if it has been read from and it cannot be re-used without
|
||||
* losing state or data.
|
||||
* @param {import('node:stream').Readable} body
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isDisturbed (body) {
|
||||
// TODO (fix): Why is body[kBodyUsed] needed?
|
||||
return !!(body && (stream.isDisturbed(body) || body[kBodyUsed]))
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef {object} SocketInfo
|
||||
* @property {string} [localAddress]
|
||||
* @property {number} [localPort]
|
||||
* @property {string} [remoteAddress]
|
||||
* @property {number} [remotePort]
|
||||
* @property {string} [remoteFamily]
|
||||
* @property {number} [timeout]
|
||||
* @property {number} bytesWritten
|
||||
* @property {number} bytesRead
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {import('net').Socket} socket
|
||||
* @returns {SocketInfo}
|
||||
*/
|
||||
function getSocketInfo (socket) {
|
||||
return {
|
||||
localAddress: socket.localAddress,
|
||||
localPort: socket.localPort,
|
||||
remoteAddress: socket.remoteAddress,
|
||||
remotePort: socket.remotePort,
|
||||
remoteFamily: socket.remoteFamily,
|
||||
timeout: socket.timeout,
|
||||
bytesWritten: socket.bytesWritten,
|
||||
bytesRead: socket.bytesRead
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Iterable} iterable
|
||||
* @returns {ReadableStream}
|
||||
*/
|
||||
function ReadableStreamFrom (iterable) {
|
||||
// We cannot use ReadableStream.from here because it does not return a byte stream.
|
||||
|
||||
let iterator
|
||||
return new ReadableStream(
|
||||
{
|
||||
start () {
|
||||
iterator = iterable[Symbol.asyncIterator]()
|
||||
},
|
||||
pull (controller) {
|
||||
return iterator.next().then(({ done, value }) => {
|
||||
if (done) {
|
||||
return queueMicrotask(() => {
|
||||
controller.close()
|
||||
controller.byobRequest?.respond(0)
|
||||
})
|
||||
} else {
|
||||
const buf = Buffer.isBuffer(value) ? value : Buffer.from(value)
|
||||
if (buf.byteLength) {
|
||||
return controller.enqueue(new Uint8Array(buf))
|
||||
} else {
|
||||
return this.pull(controller)
|
||||
}
|
||||
}
|
||||
})
|
||||
},
|
||||
cancel () {
|
||||
return iterator.return()
|
||||
},
|
||||
type: 'bytes'
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* The object should be a FormData instance and contains all the required
|
||||
* methods.
|
||||
* @param {*} object
|
||||
* @returns {object is FormData}
|
||||
*/
|
||||
function isFormDataLike (object) {
|
||||
return (
|
||||
object &&
|
||||
typeof object === 'object' &&
|
||||
typeof object.append === 'function' &&
|
||||
typeof object.delete === 'function' &&
|
||||
typeof object.get === 'function' &&
|
||||
typeof object.getAll === 'function' &&
|
||||
typeof object.has === 'function' &&
|
||||
typeof object.set === 'function' &&
|
||||
object[Symbol.toStringTag] === 'FormData'
|
||||
)
|
||||
}
|
||||
|
||||
function addAbortListener (signal, listener) {
|
||||
if ('addEventListener' in signal) {
|
||||
signal.addEventListener('abort', listener, { once: true })
|
||||
return () => signal.removeEventListener('abort', listener)
|
||||
}
|
||||
signal.once('abort', listener)
|
||||
return () => signal.removeListener('abort', listener)
|
||||
}
|
||||
|
||||
const validTokenChars = new Uint8Array([
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 0-15
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 16-31
|
||||
0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, // 32-47 (!"#$%&'()*+,-./)
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, // 48-63 (0-9:;<=>?)
|
||||
0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 64-79 (@A-O)
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, // 80-95 (P-Z[\]^_)
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 96-111 (`a-o)
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, // 112-127 (p-z{|}~)
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 128-143
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 144-159
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 160-175
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 176-191
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 192-207
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 208-223
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 224-239
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 // 240-255
|
||||
])
|
||||
|
||||
/**
|
||||
* @see https://tools.ietf.org/html/rfc7230#section-3.2.6
|
||||
* @param {number} c
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isTokenCharCode (c) {
|
||||
return (validTokenChars[c] === 1)
|
||||
}
|
||||
|
||||
const tokenRegExp = /^[\^_`a-zA-Z\-0-9!#$%&'*+.|~]+$/
|
||||
|
||||
/**
|
||||
* @param {string} characters
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isValidHTTPToken (characters) {
|
||||
if (characters.length >= 12) return tokenRegExp.test(characters)
|
||||
if (characters.length === 0) return false
|
||||
|
||||
for (let i = 0; i < characters.length; i++) {
|
||||
if (validTokenChars[characters.charCodeAt(i)] !== 1) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// headerCharRegex have been lifted from
|
||||
// https://github.com/nodejs/node/blob/main/lib/_http_common.js
|
||||
|
||||
/**
|
||||
* Matches if val contains an invalid field-vchar
|
||||
* field-value = *( field-content / obs-fold )
|
||||
* field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ]
|
||||
* field-vchar = VCHAR / obs-text
|
||||
*/
|
||||
const headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/
|
||||
|
||||
/**
|
||||
* @param {string} characters
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isValidHeaderValue (characters) {
|
||||
return !headerCharRegex.test(characters)
|
||||
}
|
||||
|
||||
const rangeHeaderRegex = /^bytes (\d+)-(\d+)\/(\d+)?$/
|
||||
|
||||
/**
|
||||
* @typedef {object} RangeHeader
|
||||
* @property {number} start
|
||||
* @property {number | null} end
|
||||
* @property {number | null} size
|
||||
*/
|
||||
|
||||
/**
|
||||
* Parse accordingly to RFC 9110
|
||||
* @see https://www.rfc-editor.org/rfc/rfc9110#field.content-range
|
||||
* @param {string} [range]
|
||||
* @returns {RangeHeader|null}
|
||||
*/
|
||||
function parseRangeHeader (range) {
|
||||
if (range == null || range === '') return { start: 0, end: null, size: null }
|
||||
|
||||
const m = range ? range.match(rangeHeaderRegex) : null
|
||||
return m
|
||||
? {
|
||||
start: parseInt(m[1]),
|
||||
end: m[2] ? parseInt(m[2]) : null,
|
||||
size: m[3] ? parseInt(m[3]) : null
|
||||
}
|
||||
: null
|
||||
}
|
||||
|
||||
/**
|
||||
* @template {import("events").EventEmitter} T
|
||||
* @param {T} obj
|
||||
* @param {string} name
|
||||
* @param {(...args: any[]) => void} listener
|
||||
* @returns {T}
|
||||
*/
|
||||
function addListener (obj, name, listener) {
|
||||
const listeners = (obj[kListeners] ??= [])
|
||||
listeners.push([name, listener])
|
||||
obj.on(name, listener)
|
||||
return obj
|
||||
}
|
||||
|
||||
/**
|
||||
* @template {import("events").EventEmitter} T
|
||||
* @param {T} obj
|
||||
* @returns {T}
|
||||
*/
|
||||
function removeAllListeners (obj) {
|
||||
if (obj[kListeners] != null) {
|
||||
for (const [name, listener] of obj[kListeners]) {
|
||||
obj.removeListener(name, listener)
|
||||
}
|
||||
obj[kListeners] = null
|
||||
}
|
||||
return obj
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import ('../dispatcher/client')} client
|
||||
* @param {import ('../core/request')} request
|
||||
* @param {Error} err
|
||||
*/
|
||||
function errorRequest (client, request, err) {
|
||||
try {
|
||||
request.onError(err)
|
||||
assert(request.aborted)
|
||||
} catch (err) {
|
||||
client.emit('error', err)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {WeakRef<net.Socket>} socketWeakRef
|
||||
* @param {object} opts
|
||||
* @param {number} opts.timeout
|
||||
* @param {string} opts.hostname
|
||||
* @param {number} opts.port
|
||||
* @returns {() => void}
|
||||
*/
|
||||
const setupConnectTimeout = process.platform === 'win32'
|
||||
? (socketWeakRef, opts) => {
|
||||
if (!opts.timeout) {
|
||||
return noop
|
||||
}
|
||||
|
||||
let s1 = null
|
||||
let s2 = null
|
||||
const fastTimer = timers.setFastTimeout(() => {
|
||||
// setImmediate is added to make sure that we prioritize socket error events over timeouts
|
||||
s1 = setImmediate(() => {
|
||||
// Windows needs an extra setImmediate probably due to implementation differences in the socket logic
|
||||
s2 = setImmediate(() => onConnectTimeout(socketWeakRef.deref(), opts))
|
||||
})
|
||||
}, opts.timeout)
|
||||
return () => {
|
||||
timers.clearFastTimeout(fastTimer)
|
||||
clearImmediate(s1)
|
||||
clearImmediate(s2)
|
||||
}
|
||||
}
|
||||
: (socketWeakRef, opts) => {
|
||||
if (!opts.timeout) {
|
||||
return noop
|
||||
}
|
||||
|
||||
let s1 = null
|
||||
const fastTimer = timers.setFastTimeout(() => {
|
||||
// setImmediate is added to make sure that we prioritize socket error events over timeouts
|
||||
s1 = setImmediate(() => {
|
||||
onConnectTimeout(socketWeakRef.deref(), opts)
|
||||
})
|
||||
}, opts.timeout)
|
||||
return () => {
|
||||
timers.clearFastTimeout(fastTimer)
|
||||
clearImmediate(s1)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {net.Socket} socket
|
||||
* @param {object} opts
|
||||
* @param {number} opts.timeout
|
||||
* @param {string} opts.hostname
|
||||
* @param {number} opts.port
|
||||
*/
|
||||
function onConnectTimeout (socket, opts) {
|
||||
// The socket could be already garbage collected
|
||||
if (socket == null) {
|
||||
return
|
||||
}
|
||||
|
||||
let message = 'Connect Timeout Error'
|
||||
if (Array.isArray(socket.autoSelectFamilyAttemptedAddresses)) {
|
||||
message += ` (attempted addresses: ${socket.autoSelectFamilyAttemptedAddresses.join(', ')},`
|
||||
} else {
|
||||
message += ` (attempted address: ${opts.hostname}:${opts.port},`
|
||||
}
|
||||
|
||||
message += ` timeout: ${opts.timeout}ms)`
|
||||
|
||||
destroy(socket, new ConnectTimeoutError(message))
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} urlString
|
||||
* @returns {string}
|
||||
*/
|
||||
function getProtocolFromUrlString (urlString) {
|
||||
if (
|
||||
urlString[0] === 'h' &&
|
||||
urlString[1] === 't' &&
|
||||
urlString[2] === 't' &&
|
||||
urlString[3] === 'p'
|
||||
) {
|
||||
switch (urlString[4]) {
|
||||
case ':':
|
||||
return 'http:'
|
||||
case 's':
|
||||
if (urlString[5] === ':') {
|
||||
return 'https:'
|
||||
}
|
||||
}
|
||||
}
|
||||
// fallback if none of the usual suspects
|
||||
return urlString.slice(0, urlString.indexOf(':') + 1)
|
||||
}
|
||||
|
||||
const kEnumerableProperty = Object.create(null)
|
||||
kEnumerableProperty.enumerable = true
|
||||
|
||||
const normalizedMethodRecordsBase = {
|
||||
delete: 'DELETE',
|
||||
DELETE: 'DELETE',
|
||||
get: 'GET',
|
||||
GET: 'GET',
|
||||
head: 'HEAD',
|
||||
HEAD: 'HEAD',
|
||||
options: 'OPTIONS',
|
||||
OPTIONS: 'OPTIONS',
|
||||
post: 'POST',
|
||||
POST: 'POST',
|
||||
put: 'PUT',
|
||||
PUT: 'PUT'
|
||||
}
|
||||
|
||||
const normalizedMethodRecords = {
|
||||
...normalizedMethodRecordsBase,
|
||||
patch: 'patch',
|
||||
PATCH: 'PATCH'
|
||||
}
|
||||
|
||||
// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`.
|
||||
Object.setPrototypeOf(normalizedMethodRecordsBase, null)
|
||||
Object.setPrototypeOf(normalizedMethodRecords, null)
|
||||
|
||||
module.exports = {
|
||||
kEnumerableProperty,
|
||||
isDisturbed,
|
||||
isBlobLike,
|
||||
parseOrigin,
|
||||
parseURL,
|
||||
getServerName,
|
||||
isStream,
|
||||
isIterable,
|
||||
isAsyncIterable,
|
||||
isDestroyed,
|
||||
headerNameToString,
|
||||
bufferToLowerCasedHeaderName,
|
||||
addListener,
|
||||
removeAllListeners,
|
||||
errorRequest,
|
||||
parseRawHeaders,
|
||||
encodeRawHeaders,
|
||||
parseHeaders,
|
||||
parseKeepAliveTimeout,
|
||||
destroy,
|
||||
bodyLength,
|
||||
deepClone,
|
||||
ReadableStreamFrom,
|
||||
isBuffer,
|
||||
assertRequestHandler,
|
||||
getSocketInfo,
|
||||
isFormDataLike,
|
||||
pathHasQueryOrFragment,
|
||||
serializePathWithQuery,
|
||||
addAbortListener,
|
||||
isValidHTTPToken,
|
||||
isValidHeaderValue,
|
||||
isTokenCharCode,
|
||||
parseRangeHeader,
|
||||
normalizedMethodRecordsBase,
|
||||
normalizedMethodRecords,
|
||||
isValidPort,
|
||||
isHttpOrHttpsPrefixed,
|
||||
nodeMajor,
|
||||
nodeMinor,
|
||||
safeHTTPMethods: Object.freeze(['GET', 'HEAD', 'OPTIONS', 'TRACE']),
|
||||
wrapRequestBody,
|
||||
setupConnectTimeout,
|
||||
getProtocolFromUrlString
|
||||
}
|
||||
156
backend/node_modules/undici/lib/dispatcher/agent.js
generated
vendored
Normal file
156
backend/node_modules/undici/lib/dispatcher/agent.js
generated
vendored
Normal file
@@ -0,0 +1,156 @@
|
||||
'use strict'
|
||||
|
||||
const { InvalidArgumentError, MaxOriginsReachedError } = require('../core/errors')
|
||||
const { kClients, kRunning, kClose, kDestroy, kDispatch, kUrl } = require('../core/symbols')
|
||||
const DispatcherBase = require('./dispatcher-base')
|
||||
const Pool = require('./pool')
|
||||
const Client = require('./client')
|
||||
const util = require('../core/util')
|
||||
|
||||
const kOnConnect = Symbol('onConnect')
|
||||
const kOnDisconnect = Symbol('onDisconnect')
|
||||
const kOnConnectionError = Symbol('onConnectionError')
|
||||
const kOnDrain = Symbol('onDrain')
|
||||
const kFactory = Symbol('factory')
|
||||
const kOptions = Symbol('options')
|
||||
const kOrigins = Symbol('origins')
|
||||
|
||||
function defaultFactory (origin, opts) {
|
||||
return opts && opts.connections === 1
|
||||
? new Client(origin, opts)
|
||||
: new Pool(origin, opts)
|
||||
}
|
||||
|
||||
class Agent extends DispatcherBase {
|
||||
constructor ({ factory = defaultFactory, maxOrigins = Infinity, connect, ...options } = {}) {
|
||||
if (typeof factory !== 'function') {
|
||||
throw new InvalidArgumentError('factory must be a function.')
|
||||
}
|
||||
|
||||
if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') {
|
||||
throw new InvalidArgumentError('connect must be a function or an object')
|
||||
}
|
||||
|
||||
if (typeof maxOrigins !== 'number' || Number.isNaN(maxOrigins) || maxOrigins <= 0) {
|
||||
throw new InvalidArgumentError('maxOrigins must be a number greater than 0')
|
||||
}
|
||||
|
||||
super()
|
||||
|
||||
if (connect && typeof connect !== 'function') {
|
||||
connect = { ...connect }
|
||||
}
|
||||
|
||||
this[kOptions] = { ...util.deepClone(options), maxOrigins, connect }
|
||||
this[kFactory] = factory
|
||||
this[kClients] = new Map()
|
||||
this[kOrigins] = new Set()
|
||||
|
||||
this[kOnDrain] = (origin, targets) => {
|
||||
this.emit('drain', origin, [this, ...targets])
|
||||
}
|
||||
|
||||
this[kOnConnect] = (origin, targets) => {
|
||||
this.emit('connect', origin, [this, ...targets])
|
||||
}
|
||||
|
||||
this[kOnDisconnect] = (origin, targets, err) => {
|
||||
this.emit('disconnect', origin, [this, ...targets], err)
|
||||
}
|
||||
|
||||
this[kOnConnectionError] = (origin, targets, err) => {
|
||||
this.emit('connectionError', origin, [this, ...targets], err)
|
||||
}
|
||||
}
|
||||
|
||||
get [kRunning] () {
|
||||
let ret = 0
|
||||
for (const { dispatcher } of this[kClients].values()) {
|
||||
ret += dispatcher[kRunning]
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
[kDispatch] (opts, handler) {
|
||||
let key
|
||||
if (opts.origin && (typeof opts.origin === 'string' || opts.origin instanceof URL)) {
|
||||
key = String(opts.origin)
|
||||
} else {
|
||||
throw new InvalidArgumentError('opts.origin must be a non-empty string or URL.')
|
||||
}
|
||||
|
||||
if (this[kOrigins].size >= this[kOptions].maxOrigins && !this[kOrigins].has(key)) {
|
||||
throw new MaxOriginsReachedError()
|
||||
}
|
||||
|
||||
const result = this[kClients].get(key)
|
||||
let dispatcher = result && result.dispatcher
|
||||
if (!dispatcher) {
|
||||
const closeClientIfUnused = (connected) => {
|
||||
const result = this[kClients].get(key)
|
||||
if (result) {
|
||||
if (connected) result.count -= 1
|
||||
if (result.count <= 0) {
|
||||
this[kClients].delete(key)
|
||||
result.dispatcher.close()
|
||||
}
|
||||
this[kOrigins].delete(key)
|
||||
}
|
||||
}
|
||||
dispatcher = this[kFactory](opts.origin, this[kOptions])
|
||||
.on('drain', this[kOnDrain])
|
||||
.on('connect', (origin, targets) => {
|
||||
const result = this[kClients].get(key)
|
||||
if (result) {
|
||||
result.count += 1
|
||||
}
|
||||
this[kOnConnect](origin, targets)
|
||||
})
|
||||
.on('disconnect', (origin, targets, err) => {
|
||||
closeClientIfUnused(true)
|
||||
this[kOnDisconnect](origin, targets, err)
|
||||
})
|
||||
.on('connectionError', (origin, targets, err) => {
|
||||
closeClientIfUnused(false)
|
||||
this[kOnConnectionError](origin, targets, err)
|
||||
})
|
||||
|
||||
this[kClients].set(key, { count: 0, dispatcher })
|
||||
this[kOrigins].add(key)
|
||||
}
|
||||
|
||||
return dispatcher.dispatch(opts, handler)
|
||||
}
|
||||
|
||||
[kClose] () {
|
||||
const closePromises = []
|
||||
for (const { dispatcher } of this[kClients].values()) {
|
||||
closePromises.push(dispatcher.close())
|
||||
}
|
||||
this[kClients].clear()
|
||||
|
||||
return Promise.all(closePromises)
|
||||
}
|
||||
|
||||
[kDestroy] (err) {
|
||||
const destroyPromises = []
|
||||
for (const { dispatcher } of this[kClients].values()) {
|
||||
destroyPromises.push(dispatcher.destroy(err))
|
||||
}
|
||||
this[kClients].clear()
|
||||
|
||||
return Promise.all(destroyPromises)
|
||||
}
|
||||
|
||||
get stats () {
|
||||
const allClientStats = {}
|
||||
for (const { dispatcher } of this[kClients].values()) {
|
||||
if (dispatcher.stats) {
|
||||
allClientStats[dispatcher[kUrl].origin] = dispatcher.stats
|
||||
}
|
||||
}
|
||||
return allClientStats
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Agent
|
||||
216
backend/node_modules/undici/lib/dispatcher/balanced-pool.js
generated
vendored
Normal file
216
backend/node_modules/undici/lib/dispatcher/balanced-pool.js
generated
vendored
Normal file
@@ -0,0 +1,216 @@
|
||||
'use strict'
|
||||
|
||||
const {
|
||||
BalancedPoolMissingUpstreamError,
|
||||
InvalidArgumentError
|
||||
} = require('../core/errors')
|
||||
const {
|
||||
PoolBase,
|
||||
kClients,
|
||||
kNeedDrain,
|
||||
kAddClient,
|
||||
kRemoveClient,
|
||||
kGetDispatcher
|
||||
} = require('./pool-base')
|
||||
const Pool = require('./pool')
|
||||
const { kUrl } = require('../core/symbols')
|
||||
const { parseOrigin } = require('../core/util')
|
||||
const kFactory = Symbol('factory')
|
||||
|
||||
const kOptions = Symbol('options')
|
||||
const kGreatestCommonDivisor = Symbol('kGreatestCommonDivisor')
|
||||
const kCurrentWeight = Symbol('kCurrentWeight')
|
||||
const kIndex = Symbol('kIndex')
|
||||
const kWeight = Symbol('kWeight')
|
||||
const kMaxWeightPerServer = Symbol('kMaxWeightPerServer')
|
||||
const kErrorPenalty = Symbol('kErrorPenalty')
|
||||
|
||||
/**
|
||||
* Calculate the greatest common divisor of two numbers by
|
||||
* using the Euclidean algorithm.
|
||||
*
|
||||
* @param {number} a
|
||||
* @param {number} b
|
||||
* @returns {number}
|
||||
*/
|
||||
function getGreatestCommonDivisor (a, b) {
|
||||
if (a === 0) return b
|
||||
|
||||
while (b !== 0) {
|
||||
const t = b
|
||||
b = a % b
|
||||
a = t
|
||||
}
|
||||
return a
|
||||
}
|
||||
|
||||
function defaultFactory (origin, opts) {
|
||||
return new Pool(origin, opts)
|
||||
}
|
||||
|
||||
class BalancedPool extends PoolBase {
|
||||
constructor (upstreams = [], { factory = defaultFactory, ...opts } = {}) {
|
||||
if (typeof factory !== 'function') {
|
||||
throw new InvalidArgumentError('factory must be a function.')
|
||||
}
|
||||
|
||||
super()
|
||||
|
||||
this[kOptions] = opts
|
||||
this[kIndex] = -1
|
||||
this[kCurrentWeight] = 0
|
||||
|
||||
this[kMaxWeightPerServer] = this[kOptions].maxWeightPerServer || 100
|
||||
this[kErrorPenalty] = this[kOptions].errorPenalty || 15
|
||||
|
||||
if (!Array.isArray(upstreams)) {
|
||||
upstreams = [upstreams]
|
||||
}
|
||||
|
||||
this[kFactory] = factory
|
||||
|
||||
for (const upstream of upstreams) {
|
||||
this.addUpstream(upstream)
|
||||
}
|
||||
this._updateBalancedPoolStats()
|
||||
}
|
||||
|
||||
addUpstream (upstream) {
|
||||
const upstreamOrigin = parseOrigin(upstream).origin
|
||||
|
||||
if (this[kClients].find((pool) => (
|
||||
pool[kUrl].origin === upstreamOrigin &&
|
||||
pool.closed !== true &&
|
||||
pool.destroyed !== true
|
||||
))) {
|
||||
return this
|
||||
}
|
||||
const pool = this[kFactory](upstreamOrigin, Object.assign({}, this[kOptions]))
|
||||
|
||||
this[kAddClient](pool)
|
||||
pool.on('connect', () => {
|
||||
pool[kWeight] = Math.min(this[kMaxWeightPerServer], pool[kWeight] + this[kErrorPenalty])
|
||||
})
|
||||
|
||||
pool.on('connectionError', () => {
|
||||
pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty])
|
||||
this._updateBalancedPoolStats()
|
||||
})
|
||||
|
||||
pool.on('disconnect', (...args) => {
|
||||
const err = args[2]
|
||||
if (err && err.code === 'UND_ERR_SOCKET') {
|
||||
// decrease the weight of the pool.
|
||||
pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty])
|
||||
this._updateBalancedPoolStats()
|
||||
}
|
||||
})
|
||||
|
||||
for (const client of this[kClients]) {
|
||||
client[kWeight] = this[kMaxWeightPerServer]
|
||||
}
|
||||
|
||||
this._updateBalancedPoolStats()
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
_updateBalancedPoolStats () {
|
||||
let result = 0
|
||||
for (let i = 0; i < this[kClients].length; i++) {
|
||||
result = getGreatestCommonDivisor(this[kClients][i][kWeight], result)
|
||||
}
|
||||
|
||||
this[kGreatestCommonDivisor] = result
|
||||
}
|
||||
|
||||
removeUpstream (upstream) {
|
||||
const upstreamOrigin = parseOrigin(upstream).origin
|
||||
|
||||
const pool = this[kClients].find((pool) => (
|
||||
pool[kUrl].origin === upstreamOrigin &&
|
||||
pool.closed !== true &&
|
||||
pool.destroyed !== true
|
||||
))
|
||||
|
||||
if (pool) {
|
||||
this[kRemoveClient](pool)
|
||||
}
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
getUpstream (upstream) {
|
||||
const upstreamOrigin = parseOrigin(upstream).origin
|
||||
|
||||
return this[kClients].find((pool) => (
|
||||
pool[kUrl].origin === upstreamOrigin &&
|
||||
pool.closed !== true &&
|
||||
pool.destroyed !== true
|
||||
))
|
||||
}
|
||||
|
||||
get upstreams () {
|
||||
return this[kClients]
|
||||
.filter(dispatcher => dispatcher.closed !== true && dispatcher.destroyed !== true)
|
||||
.map((p) => p[kUrl].origin)
|
||||
}
|
||||
|
||||
[kGetDispatcher] () {
|
||||
// We validate that pools is greater than 0,
|
||||
// otherwise we would have to wait until an upstream
|
||||
// is added, which might never happen.
|
||||
if (this[kClients].length === 0) {
|
||||
throw new BalancedPoolMissingUpstreamError()
|
||||
}
|
||||
|
||||
const dispatcher = this[kClients].find(dispatcher => (
|
||||
!dispatcher[kNeedDrain] &&
|
||||
dispatcher.closed !== true &&
|
||||
dispatcher.destroyed !== true
|
||||
))
|
||||
|
||||
if (!dispatcher) {
|
||||
return
|
||||
}
|
||||
|
||||
const allClientsBusy = this[kClients].map(pool => pool[kNeedDrain]).reduce((a, b) => a && b, true)
|
||||
|
||||
if (allClientsBusy) {
|
||||
return
|
||||
}
|
||||
|
||||
let counter = 0
|
||||
|
||||
let maxWeightIndex = this[kClients].findIndex(pool => !pool[kNeedDrain])
|
||||
|
||||
while (counter++ < this[kClients].length) {
|
||||
this[kIndex] = (this[kIndex] + 1) % this[kClients].length
|
||||
const pool = this[kClients][this[kIndex]]
|
||||
|
||||
// find pool index with the largest weight
|
||||
if (pool[kWeight] > this[kClients][maxWeightIndex][kWeight] && !pool[kNeedDrain]) {
|
||||
maxWeightIndex = this[kIndex]
|
||||
}
|
||||
|
||||
// decrease the current weight every `this[kClients].length`.
|
||||
if (this[kIndex] === 0) {
|
||||
// Set the current weight to the next lower weight.
|
||||
this[kCurrentWeight] = this[kCurrentWeight] - this[kGreatestCommonDivisor]
|
||||
|
||||
if (this[kCurrentWeight] <= 0) {
|
||||
this[kCurrentWeight] = this[kMaxWeightPerServer]
|
||||
}
|
||||
}
|
||||
if (pool[kWeight] >= this[kCurrentWeight] && (!pool[kNeedDrain])) {
|
||||
return pool
|
||||
}
|
||||
}
|
||||
|
||||
this[kCurrentWeight] = this[kClients][maxWeightIndex][kWeight]
|
||||
this[kIndex] = maxWeightIndex
|
||||
return this[kClients][maxWeightIndex]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = BalancedPool
|
||||
1606
backend/node_modules/undici/lib/dispatcher/client-h1.js
generated
vendored
Normal file
1606
backend/node_modules/undici/lib/dispatcher/client-h1.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
953
backend/node_modules/undici/lib/dispatcher/client-h2.js
generated
vendored
Normal file
953
backend/node_modules/undici/lib/dispatcher/client-h2.js
generated
vendored
Normal file
@@ -0,0 +1,953 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert')
|
||||
const { pipeline } = require('node:stream')
|
||||
const util = require('../core/util.js')
|
||||
const {
|
||||
RequestContentLengthMismatchError,
|
||||
RequestAbortedError,
|
||||
SocketError,
|
||||
InformationalError,
|
||||
InvalidArgumentError
|
||||
} = require('../core/errors.js')
|
||||
const {
|
||||
kUrl,
|
||||
kReset,
|
||||
kClient,
|
||||
kRunning,
|
||||
kPending,
|
||||
kQueue,
|
||||
kPendingIdx,
|
||||
kRunningIdx,
|
||||
kError,
|
||||
kSocket,
|
||||
kStrictContentLength,
|
||||
kOnError,
|
||||
kMaxConcurrentStreams,
|
||||
kHTTP2Session,
|
||||
kHTTP2InitialWindowSize,
|
||||
kHTTP2ConnectionWindowSize,
|
||||
kResume,
|
||||
kSize,
|
||||
kHTTPContext,
|
||||
kClosed,
|
||||
kBodyTimeout,
|
||||
kEnableConnectProtocol,
|
||||
kRemoteSettings,
|
||||
kHTTP2Stream
|
||||
} = require('../core/symbols.js')
|
||||
const { channels } = require('../core/diagnostics.js')
|
||||
|
||||
const kOpenStreams = Symbol('open streams')
|
||||
|
||||
let extractBody
|
||||
|
||||
/** @type {import('http2')} */
|
||||
let http2
|
||||
try {
|
||||
http2 = require('node:http2')
|
||||
} catch {
|
||||
// @ts-ignore
|
||||
http2 = { constants: {} }
|
||||
}
|
||||
|
||||
const {
|
||||
constants: {
|
||||
HTTP2_HEADER_AUTHORITY,
|
||||
HTTP2_HEADER_METHOD,
|
||||
HTTP2_HEADER_PATH,
|
||||
HTTP2_HEADER_SCHEME,
|
||||
HTTP2_HEADER_CONTENT_LENGTH,
|
||||
HTTP2_HEADER_EXPECT,
|
||||
HTTP2_HEADER_STATUS,
|
||||
HTTP2_HEADER_PROTOCOL,
|
||||
NGHTTP2_REFUSED_STREAM,
|
||||
NGHTTP2_CANCEL
|
||||
}
|
||||
} = http2
|
||||
|
||||
function parseH2Headers (headers) {
|
||||
const result = []
|
||||
|
||||
for (const [name, value] of Object.entries(headers)) {
|
||||
// h2 may concat the header value by array
|
||||
// e.g. Set-Cookie
|
||||
if (Array.isArray(value)) {
|
||||
for (const subvalue of value) {
|
||||
// we need to provide each header value of header name
|
||||
// because the headers handler expect name-value pair
|
||||
result.push(Buffer.from(name), Buffer.from(subvalue))
|
||||
}
|
||||
} else {
|
||||
result.push(Buffer.from(name), Buffer.from(value))
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
function connectH2 (client, socket) {
|
||||
client[kSocket] = socket
|
||||
|
||||
const http2InitialWindowSize = client[kHTTP2InitialWindowSize]
|
||||
const http2ConnectionWindowSize = client[kHTTP2ConnectionWindowSize]
|
||||
|
||||
const session = http2.connect(client[kUrl], {
|
||||
createConnection: () => socket,
|
||||
peerMaxConcurrentStreams: client[kMaxConcurrentStreams],
|
||||
settings: {
|
||||
// TODO(metcoder95): add support for PUSH
|
||||
enablePush: false,
|
||||
...(http2InitialWindowSize != null ? { initialWindowSize: http2InitialWindowSize } : null)
|
||||
}
|
||||
})
|
||||
|
||||
session[kOpenStreams] = 0
|
||||
session[kClient] = client
|
||||
session[kSocket] = socket
|
||||
session[kHTTP2Session] = null
|
||||
// We set it to true by default in a best-effort; however once connected to an H2 server
|
||||
// we will check if extended CONNECT protocol is supported or not
|
||||
// and set this value accordingly.
|
||||
session[kEnableConnectProtocol] = false
|
||||
// States whether or not we have received the remote settings from the server
|
||||
session[kRemoteSettings] = false
|
||||
|
||||
// Apply connection-level flow control once connected (if supported).
|
||||
if (http2ConnectionWindowSize) {
|
||||
util.addListener(session, 'connect', applyConnectionWindowSize.bind(session, http2ConnectionWindowSize))
|
||||
}
|
||||
|
||||
util.addListener(session, 'error', onHttp2SessionError)
|
||||
util.addListener(session, 'frameError', onHttp2FrameError)
|
||||
util.addListener(session, 'end', onHttp2SessionEnd)
|
||||
util.addListener(session, 'goaway', onHttp2SessionGoAway)
|
||||
util.addListener(session, 'close', onHttp2SessionClose)
|
||||
util.addListener(session, 'remoteSettings', onHttp2RemoteSettings)
|
||||
// TODO (@metcoder95): implement SETTINGS support
|
||||
// util.addListener(session, 'localSettings', onHttp2RemoteSettings)
|
||||
|
||||
session.unref()
|
||||
|
||||
client[kHTTP2Session] = session
|
||||
socket[kHTTP2Session] = session
|
||||
|
||||
util.addListener(socket, 'error', onHttp2SocketError)
|
||||
util.addListener(socket, 'end', onHttp2SocketEnd)
|
||||
util.addListener(socket, 'close', onHttp2SocketClose)
|
||||
|
||||
socket[kClosed] = false
|
||||
socket.on('close', onSocketClose)
|
||||
|
||||
return {
|
||||
version: 'h2',
|
||||
defaultPipelining: Infinity,
|
||||
/**
|
||||
* @param {import('../core/request.js')} request
|
||||
* @returns {boolean}
|
||||
*/
|
||||
write (request) {
|
||||
return writeH2(client, request)
|
||||
},
|
||||
/**
|
||||
* @returns {void}
|
||||
*/
|
||||
resume () {
|
||||
resumeH2(client)
|
||||
},
|
||||
/**
|
||||
* @param {Error | null} err
|
||||
* @param {() => void} callback
|
||||
*/
|
||||
destroy (err, callback) {
|
||||
if (socket[kClosed]) {
|
||||
queueMicrotask(callback)
|
||||
} else {
|
||||
socket.destroy(err).on('close', callback)
|
||||
}
|
||||
},
|
||||
/**
|
||||
* @type {boolean}
|
||||
*/
|
||||
get destroyed () {
|
||||
return socket.destroyed
|
||||
},
|
||||
/**
|
||||
* @param {import('../core/request.js')} request
|
||||
* @returns {boolean}
|
||||
*/
|
||||
busy (request) {
|
||||
if (request != null) {
|
||||
if (client[kRunning] > 0) {
|
||||
// We are already processing requests
|
||||
|
||||
// Non-idempotent request cannot be retried.
|
||||
// Ensure that no other requests are inflight and
|
||||
// could cause failure.
|
||||
if (request.idempotent === false) return true
|
||||
// Don't dispatch an upgrade until all preceding requests have completed.
|
||||
// Possibly, we do not have remote settings confirmed yet.
|
||||
if ((request.upgrade === 'websocket' || request.method === 'CONNECT') && session[kRemoteSettings] === false) return true
|
||||
// Request with stream or iterator body can error while other requests
|
||||
// are inflight and indirectly error those as well.
|
||||
// Ensure this doesn't happen by waiting for inflight
|
||||
// to complete before dispatching.
|
||||
|
||||
// Request with stream or iterator body cannot be retried.
|
||||
// Ensure that no other requests are inflight and
|
||||
// could cause failure.
|
||||
if (util.bodyLength(request.body) !== 0 &&
|
||||
(util.isStream(request.body) || util.isAsyncIterable(request.body) || util.isFormDataLike(request.body))) return true
|
||||
} else {
|
||||
return (request.upgrade === 'websocket' || request.method === 'CONNECT') && session[kRemoteSettings] === false
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function resumeH2 (client) {
|
||||
const socket = client[kSocket]
|
||||
|
||||
if (socket?.destroyed === false) {
|
||||
if (client[kSize] === 0 || client[kMaxConcurrentStreams] === 0) {
|
||||
socket.unref()
|
||||
client[kHTTP2Session].unref()
|
||||
} else {
|
||||
socket.ref()
|
||||
client[kHTTP2Session].ref()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function applyConnectionWindowSize (connectionWindowSize) {
|
||||
try {
|
||||
if (typeof this.setLocalWindowSize === 'function') {
|
||||
this.setLocalWindowSize(connectionWindowSize)
|
||||
}
|
||||
} catch {
|
||||
// Best-effort only.
|
||||
}
|
||||
}
|
||||
|
||||
function onHttp2RemoteSettings (settings) {
|
||||
// Fallbacks are a safe bet, remote setting will always override
|
||||
this[kClient][kMaxConcurrentStreams] = settings.maxConcurrentStreams ?? this[kClient][kMaxConcurrentStreams]
|
||||
/**
|
||||
* From RFC-8441
|
||||
* A sender MUST NOT send a SETTINGS_ENABLE_CONNECT_PROTOCOL parameter
|
||||
* with the value of 0 after previously sending a value of 1.
|
||||
*/
|
||||
// Note: Cannot be tested in Node, it does not supports disabling the extended CONNECT protocol once enabled
|
||||
if (this[kRemoteSettings] === true && this[kEnableConnectProtocol] === true && settings.enableConnectProtocol === false) {
|
||||
const err = new InformationalError('HTTP/2: Server disabled extended CONNECT protocol against RFC-8441')
|
||||
this[kSocket][kError] = err
|
||||
this[kClient][kOnError](err)
|
||||
return
|
||||
}
|
||||
|
||||
this[kEnableConnectProtocol] = settings.enableConnectProtocol ?? this[kEnableConnectProtocol]
|
||||
this[kRemoteSettings] = true
|
||||
this[kClient][kResume]()
|
||||
}
|
||||
|
||||
function onHttp2SessionError (err) {
|
||||
assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID')
|
||||
|
||||
this[kSocket][kError] = err
|
||||
this[kClient][kOnError](err)
|
||||
}
|
||||
|
||||
function onHttp2FrameError (type, code, id) {
|
||||
if (id === 0) {
|
||||
const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`)
|
||||
this[kSocket][kError] = err
|
||||
this[kClient][kOnError](err)
|
||||
}
|
||||
}
|
||||
|
||||
function onHttp2SessionEnd () {
|
||||
const err = new SocketError('other side closed', util.getSocketInfo(this[kSocket]))
|
||||
this.destroy(err)
|
||||
util.destroy(this[kSocket], err)
|
||||
}
|
||||
|
||||
/**
|
||||
* This is the root cause of #3011
|
||||
* We need to handle GOAWAY frames properly, and trigger the session close
|
||||
* along with the socket right away
|
||||
*
|
||||
* @this {import('http2').ClientHttp2Session}
|
||||
* @param {number} errorCode
|
||||
*/
|
||||
function onHttp2SessionGoAway (errorCode) {
|
||||
// TODO(mcollina): Verify if GOAWAY implements the spec correctly:
|
||||
// https://datatracker.ietf.org/doc/html/rfc7540#section-6.8
|
||||
// Specifically, we do not verify the "valid" stream id.
|
||||
|
||||
const err = this[kError] || new SocketError(`HTTP/2: "GOAWAY" frame received with code ${errorCode}`, util.getSocketInfo(this[kSocket]))
|
||||
const client = this[kClient]
|
||||
|
||||
client[kSocket] = null
|
||||
client[kHTTPContext] = null
|
||||
|
||||
// this is an HTTP2 session
|
||||
this.close()
|
||||
this[kHTTP2Session] = null
|
||||
|
||||
util.destroy(this[kSocket], err)
|
||||
|
||||
// Fail head of pipeline.
|
||||
if (client[kRunningIdx] < client[kQueue].length) {
|
||||
const request = client[kQueue][client[kRunningIdx]]
|
||||
client[kQueue][client[kRunningIdx]++] = null
|
||||
util.errorRequest(client, request, err)
|
||||
client[kPendingIdx] = client[kRunningIdx]
|
||||
}
|
||||
|
||||
assert(client[kRunning] === 0)
|
||||
|
||||
client.emit('disconnect', client[kUrl], [client], err)
|
||||
client.emit('connectionError', client[kUrl], [client], err)
|
||||
|
||||
client[kResume]()
|
||||
}
|
||||
|
||||
function onHttp2SessionClose () {
|
||||
const { [kClient]: client } = this
|
||||
const { [kSocket]: socket } = client
|
||||
|
||||
const err = this[kSocket][kError] || this[kError] || new SocketError('closed', util.getSocketInfo(socket))
|
||||
|
||||
client[kSocket] = null
|
||||
client[kHTTPContext] = null
|
||||
|
||||
if (client.destroyed) {
|
||||
assert(client[kPending] === 0)
|
||||
|
||||
// Fail entire queue.
|
||||
const requests = client[kQueue].splice(client[kRunningIdx])
|
||||
for (let i = 0; i < requests.length; i++) {
|
||||
const request = requests[i]
|
||||
util.errorRequest(client, request, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function onHttp2SocketClose () {
|
||||
const err = this[kError] || new SocketError('closed', util.getSocketInfo(this))
|
||||
|
||||
const client = this[kHTTP2Session][kClient]
|
||||
|
||||
client[kSocket] = null
|
||||
client[kHTTPContext] = null
|
||||
|
||||
if (this[kHTTP2Session] !== null) {
|
||||
this[kHTTP2Session].destroy(err)
|
||||
}
|
||||
|
||||
client[kPendingIdx] = client[kRunningIdx]
|
||||
|
||||
assert(client[kRunning] === 0)
|
||||
|
||||
client.emit('disconnect', client[kUrl], [client], err)
|
||||
|
||||
client[kResume]()
|
||||
}
|
||||
|
||||
function onHttp2SocketError (err) {
|
||||
assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID')
|
||||
|
||||
this[kError] = err
|
||||
|
||||
this[kClient][kOnError](err)
|
||||
}
|
||||
|
||||
function onHttp2SocketEnd () {
|
||||
util.destroy(this, new SocketError('other side closed', util.getSocketInfo(this)))
|
||||
}
|
||||
|
||||
function onSocketClose () {
|
||||
this[kClosed] = true
|
||||
}
|
||||
|
||||
// https://www.rfc-editor.org/rfc/rfc7230#section-3.3.2
|
||||
function shouldSendContentLength (method) {
|
||||
return method !== 'GET' && method !== 'HEAD' && method !== 'OPTIONS' && method !== 'TRACE' && method !== 'CONNECT'
|
||||
}
|
||||
|
||||
function writeH2 (client, request) {
|
||||
const requestTimeout = request.bodyTimeout ?? client[kBodyTimeout]
|
||||
const session = client[kHTTP2Session]
|
||||
const { method, path, host, upgrade, expectContinue, signal, protocol, headers: reqHeaders } = request
|
||||
let { body } = request
|
||||
|
||||
if (upgrade != null && upgrade !== 'websocket') {
|
||||
util.errorRequest(client, request, new InvalidArgumentError(`Custom upgrade "${upgrade}" not supported over HTTP/2`))
|
||||
return false
|
||||
}
|
||||
|
||||
const headers = {}
|
||||
for (let n = 0; n < reqHeaders.length; n += 2) {
|
||||
const key = reqHeaders[n + 0]
|
||||
const val = reqHeaders[n + 1]
|
||||
|
||||
if (key === 'cookie') {
|
||||
if (headers[key] != null) {
|
||||
headers[key] = Array.isArray(headers[key]) ? (headers[key].push(val), headers[key]) : [headers[key], val]
|
||||
} else {
|
||||
headers[key] = val
|
||||
}
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
if (Array.isArray(val)) {
|
||||
for (let i = 0; i < val.length; i++) {
|
||||
if (headers[key]) {
|
||||
headers[key] += `, ${val[i]}`
|
||||
} else {
|
||||
headers[key] = val[i]
|
||||
}
|
||||
}
|
||||
} else if (headers[key]) {
|
||||
headers[key] += `, ${val}`
|
||||
} else {
|
||||
headers[key] = val
|
||||
}
|
||||
}
|
||||
|
||||
/** @type {import('node:http2').ClientHttp2Stream} */
|
||||
let stream = null
|
||||
|
||||
const { hostname, port } = client[kUrl]
|
||||
|
||||
headers[HTTP2_HEADER_AUTHORITY] = host || `${hostname}${port ? `:${port}` : ''}`
|
||||
headers[HTTP2_HEADER_METHOD] = method
|
||||
|
||||
const abort = (err) => {
|
||||
if (request.aborted || request.completed) {
|
||||
return
|
||||
}
|
||||
|
||||
err = err || new RequestAbortedError()
|
||||
|
||||
util.errorRequest(client, request, err)
|
||||
|
||||
if (stream != null) {
|
||||
// Some chunks might still come after abort,
|
||||
// let's ignore them
|
||||
stream.removeAllListeners('data')
|
||||
|
||||
// On Abort, we close the stream to send RST_STREAM frame
|
||||
stream.close()
|
||||
|
||||
// We move the running index to the next request
|
||||
client[kOnError](err)
|
||||
client[kResume]()
|
||||
}
|
||||
|
||||
// We do not destroy the socket as we can continue using the session
|
||||
// the stream gets destroyed and the session remains to create new streams
|
||||
util.destroy(body, err)
|
||||
}
|
||||
|
||||
try {
|
||||
// We are already connected, streams are pending.
|
||||
// We can call on connect, and wait for abort
|
||||
request.onConnect(abort)
|
||||
} catch (err) {
|
||||
util.errorRequest(client, request, err)
|
||||
}
|
||||
|
||||
if (request.aborted) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (upgrade || method === 'CONNECT') {
|
||||
session.ref()
|
||||
|
||||
if (upgrade === 'websocket') {
|
||||
// We cannot upgrade to websocket if extended CONNECT protocol is not supported
|
||||
if (session[kEnableConnectProtocol] === false) {
|
||||
util.errorRequest(client, request, new InformationalError('HTTP/2: Extended CONNECT protocol not supported by server'))
|
||||
session.unref()
|
||||
return false
|
||||
}
|
||||
|
||||
// We force the method to CONNECT
|
||||
// as per RFC-8441
|
||||
// https://datatracker.ietf.org/doc/html/rfc8441#section-4
|
||||
headers[HTTP2_HEADER_METHOD] = 'CONNECT'
|
||||
headers[HTTP2_HEADER_PROTOCOL] = 'websocket'
|
||||
// :path and :scheme headers must be omitted when sending CONNECT but set if extended-CONNECT
|
||||
headers[HTTP2_HEADER_PATH] = path
|
||||
|
||||
if (protocol === 'ws:' || protocol === 'wss:') {
|
||||
headers[HTTP2_HEADER_SCHEME] = protocol === 'ws:' ? 'http' : 'https'
|
||||
} else {
|
||||
headers[HTTP2_HEADER_SCHEME] = protocol === 'http:' ? 'http' : 'https'
|
||||
}
|
||||
|
||||
stream = session.request(headers, { endStream: false, signal })
|
||||
stream[kHTTP2Stream] = true
|
||||
|
||||
stream.once('response', (headers, _flags) => {
|
||||
const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers
|
||||
|
||||
request.onUpgrade(statusCode, parseH2Headers(realHeaders), stream)
|
||||
|
||||
++session[kOpenStreams]
|
||||
client[kQueue][client[kRunningIdx]++] = null
|
||||
})
|
||||
|
||||
stream.on('error', () => {
|
||||
if (stream.rstCode === NGHTTP2_REFUSED_STREAM || stream.rstCode === NGHTTP2_CANCEL) {
|
||||
// NGHTTP2_REFUSED_STREAM (7) or NGHTTP2_CANCEL (8)
|
||||
// We do not treat those as errors as the server might
|
||||
// not support websockets and refuse the stream
|
||||
abort(new InformationalError(`HTTP/2: "stream error" received - code ${stream.rstCode}`))
|
||||
}
|
||||
})
|
||||
|
||||
stream.once('close', () => {
|
||||
session[kOpenStreams] -= 1
|
||||
if (session[kOpenStreams] === 0) session.unref()
|
||||
})
|
||||
|
||||
stream.setTimeout(requestTimeout)
|
||||
return true
|
||||
}
|
||||
|
||||
// TODO: consolidate once we support CONNECT properly
|
||||
// NOTE: We are already connected, streams are pending, first request
|
||||
// will create a new stream. We trigger a request to create the stream and wait until
|
||||
// `ready` event is triggered
|
||||
// We disabled endStream to allow the user to write to the stream
|
||||
stream = session.request(headers, { endStream: false, signal })
|
||||
stream[kHTTP2Stream] = true
|
||||
stream.on('response', headers => {
|
||||
const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers
|
||||
|
||||
request.onUpgrade(statusCode, parseH2Headers(realHeaders), stream)
|
||||
++session[kOpenStreams]
|
||||
client[kQueue][client[kRunningIdx]++] = null
|
||||
})
|
||||
stream.once('close', () => {
|
||||
session[kOpenStreams] -= 1
|
||||
if (session[kOpenStreams] === 0) session.unref()
|
||||
})
|
||||
stream.setTimeout(requestTimeout)
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// https://tools.ietf.org/html/rfc7540#section-8.3
|
||||
// :path and :scheme headers must be omitted when sending CONNECT
|
||||
headers[HTTP2_HEADER_PATH] = path
|
||||
headers[HTTP2_HEADER_SCHEME] = protocol === 'http:' ? 'http' : 'https'
|
||||
|
||||
// https://tools.ietf.org/html/rfc7231#section-4.3.1
|
||||
// https://tools.ietf.org/html/rfc7231#section-4.3.2
|
||||
// https://tools.ietf.org/html/rfc7231#section-4.3.5
|
||||
|
||||
// Sending a payload body on a request that does not
|
||||
// expect it can cause undefined behavior on some
|
||||
// servers and corrupt connection state. Do not
|
||||
// re-use the connection for further requests.
|
||||
|
||||
const expectsPayload = (
|
||||
method === 'PUT' ||
|
||||
method === 'POST' ||
|
||||
method === 'PATCH'
|
||||
)
|
||||
|
||||
if (body && typeof body.read === 'function') {
|
||||
// Try to read EOF in order to get length.
|
||||
body.read(0)
|
||||
}
|
||||
|
||||
let contentLength = util.bodyLength(body)
|
||||
|
||||
if (util.isFormDataLike(body)) {
|
||||
extractBody ??= require('../web/fetch/body.js').extractBody
|
||||
|
||||
const [bodyStream, contentType] = extractBody(body)
|
||||
headers['content-type'] = contentType
|
||||
|
||||
body = bodyStream.stream
|
||||
contentLength = bodyStream.length
|
||||
}
|
||||
|
||||
if (contentLength == null) {
|
||||
contentLength = request.contentLength
|
||||
}
|
||||
|
||||
if (!expectsPayload) {
|
||||
// https://tools.ietf.org/html/rfc7230#section-3.3.2
|
||||
// A user agent SHOULD NOT send a Content-Length header field when
|
||||
// the request message does not contain a payload body and the method
|
||||
// semantics do not anticipate such a body.
|
||||
// And for methods that don't expect a payload, omit Content-Length.
|
||||
contentLength = null
|
||||
}
|
||||
|
||||
// https://github.com/nodejs/undici/issues/2046
|
||||
// A user agent may send a Content-Length header with 0 value, this should be allowed.
|
||||
if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength != null && request.contentLength !== contentLength) {
|
||||
if (client[kStrictContentLength]) {
|
||||
util.errorRequest(client, request, new RequestContentLengthMismatchError())
|
||||
return false
|
||||
}
|
||||
|
||||
process.emitWarning(new RequestContentLengthMismatchError())
|
||||
}
|
||||
|
||||
if (contentLength != null) {
|
||||
assert(body || contentLength === 0, 'no body must not have content length')
|
||||
headers[HTTP2_HEADER_CONTENT_LENGTH] = `${contentLength}`
|
||||
}
|
||||
|
||||
session.ref()
|
||||
|
||||
if (channels.sendHeaders.hasSubscribers) {
|
||||
let header = ''
|
||||
for (const key in headers) {
|
||||
header += `${key}: ${headers[key]}\r\n`
|
||||
}
|
||||
channels.sendHeaders.publish({ request, headers: header, socket: session[kSocket] })
|
||||
}
|
||||
|
||||
// TODO(metcoder95): add support for sending trailers
|
||||
const shouldEndStream = method === 'GET' || method === 'HEAD' || body === null
|
||||
if (expectContinue) {
|
||||
headers[HTTP2_HEADER_EXPECT] = '100-continue'
|
||||
stream = session.request(headers, { endStream: shouldEndStream, signal })
|
||||
stream[kHTTP2Stream] = true
|
||||
|
||||
stream.once('continue', writeBodyH2)
|
||||
} else {
|
||||
stream = session.request(headers, {
|
||||
endStream: shouldEndStream,
|
||||
signal
|
||||
})
|
||||
stream[kHTTP2Stream] = true
|
||||
|
||||
writeBodyH2()
|
||||
}
|
||||
|
||||
// Increment counter as we have new streams open
|
||||
++session[kOpenStreams]
|
||||
stream.setTimeout(requestTimeout)
|
||||
|
||||
// Track whether we received a response (headers)
|
||||
let responseReceived = false
|
||||
|
||||
stream.once('response', headers => {
|
||||
const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers
|
||||
request.onResponseStarted()
|
||||
responseReceived = true
|
||||
|
||||
// Due to the stream nature, it is possible we face a race condition
|
||||
// where the stream has been assigned, but the request has been aborted
|
||||
// the request remains in-flight and headers hasn't been received yet
|
||||
// for those scenarios, best effort is to destroy the stream immediately
|
||||
// as there's no value to keep it open.
|
||||
if (request.aborted) {
|
||||
stream.removeAllListeners('data')
|
||||
return
|
||||
}
|
||||
|
||||
if (request.onHeaders(Number(statusCode), parseH2Headers(realHeaders), stream.resume.bind(stream), '') === false) {
|
||||
stream.pause()
|
||||
}
|
||||
})
|
||||
|
||||
stream.on('data', (chunk) => {
|
||||
if (request.onData(chunk) === false) {
|
||||
stream.pause()
|
||||
}
|
||||
})
|
||||
|
||||
stream.once('end', () => {
|
||||
stream.removeAllListeners('data')
|
||||
// If we received a response, this is a normal completion
|
||||
if (responseReceived) {
|
||||
if (!request.aborted && !request.completed) {
|
||||
request.onComplete({})
|
||||
}
|
||||
|
||||
client[kQueue][client[kRunningIdx]++] = null
|
||||
client[kResume]()
|
||||
} else {
|
||||
// Stream ended without receiving a response - this is an error
|
||||
// (e.g., server destroyed the stream before sending headers)
|
||||
abort(new InformationalError('HTTP/2: stream half-closed (remote)'))
|
||||
client[kQueue][client[kRunningIdx]++] = null
|
||||
client[kPendingIdx] = client[kRunningIdx]
|
||||
client[kResume]()
|
||||
}
|
||||
})
|
||||
|
||||
stream.once('close', () => {
|
||||
stream.removeAllListeners('data')
|
||||
session[kOpenStreams] -= 1
|
||||
if (session[kOpenStreams] === 0) {
|
||||
session.unref()
|
||||
}
|
||||
})
|
||||
|
||||
stream.once('error', function (err) {
|
||||
stream.removeAllListeners('data')
|
||||
abort(err)
|
||||
})
|
||||
|
||||
stream.once('frameError', (type, code) => {
|
||||
stream.removeAllListeners('data')
|
||||
abort(new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`))
|
||||
})
|
||||
|
||||
stream.on('aborted', () => {
|
||||
stream.removeAllListeners('data')
|
||||
})
|
||||
|
||||
stream.on('timeout', () => {
|
||||
const err = new InformationalError(`HTTP/2: "stream timeout after ${requestTimeout}"`)
|
||||
stream.removeAllListeners('data')
|
||||
session[kOpenStreams] -= 1
|
||||
|
||||
if (session[kOpenStreams] === 0) {
|
||||
session.unref()
|
||||
}
|
||||
|
||||
abort(err)
|
||||
})
|
||||
|
||||
stream.once('trailers', trailers => {
|
||||
if (request.aborted || request.completed) {
|
||||
return
|
||||
}
|
||||
|
||||
request.onComplete(trailers)
|
||||
})
|
||||
|
||||
return true
|
||||
|
||||
function writeBodyH2 () {
|
||||
if (!body || contentLength === 0) {
|
||||
writeBuffer(
|
||||
abort,
|
||||
stream,
|
||||
null,
|
||||
client,
|
||||
request,
|
||||
client[kSocket],
|
||||
contentLength,
|
||||
expectsPayload
|
||||
)
|
||||
} else if (util.isBuffer(body)) {
|
||||
writeBuffer(
|
||||
abort,
|
||||
stream,
|
||||
body,
|
||||
client,
|
||||
request,
|
||||
client[kSocket],
|
||||
contentLength,
|
||||
expectsPayload
|
||||
)
|
||||
} else if (util.isBlobLike(body)) {
|
||||
if (typeof body.stream === 'function') {
|
||||
writeIterable(
|
||||
abort,
|
||||
stream,
|
||||
body.stream(),
|
||||
client,
|
||||
request,
|
||||
client[kSocket],
|
||||
contentLength,
|
||||
expectsPayload
|
||||
)
|
||||
} else {
|
||||
writeBlob(
|
||||
abort,
|
||||
stream,
|
||||
body,
|
||||
client,
|
||||
request,
|
||||
client[kSocket],
|
||||
contentLength,
|
||||
expectsPayload
|
||||
)
|
||||
}
|
||||
} else if (util.isStream(body)) {
|
||||
writeStream(
|
||||
abort,
|
||||
client[kSocket],
|
||||
expectsPayload,
|
||||
stream,
|
||||
body,
|
||||
client,
|
||||
request,
|
||||
contentLength
|
||||
)
|
||||
} else if (util.isIterable(body)) {
|
||||
writeIterable(
|
||||
abort,
|
||||
stream,
|
||||
body,
|
||||
client,
|
||||
request,
|
||||
client[kSocket],
|
||||
contentLength,
|
||||
expectsPayload
|
||||
)
|
||||
} else {
|
||||
assert(false)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function writeBuffer (abort, h2stream, body, client, request, socket, contentLength, expectsPayload) {
|
||||
try {
|
||||
if (body != null && util.isBuffer(body)) {
|
||||
assert(contentLength === body.byteLength, 'buffer body must have content length')
|
||||
h2stream.cork()
|
||||
h2stream.write(body)
|
||||
h2stream.uncork()
|
||||
h2stream.end()
|
||||
|
||||
request.onBodySent(body)
|
||||
}
|
||||
|
||||
if (!expectsPayload) {
|
||||
socket[kReset] = true
|
||||
}
|
||||
|
||||
request.onRequestSent()
|
||||
client[kResume]()
|
||||
} catch (error) {
|
||||
abort(error)
|
||||
}
|
||||
}
|
||||
|
||||
function writeStream (abort, socket, expectsPayload, h2stream, body, client, request, contentLength) {
|
||||
assert(contentLength !== 0 || client[kRunning] === 0, 'stream body cannot be pipelined')
|
||||
|
||||
// For HTTP/2, is enough to pipe the stream
|
||||
const pipe = pipeline(
|
||||
body,
|
||||
h2stream,
|
||||
(err) => {
|
||||
if (err) {
|
||||
util.destroy(pipe, err)
|
||||
abort(err)
|
||||
} else {
|
||||
util.removeAllListeners(pipe)
|
||||
request.onRequestSent()
|
||||
|
||||
if (!expectsPayload) {
|
||||
socket[kReset] = true
|
||||
}
|
||||
|
||||
client[kResume]()
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
util.addListener(pipe, 'data', onPipeData)
|
||||
|
||||
function onPipeData (chunk) {
|
||||
request.onBodySent(chunk)
|
||||
}
|
||||
}
|
||||
|
||||
async function writeBlob (abort, h2stream, body, client, request, socket, contentLength, expectsPayload) {
|
||||
assert(contentLength === body.size, 'blob body must have content length')
|
||||
|
||||
try {
|
||||
if (contentLength != null && contentLength !== body.size) {
|
||||
throw new RequestContentLengthMismatchError()
|
||||
}
|
||||
|
||||
const buffer = Buffer.from(await body.arrayBuffer())
|
||||
|
||||
h2stream.cork()
|
||||
h2stream.write(buffer)
|
||||
h2stream.uncork()
|
||||
h2stream.end()
|
||||
|
||||
request.onBodySent(buffer)
|
||||
request.onRequestSent()
|
||||
|
||||
if (!expectsPayload) {
|
||||
socket[kReset] = true
|
||||
}
|
||||
|
||||
client[kResume]()
|
||||
} catch (err) {
|
||||
abort(err)
|
||||
}
|
||||
}
|
||||
|
||||
async function writeIterable (abort, h2stream, body, client, request, socket, contentLength, expectsPayload) {
|
||||
assert(contentLength !== 0 || client[kRunning] === 0, 'iterator body cannot be pipelined')
|
||||
|
||||
let callback = null
|
||||
function onDrain () {
|
||||
if (callback) {
|
||||
const cb = callback
|
||||
callback = null
|
||||
cb()
|
||||
}
|
||||
}
|
||||
|
||||
const waitForDrain = () => new Promise((resolve, reject) => {
|
||||
assert(callback === null)
|
||||
|
||||
if (socket[kError]) {
|
||||
reject(socket[kError])
|
||||
} else {
|
||||
callback = resolve
|
||||
}
|
||||
})
|
||||
|
||||
h2stream
|
||||
.on('close', onDrain)
|
||||
.on('drain', onDrain)
|
||||
|
||||
try {
|
||||
// It's up to the user to somehow abort the async iterable.
|
||||
for await (const chunk of body) {
|
||||
if (socket[kError]) {
|
||||
throw socket[kError]
|
||||
}
|
||||
|
||||
const res = h2stream.write(chunk)
|
||||
request.onBodySent(chunk)
|
||||
if (!res) {
|
||||
await waitForDrain()
|
||||
}
|
||||
}
|
||||
|
||||
h2stream.end()
|
||||
|
||||
request.onRequestSent()
|
||||
|
||||
if (!expectsPayload) {
|
||||
socket[kReset] = true
|
||||
}
|
||||
|
||||
client[kResume]()
|
||||
} catch (err) {
|
||||
abort(err)
|
||||
} finally {
|
||||
h2stream
|
||||
.off('close', onDrain)
|
||||
.off('drain', onDrain)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = connectH2
|
||||
639
backend/node_modules/undici/lib/dispatcher/client.js
generated
vendored
Normal file
639
backend/node_modules/undici/lib/dispatcher/client.js
generated
vendored
Normal file
@@ -0,0 +1,639 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert')
|
||||
const net = require('node:net')
|
||||
const http = require('node:http')
|
||||
const util = require('../core/util.js')
|
||||
const { ClientStats } = require('../util/stats.js')
|
||||
const { channels } = require('../core/diagnostics.js')
|
||||
const Request = require('../core/request.js')
|
||||
const DispatcherBase = require('./dispatcher-base')
|
||||
const {
|
||||
InvalidArgumentError,
|
||||
InformationalError,
|
||||
ClientDestroyedError
|
||||
} = require('../core/errors.js')
|
||||
const buildConnector = require('../core/connect.js')
|
||||
const {
|
||||
kUrl,
|
||||
kServerName,
|
||||
kClient,
|
||||
kBusy,
|
||||
kConnect,
|
||||
kResuming,
|
||||
kRunning,
|
||||
kPending,
|
||||
kSize,
|
||||
kQueue,
|
||||
kConnected,
|
||||
kConnecting,
|
||||
kNeedDrain,
|
||||
kKeepAliveDefaultTimeout,
|
||||
kHostHeader,
|
||||
kPendingIdx,
|
||||
kRunningIdx,
|
||||
kError,
|
||||
kPipelining,
|
||||
kKeepAliveTimeoutValue,
|
||||
kMaxHeadersSize,
|
||||
kKeepAliveMaxTimeout,
|
||||
kKeepAliveTimeoutThreshold,
|
||||
kHeadersTimeout,
|
||||
kBodyTimeout,
|
||||
kStrictContentLength,
|
||||
kConnector,
|
||||
kMaxRequests,
|
||||
kCounter,
|
||||
kClose,
|
||||
kDestroy,
|
||||
kDispatch,
|
||||
kLocalAddress,
|
||||
kMaxResponseSize,
|
||||
kOnError,
|
||||
kHTTPContext,
|
||||
kMaxConcurrentStreams,
|
||||
kHTTP2InitialWindowSize,
|
||||
kHTTP2ConnectionWindowSize,
|
||||
kResume
|
||||
} = require('../core/symbols.js')
|
||||
const connectH1 = require('./client-h1.js')
|
||||
const connectH2 = require('./client-h2.js')
|
||||
|
||||
const kClosedResolve = Symbol('kClosedResolve')
|
||||
|
||||
const getDefaultNodeMaxHeaderSize = http &&
|
||||
http.maxHeaderSize &&
|
||||
Number.isInteger(http.maxHeaderSize) &&
|
||||
http.maxHeaderSize > 0
|
||||
? () => http.maxHeaderSize
|
||||
: () => { throw new InvalidArgumentError('http module not available or http.maxHeaderSize invalid') }
|
||||
|
||||
const noop = () => {}
|
||||
|
||||
function getPipelining (client) {
|
||||
return client[kPipelining] ?? client[kHTTPContext]?.defaultPipelining ?? 1
|
||||
}
|
||||
|
||||
/**
|
||||
* @type {import('../../types/client.js').default}
|
||||
*/
|
||||
class Client extends DispatcherBase {
|
||||
/**
|
||||
*
|
||||
* @param {string|URL} url
|
||||
* @param {import('../../types/client.js').Client.Options} options
|
||||
*/
|
||||
constructor (url, {
|
||||
maxHeaderSize,
|
||||
headersTimeout,
|
||||
socketTimeout,
|
||||
requestTimeout,
|
||||
connectTimeout,
|
||||
bodyTimeout,
|
||||
idleTimeout,
|
||||
keepAlive,
|
||||
keepAliveTimeout,
|
||||
maxKeepAliveTimeout,
|
||||
keepAliveMaxTimeout,
|
||||
keepAliveTimeoutThreshold,
|
||||
socketPath,
|
||||
pipelining,
|
||||
tls,
|
||||
strictContentLength,
|
||||
maxCachedSessions,
|
||||
connect,
|
||||
maxRequestsPerClient,
|
||||
localAddress,
|
||||
maxResponseSize,
|
||||
autoSelectFamily,
|
||||
autoSelectFamilyAttemptTimeout,
|
||||
// h2
|
||||
maxConcurrentStreams,
|
||||
allowH2,
|
||||
useH2c,
|
||||
initialWindowSize,
|
||||
connectionWindowSize
|
||||
} = {}) {
|
||||
if (keepAlive !== undefined) {
|
||||
throw new InvalidArgumentError('unsupported keepAlive, use pipelining=0 instead')
|
||||
}
|
||||
|
||||
if (socketTimeout !== undefined) {
|
||||
throw new InvalidArgumentError('unsupported socketTimeout, use headersTimeout & bodyTimeout instead')
|
||||
}
|
||||
|
||||
if (requestTimeout !== undefined) {
|
||||
throw new InvalidArgumentError('unsupported requestTimeout, use headersTimeout & bodyTimeout instead')
|
||||
}
|
||||
|
||||
if (idleTimeout !== undefined) {
|
||||
throw new InvalidArgumentError('unsupported idleTimeout, use keepAliveTimeout instead')
|
||||
}
|
||||
|
||||
if (maxKeepAliveTimeout !== undefined) {
|
||||
throw new InvalidArgumentError('unsupported maxKeepAliveTimeout, use keepAliveMaxTimeout instead')
|
||||
}
|
||||
|
||||
if (maxHeaderSize != null) {
|
||||
if (!Number.isInteger(maxHeaderSize) || maxHeaderSize < 1) {
|
||||
throw new InvalidArgumentError('invalid maxHeaderSize')
|
||||
}
|
||||
} else {
|
||||
// If maxHeaderSize is not provided, use the default value from the http module
|
||||
// or if that is not available, throw an error.
|
||||
maxHeaderSize = getDefaultNodeMaxHeaderSize()
|
||||
}
|
||||
|
||||
if (socketPath != null && typeof socketPath !== 'string') {
|
||||
throw new InvalidArgumentError('invalid socketPath')
|
||||
}
|
||||
|
||||
if (connectTimeout != null && (!Number.isFinite(connectTimeout) || connectTimeout < 0)) {
|
||||
throw new InvalidArgumentError('invalid connectTimeout')
|
||||
}
|
||||
|
||||
if (keepAliveTimeout != null && (!Number.isFinite(keepAliveTimeout) || keepAliveTimeout <= 0)) {
|
||||
throw new InvalidArgumentError('invalid keepAliveTimeout')
|
||||
}
|
||||
|
||||
if (keepAliveMaxTimeout != null && (!Number.isFinite(keepAliveMaxTimeout) || keepAliveMaxTimeout <= 0)) {
|
||||
throw new InvalidArgumentError('invalid keepAliveMaxTimeout')
|
||||
}
|
||||
|
||||
if (keepAliveTimeoutThreshold != null && !Number.isFinite(keepAliveTimeoutThreshold)) {
|
||||
throw new InvalidArgumentError('invalid keepAliveTimeoutThreshold')
|
||||
}
|
||||
|
||||
if (headersTimeout != null && (!Number.isInteger(headersTimeout) || headersTimeout < 0)) {
|
||||
throw new InvalidArgumentError('headersTimeout must be a positive integer or zero')
|
||||
}
|
||||
|
||||
if (bodyTimeout != null && (!Number.isInteger(bodyTimeout) || bodyTimeout < 0)) {
|
||||
throw new InvalidArgumentError('bodyTimeout must be a positive integer or zero')
|
||||
}
|
||||
|
||||
if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') {
|
||||
throw new InvalidArgumentError('connect must be a function or an object')
|
||||
}
|
||||
|
||||
if (maxRequestsPerClient != null && (!Number.isInteger(maxRequestsPerClient) || maxRequestsPerClient < 0)) {
|
||||
throw new InvalidArgumentError('maxRequestsPerClient must be a positive number')
|
||||
}
|
||||
|
||||
if (localAddress != null && (typeof localAddress !== 'string' || net.isIP(localAddress) === 0)) {
|
||||
throw new InvalidArgumentError('localAddress must be valid string IP address')
|
||||
}
|
||||
|
||||
if (maxResponseSize != null && (!Number.isInteger(maxResponseSize) || maxResponseSize < -1)) {
|
||||
throw new InvalidArgumentError('maxResponseSize must be a positive number')
|
||||
}
|
||||
|
||||
if (
|
||||
autoSelectFamilyAttemptTimeout != null &&
|
||||
(!Number.isInteger(autoSelectFamilyAttemptTimeout) || autoSelectFamilyAttemptTimeout < -1)
|
||||
) {
|
||||
throw new InvalidArgumentError('autoSelectFamilyAttemptTimeout must be a positive number')
|
||||
}
|
||||
|
||||
// h2
|
||||
if (allowH2 != null && typeof allowH2 !== 'boolean') {
|
||||
throw new InvalidArgumentError('allowH2 must be a valid boolean value')
|
||||
}
|
||||
|
||||
if (maxConcurrentStreams != null && (typeof maxConcurrentStreams !== 'number' || maxConcurrentStreams < 1)) {
|
||||
throw new InvalidArgumentError('maxConcurrentStreams must be a positive integer, greater than 0')
|
||||
}
|
||||
|
||||
if (useH2c != null && typeof useH2c !== 'boolean') {
|
||||
throw new InvalidArgumentError('useH2c must be a valid boolean value')
|
||||
}
|
||||
|
||||
if (initialWindowSize != null && (!Number.isInteger(initialWindowSize) || initialWindowSize < 1)) {
|
||||
throw new InvalidArgumentError('initialWindowSize must be a positive integer, greater than 0')
|
||||
}
|
||||
|
||||
if (connectionWindowSize != null && (!Number.isInteger(connectionWindowSize) || connectionWindowSize < 1)) {
|
||||
throw new InvalidArgumentError('connectionWindowSize must be a positive integer, greater than 0')
|
||||
}
|
||||
|
||||
super()
|
||||
|
||||
if (typeof connect !== 'function') {
|
||||
connect = buildConnector({
|
||||
...tls,
|
||||
maxCachedSessions,
|
||||
allowH2,
|
||||
useH2c,
|
||||
socketPath,
|
||||
timeout: connectTimeout,
|
||||
...(typeof autoSelectFamily === 'boolean' ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined),
|
||||
...connect
|
||||
})
|
||||
}
|
||||
|
||||
this[kUrl] = util.parseOrigin(url)
|
||||
this[kConnector] = connect
|
||||
this[kPipelining] = pipelining != null ? pipelining : 1
|
||||
this[kMaxHeadersSize] = maxHeaderSize
|
||||
this[kKeepAliveDefaultTimeout] = keepAliveTimeout == null ? 4e3 : keepAliveTimeout
|
||||
this[kKeepAliveMaxTimeout] = keepAliveMaxTimeout == null ? 600e3 : keepAliveMaxTimeout
|
||||
this[kKeepAliveTimeoutThreshold] = keepAliveTimeoutThreshold == null ? 2e3 : keepAliveTimeoutThreshold
|
||||
this[kKeepAliveTimeoutValue] = this[kKeepAliveDefaultTimeout]
|
||||
this[kServerName] = null
|
||||
this[kLocalAddress] = localAddress != null ? localAddress : null
|
||||
this[kResuming] = 0 // 0, idle, 1, scheduled, 2 resuming
|
||||
this[kNeedDrain] = 0 // 0, idle, 1, scheduled, 2 resuming
|
||||
this[kHostHeader] = `host: ${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ''}\r\n`
|
||||
this[kBodyTimeout] = bodyTimeout != null ? bodyTimeout : 300e3
|
||||
this[kHeadersTimeout] = headersTimeout != null ? headersTimeout : 300e3
|
||||
this[kStrictContentLength] = strictContentLength == null ? true : strictContentLength
|
||||
this[kMaxRequests] = maxRequestsPerClient
|
||||
this[kClosedResolve] = null
|
||||
this[kMaxResponseSize] = maxResponseSize > -1 ? maxResponseSize : -1
|
||||
this[kMaxConcurrentStreams] = maxConcurrentStreams != null ? maxConcurrentStreams : 100 // Max peerConcurrentStreams for a Node h2 server
|
||||
// HTTP/2 window sizes are set to higher defaults than Node.js core for better performance:
|
||||
// - initialWindowSize: 262144 (256KB) vs Node.js default 65535 (64KB - 1)
|
||||
// Allows more data to be sent before requiring acknowledgment, improving throughput
|
||||
// especially on high-latency networks. This matches common production HTTP/2 servers.
|
||||
// - connectionWindowSize: 524288 (512KB) vs Node.js default (none set)
|
||||
// Provides better flow control for the entire connection across multiple streams.
|
||||
this[kHTTP2InitialWindowSize] = initialWindowSize != null ? initialWindowSize : 262144
|
||||
this[kHTTP2ConnectionWindowSize] = connectionWindowSize != null ? connectionWindowSize : 524288
|
||||
this[kHTTPContext] = null
|
||||
|
||||
// kQueue is built up of 3 sections separated by
|
||||
// the kRunningIdx and kPendingIdx indices.
|
||||
// | complete | running | pending |
|
||||
// ^ kRunningIdx ^ kPendingIdx ^ kQueue.length
|
||||
// kRunningIdx points to the first running element.
|
||||
// kPendingIdx points to the first pending element.
|
||||
// This implements a fast queue with an amortized
|
||||
// time of O(1).
|
||||
|
||||
this[kQueue] = []
|
||||
this[kRunningIdx] = 0
|
||||
this[kPendingIdx] = 0
|
||||
|
||||
this[kResume] = (sync) => resume(this, sync)
|
||||
this[kOnError] = (err) => onError(this, err)
|
||||
}
|
||||
|
||||
get pipelining () {
|
||||
return this[kPipelining]
|
||||
}
|
||||
|
||||
set pipelining (value) {
|
||||
this[kPipelining] = value
|
||||
this[kResume](true)
|
||||
}
|
||||
|
||||
get stats () {
|
||||
return new ClientStats(this)
|
||||
}
|
||||
|
||||
get [kPending] () {
|
||||
return this[kQueue].length - this[kPendingIdx]
|
||||
}
|
||||
|
||||
get [kRunning] () {
|
||||
return this[kPendingIdx] - this[kRunningIdx]
|
||||
}
|
||||
|
||||
get [kSize] () {
|
||||
return this[kQueue].length - this[kRunningIdx]
|
||||
}
|
||||
|
||||
get [kConnected] () {
|
||||
return !!this[kHTTPContext] && !this[kConnecting] && !this[kHTTPContext].destroyed
|
||||
}
|
||||
|
||||
get [kBusy] () {
|
||||
return Boolean(
|
||||
this[kHTTPContext]?.busy(null) ||
|
||||
(this[kSize] >= (getPipelining(this) || 1)) ||
|
||||
this[kPending] > 0
|
||||
)
|
||||
}
|
||||
|
||||
[kConnect] (cb) {
|
||||
connect(this)
|
||||
this.once('connect', cb)
|
||||
}
|
||||
|
||||
[kDispatch] (opts, handler) {
|
||||
const request = new Request(this[kUrl].origin, opts, handler)
|
||||
|
||||
this[kQueue].push(request)
|
||||
if (this[kResuming]) {
|
||||
// Do nothing.
|
||||
} else if (util.bodyLength(request.body) == null && util.isIterable(request.body)) {
|
||||
// Wait a tick in case stream/iterator is ended in the same tick.
|
||||
this[kResuming] = 1
|
||||
queueMicrotask(() => resume(this))
|
||||
} else {
|
||||
this[kResume](true)
|
||||
}
|
||||
|
||||
if (this[kResuming] && this[kNeedDrain] !== 2 && this[kBusy]) {
|
||||
this[kNeedDrain] = 2
|
||||
}
|
||||
|
||||
return this[kNeedDrain] < 2
|
||||
}
|
||||
|
||||
[kClose] () {
|
||||
// TODO: for H2 we need to gracefully flush the remaining enqueued
|
||||
// request and close each stream.
|
||||
return new Promise((resolve) => {
|
||||
if (this[kSize]) {
|
||||
this[kClosedResolve] = resolve
|
||||
} else {
|
||||
resolve(null)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
[kDestroy] (err) {
|
||||
return new Promise((resolve) => {
|
||||
const requests = this[kQueue].splice(this[kPendingIdx])
|
||||
for (let i = 0; i < requests.length; i++) {
|
||||
const request = requests[i]
|
||||
util.errorRequest(this, request, err)
|
||||
}
|
||||
|
||||
const callback = () => {
|
||||
if (this[kClosedResolve]) {
|
||||
// TODO (fix): Should we error here with ClientDestroyedError?
|
||||
this[kClosedResolve]()
|
||||
this[kClosedResolve] = null
|
||||
}
|
||||
resolve(null)
|
||||
}
|
||||
|
||||
if (this[kHTTPContext]) {
|
||||
this[kHTTPContext].destroy(err, callback)
|
||||
this[kHTTPContext] = null
|
||||
} else {
|
||||
queueMicrotask(callback)
|
||||
}
|
||||
|
||||
this[kResume]()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
function onError (client, err) {
|
||||
if (
|
||||
client[kRunning] === 0 &&
|
||||
err.code !== 'UND_ERR_INFO' &&
|
||||
err.code !== 'UND_ERR_SOCKET'
|
||||
) {
|
||||
// Error is not caused by running request and not a recoverable
|
||||
// socket error.
|
||||
|
||||
assert(client[kPendingIdx] === client[kRunningIdx])
|
||||
|
||||
const requests = client[kQueue].splice(client[kRunningIdx])
|
||||
|
||||
for (let i = 0; i < requests.length; i++) {
|
||||
const request = requests[i]
|
||||
util.errorRequest(client, request, err)
|
||||
}
|
||||
assert(client[kSize] === 0)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Client} client
|
||||
* @returns {void}
|
||||
*/
|
||||
function connect (client) {
|
||||
assert(!client[kConnecting])
|
||||
assert(!client[kHTTPContext])
|
||||
|
||||
let { host, hostname, protocol, port } = client[kUrl]
|
||||
|
||||
// Resolve ipv6
|
||||
if (hostname[0] === '[') {
|
||||
const idx = hostname.indexOf(']')
|
||||
|
||||
assert(idx !== -1)
|
||||
const ip = hostname.substring(1, idx)
|
||||
|
||||
assert(net.isIPv6(ip))
|
||||
hostname = ip
|
||||
}
|
||||
|
||||
client[kConnecting] = true
|
||||
|
||||
if (channels.beforeConnect.hasSubscribers) {
|
||||
channels.beforeConnect.publish({
|
||||
connectParams: {
|
||||
host,
|
||||
hostname,
|
||||
protocol,
|
||||
port,
|
||||
version: client[kHTTPContext]?.version,
|
||||
servername: client[kServerName],
|
||||
localAddress: client[kLocalAddress]
|
||||
},
|
||||
connector: client[kConnector]
|
||||
})
|
||||
}
|
||||
|
||||
client[kConnector]({
|
||||
host,
|
||||
hostname,
|
||||
protocol,
|
||||
port,
|
||||
servername: client[kServerName],
|
||||
localAddress: client[kLocalAddress]
|
||||
}, (err, socket) => {
|
||||
if (err) {
|
||||
handleConnectError(client, err, { host, hostname, protocol, port })
|
||||
client[kResume]()
|
||||
return
|
||||
}
|
||||
|
||||
if (client.destroyed) {
|
||||
util.destroy(socket.on('error', noop), new ClientDestroyedError())
|
||||
client[kResume]()
|
||||
return
|
||||
}
|
||||
|
||||
assert(socket)
|
||||
|
||||
try {
|
||||
client[kHTTPContext] = socket.alpnProtocol === 'h2'
|
||||
? connectH2(client, socket)
|
||||
: connectH1(client, socket)
|
||||
} catch (err) {
|
||||
socket.destroy().on('error', noop)
|
||||
handleConnectError(client, err, { host, hostname, protocol, port })
|
||||
client[kResume]()
|
||||
return
|
||||
}
|
||||
|
||||
client[kConnecting] = false
|
||||
|
||||
socket[kCounter] = 0
|
||||
socket[kMaxRequests] = client[kMaxRequests]
|
||||
socket[kClient] = client
|
||||
socket[kError] = null
|
||||
|
||||
if (channels.connected.hasSubscribers) {
|
||||
channels.connected.publish({
|
||||
connectParams: {
|
||||
host,
|
||||
hostname,
|
||||
protocol,
|
||||
port,
|
||||
version: client[kHTTPContext]?.version,
|
||||
servername: client[kServerName],
|
||||
localAddress: client[kLocalAddress]
|
||||
},
|
||||
connector: client[kConnector],
|
||||
socket
|
||||
})
|
||||
}
|
||||
|
||||
client.emit('connect', client[kUrl], [client])
|
||||
client[kResume]()
|
||||
})
|
||||
}
|
||||
|
||||
function handleConnectError (client, err, { host, hostname, protocol, port }) {
|
||||
if (client.destroyed) {
|
||||
return
|
||||
}
|
||||
|
||||
client[kConnecting] = false
|
||||
|
||||
if (channels.connectError.hasSubscribers) {
|
||||
channels.connectError.publish({
|
||||
connectParams: {
|
||||
host,
|
||||
hostname,
|
||||
protocol,
|
||||
port,
|
||||
version: client[kHTTPContext]?.version,
|
||||
servername: client[kServerName],
|
||||
localAddress: client[kLocalAddress]
|
||||
},
|
||||
connector: client[kConnector],
|
||||
error: err
|
||||
})
|
||||
}
|
||||
|
||||
if (err.code === 'ERR_TLS_CERT_ALTNAME_INVALID') {
|
||||
assert(client[kRunning] === 0)
|
||||
while (client[kPending] > 0 && client[kQueue][client[kPendingIdx]].servername === client[kServerName]) {
|
||||
const request = client[kQueue][client[kPendingIdx]++]
|
||||
util.errorRequest(client, request, err)
|
||||
}
|
||||
} else {
|
||||
onError(client, err)
|
||||
}
|
||||
|
||||
client.emit('connectionError', client[kUrl], [client], err)
|
||||
}
|
||||
|
||||
function emitDrain (client) {
|
||||
client[kNeedDrain] = 0
|
||||
client.emit('drain', client[kUrl], [client])
|
||||
}
|
||||
|
||||
function resume (client, sync) {
|
||||
if (client[kResuming] === 2) {
|
||||
return
|
||||
}
|
||||
|
||||
client[kResuming] = 2
|
||||
|
||||
_resume(client, sync)
|
||||
client[kResuming] = 0
|
||||
|
||||
if (client[kRunningIdx] > 256) {
|
||||
client[kQueue].splice(0, client[kRunningIdx])
|
||||
client[kPendingIdx] -= client[kRunningIdx]
|
||||
client[kRunningIdx] = 0
|
||||
}
|
||||
}
|
||||
|
||||
function _resume (client, sync) {
|
||||
while (true) {
|
||||
if (client.destroyed) {
|
||||
assert(client[kPending] === 0)
|
||||
return
|
||||
}
|
||||
|
||||
if (client[kClosedResolve] && !client[kSize]) {
|
||||
client[kClosedResolve]()
|
||||
client[kClosedResolve] = null
|
||||
return
|
||||
}
|
||||
|
||||
if (client[kHTTPContext]) {
|
||||
client[kHTTPContext].resume()
|
||||
}
|
||||
|
||||
if (client[kBusy]) {
|
||||
client[kNeedDrain] = 2
|
||||
} else if (client[kNeedDrain] === 2) {
|
||||
if (sync) {
|
||||
client[kNeedDrain] = 1
|
||||
queueMicrotask(() => emitDrain(client))
|
||||
} else {
|
||||
emitDrain(client)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if (client[kPending] === 0) {
|
||||
return
|
||||
}
|
||||
|
||||
if (client[kRunning] >= (getPipelining(client) || 1)) {
|
||||
return
|
||||
}
|
||||
|
||||
const request = client[kQueue][client[kPendingIdx]]
|
||||
|
||||
if (client[kUrl].protocol === 'https:' && client[kServerName] !== request.servername) {
|
||||
if (client[kRunning] > 0) {
|
||||
return
|
||||
}
|
||||
|
||||
client[kServerName] = request.servername
|
||||
client[kHTTPContext]?.destroy(new InformationalError('servername changed'), () => {
|
||||
client[kHTTPContext] = null
|
||||
resume(client)
|
||||
})
|
||||
}
|
||||
|
||||
if (client[kConnecting]) {
|
||||
return
|
||||
}
|
||||
|
||||
if (!client[kHTTPContext]) {
|
||||
connect(client)
|
||||
return
|
||||
}
|
||||
|
||||
if (client[kHTTPContext].destroyed) {
|
||||
return
|
||||
}
|
||||
|
||||
if (client[kHTTPContext].busy(request)) {
|
||||
return
|
||||
}
|
||||
|
||||
if (!request.aborted && client[kHTTPContext].write(request)) {
|
||||
client[kPendingIdx]++
|
||||
} else {
|
||||
client[kQueue].splice(client[kPendingIdx], 1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Client
|
||||
165
backend/node_modules/undici/lib/dispatcher/dispatcher-base.js
generated
vendored
Normal file
165
backend/node_modules/undici/lib/dispatcher/dispatcher-base.js
generated
vendored
Normal file
@@ -0,0 +1,165 @@
|
||||
'use strict'
|
||||
|
||||
const Dispatcher = require('./dispatcher')
|
||||
const UnwrapHandler = require('../handler/unwrap-handler')
|
||||
const {
|
||||
ClientDestroyedError,
|
||||
ClientClosedError,
|
||||
InvalidArgumentError
|
||||
} = require('../core/errors')
|
||||
const { kDestroy, kClose, kClosed, kDestroyed, kDispatch } = require('../core/symbols')
|
||||
|
||||
const kOnDestroyed = Symbol('onDestroyed')
|
||||
const kOnClosed = Symbol('onClosed')
|
||||
|
||||
class DispatcherBase extends Dispatcher {
|
||||
/** @type {boolean} */
|
||||
[kDestroyed] = false;
|
||||
|
||||
/** @type {Array<Function|null} */
|
||||
[kOnDestroyed] = null;
|
||||
|
||||
/** @type {boolean} */
|
||||
[kClosed] = false;
|
||||
|
||||
/** @type {Array<Function>|null} */
|
||||
[kOnClosed] = null
|
||||
|
||||
/** @returns {boolean} */
|
||||
get destroyed () {
|
||||
return this[kDestroyed]
|
||||
}
|
||||
|
||||
/** @returns {boolean} */
|
||||
get closed () {
|
||||
return this[kClosed]
|
||||
}
|
||||
|
||||
close (callback) {
|
||||
if (callback === undefined) {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.close((err, data) => {
|
||||
return err ? reject(err) : resolve(data)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
if (typeof callback !== 'function') {
|
||||
throw new InvalidArgumentError('invalid callback')
|
||||
}
|
||||
|
||||
if (this[kDestroyed]) {
|
||||
const err = new ClientDestroyedError()
|
||||
queueMicrotask(() => callback(err, null))
|
||||
return
|
||||
}
|
||||
|
||||
if (this[kClosed]) {
|
||||
if (this[kOnClosed]) {
|
||||
this[kOnClosed].push(callback)
|
||||
} else {
|
||||
queueMicrotask(() => callback(null, null))
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
this[kClosed] = true
|
||||
this[kOnClosed] ??= []
|
||||
this[kOnClosed].push(callback)
|
||||
|
||||
const onClosed = () => {
|
||||
const callbacks = this[kOnClosed]
|
||||
this[kOnClosed] = null
|
||||
for (let i = 0; i < callbacks.length; i++) {
|
||||
callbacks[i](null, null)
|
||||
}
|
||||
}
|
||||
|
||||
// Should not error.
|
||||
this[kClose]()
|
||||
.then(() => this.destroy())
|
||||
.then(() => queueMicrotask(onClosed))
|
||||
}
|
||||
|
||||
destroy (err, callback) {
|
||||
if (typeof err === 'function') {
|
||||
callback = err
|
||||
err = null
|
||||
}
|
||||
|
||||
if (callback === undefined) {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.destroy(err, (err, data) => {
|
||||
return err ? reject(err) : resolve(data)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
if (typeof callback !== 'function') {
|
||||
throw new InvalidArgumentError('invalid callback')
|
||||
}
|
||||
|
||||
if (this[kDestroyed]) {
|
||||
if (this[kOnDestroyed]) {
|
||||
this[kOnDestroyed].push(callback)
|
||||
} else {
|
||||
queueMicrotask(() => callback(null, null))
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if (!err) {
|
||||
err = new ClientDestroyedError()
|
||||
}
|
||||
|
||||
this[kDestroyed] = true
|
||||
this[kOnDestroyed] ??= []
|
||||
this[kOnDestroyed].push(callback)
|
||||
|
||||
const onDestroyed = () => {
|
||||
const callbacks = this[kOnDestroyed]
|
||||
this[kOnDestroyed] = null
|
||||
for (let i = 0; i < callbacks.length; i++) {
|
||||
callbacks[i](null, null)
|
||||
}
|
||||
}
|
||||
|
||||
// Should not error.
|
||||
this[kDestroy](err)
|
||||
.then(() => queueMicrotask(onDestroyed))
|
||||
}
|
||||
|
||||
dispatch (opts, handler) {
|
||||
if (!handler || typeof handler !== 'object') {
|
||||
throw new InvalidArgumentError('handler must be an object')
|
||||
}
|
||||
|
||||
handler = UnwrapHandler.unwrap(handler)
|
||||
|
||||
try {
|
||||
if (!opts || typeof opts !== 'object') {
|
||||
throw new InvalidArgumentError('opts must be an object.')
|
||||
}
|
||||
|
||||
if (this[kDestroyed] || this[kOnDestroyed]) {
|
||||
throw new ClientDestroyedError()
|
||||
}
|
||||
|
||||
if (this[kClosed]) {
|
||||
throw new ClientClosedError()
|
||||
}
|
||||
|
||||
return this[kDispatch](opts, handler)
|
||||
} catch (err) {
|
||||
if (typeof handler.onError !== 'function') {
|
||||
throw err
|
||||
}
|
||||
|
||||
handler.onError(err)
|
||||
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = DispatcherBase
|
||||
48
backend/node_modules/undici/lib/dispatcher/dispatcher.js
generated
vendored
Normal file
48
backend/node_modules/undici/lib/dispatcher/dispatcher.js
generated
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
'use strict'
|
||||
const EventEmitter = require('node:events')
|
||||
const WrapHandler = require('../handler/wrap-handler')
|
||||
|
||||
const wrapInterceptor = (dispatch) => (opts, handler) => dispatch(opts, WrapHandler.wrap(handler))
|
||||
|
||||
class Dispatcher extends EventEmitter {
|
||||
dispatch () {
|
||||
throw new Error('not implemented')
|
||||
}
|
||||
|
||||
close () {
|
||||
throw new Error('not implemented')
|
||||
}
|
||||
|
||||
destroy () {
|
||||
throw new Error('not implemented')
|
||||
}
|
||||
|
||||
compose (...args) {
|
||||
// So we handle [interceptor1, interceptor2] or interceptor1, interceptor2, ...
|
||||
const interceptors = Array.isArray(args[0]) ? args[0] : args
|
||||
let dispatch = this.dispatch.bind(this)
|
||||
|
||||
for (const interceptor of interceptors) {
|
||||
if (interceptor == null) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (typeof interceptor !== 'function') {
|
||||
throw new TypeError(`invalid interceptor, expected function received ${typeof interceptor}`)
|
||||
}
|
||||
|
||||
dispatch = interceptor(dispatch)
|
||||
dispatch = wrapInterceptor(dispatch)
|
||||
|
||||
if (dispatch == null || typeof dispatch !== 'function' || dispatch.length !== 2) {
|
||||
throw new TypeError('invalid interceptor')
|
||||
}
|
||||
}
|
||||
|
||||
return new Proxy(this, {
|
||||
get: (target, key) => key === 'dispatch' ? dispatch : target[key]
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Dispatcher
|
||||
147
backend/node_modules/undici/lib/dispatcher/env-http-proxy-agent.js
generated
vendored
Normal file
147
backend/node_modules/undici/lib/dispatcher/env-http-proxy-agent.js
generated
vendored
Normal file
@@ -0,0 +1,147 @@
|
||||
'use strict'
|
||||
|
||||
const DispatcherBase = require('./dispatcher-base')
|
||||
const { kClose, kDestroy, kClosed, kDestroyed, kDispatch, kNoProxyAgent, kHttpProxyAgent, kHttpsProxyAgent } = require('../core/symbols')
|
||||
const ProxyAgent = require('./proxy-agent')
|
||||
const Agent = require('./agent')
|
||||
|
||||
const DEFAULT_PORTS = {
|
||||
'http:': 80,
|
||||
'https:': 443
|
||||
}
|
||||
|
||||
class EnvHttpProxyAgent extends DispatcherBase {
|
||||
#noProxyValue = null
|
||||
#noProxyEntries = null
|
||||
#opts = null
|
||||
|
||||
constructor (opts = {}) {
|
||||
super()
|
||||
this.#opts = opts
|
||||
|
||||
const { httpProxy, httpsProxy, noProxy, ...agentOpts } = opts
|
||||
|
||||
this[kNoProxyAgent] = new Agent(agentOpts)
|
||||
|
||||
const HTTP_PROXY = httpProxy ?? process.env.http_proxy ?? process.env.HTTP_PROXY
|
||||
if (HTTP_PROXY) {
|
||||
this[kHttpProxyAgent] = new ProxyAgent({ ...agentOpts, uri: HTTP_PROXY })
|
||||
} else {
|
||||
this[kHttpProxyAgent] = this[kNoProxyAgent]
|
||||
}
|
||||
|
||||
const HTTPS_PROXY = httpsProxy ?? process.env.https_proxy ?? process.env.HTTPS_PROXY
|
||||
if (HTTPS_PROXY) {
|
||||
this[kHttpsProxyAgent] = new ProxyAgent({ ...agentOpts, uri: HTTPS_PROXY })
|
||||
} else {
|
||||
this[kHttpsProxyAgent] = this[kHttpProxyAgent]
|
||||
}
|
||||
|
||||
this.#parseNoProxy()
|
||||
}
|
||||
|
||||
[kDispatch] (opts, handler) {
|
||||
const url = new URL(opts.origin)
|
||||
const agent = this.#getProxyAgentForUrl(url)
|
||||
return agent.dispatch(opts, handler)
|
||||
}
|
||||
|
||||
[kClose] () {
|
||||
return Promise.all([
|
||||
this[kNoProxyAgent].close(),
|
||||
!this[kHttpProxyAgent][kClosed] && this[kHttpProxyAgent].close(),
|
||||
!this[kHttpsProxyAgent][kClosed] && this[kHttpsProxyAgent].close()
|
||||
])
|
||||
}
|
||||
|
||||
[kDestroy] (err) {
|
||||
return Promise.all([
|
||||
this[kNoProxyAgent].destroy(err),
|
||||
!this[kHttpProxyAgent][kDestroyed] && this[kHttpProxyAgent].destroy(err),
|
||||
!this[kHttpsProxyAgent][kDestroyed] && this[kHttpsProxyAgent].destroy(err)
|
||||
])
|
||||
}
|
||||
|
||||
#getProxyAgentForUrl (url) {
|
||||
let { protocol, host: hostname, port } = url
|
||||
|
||||
// Stripping ports in this way instead of using parsedUrl.hostname to make
|
||||
// sure that the brackets around IPv6 addresses are kept.
|
||||
hostname = hostname.replace(/:\d*$/, '').toLowerCase()
|
||||
port = Number.parseInt(port, 10) || DEFAULT_PORTS[protocol] || 0
|
||||
if (!this.#shouldProxy(hostname, port)) {
|
||||
return this[kNoProxyAgent]
|
||||
}
|
||||
if (protocol === 'https:') {
|
||||
return this[kHttpsProxyAgent]
|
||||
}
|
||||
return this[kHttpProxyAgent]
|
||||
}
|
||||
|
||||
#shouldProxy (hostname, port) {
|
||||
if (this.#noProxyChanged) {
|
||||
this.#parseNoProxy()
|
||||
}
|
||||
|
||||
if (this.#noProxyEntries.length === 0) {
|
||||
return true // Always proxy if NO_PROXY is not set or empty.
|
||||
}
|
||||
if (this.#noProxyValue === '*') {
|
||||
return false // Never proxy if wildcard is set.
|
||||
}
|
||||
|
||||
for (let i = 0; i < this.#noProxyEntries.length; i++) {
|
||||
const entry = this.#noProxyEntries[i]
|
||||
if (entry.port && entry.port !== port) {
|
||||
continue // Skip if ports don't match.
|
||||
}
|
||||
if (!/^[.*]/.test(entry.hostname)) {
|
||||
// No wildcards, so don't proxy only if there is not an exact match.
|
||||
if (hostname === entry.hostname) {
|
||||
return false
|
||||
}
|
||||
} else {
|
||||
// Don't proxy if the hostname ends with the no_proxy host.
|
||||
if (hostname.endsWith(entry.hostname.replace(/^\*/, ''))) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
#parseNoProxy () {
|
||||
const noProxyValue = this.#opts.noProxy ?? this.#noProxyEnv
|
||||
const noProxySplit = noProxyValue.split(/[,\s]/)
|
||||
const noProxyEntries = []
|
||||
|
||||
for (let i = 0; i < noProxySplit.length; i++) {
|
||||
const entry = noProxySplit[i]
|
||||
if (!entry) {
|
||||
continue
|
||||
}
|
||||
const parsed = entry.match(/^(.+):(\d+)$/)
|
||||
noProxyEntries.push({
|
||||
hostname: (parsed ? parsed[1] : entry).toLowerCase(),
|
||||
port: parsed ? Number.parseInt(parsed[2], 10) : 0
|
||||
})
|
||||
}
|
||||
|
||||
this.#noProxyValue = noProxyValue
|
||||
this.#noProxyEntries = noProxyEntries
|
||||
}
|
||||
|
||||
get #noProxyChanged () {
|
||||
if (this.#opts.noProxy !== undefined) {
|
||||
return false
|
||||
}
|
||||
return this.#noProxyValue !== this.#noProxyEnv
|
||||
}
|
||||
|
||||
get #noProxyEnv () {
|
||||
return process.env.no_proxy ?? process.env.NO_PROXY ?? ''
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = EnvHttpProxyAgent
|
||||
135
backend/node_modules/undici/lib/dispatcher/fixed-queue.js
generated
vendored
Normal file
135
backend/node_modules/undici/lib/dispatcher/fixed-queue.js
generated
vendored
Normal file
@@ -0,0 +1,135 @@
|
||||
'use strict'
|
||||
|
||||
// Extracted from node/lib/internal/fixed_queue.js
|
||||
|
||||
// Currently optimal queue size, tested on V8 6.0 - 6.6. Must be power of two.
|
||||
const kSize = 2048
|
||||
const kMask = kSize - 1
|
||||
|
||||
// The FixedQueue is implemented as a singly-linked list of fixed-size
|
||||
// circular buffers. It looks something like this:
|
||||
//
|
||||
// head tail
|
||||
// | |
|
||||
// v v
|
||||
// +-----------+ <-----\ +-----------+ <------\ +-----------+
|
||||
// | [null] | \----- | next | \------- | next |
|
||||
// +-----------+ +-----------+ +-----------+
|
||||
// | item | <-- bottom | item | <-- bottom | undefined |
|
||||
// | item | | item | | undefined |
|
||||
// | item | | item | | undefined |
|
||||
// | item | | item | | undefined |
|
||||
// | item | | item | bottom --> | item |
|
||||
// | item | | item | | item |
|
||||
// | ... | | ... | | ... |
|
||||
// | item | | item | | item |
|
||||
// | item | | item | | item |
|
||||
// | undefined | <-- top | item | | item |
|
||||
// | undefined | | item | | item |
|
||||
// | undefined | | undefined | <-- top top --> | undefined |
|
||||
// +-----------+ +-----------+ +-----------+
|
||||
//
|
||||
// Or, if there is only one circular buffer, it looks something
|
||||
// like either of these:
|
||||
//
|
||||
// head tail head tail
|
||||
// | | | |
|
||||
// v v v v
|
||||
// +-----------+ +-----------+
|
||||
// | [null] | | [null] |
|
||||
// +-----------+ +-----------+
|
||||
// | undefined | | item |
|
||||
// | undefined | | item |
|
||||
// | item | <-- bottom top --> | undefined |
|
||||
// | item | | undefined |
|
||||
// | undefined | <-- top bottom --> | item |
|
||||
// | undefined | | item |
|
||||
// +-----------+ +-----------+
|
||||
//
|
||||
// Adding a value means moving `top` forward by one, removing means
|
||||
// moving `bottom` forward by one. After reaching the end, the queue
|
||||
// wraps around.
|
||||
//
|
||||
// When `top === bottom` the current queue is empty and when
|
||||
// `top + 1 === bottom` it's full. This wastes a single space of storage
|
||||
// but allows much quicker checks.
|
||||
|
||||
/**
|
||||
* @type {FixedCircularBuffer}
|
||||
* @template T
|
||||
*/
|
||||
class FixedCircularBuffer {
|
||||
/** @type {number} */
|
||||
bottom = 0
|
||||
/** @type {number} */
|
||||
top = 0
|
||||
/** @type {Array<T|undefined>} */
|
||||
list = new Array(kSize).fill(undefined)
|
||||
/** @type {T|null} */
|
||||
next = null
|
||||
|
||||
/** @returns {boolean} */
|
||||
isEmpty () {
|
||||
return this.top === this.bottom
|
||||
}
|
||||
|
||||
/** @returns {boolean} */
|
||||
isFull () {
|
||||
return ((this.top + 1) & kMask) === this.bottom
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {T} data
|
||||
* @returns {void}
|
||||
*/
|
||||
push (data) {
|
||||
this.list[this.top] = data
|
||||
this.top = (this.top + 1) & kMask
|
||||
}
|
||||
|
||||
/** @returns {T|null} */
|
||||
shift () {
|
||||
const nextItem = this.list[this.bottom]
|
||||
if (nextItem === undefined) { return null }
|
||||
this.list[this.bottom] = undefined
|
||||
this.bottom = (this.bottom + 1) & kMask
|
||||
return nextItem
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @template T
|
||||
*/
|
||||
module.exports = class FixedQueue {
|
||||
constructor () {
|
||||
/** @type {FixedCircularBuffer<T>} */
|
||||
this.head = this.tail = new FixedCircularBuffer()
|
||||
}
|
||||
|
||||
/** @returns {boolean} */
|
||||
isEmpty () {
|
||||
return this.head.isEmpty()
|
||||
}
|
||||
|
||||
/** @param {T} data */
|
||||
push (data) {
|
||||
if (this.head.isFull()) {
|
||||
// Head is full: Creates a new queue, sets the old queue's `.next` to it,
|
||||
// and sets it as the new main queue.
|
||||
this.head = this.head.next = new FixedCircularBuffer()
|
||||
}
|
||||
this.head.push(data)
|
||||
}
|
||||
|
||||
/** @returns {T|null} */
|
||||
shift () {
|
||||
const tail = this.tail
|
||||
const next = tail.shift()
|
||||
if (tail.isEmpty() && tail.next !== null) {
|
||||
// If there is another queue, it forms the new tail.
|
||||
this.tail = tail.next
|
||||
tail.next = null
|
||||
}
|
||||
return next
|
||||
}
|
||||
}
|
||||
51
backend/node_modules/undici/lib/dispatcher/h2c-client.js
generated
vendored
Normal file
51
backend/node_modules/undici/lib/dispatcher/h2c-client.js
generated
vendored
Normal file
@@ -0,0 +1,51 @@
|
||||
'use strict'
|
||||
|
||||
const { InvalidArgumentError } = require('../core/errors')
|
||||
const Client = require('./client')
|
||||
|
||||
class H2CClient extends Client {
|
||||
constructor (origin, clientOpts) {
|
||||
if (typeof origin === 'string') {
|
||||
origin = new URL(origin)
|
||||
}
|
||||
|
||||
if (origin.protocol !== 'http:') {
|
||||
throw new InvalidArgumentError(
|
||||
'h2c-client: Only h2c protocol is supported'
|
||||
)
|
||||
}
|
||||
|
||||
const { connect, maxConcurrentStreams, pipelining, ...opts } =
|
||||
clientOpts ?? {}
|
||||
let defaultMaxConcurrentStreams = 100
|
||||
let defaultPipelining = 100
|
||||
|
||||
if (
|
||||
maxConcurrentStreams != null &&
|
||||
Number.isInteger(maxConcurrentStreams) &&
|
||||
maxConcurrentStreams > 0
|
||||
) {
|
||||
defaultMaxConcurrentStreams = maxConcurrentStreams
|
||||
}
|
||||
|
||||
if (pipelining != null && Number.isInteger(pipelining) && pipelining > 0) {
|
||||
defaultPipelining = pipelining
|
||||
}
|
||||
|
||||
if (defaultPipelining > defaultMaxConcurrentStreams) {
|
||||
throw new InvalidArgumentError(
|
||||
'h2c-client: pipelining cannot be greater than maxConcurrentStreams'
|
||||
)
|
||||
}
|
||||
|
||||
super(origin, {
|
||||
...opts,
|
||||
maxConcurrentStreams: defaultMaxConcurrentStreams,
|
||||
pipelining: defaultPipelining,
|
||||
allowH2: true,
|
||||
useH2c: true
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = H2CClient
|
||||
208
backend/node_modules/undici/lib/dispatcher/pool-base.js
generated
vendored
Normal file
208
backend/node_modules/undici/lib/dispatcher/pool-base.js
generated
vendored
Normal file
@@ -0,0 +1,208 @@
|
||||
'use strict'
|
||||
|
||||
const { PoolStats } = require('../util/stats.js')
|
||||
const DispatcherBase = require('./dispatcher-base')
|
||||
const FixedQueue = require('./fixed-queue')
|
||||
const { kConnected, kSize, kRunning, kPending, kQueued, kBusy, kFree, kUrl, kClose, kDestroy, kDispatch } = require('../core/symbols')
|
||||
|
||||
const kClients = Symbol('clients')
|
||||
const kNeedDrain = Symbol('needDrain')
|
||||
const kQueue = Symbol('queue')
|
||||
const kClosedResolve = Symbol('closed resolve')
|
||||
const kOnDrain = Symbol('onDrain')
|
||||
const kOnConnect = Symbol('onConnect')
|
||||
const kOnDisconnect = Symbol('onDisconnect')
|
||||
const kOnConnectionError = Symbol('onConnectionError')
|
||||
const kGetDispatcher = Symbol('get dispatcher')
|
||||
const kAddClient = Symbol('add client')
|
||||
const kRemoveClient = Symbol('remove client')
|
||||
|
||||
class PoolBase extends DispatcherBase {
|
||||
[kQueue] = new FixedQueue();
|
||||
|
||||
[kQueued] = 0;
|
||||
|
||||
[kClients] = [];
|
||||
|
||||
[kNeedDrain] = false;
|
||||
|
||||
[kOnDrain] (client, origin, targets) {
|
||||
const queue = this[kQueue]
|
||||
|
||||
let needDrain = false
|
||||
|
||||
while (!needDrain) {
|
||||
const item = queue.shift()
|
||||
if (!item) {
|
||||
break
|
||||
}
|
||||
this[kQueued]--
|
||||
needDrain = !client.dispatch(item.opts, item.handler)
|
||||
}
|
||||
|
||||
client[kNeedDrain] = needDrain
|
||||
|
||||
if (!needDrain && this[kNeedDrain]) {
|
||||
this[kNeedDrain] = false
|
||||
this.emit('drain', origin, [this, ...targets])
|
||||
}
|
||||
|
||||
if (this[kClosedResolve] && queue.isEmpty()) {
|
||||
const closeAll = new Array(this[kClients].length)
|
||||
for (let i = 0; i < this[kClients].length; i++) {
|
||||
closeAll[i] = this[kClients][i].close()
|
||||
}
|
||||
return Promise.all(closeAll)
|
||||
.then(this[kClosedResolve])
|
||||
}
|
||||
}
|
||||
|
||||
[kOnConnect] = (origin, targets) => {
|
||||
this.emit('connect', origin, [this, ...targets])
|
||||
};
|
||||
|
||||
[kOnDisconnect] = (origin, targets, err) => {
|
||||
this.emit('disconnect', origin, [this, ...targets], err)
|
||||
};
|
||||
|
||||
[kOnConnectionError] = (origin, targets, err) => {
|
||||
this.emit('connectionError', origin, [this, ...targets], err)
|
||||
}
|
||||
|
||||
get [kBusy] () {
|
||||
return this[kNeedDrain]
|
||||
}
|
||||
|
||||
get [kConnected] () {
|
||||
let ret = 0
|
||||
for (const { [kConnected]: connected } of this[kClients]) {
|
||||
ret += connected
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
get [kFree] () {
|
||||
let ret = 0
|
||||
for (const { [kConnected]: connected, [kNeedDrain]: needDrain } of this[kClients]) {
|
||||
ret += connected && !needDrain
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
get [kPending] () {
|
||||
let ret = this[kQueued]
|
||||
for (const { [kPending]: pending } of this[kClients]) {
|
||||
ret += pending
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
get [kRunning] () {
|
||||
let ret = 0
|
||||
for (const { [kRunning]: running } of this[kClients]) {
|
||||
ret += running
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
get [kSize] () {
|
||||
let ret = this[kQueued]
|
||||
for (const { [kSize]: size } of this[kClients]) {
|
||||
ret += size
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
get stats () {
|
||||
return new PoolStats(this)
|
||||
}
|
||||
|
||||
[kClose] () {
|
||||
if (this[kQueue].isEmpty()) {
|
||||
const closeAll = new Array(this[kClients].length)
|
||||
for (let i = 0; i < this[kClients].length; i++) {
|
||||
closeAll[i] = this[kClients][i].close()
|
||||
}
|
||||
return Promise.all(closeAll)
|
||||
} else {
|
||||
return new Promise((resolve) => {
|
||||
this[kClosedResolve] = resolve
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
[kDestroy] (err) {
|
||||
while (true) {
|
||||
const item = this[kQueue].shift()
|
||||
if (!item) {
|
||||
break
|
||||
}
|
||||
item.handler.onError(err)
|
||||
}
|
||||
|
||||
const destroyAll = new Array(this[kClients].length)
|
||||
for (let i = 0; i < this[kClients].length; i++) {
|
||||
destroyAll[i] = this[kClients][i].destroy(err)
|
||||
}
|
||||
return Promise.all(destroyAll)
|
||||
}
|
||||
|
||||
[kDispatch] (opts, handler) {
|
||||
const dispatcher = this[kGetDispatcher]()
|
||||
|
||||
if (!dispatcher) {
|
||||
this[kNeedDrain] = true
|
||||
this[kQueue].push({ opts, handler })
|
||||
this[kQueued]++
|
||||
} else if (!dispatcher.dispatch(opts, handler)) {
|
||||
dispatcher[kNeedDrain] = true
|
||||
this[kNeedDrain] = !this[kGetDispatcher]()
|
||||
}
|
||||
|
||||
return !this[kNeedDrain]
|
||||
}
|
||||
|
||||
[kAddClient] (client) {
|
||||
client
|
||||
.on('drain', this[kOnDrain].bind(this, client))
|
||||
.on('connect', this[kOnConnect])
|
||||
.on('disconnect', this[kOnDisconnect])
|
||||
.on('connectionError', this[kOnConnectionError])
|
||||
|
||||
this[kClients].push(client)
|
||||
|
||||
if (this[kNeedDrain]) {
|
||||
queueMicrotask(() => {
|
||||
if (this[kNeedDrain]) {
|
||||
this[kOnDrain](client, client[kUrl], [client, this])
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
[kRemoveClient] (client) {
|
||||
client.close(() => {
|
||||
const idx = this[kClients].indexOf(client)
|
||||
if (idx !== -1) {
|
||||
this[kClients].splice(idx, 1)
|
||||
}
|
||||
})
|
||||
|
||||
this[kNeedDrain] = this[kClients].some(dispatcher => (
|
||||
!dispatcher[kNeedDrain] &&
|
||||
dispatcher.closed !== true &&
|
||||
dispatcher.destroyed !== true
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
PoolBase,
|
||||
kClients,
|
||||
kNeedDrain,
|
||||
kAddClient,
|
||||
kRemoveClient,
|
||||
kGetDispatcher
|
||||
}
|
||||
118
backend/node_modules/undici/lib/dispatcher/pool.js
generated
vendored
Normal file
118
backend/node_modules/undici/lib/dispatcher/pool.js
generated
vendored
Normal file
@@ -0,0 +1,118 @@
|
||||
'use strict'
|
||||
|
||||
const {
|
||||
PoolBase,
|
||||
kClients,
|
||||
kNeedDrain,
|
||||
kAddClient,
|
||||
kGetDispatcher,
|
||||
kRemoveClient
|
||||
} = require('./pool-base')
|
||||
const Client = require('./client')
|
||||
const {
|
||||
InvalidArgumentError
|
||||
} = require('../core/errors')
|
||||
const util = require('../core/util')
|
||||
const { kUrl } = require('../core/symbols')
|
||||
const buildConnector = require('../core/connect')
|
||||
|
||||
const kOptions = Symbol('options')
|
||||
const kConnections = Symbol('connections')
|
||||
const kFactory = Symbol('factory')
|
||||
|
||||
function defaultFactory (origin, opts) {
|
||||
return new Client(origin, opts)
|
||||
}
|
||||
|
||||
class Pool extends PoolBase {
|
||||
constructor (origin, {
|
||||
connections,
|
||||
factory = defaultFactory,
|
||||
connect,
|
||||
connectTimeout,
|
||||
tls,
|
||||
maxCachedSessions,
|
||||
socketPath,
|
||||
autoSelectFamily,
|
||||
autoSelectFamilyAttemptTimeout,
|
||||
allowH2,
|
||||
clientTtl,
|
||||
...options
|
||||
} = {}) {
|
||||
if (connections != null && (!Number.isFinite(connections) || connections < 0)) {
|
||||
throw new InvalidArgumentError('invalid connections')
|
||||
}
|
||||
|
||||
if (typeof factory !== 'function') {
|
||||
throw new InvalidArgumentError('factory must be a function.')
|
||||
}
|
||||
|
||||
if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') {
|
||||
throw new InvalidArgumentError('connect must be a function or an object')
|
||||
}
|
||||
|
||||
if (typeof connect !== 'function') {
|
||||
connect = buildConnector({
|
||||
...tls,
|
||||
maxCachedSessions,
|
||||
allowH2,
|
||||
socketPath,
|
||||
timeout: connectTimeout,
|
||||
...(typeof autoSelectFamily === 'boolean' ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined),
|
||||
...connect
|
||||
})
|
||||
}
|
||||
|
||||
super()
|
||||
|
||||
this[kConnections] = connections || null
|
||||
this[kUrl] = util.parseOrigin(origin)
|
||||
this[kOptions] = { ...util.deepClone(options), connect, allowH2, clientTtl }
|
||||
this[kOptions].interceptors = options.interceptors
|
||||
? { ...options.interceptors }
|
||||
: undefined
|
||||
this[kFactory] = factory
|
||||
|
||||
this.on('connect', (origin, targets) => {
|
||||
if (clientTtl != null && clientTtl > 0) {
|
||||
for (const target of targets) {
|
||||
Object.assign(target, { ttl: Date.now() })
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
this.on('connectionError', (origin, targets, error) => {
|
||||
// If a connection error occurs, we remove the client from the pool,
|
||||
// and emit a connectionError event. They will not be re-used.
|
||||
// Fixes https://github.com/nodejs/undici/issues/3895
|
||||
for (const target of targets) {
|
||||
// Do not use kRemoveClient here, as it will close the client,
|
||||
// but the client cannot be closed in this state.
|
||||
const idx = this[kClients].indexOf(target)
|
||||
if (idx !== -1) {
|
||||
this[kClients].splice(idx, 1)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
[kGetDispatcher] () {
|
||||
const clientTtlOption = this[kOptions].clientTtl
|
||||
for (const client of this[kClients]) {
|
||||
// check ttl of client and if it's stale, remove it from the pool
|
||||
if (clientTtlOption != null && clientTtlOption > 0 && client.ttl && ((Date.now() - client.ttl) > clientTtlOption)) {
|
||||
this[kRemoveClient](client)
|
||||
} else if (!client[kNeedDrain]) {
|
||||
return client
|
||||
}
|
||||
}
|
||||
|
||||
if (!this[kConnections] || this[kClients].length < this[kConnections]) {
|
||||
const dispatcher = this[kFactory](this[kUrl], this[kOptions])
|
||||
this[kAddClient](dispatcher)
|
||||
return dispatcher
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Pool
|
||||
287
backend/node_modules/undici/lib/dispatcher/proxy-agent.js
generated
vendored
Normal file
287
backend/node_modules/undici/lib/dispatcher/proxy-agent.js
generated
vendored
Normal file
@@ -0,0 +1,287 @@
|
||||
'use strict'
|
||||
|
||||
const { kProxy, kClose, kDestroy, kDispatch } = require('../core/symbols')
|
||||
const Agent = require('./agent')
|
||||
const Pool = require('./pool')
|
||||
const DispatcherBase = require('./dispatcher-base')
|
||||
const { InvalidArgumentError, RequestAbortedError, SecureProxyConnectionError } = require('../core/errors')
|
||||
const buildConnector = require('../core/connect')
|
||||
const Client = require('./client')
|
||||
const { channels } = require('../core/diagnostics')
|
||||
|
||||
const kAgent = Symbol('proxy agent')
|
||||
const kClient = Symbol('proxy client')
|
||||
const kProxyHeaders = Symbol('proxy headers')
|
||||
const kRequestTls = Symbol('request tls settings')
|
||||
const kProxyTls = Symbol('proxy tls settings')
|
||||
const kConnectEndpoint = Symbol('connect endpoint function')
|
||||
const kTunnelProxy = Symbol('tunnel proxy')
|
||||
|
||||
function defaultProtocolPort (protocol) {
|
||||
return protocol === 'https:' ? 443 : 80
|
||||
}
|
||||
|
||||
function defaultFactory (origin, opts) {
|
||||
return new Pool(origin, opts)
|
||||
}
|
||||
|
||||
const noop = () => {}
|
||||
|
||||
function defaultAgentFactory (origin, opts) {
|
||||
if (opts.connections === 1) {
|
||||
return new Client(origin, opts)
|
||||
}
|
||||
return new Pool(origin, opts)
|
||||
}
|
||||
|
||||
class Http1ProxyWrapper extends DispatcherBase {
|
||||
#client
|
||||
|
||||
constructor (proxyUrl, { headers = {}, connect, factory }) {
|
||||
if (!proxyUrl) {
|
||||
throw new InvalidArgumentError('Proxy URL is mandatory')
|
||||
}
|
||||
|
||||
super()
|
||||
|
||||
this[kProxyHeaders] = headers
|
||||
if (factory) {
|
||||
this.#client = factory(proxyUrl, { connect })
|
||||
} else {
|
||||
this.#client = new Client(proxyUrl, { connect })
|
||||
}
|
||||
}
|
||||
|
||||
[kDispatch] (opts, handler) {
|
||||
const onHeaders = handler.onHeaders
|
||||
handler.onHeaders = function (statusCode, data, resume) {
|
||||
if (statusCode === 407) {
|
||||
if (typeof handler.onError === 'function') {
|
||||
handler.onError(new InvalidArgumentError('Proxy Authentication Required (407)'))
|
||||
}
|
||||
return
|
||||
}
|
||||
if (onHeaders) onHeaders.call(this, statusCode, data, resume)
|
||||
}
|
||||
|
||||
// Rewrite request as an HTTP1 Proxy request, without tunneling.
|
||||
const {
|
||||
origin,
|
||||
path = '/',
|
||||
headers = {}
|
||||
} = opts
|
||||
|
||||
opts.path = origin + path
|
||||
|
||||
if (!('host' in headers) && !('Host' in headers)) {
|
||||
const { host } = new URL(origin)
|
||||
headers.host = host
|
||||
}
|
||||
opts.headers = { ...this[kProxyHeaders], ...headers }
|
||||
|
||||
return this.#client[kDispatch](opts, handler)
|
||||
}
|
||||
|
||||
[kClose] () {
|
||||
return this.#client.close()
|
||||
}
|
||||
|
||||
[kDestroy] (err) {
|
||||
return this.#client.destroy(err)
|
||||
}
|
||||
}
|
||||
|
||||
class ProxyAgent extends DispatcherBase {
|
||||
constructor (opts) {
|
||||
if (!opts || (typeof opts === 'object' && !(opts instanceof URL) && !opts.uri)) {
|
||||
throw new InvalidArgumentError('Proxy uri is mandatory')
|
||||
}
|
||||
|
||||
const { clientFactory = defaultFactory } = opts
|
||||
if (typeof clientFactory !== 'function') {
|
||||
throw new InvalidArgumentError('Proxy opts.clientFactory must be a function.')
|
||||
}
|
||||
|
||||
const { proxyTunnel = true } = opts
|
||||
|
||||
super()
|
||||
|
||||
const url = this.#getUrl(opts)
|
||||
const { href, origin, port, protocol, username, password, hostname: proxyHostname } = url
|
||||
|
||||
this[kProxy] = { uri: href, protocol }
|
||||
this[kRequestTls] = opts.requestTls
|
||||
this[kProxyTls] = opts.proxyTls
|
||||
this[kProxyHeaders] = opts.headers || {}
|
||||
this[kTunnelProxy] = proxyTunnel
|
||||
|
||||
if (opts.auth && opts.token) {
|
||||
throw new InvalidArgumentError('opts.auth cannot be used in combination with opts.token')
|
||||
} else if (opts.auth) {
|
||||
/* @deprecated in favour of opts.token */
|
||||
this[kProxyHeaders]['proxy-authorization'] = `Basic ${opts.auth}`
|
||||
} else if (opts.token) {
|
||||
this[kProxyHeaders]['proxy-authorization'] = opts.token
|
||||
} else if (username && password) {
|
||||
this[kProxyHeaders]['proxy-authorization'] = `Basic ${Buffer.from(`${decodeURIComponent(username)}:${decodeURIComponent(password)}`).toString('base64')}`
|
||||
}
|
||||
|
||||
const connect = buildConnector({ ...opts.proxyTls })
|
||||
this[kConnectEndpoint] = buildConnector({ ...opts.requestTls })
|
||||
|
||||
const agentFactory = opts.factory || defaultAgentFactory
|
||||
const factory = (origin, options) => {
|
||||
const { protocol } = new URL(origin)
|
||||
if (!this[kTunnelProxy] && protocol === 'http:' && this[kProxy].protocol === 'http:') {
|
||||
return new Http1ProxyWrapper(this[kProxy].uri, {
|
||||
headers: this[kProxyHeaders],
|
||||
connect,
|
||||
factory: agentFactory
|
||||
})
|
||||
}
|
||||
return agentFactory(origin, options)
|
||||
}
|
||||
this[kClient] = clientFactory(url, { connect })
|
||||
this[kAgent] = new Agent({
|
||||
...opts,
|
||||
factory,
|
||||
connect: async (opts, callback) => {
|
||||
let requestedPath = opts.host
|
||||
if (!opts.port) {
|
||||
requestedPath += `:${defaultProtocolPort(opts.protocol)}`
|
||||
}
|
||||
try {
|
||||
const connectParams = {
|
||||
origin,
|
||||
port,
|
||||
path: requestedPath,
|
||||
signal: opts.signal,
|
||||
headers: {
|
||||
...this[kProxyHeaders],
|
||||
host: opts.host,
|
||||
...(opts.connections == null || opts.connections > 0 ? { 'proxy-connection': 'keep-alive' } : {})
|
||||
},
|
||||
servername: this[kProxyTls]?.servername || proxyHostname
|
||||
}
|
||||
const { socket, statusCode } = await this[kClient].connect(connectParams)
|
||||
if (statusCode !== 200) {
|
||||
socket.on('error', noop).destroy()
|
||||
callback(new RequestAbortedError(`Proxy response (${statusCode}) !== 200 when HTTP Tunneling`))
|
||||
return
|
||||
}
|
||||
|
||||
if (channels.proxyConnected.hasSubscribers) {
|
||||
channels.proxyConnected.publish({
|
||||
socket,
|
||||
connectParams
|
||||
})
|
||||
}
|
||||
|
||||
if (opts.protocol !== 'https:') {
|
||||
callback(null, socket)
|
||||
return
|
||||
}
|
||||
let servername
|
||||
if (this[kRequestTls]) {
|
||||
servername = this[kRequestTls].servername
|
||||
} else {
|
||||
servername = opts.servername
|
||||
}
|
||||
this[kConnectEndpoint]({ ...opts, servername, httpSocket: socket }, callback)
|
||||
} catch (err) {
|
||||
if (err.code === 'ERR_TLS_CERT_ALTNAME_INVALID') {
|
||||
// Throw a custom error to avoid loop in client.js#connect
|
||||
callback(new SecureProxyConnectionError(err))
|
||||
} else {
|
||||
callback(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
dispatch (opts, handler) {
|
||||
const headers = buildHeaders(opts.headers)
|
||||
throwIfProxyAuthIsSent(headers)
|
||||
|
||||
if (headers && !('host' in headers) && !('Host' in headers)) {
|
||||
const { host } = new URL(opts.origin)
|
||||
headers.host = host
|
||||
}
|
||||
|
||||
return this[kAgent].dispatch(
|
||||
{
|
||||
...opts,
|
||||
headers
|
||||
},
|
||||
handler
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('../../types/proxy-agent').ProxyAgent.Options | string | URL} opts
|
||||
* @returns {URL}
|
||||
*/
|
||||
#getUrl (opts) {
|
||||
if (typeof opts === 'string') {
|
||||
return new URL(opts)
|
||||
} else if (opts instanceof URL) {
|
||||
return opts
|
||||
} else {
|
||||
return new URL(opts.uri)
|
||||
}
|
||||
}
|
||||
|
||||
[kClose] () {
|
||||
return Promise.all([
|
||||
this[kAgent].close(),
|
||||
this[kClient].close()
|
||||
])
|
||||
}
|
||||
|
||||
[kDestroy] () {
|
||||
return Promise.all([
|
||||
this[kAgent].destroy(),
|
||||
this[kClient].destroy()
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string[] | Record<string, string>} headers
|
||||
* @returns {Record<string, string>}
|
||||
*/
|
||||
function buildHeaders (headers) {
|
||||
// When using undici.fetch, the headers list is stored
|
||||
// as an array.
|
||||
if (Array.isArray(headers)) {
|
||||
/** @type {Record<string, string>} */
|
||||
const headersPair = {}
|
||||
|
||||
for (let i = 0; i < headers.length; i += 2) {
|
||||
headersPair[headers[i]] = headers[i + 1]
|
||||
}
|
||||
|
||||
return headersPair
|
||||
}
|
||||
|
||||
return headers
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Record<string, string>} headers
|
||||
*
|
||||
* Previous versions of ProxyAgent suggests the Proxy-Authorization in request headers
|
||||
* Nevertheless, it was changed and to avoid a security vulnerability by end users
|
||||
* this check was created.
|
||||
* It should be removed in the next major version for performance reasons
|
||||
*/
|
||||
function throwIfProxyAuthIsSent (headers) {
|
||||
const existProxyAuth = headers && Object.keys(headers)
|
||||
.find((key) => key.toLowerCase() === 'proxy-authorization')
|
||||
if (existProxyAuth) {
|
||||
throw new InvalidArgumentError('Proxy-Authorization should be sent in ProxyAgent constructor')
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ProxyAgent
|
||||
35
backend/node_modules/undici/lib/dispatcher/retry-agent.js
generated
vendored
Normal file
35
backend/node_modules/undici/lib/dispatcher/retry-agent.js
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
'use strict'
|
||||
|
||||
const Dispatcher = require('./dispatcher')
|
||||
const RetryHandler = require('../handler/retry-handler')
|
||||
|
||||
class RetryAgent extends Dispatcher {
|
||||
#agent = null
|
||||
#options = null
|
||||
constructor (agent, options = {}) {
|
||||
super(options)
|
||||
this.#agent = agent
|
||||
this.#options = options
|
||||
}
|
||||
|
||||
dispatch (opts, handler) {
|
||||
const retry = new RetryHandler({
|
||||
...opts,
|
||||
retryOptions: this.#options
|
||||
}, {
|
||||
dispatch: this.#agent.dispatch.bind(this.#agent),
|
||||
handler
|
||||
})
|
||||
return this.#agent.dispatch(opts, retry)
|
||||
}
|
||||
|
||||
close () {
|
||||
return this.#agent.close()
|
||||
}
|
||||
|
||||
destroy () {
|
||||
return this.#agent.destroy()
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = RetryAgent
|
||||
137
backend/node_modules/undici/lib/dispatcher/round-robin-pool.js
generated
vendored
Normal file
137
backend/node_modules/undici/lib/dispatcher/round-robin-pool.js
generated
vendored
Normal file
@@ -0,0 +1,137 @@
|
||||
'use strict'
|
||||
|
||||
const {
|
||||
PoolBase,
|
||||
kClients,
|
||||
kNeedDrain,
|
||||
kAddClient,
|
||||
kGetDispatcher,
|
||||
kRemoveClient
|
||||
} = require('./pool-base')
|
||||
const Client = require('./client')
|
||||
const {
|
||||
InvalidArgumentError
|
||||
} = require('../core/errors')
|
||||
const util = require('../core/util')
|
||||
const { kUrl } = require('../core/symbols')
|
||||
const buildConnector = require('../core/connect')
|
||||
|
||||
const kOptions = Symbol('options')
|
||||
const kConnections = Symbol('connections')
|
||||
const kFactory = Symbol('factory')
|
||||
const kIndex = Symbol('index')
|
||||
|
||||
function defaultFactory (origin, opts) {
|
||||
return new Client(origin, opts)
|
||||
}
|
||||
|
||||
class RoundRobinPool extends PoolBase {
|
||||
constructor (origin, {
|
||||
connections,
|
||||
factory = defaultFactory,
|
||||
connect,
|
||||
connectTimeout,
|
||||
tls,
|
||||
maxCachedSessions,
|
||||
socketPath,
|
||||
autoSelectFamily,
|
||||
autoSelectFamilyAttemptTimeout,
|
||||
allowH2,
|
||||
clientTtl,
|
||||
...options
|
||||
} = {}) {
|
||||
if (connections != null && (!Number.isFinite(connections) || connections < 0)) {
|
||||
throw new InvalidArgumentError('invalid connections')
|
||||
}
|
||||
|
||||
if (typeof factory !== 'function') {
|
||||
throw new InvalidArgumentError('factory must be a function.')
|
||||
}
|
||||
|
||||
if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') {
|
||||
throw new InvalidArgumentError('connect must be a function or an object')
|
||||
}
|
||||
|
||||
if (typeof connect !== 'function') {
|
||||
connect = buildConnector({
|
||||
...tls,
|
||||
maxCachedSessions,
|
||||
allowH2,
|
||||
socketPath,
|
||||
timeout: connectTimeout,
|
||||
...(typeof autoSelectFamily === 'boolean' ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined),
|
||||
...connect
|
||||
})
|
||||
}
|
||||
|
||||
super()
|
||||
|
||||
this[kConnections] = connections || null
|
||||
this[kUrl] = util.parseOrigin(origin)
|
||||
this[kOptions] = { ...util.deepClone(options), connect, allowH2, clientTtl }
|
||||
this[kOptions].interceptors = options.interceptors
|
||||
? { ...options.interceptors }
|
||||
: undefined
|
||||
this[kFactory] = factory
|
||||
this[kIndex] = -1
|
||||
|
||||
this.on('connect', (origin, targets) => {
|
||||
if (clientTtl != null && clientTtl > 0) {
|
||||
for (const target of targets) {
|
||||
Object.assign(target, { ttl: Date.now() })
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
this.on('connectionError', (origin, targets, error) => {
|
||||
for (const target of targets) {
|
||||
const idx = this[kClients].indexOf(target)
|
||||
if (idx !== -1) {
|
||||
this[kClients].splice(idx, 1)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
[kGetDispatcher] () {
|
||||
const clientTtlOption = this[kOptions].clientTtl
|
||||
const clientsLength = this[kClients].length
|
||||
|
||||
// If we have no clients yet, create one
|
||||
if (clientsLength === 0) {
|
||||
const dispatcher = this[kFactory](this[kUrl], this[kOptions])
|
||||
this[kAddClient](dispatcher)
|
||||
return dispatcher
|
||||
}
|
||||
|
||||
// Round-robin through existing clients
|
||||
let checked = 0
|
||||
while (checked < clientsLength) {
|
||||
this[kIndex] = (this[kIndex] + 1) % clientsLength
|
||||
const client = this[kClients][this[kIndex]]
|
||||
|
||||
// Check if client is stale (TTL expired)
|
||||
if (clientTtlOption != null && clientTtlOption > 0 && client.ttl && ((Date.now() - client.ttl) > clientTtlOption)) {
|
||||
this[kRemoveClient](client)
|
||||
checked++
|
||||
continue
|
||||
}
|
||||
|
||||
// Return client if it's not draining
|
||||
if (!client[kNeedDrain]) {
|
||||
return client
|
||||
}
|
||||
|
||||
checked++
|
||||
}
|
||||
|
||||
// All clients are busy, create a new one if we haven't reached the limit
|
||||
if (!this[kConnections] || clientsLength < this[kConnections]) {
|
||||
const dispatcher = this[kFactory](this[kUrl], this[kOptions])
|
||||
this[kAddClient](dispatcher)
|
||||
return dispatcher
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = RoundRobinPool
|
||||
33
backend/node_modules/undici/lib/encoding/index.js
generated
vendored
Normal file
33
backend/node_modules/undici/lib/encoding/index.js
generated
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
'use strict'
|
||||
|
||||
const textDecoder = new TextDecoder()
|
||||
|
||||
/**
|
||||
* @see https://encoding.spec.whatwg.org/#utf-8-decode
|
||||
* @param {Uint8Array} buffer
|
||||
*/
|
||||
function utf8DecodeBytes (buffer) {
|
||||
if (buffer.length === 0) {
|
||||
return ''
|
||||
}
|
||||
|
||||
// 1. Let buffer be the result of peeking three bytes from
|
||||
// ioQueue, converted to a byte sequence.
|
||||
|
||||
// 2. If buffer is 0xEF 0xBB 0xBF, then read three
|
||||
// bytes from ioQueue. (Do nothing with those bytes.)
|
||||
if (buffer[0] === 0xEF && buffer[1] === 0xBB && buffer[2] === 0xBF) {
|
||||
buffer = buffer.subarray(3)
|
||||
}
|
||||
|
||||
// 3. Process a queue with an instance of UTF-8’s
|
||||
// decoder, ioQueue, output, and "replacement".
|
||||
const output = textDecoder.decode(buffer)
|
||||
|
||||
// 4. Return output.
|
||||
return output
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
utf8DecodeBytes
|
||||
}
|
||||
50
backend/node_modules/undici/lib/global.js
generated
vendored
Normal file
50
backend/node_modules/undici/lib/global.js
generated
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
'use strict'
|
||||
|
||||
// We include a version number for the Dispatcher API. In case of breaking changes,
|
||||
// this version number must be increased to avoid conflicts.
|
||||
const globalDispatcher = Symbol.for('undici.globalDispatcher.1')
|
||||
const { InvalidArgumentError } = require('./core/errors')
|
||||
const Agent = require('./dispatcher/agent')
|
||||
|
||||
if (getGlobalDispatcher() === undefined) {
|
||||
setGlobalDispatcher(new Agent())
|
||||
}
|
||||
|
||||
function setGlobalDispatcher (agent) {
|
||||
if (!agent || typeof agent.dispatch !== 'function') {
|
||||
throw new InvalidArgumentError('Argument agent must implement Agent')
|
||||
}
|
||||
Object.defineProperty(globalThis, globalDispatcher, {
|
||||
value: agent,
|
||||
writable: true,
|
||||
enumerable: false,
|
||||
configurable: false
|
||||
})
|
||||
}
|
||||
|
||||
function getGlobalDispatcher () {
|
||||
return globalThis[globalDispatcher]
|
||||
}
|
||||
|
||||
// These are the globals that can be installed by undici.install().
|
||||
// Not exported by index.js to avoid use outside of this module.
|
||||
const installedExports = /** @type {const} */ (
|
||||
[
|
||||
'fetch',
|
||||
'Headers',
|
||||
'Response',
|
||||
'Request',
|
||||
'FormData',
|
||||
'WebSocket',
|
||||
'CloseEvent',
|
||||
'ErrorEvent',
|
||||
'MessageEvent',
|
||||
'EventSource'
|
||||
]
|
||||
)
|
||||
|
||||
module.exports = {
|
||||
setGlobalDispatcher,
|
||||
getGlobalDispatcher,
|
||||
installedExports
|
||||
}
|
||||
526
backend/node_modules/undici/lib/handler/cache-handler.js
generated
vendored
Normal file
526
backend/node_modules/undici/lib/handler/cache-handler.js
generated
vendored
Normal file
@@ -0,0 +1,526 @@
|
||||
'use strict'
|
||||
|
||||
const util = require('../core/util')
|
||||
const {
|
||||
parseCacheControlHeader,
|
||||
parseVaryHeader,
|
||||
isEtagUsable
|
||||
} = require('../util/cache')
|
||||
const { parseHttpDate } = require('../util/date.js')
|
||||
|
||||
function noop () {}
|
||||
|
||||
// Status codes that we can use some heuristics on to cache
|
||||
const HEURISTICALLY_CACHEABLE_STATUS_CODES = [
|
||||
200, 203, 204, 206, 300, 301, 308, 404, 405, 410, 414, 501
|
||||
]
|
||||
|
||||
// Status codes which semantic is not handled by the cache
|
||||
// https://datatracker.ietf.org/doc/html/rfc9111#section-3
|
||||
// This list should not grow beyond 206 unless the RFC is updated
|
||||
// by a newer one including more. Please introduce another list if
|
||||
// implementing caching of responses with the 'must-understand' directive.
|
||||
const NOT_UNDERSTOOD_STATUS_CODES = [
|
||||
206
|
||||
]
|
||||
|
||||
const MAX_RESPONSE_AGE = 2147483647000
|
||||
|
||||
/**
|
||||
* @typedef {import('../../types/dispatcher.d.ts').default.DispatchHandler} DispatchHandler
|
||||
*
|
||||
* @implements {DispatchHandler}
|
||||
*/
|
||||
class CacheHandler {
|
||||
/**
|
||||
* @type {import('../../types/cache-interceptor.d.ts').default.CacheKey}
|
||||
*/
|
||||
#cacheKey
|
||||
|
||||
/**
|
||||
* @type {import('../../types/cache-interceptor.d.ts').default.CacheHandlerOptions['type']}
|
||||
*/
|
||||
#cacheType
|
||||
|
||||
/**
|
||||
* @type {number | undefined}
|
||||
*/
|
||||
#cacheByDefault
|
||||
|
||||
/**
|
||||
* @type {import('../../types/cache-interceptor.d.ts').default.CacheStore}
|
||||
*/
|
||||
#store
|
||||
|
||||
/**
|
||||
* @type {import('../../types/dispatcher.d.ts').default.DispatchHandler}
|
||||
*/
|
||||
#handler
|
||||
|
||||
/**
|
||||
* @type {import('node:stream').Writable | undefined}
|
||||
*/
|
||||
#writeStream
|
||||
|
||||
/**
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheHandlerOptions} opts
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} cacheKey
|
||||
* @param {import('../../types/dispatcher.d.ts').default.DispatchHandler} handler
|
||||
*/
|
||||
constructor ({ store, type, cacheByDefault }, cacheKey, handler) {
|
||||
this.#store = store
|
||||
this.#cacheType = type
|
||||
this.#cacheByDefault = cacheByDefault
|
||||
this.#cacheKey = cacheKey
|
||||
this.#handler = handler
|
||||
}
|
||||
|
||||
onRequestStart (controller, context) {
|
||||
this.#writeStream?.destroy()
|
||||
this.#writeStream = undefined
|
||||
this.#handler.onRequestStart?.(controller, context)
|
||||
}
|
||||
|
||||
onRequestUpgrade (controller, statusCode, headers, socket) {
|
||||
this.#handler.onRequestUpgrade?.(controller, statusCode, headers, socket)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('../../types/dispatcher.d.ts').default.DispatchController} controller
|
||||
* @param {number} statusCode
|
||||
* @param {import('../../types/header.d.ts').IncomingHttpHeaders} resHeaders
|
||||
* @param {string} statusMessage
|
||||
*/
|
||||
onResponseStart (
|
||||
controller,
|
||||
statusCode,
|
||||
resHeaders,
|
||||
statusMessage
|
||||
) {
|
||||
const downstreamOnHeaders = () =>
|
||||
this.#handler.onResponseStart?.(
|
||||
controller,
|
||||
statusCode,
|
||||
resHeaders,
|
||||
statusMessage
|
||||
)
|
||||
const handler = this
|
||||
|
||||
if (
|
||||
!util.safeHTTPMethods.includes(this.#cacheKey.method) &&
|
||||
statusCode >= 200 &&
|
||||
statusCode <= 399
|
||||
) {
|
||||
// Successful response to an unsafe method, delete it from cache
|
||||
// https://www.rfc-editor.org/rfc/rfc9111.html#name-invalidating-stored-response
|
||||
try {
|
||||
this.#store.delete(this.#cacheKey)?.catch?.(noop)
|
||||
} catch {
|
||||
// Fail silently
|
||||
}
|
||||
return downstreamOnHeaders()
|
||||
}
|
||||
|
||||
const cacheControlHeader = resHeaders['cache-control']
|
||||
const heuristicallyCacheable = resHeaders['last-modified'] && HEURISTICALLY_CACHEABLE_STATUS_CODES.includes(statusCode)
|
||||
if (
|
||||
!cacheControlHeader &&
|
||||
!resHeaders['expires'] &&
|
||||
!heuristicallyCacheable &&
|
||||
!this.#cacheByDefault
|
||||
) {
|
||||
// Don't have anything to tell us this response is cachable and we're not
|
||||
// caching by default
|
||||
return downstreamOnHeaders()
|
||||
}
|
||||
|
||||
const cacheControlDirectives = cacheControlHeader ? parseCacheControlHeader(cacheControlHeader) : {}
|
||||
if (!canCacheResponse(this.#cacheType, statusCode, resHeaders, cacheControlDirectives)) {
|
||||
return downstreamOnHeaders()
|
||||
}
|
||||
|
||||
const now = Date.now()
|
||||
const resAge = resHeaders.age ? getAge(resHeaders.age) : undefined
|
||||
if (resAge && resAge >= MAX_RESPONSE_AGE) {
|
||||
// Response considered stale
|
||||
return downstreamOnHeaders()
|
||||
}
|
||||
|
||||
const resDate = typeof resHeaders.date === 'string'
|
||||
? parseHttpDate(resHeaders.date)
|
||||
: undefined
|
||||
|
||||
const staleAt =
|
||||
determineStaleAt(this.#cacheType, now, resAge, resHeaders, resDate, cacheControlDirectives) ??
|
||||
this.#cacheByDefault
|
||||
if (staleAt === undefined || (resAge && resAge > staleAt)) {
|
||||
return downstreamOnHeaders()
|
||||
}
|
||||
|
||||
const baseTime = resDate ? resDate.getTime() : now
|
||||
const absoluteStaleAt = staleAt + baseTime
|
||||
if (now >= absoluteStaleAt) {
|
||||
// Response is already stale
|
||||
return downstreamOnHeaders()
|
||||
}
|
||||
|
||||
let varyDirectives
|
||||
if (this.#cacheKey.headers && resHeaders.vary) {
|
||||
varyDirectives = parseVaryHeader(resHeaders.vary, this.#cacheKey.headers)
|
||||
if (!varyDirectives) {
|
||||
// Parse error
|
||||
return downstreamOnHeaders()
|
||||
}
|
||||
}
|
||||
|
||||
const deleteAt = determineDeleteAt(baseTime, cacheControlDirectives, absoluteStaleAt)
|
||||
const strippedHeaders = stripNecessaryHeaders(resHeaders, cacheControlDirectives)
|
||||
|
||||
/**
|
||||
* @type {import('../../types/cache-interceptor.d.ts').default.CacheValue}
|
||||
*/
|
||||
const value = {
|
||||
statusCode,
|
||||
statusMessage,
|
||||
headers: strippedHeaders,
|
||||
vary: varyDirectives,
|
||||
cacheControlDirectives,
|
||||
cachedAt: resAge ? now - resAge : now,
|
||||
staleAt: absoluteStaleAt,
|
||||
deleteAt
|
||||
}
|
||||
|
||||
// Not modified, re-use the cached value
|
||||
// https://www.rfc-editor.org/rfc/rfc9111.html#name-handling-304-not-modified
|
||||
if (statusCode === 304) {
|
||||
/**
|
||||
* @type {import('../../types/cache-interceptor.d.ts').default.CacheValue}
|
||||
*/
|
||||
const cachedValue = this.#store.get(this.#cacheKey)
|
||||
if (!cachedValue) {
|
||||
// Do not create a new cache entry, as a 304 won't have a body - so cannot be cached.
|
||||
return downstreamOnHeaders()
|
||||
}
|
||||
|
||||
// Re-use the cached value: statuscode, statusmessage, headers and body
|
||||
value.statusCode = cachedValue.statusCode
|
||||
value.statusMessage = cachedValue.statusMessage
|
||||
value.etag = cachedValue.etag
|
||||
value.headers = { ...cachedValue.headers, ...strippedHeaders }
|
||||
|
||||
downstreamOnHeaders()
|
||||
|
||||
this.#writeStream = this.#store.createWriteStream(this.#cacheKey, value)
|
||||
|
||||
if (!this.#writeStream || !cachedValue?.body) {
|
||||
return
|
||||
}
|
||||
|
||||
const bodyIterator = cachedValue.body.values()
|
||||
|
||||
const streamCachedBody = () => {
|
||||
for (const chunk of bodyIterator) {
|
||||
const full = this.#writeStream.write(chunk) === false
|
||||
this.#handler.onResponseData?.(controller, chunk)
|
||||
// when stream is full stop writing until we get a 'drain' event
|
||||
if (full) {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.#writeStream
|
||||
.on('error', function () {
|
||||
handler.#writeStream = undefined
|
||||
handler.#store.delete(handler.#cacheKey)
|
||||
})
|
||||
.on('drain', () => {
|
||||
streamCachedBody()
|
||||
})
|
||||
.on('close', function () {
|
||||
if (handler.#writeStream === this) {
|
||||
handler.#writeStream = undefined
|
||||
}
|
||||
})
|
||||
|
||||
streamCachedBody()
|
||||
} else {
|
||||
if (typeof resHeaders.etag === 'string' && isEtagUsable(resHeaders.etag)) {
|
||||
value.etag = resHeaders.etag
|
||||
}
|
||||
|
||||
this.#writeStream = this.#store.createWriteStream(this.#cacheKey, value)
|
||||
|
||||
if (!this.#writeStream) {
|
||||
return downstreamOnHeaders()
|
||||
}
|
||||
|
||||
this.#writeStream
|
||||
.on('drain', () => controller.resume())
|
||||
.on('error', function () {
|
||||
// TODO (fix): Make error somehow observable?
|
||||
handler.#writeStream = undefined
|
||||
|
||||
// Delete the value in case the cache store is holding onto state from
|
||||
// the call to createWriteStream
|
||||
handler.#store.delete(handler.#cacheKey)
|
||||
})
|
||||
.on('close', function () {
|
||||
if (handler.#writeStream === this) {
|
||||
handler.#writeStream = undefined
|
||||
}
|
||||
|
||||
// TODO (fix): Should we resume even if was paused downstream?
|
||||
controller.resume()
|
||||
})
|
||||
|
||||
downstreamOnHeaders()
|
||||
}
|
||||
}
|
||||
|
||||
onResponseData (controller, chunk) {
|
||||
if (this.#writeStream?.write(chunk) === false) {
|
||||
controller.pause()
|
||||
}
|
||||
|
||||
this.#handler.onResponseData?.(controller, chunk)
|
||||
}
|
||||
|
||||
onResponseEnd (controller, trailers) {
|
||||
this.#writeStream?.end()
|
||||
this.#handler.onResponseEnd?.(controller, trailers)
|
||||
}
|
||||
|
||||
onResponseError (controller, err) {
|
||||
this.#writeStream?.destroy(err)
|
||||
this.#writeStream = undefined
|
||||
this.#handler.onResponseError?.(controller, err)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://www.rfc-editor.org/rfc/rfc9111.html#name-storing-responses-to-authen
|
||||
*
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheOptions['type']} cacheType
|
||||
* @param {number} statusCode
|
||||
* @param {import('../../types/header.d.ts').IncomingHttpHeaders} resHeaders
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheControlDirectives} cacheControlDirectives
|
||||
*/
|
||||
function canCacheResponse (cacheType, statusCode, resHeaders, cacheControlDirectives) {
|
||||
// Status code must be final and understood.
|
||||
if (statusCode < 200 || NOT_UNDERSTOOD_STATUS_CODES.includes(statusCode)) {
|
||||
return false
|
||||
}
|
||||
// Responses with neither status codes that are heuristically cacheable, nor "explicit enough" caching
|
||||
// directives, are not cacheable. "Explicit enough": see https://www.rfc-editor.org/rfc/rfc9111.html#section-3
|
||||
if (!HEURISTICALLY_CACHEABLE_STATUS_CODES.includes(statusCode) && !resHeaders['expires'] &&
|
||||
!cacheControlDirectives.public &&
|
||||
cacheControlDirectives['max-age'] === undefined &&
|
||||
// RFC 9111: a private response directive, if the cache is not shared
|
||||
!(cacheControlDirectives.private && cacheType === 'private') &&
|
||||
!(cacheControlDirectives['s-maxage'] !== undefined && cacheType === 'shared')
|
||||
) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (cacheControlDirectives['no-store']) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (cacheType === 'shared' && cacheControlDirectives.private === true) {
|
||||
return false
|
||||
}
|
||||
|
||||
// https://www.rfc-editor.org/rfc/rfc9111.html#section-4.1-5
|
||||
if (resHeaders.vary?.includes('*')) {
|
||||
return false
|
||||
}
|
||||
|
||||
// https://www.rfc-editor.org/rfc/rfc9111.html#name-storing-responses-to-authen
|
||||
if (resHeaders.authorization) {
|
||||
if (!cacheControlDirectives.public || typeof resHeaders.authorization !== 'string') {
|
||||
return false
|
||||
}
|
||||
|
||||
if (
|
||||
Array.isArray(cacheControlDirectives['no-cache']) &&
|
||||
cacheControlDirectives['no-cache'].includes('authorization')
|
||||
) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (
|
||||
Array.isArray(cacheControlDirectives['private']) &&
|
||||
cacheControlDirectives['private'].includes('authorization')
|
||||
) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string | string[]} ageHeader
|
||||
* @returns {number | undefined}
|
||||
*/
|
||||
function getAge (ageHeader) {
|
||||
const age = parseInt(Array.isArray(ageHeader) ? ageHeader[0] : ageHeader)
|
||||
|
||||
return isNaN(age) ? undefined : age * 1000
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheOptions['type']} cacheType
|
||||
* @param {number} now
|
||||
* @param {number | undefined} age
|
||||
* @param {import('../../types/header.d.ts').IncomingHttpHeaders} resHeaders
|
||||
* @param {Date | undefined} responseDate
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheControlDirectives} cacheControlDirectives
|
||||
*
|
||||
* @returns {number | undefined} time that the value is stale at in seconds or undefined if it shouldn't be cached
|
||||
*/
|
||||
function determineStaleAt (cacheType, now, age, resHeaders, responseDate, cacheControlDirectives) {
|
||||
if (cacheType === 'shared') {
|
||||
// Prioritize s-maxage since we're a shared cache
|
||||
// s-maxage > max-age > Expire
|
||||
// https://www.rfc-editor.org/rfc/rfc9111.html#section-5.2.2.10-3
|
||||
const sMaxAge = cacheControlDirectives['s-maxage']
|
||||
if (sMaxAge !== undefined) {
|
||||
return sMaxAge > 0 ? sMaxAge * 1000 : undefined
|
||||
}
|
||||
}
|
||||
|
||||
const maxAge = cacheControlDirectives['max-age']
|
||||
if (maxAge !== undefined) {
|
||||
return maxAge > 0 ? maxAge * 1000 : undefined
|
||||
}
|
||||
|
||||
if (typeof resHeaders.expires === 'string') {
|
||||
// https://www.rfc-editor.org/rfc/rfc9111.html#section-5.3
|
||||
const expiresDate = parseHttpDate(resHeaders.expires)
|
||||
if (expiresDate) {
|
||||
if (now >= expiresDate.getTime()) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
if (responseDate) {
|
||||
if (responseDate >= expiresDate) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
if (age !== undefined && age > (expiresDate - responseDate)) {
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
return expiresDate.getTime() - now
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof resHeaders['last-modified'] === 'string') {
|
||||
// https://www.rfc-editor.org/rfc/rfc9111.html#name-calculating-heuristic-fresh
|
||||
const lastModified = new Date(resHeaders['last-modified'])
|
||||
if (isValidDate(lastModified)) {
|
||||
if (lastModified.getTime() >= now) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const responseAge = now - lastModified.getTime()
|
||||
|
||||
return responseAge * 0.1
|
||||
}
|
||||
}
|
||||
|
||||
if (cacheControlDirectives.immutable) {
|
||||
// https://www.rfc-editor.org/rfc/rfc8246.html#section-2.2
|
||||
return 31536000
|
||||
}
|
||||
|
||||
return undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} now
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheControlDirectives} cacheControlDirectives
|
||||
* @param {number} staleAt
|
||||
*/
|
||||
function determineDeleteAt (now, cacheControlDirectives, staleAt) {
|
||||
let staleWhileRevalidate = -Infinity
|
||||
let staleIfError = -Infinity
|
||||
let immutable = -Infinity
|
||||
|
||||
if (cacheControlDirectives['stale-while-revalidate']) {
|
||||
staleWhileRevalidate = staleAt + (cacheControlDirectives['stale-while-revalidate'] * 1000)
|
||||
}
|
||||
|
||||
if (cacheControlDirectives['stale-if-error']) {
|
||||
staleIfError = staleAt + (cacheControlDirectives['stale-if-error'] * 1000)
|
||||
}
|
||||
|
||||
if (staleWhileRevalidate === -Infinity && staleIfError === -Infinity) {
|
||||
immutable = now + 31536000000
|
||||
}
|
||||
|
||||
return Math.max(staleAt, staleWhileRevalidate, staleIfError, immutable)
|
||||
}
|
||||
|
||||
/**
|
||||
* Strips headers required to be removed in cached responses
|
||||
* @param {import('../../types/header.d.ts').IncomingHttpHeaders} resHeaders
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheControlDirectives} cacheControlDirectives
|
||||
* @returns {Record<string, string | string []>}
|
||||
*/
|
||||
function stripNecessaryHeaders (resHeaders, cacheControlDirectives) {
|
||||
const headersToRemove = [
|
||||
'connection',
|
||||
'proxy-authenticate',
|
||||
'proxy-authentication-info',
|
||||
'proxy-authorization',
|
||||
'proxy-connection',
|
||||
'te',
|
||||
'transfer-encoding',
|
||||
'upgrade',
|
||||
// We'll add age back when serving it
|
||||
'age'
|
||||
]
|
||||
|
||||
if (resHeaders['connection']) {
|
||||
if (Array.isArray(resHeaders['connection'])) {
|
||||
// connection: a
|
||||
// connection: b
|
||||
headersToRemove.push(...resHeaders['connection'].map(header => header.trim()))
|
||||
} else {
|
||||
// connection: a, b
|
||||
headersToRemove.push(...resHeaders['connection'].split(',').map(header => header.trim()))
|
||||
}
|
||||
}
|
||||
|
||||
if (Array.isArray(cacheControlDirectives['no-cache'])) {
|
||||
headersToRemove.push(...cacheControlDirectives['no-cache'])
|
||||
}
|
||||
|
||||
if (Array.isArray(cacheControlDirectives['private'])) {
|
||||
headersToRemove.push(...cacheControlDirectives['private'])
|
||||
}
|
||||
|
||||
let strippedHeaders
|
||||
for (const headerName of headersToRemove) {
|
||||
if (resHeaders[headerName]) {
|
||||
strippedHeaders ??= { ...resHeaders }
|
||||
delete strippedHeaders[headerName]
|
||||
}
|
||||
}
|
||||
|
||||
return strippedHeaders ?? resHeaders
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Date} date
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isValidDate (date) {
|
||||
return date instanceof Date && Number.isFinite(date.valueOf())
|
||||
}
|
||||
|
||||
module.exports = CacheHandler
|
||||
124
backend/node_modules/undici/lib/handler/cache-revalidation-handler.js
generated
vendored
Normal file
124
backend/node_modules/undici/lib/handler/cache-revalidation-handler.js
generated
vendored
Normal file
@@ -0,0 +1,124 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert')
|
||||
|
||||
/**
|
||||
* This takes care of revalidation requests we send to the origin. If we get
|
||||
* a response indicating that what we have is cached (via a HTTP 304), we can
|
||||
* continue using the cached value. Otherwise, we'll receive the new response
|
||||
* here, which we then just pass on to the next handler (most likely a
|
||||
* CacheHandler). Note that this assumes the proper headers were already
|
||||
* included in the request to tell the origin that we want to revalidate the
|
||||
* response (i.e. if-modified-since or if-none-match).
|
||||
*
|
||||
* @see https://www.rfc-editor.org/rfc/rfc9111.html#name-validation
|
||||
*
|
||||
* @implements {import('../../types/dispatcher.d.ts').default.DispatchHandler}
|
||||
*/
|
||||
class CacheRevalidationHandler {
|
||||
#successful = false
|
||||
|
||||
/**
|
||||
* @type {((boolean, any) => void) | null}
|
||||
*/
|
||||
#callback
|
||||
|
||||
/**
|
||||
* @type {(import('../../types/dispatcher.d.ts').default.DispatchHandler)}
|
||||
*/
|
||||
#handler
|
||||
|
||||
#context
|
||||
|
||||
/**
|
||||
* @type {boolean}
|
||||
*/
|
||||
#allowErrorStatusCodes
|
||||
|
||||
/**
|
||||
* @param {(boolean) => void} callback Function to call if the cached value is valid
|
||||
* @param {import('../../types/dispatcher.d.ts').default.DispatchHandlers} handler
|
||||
* @param {boolean} allowErrorStatusCodes
|
||||
*/
|
||||
constructor (callback, handler, allowErrorStatusCodes) {
|
||||
if (typeof callback !== 'function') {
|
||||
throw new TypeError('callback must be a function')
|
||||
}
|
||||
|
||||
this.#callback = callback
|
||||
this.#handler = handler
|
||||
this.#allowErrorStatusCodes = allowErrorStatusCodes
|
||||
}
|
||||
|
||||
onRequestStart (_, context) {
|
||||
this.#successful = false
|
||||
this.#context = context
|
||||
}
|
||||
|
||||
onRequestUpgrade (controller, statusCode, headers, socket) {
|
||||
this.#handler.onRequestUpgrade?.(controller, statusCode, headers, socket)
|
||||
}
|
||||
|
||||
onResponseStart (
|
||||
controller,
|
||||
statusCode,
|
||||
headers,
|
||||
statusMessage
|
||||
) {
|
||||
assert(this.#callback != null)
|
||||
|
||||
// https://www.rfc-editor.org/rfc/rfc9111.html#name-handling-a-validation-respo
|
||||
// https://datatracker.ietf.org/doc/html/rfc5861#section-4
|
||||
this.#successful = statusCode === 304 ||
|
||||
(this.#allowErrorStatusCodes && statusCode >= 500 && statusCode <= 504)
|
||||
this.#callback(this.#successful, this.#context)
|
||||
this.#callback = null
|
||||
|
||||
if (this.#successful) {
|
||||
return true
|
||||
}
|
||||
|
||||
this.#handler.onRequestStart?.(controller, this.#context)
|
||||
this.#handler.onResponseStart?.(
|
||||
controller,
|
||||
statusCode,
|
||||
headers,
|
||||
statusMessage
|
||||
)
|
||||
}
|
||||
|
||||
onResponseData (controller, chunk) {
|
||||
if (this.#successful) {
|
||||
return
|
||||
}
|
||||
|
||||
return this.#handler.onResponseData?.(controller, chunk)
|
||||
}
|
||||
|
||||
onResponseEnd (controller, trailers) {
|
||||
if (this.#successful) {
|
||||
return
|
||||
}
|
||||
|
||||
this.#handler.onResponseEnd?.(controller, trailers)
|
||||
}
|
||||
|
||||
onResponseError (controller, err) {
|
||||
if (this.#successful) {
|
||||
return
|
||||
}
|
||||
|
||||
if (this.#callback) {
|
||||
this.#callback(false)
|
||||
this.#callback = null
|
||||
}
|
||||
|
||||
if (typeof this.#handler.onResponseError === 'function') {
|
||||
this.#handler.onResponseError(controller, err)
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = CacheRevalidationHandler
|
||||
67
backend/node_modules/undici/lib/handler/decorator-handler.js
generated
vendored
Normal file
67
backend/node_modules/undici/lib/handler/decorator-handler.js
generated
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert')
|
||||
const WrapHandler = require('./wrap-handler')
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
module.exports = class DecoratorHandler {
|
||||
#handler
|
||||
#onCompleteCalled = false
|
||||
#onErrorCalled = false
|
||||
#onResponseStartCalled = false
|
||||
|
||||
constructor (handler) {
|
||||
if (typeof handler !== 'object' || handler === null) {
|
||||
throw new TypeError('handler must be an object')
|
||||
}
|
||||
this.#handler = WrapHandler.wrap(handler)
|
||||
}
|
||||
|
||||
onRequestStart (...args) {
|
||||
this.#handler.onRequestStart?.(...args)
|
||||
}
|
||||
|
||||
onRequestUpgrade (...args) {
|
||||
assert(!this.#onCompleteCalled)
|
||||
assert(!this.#onErrorCalled)
|
||||
|
||||
return this.#handler.onRequestUpgrade?.(...args)
|
||||
}
|
||||
|
||||
onResponseStart (...args) {
|
||||
assert(!this.#onCompleteCalled)
|
||||
assert(!this.#onErrorCalled)
|
||||
assert(!this.#onResponseStartCalled)
|
||||
|
||||
this.#onResponseStartCalled = true
|
||||
|
||||
return this.#handler.onResponseStart?.(...args)
|
||||
}
|
||||
|
||||
onResponseData (...args) {
|
||||
assert(!this.#onCompleteCalled)
|
||||
assert(!this.#onErrorCalled)
|
||||
|
||||
return this.#handler.onResponseData?.(...args)
|
||||
}
|
||||
|
||||
onResponseEnd (...args) {
|
||||
assert(!this.#onCompleteCalled)
|
||||
assert(!this.#onErrorCalled)
|
||||
|
||||
this.#onCompleteCalled = true
|
||||
return this.#handler.onResponseEnd?.(...args)
|
||||
}
|
||||
|
||||
onResponseError (...args) {
|
||||
this.#onErrorCalled = true
|
||||
return this.#handler.onResponseError?.(...args)
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
onBodySent () {}
|
||||
}
|
||||
216
backend/node_modules/undici/lib/handler/deduplication-handler.js
generated
vendored
Normal file
216
backend/node_modules/undici/lib/handler/deduplication-handler.js
generated
vendored
Normal file
@@ -0,0 +1,216 @@
|
||||
'use strict'
|
||||
|
||||
/**
|
||||
* @typedef {import('../../types/dispatcher.d.ts').default.DispatchHandler} DispatchHandler
|
||||
*/
|
||||
|
||||
/**
|
||||
* Handler that buffers response data and notifies multiple waiting handlers.
|
||||
* Used for request deduplication.
|
||||
*
|
||||
* @implements {DispatchHandler}
|
||||
*/
|
||||
class DeduplicationHandler {
|
||||
/**
|
||||
* @type {DispatchHandler}
|
||||
*/
|
||||
#primaryHandler
|
||||
|
||||
/**
|
||||
* @type {DispatchHandler[]}
|
||||
*/
|
||||
#waitingHandlers = []
|
||||
|
||||
/**
|
||||
* @type {Buffer[]}
|
||||
*/
|
||||
#chunks = []
|
||||
|
||||
/**
|
||||
* @type {number}
|
||||
*/
|
||||
#statusCode = 0
|
||||
|
||||
/**
|
||||
* @type {Record<string, string | string[]>}
|
||||
*/
|
||||
#headers = {}
|
||||
|
||||
/**
|
||||
* @type {string}
|
||||
*/
|
||||
#statusMessage = ''
|
||||
|
||||
/**
|
||||
* @type {boolean}
|
||||
*/
|
||||
#aborted = false
|
||||
|
||||
/**
|
||||
* @type {import('../../types/dispatcher.d.ts').default.DispatchController | null}
|
||||
*/
|
||||
#controller = null
|
||||
|
||||
/**
|
||||
* @type {(() => void) | null}
|
||||
*/
|
||||
#onComplete = null
|
||||
|
||||
/**
|
||||
* @param {DispatchHandler} primaryHandler The primary handler
|
||||
* @param {() => void} onComplete Callback when request completes
|
||||
*/
|
||||
constructor (primaryHandler, onComplete) {
|
||||
this.#primaryHandler = primaryHandler
|
||||
this.#onComplete = onComplete
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a waiting handler that will receive the buffered response
|
||||
* @param {DispatchHandler} handler
|
||||
*/
|
||||
addWaitingHandler (handler) {
|
||||
this.#waitingHandlers.push(handler)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {() => void} abort
|
||||
* @param {any} context
|
||||
*/
|
||||
onRequestStart (controller, context) {
|
||||
this.#controller = controller
|
||||
this.#primaryHandler.onRequestStart?.(controller, context)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('../../types/dispatcher.d.ts').default.DispatchController} controller
|
||||
* @param {number} statusCode
|
||||
* @param {import('../../types/header.d.ts').IncomingHttpHeaders} headers
|
||||
* @param {Socket} socket
|
||||
*/
|
||||
onRequestUpgrade (controller, statusCode, headers, socket) {
|
||||
this.#primaryHandler.onRequestUpgrade?.(controller, statusCode, headers, socket)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('../../types/dispatcher.d.ts').default.DispatchController} controller
|
||||
* @param {number} statusCode
|
||||
* @param {Record<string, string | string[]>} headers
|
||||
* @param {string} statusMessage
|
||||
*/
|
||||
onResponseStart (controller, statusCode, headers, statusMessage) {
|
||||
this.#statusCode = statusCode
|
||||
this.#headers = headers
|
||||
this.#statusMessage = statusMessage
|
||||
this.#primaryHandler.onResponseStart?.(controller, statusCode, headers, statusMessage)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('../../types/dispatcher.d.ts').default.DispatchController} controller
|
||||
* @param {Buffer} chunk
|
||||
*/
|
||||
onResponseData (controller, chunk) {
|
||||
// Buffer the chunk for waiting handlers
|
||||
this.#chunks.push(Buffer.from(chunk))
|
||||
this.#primaryHandler.onResponseData?.(controller, chunk)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('../../types/dispatcher.d.ts').default.DispatchController} controller
|
||||
* @param {object} trailers
|
||||
*/
|
||||
onResponseEnd (controller, trailers) {
|
||||
this.#primaryHandler.onResponseEnd?.(controller, trailers)
|
||||
this.#notifyWaitingHandlers()
|
||||
this.#onComplete?.()
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('../../types/dispatcher.d.ts').default.DispatchController} controller
|
||||
* @param {Error} err
|
||||
*/
|
||||
onResponseError (controller, err) {
|
||||
this.#aborted = true
|
||||
this.#primaryHandler.onResponseError?.(controller, err)
|
||||
this.#notifyWaitingHandlersError(err)
|
||||
this.#onComplete?.()
|
||||
}
|
||||
|
||||
/**
|
||||
* Notify all waiting handlers with the buffered response
|
||||
*/
|
||||
#notifyWaitingHandlers () {
|
||||
const body = Buffer.concat(this.#chunks)
|
||||
|
||||
for (const handler of this.#waitingHandlers) {
|
||||
// Create a simple controller for each waiting handler
|
||||
const waitingController = {
|
||||
resume () {},
|
||||
pause () {},
|
||||
get paused () { return false },
|
||||
get aborted () { return false },
|
||||
get reason () { return null },
|
||||
abort () {}
|
||||
}
|
||||
|
||||
try {
|
||||
handler.onRequestStart?.(waitingController, null)
|
||||
|
||||
if (waitingController.aborted) {
|
||||
continue
|
||||
}
|
||||
|
||||
handler.onResponseStart?.(
|
||||
waitingController,
|
||||
this.#statusCode,
|
||||
this.#headers,
|
||||
this.#statusMessage
|
||||
)
|
||||
|
||||
if (waitingController.aborted) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (body.length > 0) {
|
||||
handler.onResponseData?.(waitingController, body)
|
||||
}
|
||||
|
||||
handler.onResponseEnd?.(waitingController, {})
|
||||
} catch {
|
||||
// Ignore errors from waiting handlers
|
||||
}
|
||||
}
|
||||
|
||||
this.#waitingHandlers = []
|
||||
this.#chunks = []
|
||||
}
|
||||
|
||||
/**
|
||||
* Notify all waiting handlers of an error
|
||||
* @param {Error} err
|
||||
*/
|
||||
#notifyWaitingHandlersError (err) {
|
||||
for (const handler of this.#waitingHandlers) {
|
||||
const waitingController = {
|
||||
resume () {},
|
||||
pause () {},
|
||||
get paused () { return false },
|
||||
get aborted () { return true },
|
||||
get reason () { return err },
|
||||
abort () {}
|
||||
}
|
||||
|
||||
try {
|
||||
handler.onRequestStart?.(waitingController, null)
|
||||
handler.onResponseError?.(waitingController, err)
|
||||
} catch {
|
||||
// Ignore errors from waiting handlers
|
||||
}
|
||||
}
|
||||
|
||||
this.#waitingHandlers = []
|
||||
this.#chunks = []
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = DeduplicationHandler
|
||||
237
backend/node_modules/undici/lib/handler/redirect-handler.js
generated
vendored
Normal file
237
backend/node_modules/undici/lib/handler/redirect-handler.js
generated
vendored
Normal file
@@ -0,0 +1,237 @@
|
||||
'use strict'
|
||||
|
||||
const util = require('../core/util')
|
||||
const { kBodyUsed } = require('../core/symbols')
|
||||
const assert = require('node:assert')
|
||||
const { InvalidArgumentError } = require('../core/errors')
|
||||
const EE = require('node:events')
|
||||
|
||||
const redirectableStatusCodes = [300, 301, 302, 303, 307, 308]
|
||||
|
||||
const kBody = Symbol('body')
|
||||
|
||||
const noop = () => {}
|
||||
|
||||
class BodyAsyncIterable {
|
||||
constructor (body) {
|
||||
this[kBody] = body
|
||||
this[kBodyUsed] = false
|
||||
}
|
||||
|
||||
async * [Symbol.asyncIterator] () {
|
||||
assert(!this[kBodyUsed], 'disturbed')
|
||||
this[kBodyUsed] = true
|
||||
yield * this[kBody]
|
||||
}
|
||||
}
|
||||
|
||||
class RedirectHandler {
|
||||
static buildDispatch (dispatcher, maxRedirections) {
|
||||
if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) {
|
||||
throw new InvalidArgumentError('maxRedirections must be a positive number')
|
||||
}
|
||||
|
||||
const dispatch = dispatcher.dispatch.bind(dispatcher)
|
||||
return (opts, originalHandler) => dispatch(opts, new RedirectHandler(dispatch, maxRedirections, opts, originalHandler))
|
||||
}
|
||||
|
||||
constructor (dispatch, maxRedirections, opts, handler) {
|
||||
if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) {
|
||||
throw new InvalidArgumentError('maxRedirections must be a positive number')
|
||||
}
|
||||
|
||||
this.dispatch = dispatch
|
||||
this.location = null
|
||||
const { maxRedirections: _, ...cleanOpts } = opts
|
||||
this.opts = cleanOpts // opts must be a copy, exclude maxRedirections
|
||||
this.maxRedirections = maxRedirections
|
||||
this.handler = handler
|
||||
this.history = []
|
||||
|
||||
if (util.isStream(this.opts.body)) {
|
||||
// TODO (fix): Provide some way for the user to cache the file to e.g. /tmp
|
||||
// so that it can be dispatched again?
|
||||
// TODO (fix): Do we need 100-expect support to provide a way to do this properly?
|
||||
if (util.bodyLength(this.opts.body) === 0) {
|
||||
this.opts.body
|
||||
.on('data', function () {
|
||||
assert(false)
|
||||
})
|
||||
}
|
||||
|
||||
if (typeof this.opts.body.readableDidRead !== 'boolean') {
|
||||
this.opts.body[kBodyUsed] = false
|
||||
EE.prototype.on.call(this.opts.body, 'data', function () {
|
||||
this[kBodyUsed] = true
|
||||
})
|
||||
}
|
||||
} else if (this.opts.body && typeof this.opts.body.pipeTo === 'function') {
|
||||
// TODO (fix): We can't access ReadableStream internal state
|
||||
// to determine whether or not it has been disturbed. This is just
|
||||
// a workaround.
|
||||
this.opts.body = new BodyAsyncIterable(this.opts.body)
|
||||
} else if (
|
||||
this.opts.body &&
|
||||
typeof this.opts.body !== 'string' &&
|
||||
!ArrayBuffer.isView(this.opts.body) &&
|
||||
util.isIterable(this.opts.body) &&
|
||||
!util.isFormDataLike(this.opts.body)
|
||||
) {
|
||||
// TODO: Should we allow re-using iterable if !this.opts.idempotent
|
||||
// or through some other flag?
|
||||
this.opts.body = new BodyAsyncIterable(this.opts.body)
|
||||
}
|
||||
}
|
||||
|
||||
onRequestStart (controller, context) {
|
||||
this.handler.onRequestStart?.(controller, { ...context, history: this.history })
|
||||
}
|
||||
|
||||
onRequestUpgrade (controller, statusCode, headers, socket) {
|
||||
this.handler.onRequestUpgrade?.(controller, statusCode, headers, socket)
|
||||
}
|
||||
|
||||
onResponseStart (controller, statusCode, headers, statusMessage) {
|
||||
if (this.opts.throwOnMaxRedirect && this.history.length >= this.maxRedirections) {
|
||||
throw new Error('max redirects')
|
||||
}
|
||||
|
||||
// https://tools.ietf.org/html/rfc7231#section-6.4.2
|
||||
// https://fetch.spec.whatwg.org/#http-redirect-fetch
|
||||
// In case of HTTP 301 or 302 with POST, change the method to GET
|
||||
if ((statusCode === 301 || statusCode === 302) && this.opts.method === 'POST') {
|
||||
this.opts.method = 'GET'
|
||||
if (util.isStream(this.opts.body)) {
|
||||
util.destroy(this.opts.body.on('error', noop))
|
||||
}
|
||||
this.opts.body = null
|
||||
}
|
||||
|
||||
// https://tools.ietf.org/html/rfc7231#section-6.4.4
|
||||
// In case of HTTP 303, always replace method to be either HEAD or GET
|
||||
if (statusCode === 303 && this.opts.method !== 'HEAD') {
|
||||
this.opts.method = 'GET'
|
||||
if (util.isStream(this.opts.body)) {
|
||||
util.destroy(this.opts.body.on('error', noop))
|
||||
}
|
||||
this.opts.body = null
|
||||
}
|
||||
|
||||
this.location = this.history.length >= this.maxRedirections || util.isDisturbed(this.opts.body) || redirectableStatusCodes.indexOf(statusCode) === -1
|
||||
? null
|
||||
: headers.location
|
||||
|
||||
if (this.opts.origin) {
|
||||
this.history.push(new URL(this.opts.path, this.opts.origin))
|
||||
}
|
||||
|
||||
if (!this.location) {
|
||||
this.handler.onResponseStart?.(controller, statusCode, headers, statusMessage)
|
||||
return
|
||||
}
|
||||
|
||||
const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin)))
|
||||
const path = search ? `${pathname}${search}` : pathname
|
||||
|
||||
// Check for redirect loops by seeing if we've already visited this URL in our history
|
||||
// This catches the case where Client/Pool try to handle cross-origin redirects but fail
|
||||
// and keep redirecting to the same URL in an infinite loop
|
||||
const redirectUrlString = `${origin}${path}`
|
||||
for (const historyUrl of this.history) {
|
||||
if (historyUrl.toString() === redirectUrlString) {
|
||||
throw new InvalidArgumentError(`Redirect loop detected. Cannot redirect to ${origin}. This typically happens when using a Client or Pool with cross-origin redirects. Use an Agent for cross-origin redirects.`)
|
||||
}
|
||||
}
|
||||
|
||||
// Remove headers referring to the original URL.
|
||||
// By default it is Host only, unless it's a 303 (see below), which removes also all Content-* headers.
|
||||
// https://tools.ietf.org/html/rfc7231#section-6.4
|
||||
this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin)
|
||||
this.opts.path = path
|
||||
this.opts.origin = origin
|
||||
this.opts.query = null
|
||||
}
|
||||
|
||||
onResponseData (controller, chunk) {
|
||||
if (this.location) {
|
||||
/*
|
||||
https://tools.ietf.org/html/rfc7231#section-6.4
|
||||
|
||||
TLDR: undici always ignores 3xx response bodies.
|
||||
|
||||
Redirection is used to serve the requested resource from another URL, so it assumes that
|
||||
no body is generated (and thus can be ignored). Even though generating a body is not prohibited.
|
||||
|
||||
For status 301, 302, 303, 307 and 308 (the latter from RFC 7238), the specs mention that the body usually
|
||||
(which means it's optional and not mandated) contain just an hyperlink to the value of
|
||||
the Location response header, so the body can be ignored safely.
|
||||
|
||||
For status 300, which is "Multiple Choices", the spec mentions both generating a Location
|
||||
response header AND a response body with the other possible location to follow.
|
||||
Since the spec explicitly chooses not to specify a format for such body and leave it to
|
||||
servers and browsers implementors, we ignore the body as there is no specified way to eventually parse it.
|
||||
*/
|
||||
} else {
|
||||
this.handler.onResponseData?.(controller, chunk)
|
||||
}
|
||||
}
|
||||
|
||||
onResponseEnd (controller, trailers) {
|
||||
if (this.location) {
|
||||
/*
|
||||
https://tools.ietf.org/html/rfc7231#section-6.4
|
||||
|
||||
TLDR: undici always ignores 3xx response trailers as they are not expected in case of redirections
|
||||
and neither are useful if present.
|
||||
|
||||
See comment on onData method above for more detailed information.
|
||||
*/
|
||||
this.dispatch(this.opts, this)
|
||||
} else {
|
||||
this.handler.onResponseEnd(controller, trailers)
|
||||
}
|
||||
}
|
||||
|
||||
onResponseError (controller, error) {
|
||||
this.handler.onResponseError?.(controller, error)
|
||||
}
|
||||
}
|
||||
|
||||
// https://tools.ietf.org/html/rfc7231#section-6.4.4
|
||||
function shouldRemoveHeader (header, removeContent, unknownOrigin) {
|
||||
if (header.length === 4) {
|
||||
return util.headerNameToString(header) === 'host'
|
||||
}
|
||||
if (removeContent && util.headerNameToString(header).startsWith('content-')) {
|
||||
return true
|
||||
}
|
||||
if (unknownOrigin && (header.length === 13 || header.length === 6 || header.length === 19)) {
|
||||
const name = util.headerNameToString(header)
|
||||
return name === 'authorization' || name === 'cookie' || name === 'proxy-authorization'
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// https://tools.ietf.org/html/rfc7231#section-6.4
|
||||
function cleanRequestHeaders (headers, removeContent, unknownOrigin) {
|
||||
const ret = []
|
||||
if (Array.isArray(headers)) {
|
||||
for (let i = 0; i < headers.length; i += 2) {
|
||||
if (!shouldRemoveHeader(headers[i], removeContent, unknownOrigin)) {
|
||||
ret.push(headers[i], headers[i + 1])
|
||||
}
|
||||
}
|
||||
} else if (headers && typeof headers === 'object') {
|
||||
const entries = typeof headers[Symbol.iterator] === 'function' ? headers : Object.entries(headers)
|
||||
for (const [key, value] of entries) {
|
||||
if (!shouldRemoveHeader(key, removeContent, unknownOrigin)) {
|
||||
ret.push(key, value)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
assert(headers == null, 'headers must be an object or an array')
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
module.exports = RedirectHandler
|
||||
394
backend/node_modules/undici/lib/handler/retry-handler.js
generated
vendored
Normal file
394
backend/node_modules/undici/lib/handler/retry-handler.js
generated
vendored
Normal file
@@ -0,0 +1,394 @@
|
||||
'use strict'
|
||||
const assert = require('node:assert')
|
||||
|
||||
const { kRetryHandlerDefaultRetry } = require('../core/symbols')
|
||||
const { RequestRetryError } = require('../core/errors')
|
||||
const WrapHandler = require('./wrap-handler')
|
||||
const {
|
||||
isDisturbed,
|
||||
parseRangeHeader,
|
||||
wrapRequestBody
|
||||
} = require('../core/util')
|
||||
|
||||
function calculateRetryAfterHeader (retryAfter) {
|
||||
const retryTime = new Date(retryAfter).getTime()
|
||||
return isNaN(retryTime) ? 0 : retryTime - Date.now()
|
||||
}
|
||||
|
||||
class RetryHandler {
|
||||
constructor (opts, { dispatch, handler }) {
|
||||
const { retryOptions, ...dispatchOpts } = opts
|
||||
const {
|
||||
// Retry scoped
|
||||
retry: retryFn,
|
||||
maxRetries,
|
||||
maxTimeout,
|
||||
minTimeout,
|
||||
timeoutFactor,
|
||||
// Response scoped
|
||||
methods,
|
||||
errorCodes,
|
||||
retryAfter,
|
||||
statusCodes,
|
||||
throwOnError
|
||||
} = retryOptions ?? {}
|
||||
|
||||
this.error = null
|
||||
this.dispatch = dispatch
|
||||
this.handler = WrapHandler.wrap(handler)
|
||||
this.opts = { ...dispatchOpts, body: wrapRequestBody(opts.body) }
|
||||
this.retryOpts = {
|
||||
throwOnError: throwOnError ?? true,
|
||||
retry: retryFn ?? RetryHandler[kRetryHandlerDefaultRetry],
|
||||
retryAfter: retryAfter ?? true,
|
||||
maxTimeout: maxTimeout ?? 30 * 1000, // 30s,
|
||||
minTimeout: minTimeout ?? 500, // .5s
|
||||
timeoutFactor: timeoutFactor ?? 2,
|
||||
maxRetries: maxRetries ?? 5,
|
||||
// What errors we should retry
|
||||
methods: methods ?? ['GET', 'HEAD', 'OPTIONS', 'PUT', 'DELETE', 'TRACE'],
|
||||
// Indicates which errors to retry
|
||||
statusCodes: statusCodes ?? [500, 502, 503, 504, 429],
|
||||
// List of errors to retry
|
||||
errorCodes: errorCodes ?? [
|
||||
'ECONNRESET',
|
||||
'ECONNREFUSED',
|
||||
'ENOTFOUND',
|
||||
'ENETDOWN',
|
||||
'ENETUNREACH',
|
||||
'EHOSTDOWN',
|
||||
'EHOSTUNREACH',
|
||||
'EPIPE',
|
||||
'UND_ERR_SOCKET'
|
||||
]
|
||||
}
|
||||
|
||||
this.retryCount = 0
|
||||
this.retryCountCheckpoint = 0
|
||||
this.headersSent = false
|
||||
this.start = 0
|
||||
this.end = null
|
||||
this.etag = null
|
||||
}
|
||||
|
||||
onResponseStartWithRetry (controller, statusCode, headers, statusMessage, err) {
|
||||
if (this.retryOpts.throwOnError) {
|
||||
// Preserve old behavior for status codes that are not eligible for retry
|
||||
if (this.retryOpts.statusCodes.includes(statusCode) === false) {
|
||||
this.headersSent = true
|
||||
this.handler.onResponseStart?.(controller, statusCode, headers, statusMessage)
|
||||
} else {
|
||||
this.error = err
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
if (isDisturbed(this.opts.body)) {
|
||||
this.headersSent = true
|
||||
this.handler.onResponseStart?.(controller, statusCode, headers, statusMessage)
|
||||
return
|
||||
}
|
||||
|
||||
function shouldRetry (passedErr) {
|
||||
if (passedErr) {
|
||||
this.headersSent = true
|
||||
this.handler.onResponseStart?.(controller, statusCode, headers, statusMessage)
|
||||
controller.resume()
|
||||
return
|
||||
}
|
||||
|
||||
this.error = err
|
||||
controller.resume()
|
||||
}
|
||||
|
||||
controller.pause()
|
||||
this.retryOpts.retry(
|
||||
err,
|
||||
{
|
||||
state: { counter: this.retryCount },
|
||||
opts: { retryOptions: this.retryOpts, ...this.opts }
|
||||
},
|
||||
shouldRetry.bind(this)
|
||||
)
|
||||
}
|
||||
|
||||
onRequestStart (controller, context) {
|
||||
if (!this.headersSent) {
|
||||
this.handler.onRequestStart?.(controller, context)
|
||||
}
|
||||
}
|
||||
|
||||
onRequestUpgrade (controller, statusCode, headers, socket) {
|
||||
this.handler.onRequestUpgrade?.(controller, statusCode, headers, socket)
|
||||
}
|
||||
|
||||
static [kRetryHandlerDefaultRetry] (err, { state, opts }, cb) {
|
||||
const { statusCode, code, headers } = err
|
||||
const { method, retryOptions } = opts
|
||||
const {
|
||||
maxRetries,
|
||||
minTimeout,
|
||||
maxTimeout,
|
||||
timeoutFactor,
|
||||
statusCodes,
|
||||
errorCodes,
|
||||
methods
|
||||
} = retryOptions
|
||||
const { counter } = state
|
||||
|
||||
// Any code that is not a Undici's originated and allowed to retry
|
||||
if (code && code !== 'UND_ERR_REQ_RETRY' && !errorCodes.includes(code)) {
|
||||
cb(err)
|
||||
return
|
||||
}
|
||||
|
||||
// If a set of method are provided and the current method is not in the list
|
||||
if (Array.isArray(methods) && !methods.includes(method)) {
|
||||
cb(err)
|
||||
return
|
||||
}
|
||||
|
||||
// If a set of status code are provided and the current status code is not in the list
|
||||
if (
|
||||
statusCode != null &&
|
||||
Array.isArray(statusCodes) &&
|
||||
!statusCodes.includes(statusCode)
|
||||
) {
|
||||
cb(err)
|
||||
return
|
||||
}
|
||||
|
||||
// If we reached the max number of retries
|
||||
if (counter > maxRetries) {
|
||||
cb(err)
|
||||
return
|
||||
}
|
||||
|
||||
let retryAfterHeader = headers?.['retry-after']
|
||||
if (retryAfterHeader) {
|
||||
retryAfterHeader = Number(retryAfterHeader)
|
||||
retryAfterHeader = Number.isNaN(retryAfterHeader)
|
||||
? calculateRetryAfterHeader(headers['retry-after'])
|
||||
: retryAfterHeader * 1e3 // Retry-After is in seconds
|
||||
}
|
||||
|
||||
const retryTimeout =
|
||||
retryAfterHeader > 0
|
||||
? Math.min(retryAfterHeader, maxTimeout)
|
||||
: Math.min(minTimeout * timeoutFactor ** (counter - 1), maxTimeout)
|
||||
|
||||
setTimeout(() => cb(null), retryTimeout)
|
||||
}
|
||||
|
||||
onResponseStart (controller, statusCode, headers, statusMessage) {
|
||||
this.error = null
|
||||
this.retryCount += 1
|
||||
|
||||
if (statusCode >= 300) {
|
||||
const err = new RequestRetryError('Request failed', statusCode, {
|
||||
headers,
|
||||
data: {
|
||||
count: this.retryCount
|
||||
}
|
||||
})
|
||||
|
||||
this.onResponseStartWithRetry(controller, statusCode, headers, statusMessage, err)
|
||||
return
|
||||
}
|
||||
|
||||
// Checkpoint for resume from where we left it
|
||||
if (this.headersSent) {
|
||||
// Only Partial Content 206 supposed to provide Content-Range,
|
||||
// any other status code that partially consumed the payload
|
||||
// should not be retried because it would result in downstream
|
||||
// wrongly concatenate multiple responses.
|
||||
if (statusCode !== 206 && (this.start > 0 || statusCode !== 200)) {
|
||||
throw new RequestRetryError('server does not support the range header and the payload was partially consumed', statusCode, {
|
||||
headers,
|
||||
data: { count: this.retryCount }
|
||||
})
|
||||
}
|
||||
|
||||
const contentRange = parseRangeHeader(headers['content-range'])
|
||||
// If no content range
|
||||
if (!contentRange) {
|
||||
// We always throw here as we want to indicate that we entred unexpected path
|
||||
throw new RequestRetryError('Content-Range mismatch', statusCode, {
|
||||
headers,
|
||||
data: { count: this.retryCount }
|
||||
})
|
||||
}
|
||||
|
||||
// Let's start with a weak etag check
|
||||
if (this.etag != null && this.etag !== headers.etag) {
|
||||
// We always throw here as we want to indicate that we entred unexpected path
|
||||
throw new RequestRetryError('ETag mismatch', statusCode, {
|
||||
headers,
|
||||
data: { count: this.retryCount }
|
||||
})
|
||||
}
|
||||
|
||||
const { start, size, end = size ? size - 1 : null } = contentRange
|
||||
|
||||
assert(this.start === start, 'content-range mismatch')
|
||||
assert(this.end == null || this.end === end, 'content-range mismatch')
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
if (this.end == null) {
|
||||
if (statusCode === 206) {
|
||||
// First time we receive 206
|
||||
const range = parseRangeHeader(headers['content-range'])
|
||||
|
||||
if (range == null) {
|
||||
this.headersSent = true
|
||||
this.handler.onResponseStart?.(
|
||||
controller,
|
||||
statusCode,
|
||||
headers,
|
||||
statusMessage
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
const { start, size, end = size ? size - 1 : null } = range
|
||||
assert(
|
||||
start != null && Number.isFinite(start),
|
||||
'content-range mismatch'
|
||||
)
|
||||
assert(end != null && Number.isFinite(end), 'invalid content-length')
|
||||
|
||||
this.start = start
|
||||
this.end = end
|
||||
}
|
||||
|
||||
// We make our best to checkpoint the body for further range headers
|
||||
if (this.end == null) {
|
||||
const contentLength = headers['content-length']
|
||||
this.end = contentLength != null ? Number(contentLength) - 1 : null
|
||||
}
|
||||
|
||||
assert(Number.isFinite(this.start))
|
||||
assert(
|
||||
this.end == null || Number.isFinite(this.end),
|
||||
'invalid content-length'
|
||||
)
|
||||
|
||||
this.resume = true
|
||||
this.etag = headers.etag != null ? headers.etag : null
|
||||
|
||||
// Weak etags are not useful for comparison nor cache
|
||||
// for instance not safe to assume if the response is byte-per-byte
|
||||
// equal
|
||||
if (
|
||||
this.etag != null &&
|
||||
this.etag[0] === 'W' &&
|
||||
this.etag[1] === '/'
|
||||
) {
|
||||
this.etag = null
|
||||
}
|
||||
|
||||
this.headersSent = true
|
||||
this.handler.onResponseStart?.(
|
||||
controller,
|
||||
statusCode,
|
||||
headers,
|
||||
statusMessage
|
||||
)
|
||||
} else {
|
||||
throw new RequestRetryError('Request failed', statusCode, {
|
||||
headers,
|
||||
data: { count: this.retryCount }
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
onResponseData (controller, chunk) {
|
||||
if (this.error) {
|
||||
return
|
||||
}
|
||||
|
||||
this.start += chunk.length
|
||||
|
||||
this.handler.onResponseData?.(controller, chunk)
|
||||
}
|
||||
|
||||
onResponseEnd (controller, trailers) {
|
||||
if (this.error && this.retryOpts.throwOnError) {
|
||||
throw this.error
|
||||
}
|
||||
|
||||
if (!this.error) {
|
||||
this.retryCount = 0
|
||||
return this.handler.onResponseEnd?.(controller, trailers)
|
||||
}
|
||||
|
||||
this.retry(controller)
|
||||
}
|
||||
|
||||
retry (controller) {
|
||||
if (this.start !== 0) {
|
||||
const headers = { range: `bytes=${this.start}-${this.end ?? ''}` }
|
||||
|
||||
// Weak etag check - weak etags will make comparison algorithms never match
|
||||
if (this.etag != null) {
|
||||
headers['if-match'] = this.etag
|
||||
}
|
||||
|
||||
this.opts = {
|
||||
...this.opts,
|
||||
headers: {
|
||||
...this.opts.headers,
|
||||
...headers
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
this.retryCountCheckpoint = this.retryCount
|
||||
this.dispatch(this.opts, this)
|
||||
} catch (err) {
|
||||
this.handler.onResponseError?.(controller, err)
|
||||
}
|
||||
}
|
||||
|
||||
onResponseError (controller, err) {
|
||||
if (controller?.aborted || isDisturbed(this.opts.body)) {
|
||||
this.handler.onResponseError?.(controller, err)
|
||||
return
|
||||
}
|
||||
|
||||
function shouldRetry (returnedErr) {
|
||||
if (!returnedErr) {
|
||||
this.retry(controller)
|
||||
return
|
||||
}
|
||||
|
||||
this.handler?.onResponseError?.(controller, returnedErr)
|
||||
}
|
||||
|
||||
// We reconcile in case of a mix between network errors
|
||||
// and server error response
|
||||
if (this.retryCount - this.retryCountCheckpoint > 0) {
|
||||
// We count the difference between the last checkpoint and the current retry count
|
||||
this.retryCount =
|
||||
this.retryCountCheckpoint +
|
||||
(this.retryCount - this.retryCountCheckpoint)
|
||||
} else {
|
||||
this.retryCount += 1
|
||||
}
|
||||
|
||||
this.retryOpts.retry(
|
||||
err,
|
||||
{
|
||||
state: { counter: this.retryCount },
|
||||
opts: { retryOptions: this.retryOpts, ...this.opts }
|
||||
},
|
||||
shouldRetry.bind(this)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = RetryHandler
|
||||
96
backend/node_modules/undici/lib/handler/unwrap-handler.js
generated
vendored
Normal file
96
backend/node_modules/undici/lib/handler/unwrap-handler.js
generated
vendored
Normal file
@@ -0,0 +1,96 @@
|
||||
'use strict'
|
||||
|
||||
const { parseHeaders } = require('../core/util')
|
||||
const { InvalidArgumentError } = require('../core/errors')
|
||||
|
||||
const kResume = Symbol('resume')
|
||||
|
||||
class UnwrapController {
|
||||
#paused = false
|
||||
#reason = null
|
||||
#aborted = false
|
||||
#abort
|
||||
|
||||
[kResume] = null
|
||||
|
||||
constructor (abort) {
|
||||
this.#abort = abort
|
||||
}
|
||||
|
||||
pause () {
|
||||
this.#paused = true
|
||||
}
|
||||
|
||||
resume () {
|
||||
if (this.#paused) {
|
||||
this.#paused = false
|
||||
this[kResume]?.()
|
||||
}
|
||||
}
|
||||
|
||||
abort (reason) {
|
||||
if (!this.#aborted) {
|
||||
this.#aborted = true
|
||||
this.#reason = reason
|
||||
this.#abort(reason)
|
||||
}
|
||||
}
|
||||
|
||||
get aborted () {
|
||||
return this.#aborted
|
||||
}
|
||||
|
||||
get reason () {
|
||||
return this.#reason
|
||||
}
|
||||
|
||||
get paused () {
|
||||
return this.#paused
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = class UnwrapHandler {
|
||||
#handler
|
||||
#controller
|
||||
|
||||
constructor (handler) {
|
||||
this.#handler = handler
|
||||
}
|
||||
|
||||
static unwrap (handler) {
|
||||
// TODO (fix): More checks...
|
||||
return !handler.onRequestStart ? handler : new UnwrapHandler(handler)
|
||||
}
|
||||
|
||||
onConnect (abort, context) {
|
||||
this.#controller = new UnwrapController(abort)
|
||||
this.#handler.onRequestStart?.(this.#controller, context)
|
||||
}
|
||||
|
||||
onUpgrade (statusCode, rawHeaders, socket) {
|
||||
this.#handler.onRequestUpgrade?.(this.#controller, statusCode, parseHeaders(rawHeaders), socket)
|
||||
}
|
||||
|
||||
onHeaders (statusCode, rawHeaders, resume, statusMessage) {
|
||||
this.#controller[kResume] = resume
|
||||
this.#handler.onResponseStart?.(this.#controller, statusCode, parseHeaders(rawHeaders), statusMessage)
|
||||
return !this.#controller.paused
|
||||
}
|
||||
|
||||
onData (data) {
|
||||
this.#handler.onResponseData?.(this.#controller, data)
|
||||
return !this.#controller.paused
|
||||
}
|
||||
|
||||
onComplete (rawTrailers) {
|
||||
this.#handler.onResponseEnd?.(this.#controller, parseHeaders(rawTrailers))
|
||||
}
|
||||
|
||||
onError (err) {
|
||||
if (!this.#handler.onResponseError) {
|
||||
throw new InvalidArgumentError('invalid onError method')
|
||||
}
|
||||
|
||||
this.#handler.onResponseError?.(this.#controller, err)
|
||||
}
|
||||
}
|
||||
95
backend/node_modules/undici/lib/handler/wrap-handler.js
generated
vendored
Normal file
95
backend/node_modules/undici/lib/handler/wrap-handler.js
generated
vendored
Normal file
@@ -0,0 +1,95 @@
|
||||
'use strict'
|
||||
|
||||
const { InvalidArgumentError } = require('../core/errors')
|
||||
|
||||
module.exports = class WrapHandler {
|
||||
#handler
|
||||
|
||||
constructor (handler) {
|
||||
this.#handler = handler
|
||||
}
|
||||
|
||||
static wrap (handler) {
|
||||
// TODO (fix): More checks...
|
||||
return handler.onRequestStart ? handler : new WrapHandler(handler)
|
||||
}
|
||||
|
||||
// Unwrap Interface
|
||||
|
||||
onConnect (abort, context) {
|
||||
return this.#handler.onConnect?.(abort, context)
|
||||
}
|
||||
|
||||
onHeaders (statusCode, rawHeaders, resume, statusMessage) {
|
||||
return this.#handler.onHeaders?.(statusCode, rawHeaders, resume, statusMessage)
|
||||
}
|
||||
|
||||
onUpgrade (statusCode, rawHeaders, socket) {
|
||||
return this.#handler.onUpgrade?.(statusCode, rawHeaders, socket)
|
||||
}
|
||||
|
||||
onData (data) {
|
||||
return this.#handler.onData?.(data)
|
||||
}
|
||||
|
||||
onComplete (trailers) {
|
||||
return this.#handler.onComplete?.(trailers)
|
||||
}
|
||||
|
||||
onError (err) {
|
||||
if (!this.#handler.onError) {
|
||||
throw err
|
||||
}
|
||||
|
||||
return this.#handler.onError?.(err)
|
||||
}
|
||||
|
||||
// Wrap Interface
|
||||
|
||||
onRequestStart (controller, context) {
|
||||
this.#handler.onConnect?.((reason) => controller.abort(reason), context)
|
||||
}
|
||||
|
||||
onRequestUpgrade (controller, statusCode, headers, socket) {
|
||||
const rawHeaders = []
|
||||
for (const [key, val] of Object.entries(headers)) {
|
||||
rawHeaders.push(Buffer.from(key), Array.isArray(val) ? val.map(v => Buffer.from(v)) : Buffer.from(val))
|
||||
}
|
||||
|
||||
this.#handler.onUpgrade?.(statusCode, rawHeaders, socket)
|
||||
}
|
||||
|
||||
onResponseStart (controller, statusCode, headers, statusMessage) {
|
||||
const rawHeaders = []
|
||||
for (const [key, val] of Object.entries(headers)) {
|
||||
rawHeaders.push(Buffer.from(key), Array.isArray(val) ? val.map(v => Buffer.from(v)) : Buffer.from(val))
|
||||
}
|
||||
|
||||
if (this.#handler.onHeaders?.(statusCode, rawHeaders, () => controller.resume(), statusMessage) === false) {
|
||||
controller.pause()
|
||||
}
|
||||
}
|
||||
|
||||
onResponseData (controller, data) {
|
||||
if (this.#handler.onData?.(data) === false) {
|
||||
controller.pause()
|
||||
}
|
||||
}
|
||||
|
||||
onResponseEnd (controller, trailers) {
|
||||
const rawTrailers = []
|
||||
for (const [key, val] of Object.entries(trailers)) {
|
||||
rawTrailers.push(Buffer.from(key), Array.isArray(val) ? val.map(v => Buffer.from(v)) : Buffer.from(val))
|
||||
}
|
||||
|
||||
this.#handler.onComplete?.(rawTrailers)
|
||||
}
|
||||
|
||||
onResponseError (controller, err) {
|
||||
if (!this.#handler.onError) {
|
||||
throw new InvalidArgumentError('invalid onError method')
|
||||
}
|
||||
|
||||
this.#handler.onError?.(err)
|
||||
}
|
||||
}
|
||||
493
backend/node_modules/undici/lib/interceptor/cache.js
generated
vendored
Normal file
493
backend/node_modules/undici/lib/interceptor/cache.js
generated
vendored
Normal file
@@ -0,0 +1,493 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert')
|
||||
const { Readable } = require('node:stream')
|
||||
const util = require('../core/util')
|
||||
const CacheHandler = require('../handler/cache-handler')
|
||||
const MemoryCacheStore = require('../cache/memory-cache-store')
|
||||
const CacheRevalidationHandler = require('../handler/cache-revalidation-handler')
|
||||
const { assertCacheStore, assertCacheMethods, makeCacheKey, normalizeHeaders, parseCacheControlHeader } = require('../util/cache.js')
|
||||
const { AbortError } = require('../core/errors.js')
|
||||
|
||||
/**
|
||||
* @param {(string | RegExp)[] | undefined} origins
|
||||
* @param {string} name
|
||||
*/
|
||||
function assertCacheOrigins (origins, name) {
|
||||
if (origins === undefined) return
|
||||
if (!Array.isArray(origins)) {
|
||||
throw new TypeError(`expected ${name} to be an array or undefined, got ${typeof origins}`)
|
||||
}
|
||||
for (let i = 0; i < origins.length; i++) {
|
||||
const origin = origins[i]
|
||||
if (typeof origin !== 'string' && !(origin instanceof RegExp)) {
|
||||
throw new TypeError(`expected ${name}[${i}] to be a string or RegExp, got ${typeof origin}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const nop = () => {}
|
||||
|
||||
/**
|
||||
* @typedef {(options: import('../../types/dispatcher.d.ts').default.DispatchOptions, handler: import('../../types/dispatcher.d.ts').default.DispatchHandler) => void} DispatchFn
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.GetResult} result
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheControlDirectives | undefined} cacheControlDirectives
|
||||
* @param {import('../../types/dispatcher.d.ts').default.RequestOptions} opts
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function needsRevalidation (result, cacheControlDirectives, { headers = {} }) {
|
||||
// Always revalidate requests with the no-cache request directive.
|
||||
if (cacheControlDirectives?.['no-cache']) {
|
||||
return true
|
||||
}
|
||||
|
||||
// Always revalidate requests with unqualified no-cache response directive.
|
||||
if (result.cacheControlDirectives?.['no-cache'] && !Array.isArray(result.cacheControlDirectives['no-cache'])) {
|
||||
return true
|
||||
}
|
||||
|
||||
// Always revalidate requests with conditional headers.
|
||||
if (headers['if-modified-since'] || headers['if-none-match']) {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.GetResult} result
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheControlDirectives | undefined} cacheControlDirectives
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isStale (result, cacheControlDirectives) {
|
||||
const now = Date.now()
|
||||
if (now > result.staleAt) {
|
||||
// Response is stale
|
||||
if (cacheControlDirectives?.['max-stale']) {
|
||||
// There's a threshold where we can serve stale responses, let's see if
|
||||
// we're in it
|
||||
// https://www.rfc-editor.org/rfc/rfc9111.html#name-max-stale
|
||||
const gracePeriod = result.staleAt + (cacheControlDirectives['max-stale'] * 1000)
|
||||
return now > gracePeriod
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
if (cacheControlDirectives?.['min-fresh']) {
|
||||
// https://www.rfc-editor.org/rfc/rfc9111.html#section-5.2.1.3
|
||||
|
||||
// At this point, staleAt is always > now
|
||||
const timeLeftTillStale = result.staleAt - now
|
||||
const threshold = cacheControlDirectives['min-fresh'] * 1000
|
||||
|
||||
return timeLeftTillStale <= threshold
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if we're within the stale-while-revalidate window for a stale response
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.GetResult} result
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function withinStaleWhileRevalidateWindow (result) {
|
||||
const staleWhileRevalidate = result.cacheControlDirectives?.['stale-while-revalidate']
|
||||
if (!staleWhileRevalidate) {
|
||||
return false
|
||||
}
|
||||
|
||||
const now = Date.now()
|
||||
const staleWhileRevalidateExpiry = result.staleAt + (staleWhileRevalidate * 1000)
|
||||
return now <= staleWhileRevalidateExpiry
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {DispatchFn} dispatch
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheHandlerOptions} globalOpts
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} cacheKey
|
||||
* @param {import('../../types/dispatcher.d.ts').default.DispatchHandler} handler
|
||||
* @param {import('../../types/dispatcher.d.ts').default.RequestOptions} opts
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheControlDirectives | undefined} reqCacheControl
|
||||
*/
|
||||
function handleUncachedResponse (
|
||||
dispatch,
|
||||
globalOpts,
|
||||
cacheKey,
|
||||
handler,
|
||||
opts,
|
||||
reqCacheControl
|
||||
) {
|
||||
if (reqCacheControl?.['only-if-cached']) {
|
||||
let aborted = false
|
||||
try {
|
||||
if (typeof handler.onConnect === 'function') {
|
||||
handler.onConnect(() => {
|
||||
aborted = true
|
||||
})
|
||||
|
||||
if (aborted) {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof handler.onHeaders === 'function') {
|
||||
handler.onHeaders(504, [], nop, 'Gateway Timeout')
|
||||
if (aborted) {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof handler.onComplete === 'function') {
|
||||
handler.onComplete([])
|
||||
}
|
||||
} catch (err) {
|
||||
if (typeof handler.onError === 'function') {
|
||||
handler.onError(err)
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
return dispatch(opts, new CacheHandler(globalOpts, cacheKey, handler))
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('../../types/dispatcher.d.ts').default.DispatchHandler} handler
|
||||
* @param {import('../../types/dispatcher.d.ts').default.RequestOptions} opts
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.GetResult} result
|
||||
* @param {number} age
|
||||
* @param {any} context
|
||||
* @param {boolean} isStale
|
||||
*/
|
||||
function sendCachedValue (handler, opts, result, age, context, isStale) {
|
||||
// TODO (perf): Readable.from path can be optimized...
|
||||
const stream = util.isStream(result.body)
|
||||
? result.body
|
||||
: Readable.from(result.body ?? [])
|
||||
|
||||
assert(!stream.destroyed, 'stream should not be destroyed')
|
||||
assert(!stream.readableDidRead, 'stream should not be readableDidRead')
|
||||
|
||||
const controller = {
|
||||
resume () {
|
||||
stream.resume()
|
||||
},
|
||||
pause () {
|
||||
stream.pause()
|
||||
},
|
||||
get paused () {
|
||||
return stream.isPaused()
|
||||
},
|
||||
get aborted () {
|
||||
return stream.destroyed
|
||||
},
|
||||
get reason () {
|
||||
return stream.errored
|
||||
},
|
||||
abort (reason) {
|
||||
stream.destroy(reason ?? new AbortError())
|
||||
}
|
||||
}
|
||||
|
||||
stream
|
||||
.on('error', function (err) {
|
||||
if (!this.readableEnded) {
|
||||
if (typeof handler.onResponseError === 'function') {
|
||||
handler.onResponseError(controller, err)
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
})
|
||||
.on('close', function () {
|
||||
if (!this.errored) {
|
||||
handler.onResponseEnd?.(controller, {})
|
||||
}
|
||||
})
|
||||
|
||||
handler.onRequestStart?.(controller, context)
|
||||
|
||||
if (stream.destroyed) {
|
||||
return
|
||||
}
|
||||
|
||||
// Add the age header
|
||||
// https://www.rfc-editor.org/rfc/rfc9111.html#name-age
|
||||
const headers = { ...result.headers, age: String(age) }
|
||||
|
||||
if (isStale) {
|
||||
// Add warning header
|
||||
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Warning
|
||||
headers.warning = '110 - "response is stale"'
|
||||
}
|
||||
|
||||
handler.onResponseStart?.(controller, result.statusCode, headers, result.statusMessage)
|
||||
|
||||
if (opts.method === 'HEAD') {
|
||||
stream.destroy()
|
||||
} else {
|
||||
stream.on('data', function (chunk) {
|
||||
handler.onResponseData?.(controller, chunk)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {DispatchFn} dispatch
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheHandlerOptions} globalOpts
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} cacheKey
|
||||
* @param {import('../../types/dispatcher.d.ts').default.DispatchHandler} handler
|
||||
* @param {import('../../types/dispatcher.d.ts').default.RequestOptions} opts
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheControlDirectives | undefined} reqCacheControl
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.GetResult | undefined} result
|
||||
*/
|
||||
function handleResult (
|
||||
dispatch,
|
||||
globalOpts,
|
||||
cacheKey,
|
||||
handler,
|
||||
opts,
|
||||
reqCacheControl,
|
||||
result
|
||||
) {
|
||||
if (!result) {
|
||||
return handleUncachedResponse(dispatch, globalOpts, cacheKey, handler, opts, reqCacheControl)
|
||||
}
|
||||
|
||||
const now = Date.now()
|
||||
if (now > result.deleteAt) {
|
||||
// Response is expired, cache store shouldn't have given this to us
|
||||
return dispatch(opts, new CacheHandler(globalOpts, cacheKey, handler))
|
||||
}
|
||||
|
||||
const age = Math.round((now - result.cachedAt) / 1000)
|
||||
if (reqCacheControl?.['max-age'] && age >= reqCacheControl['max-age']) {
|
||||
// Response is considered expired for this specific request
|
||||
// https://www.rfc-editor.org/rfc/rfc9111.html#section-5.2.1.1
|
||||
return dispatch(opts, handler)
|
||||
}
|
||||
|
||||
const stale = isStale(result, reqCacheControl)
|
||||
const revalidate = needsRevalidation(result, reqCacheControl, opts)
|
||||
|
||||
// Check if the response is stale
|
||||
if (stale || revalidate) {
|
||||
if (util.isStream(opts.body) && util.bodyLength(opts.body) !== 0) {
|
||||
// If body is a stream we can't revalidate...
|
||||
// TODO (fix): This could be less strict...
|
||||
return dispatch(opts, new CacheHandler(globalOpts, cacheKey, handler))
|
||||
}
|
||||
|
||||
// RFC 5861: If we're within stale-while-revalidate window, serve stale immediately
|
||||
// and revalidate in background, unless immediate revalidation is necessary
|
||||
if (!revalidate && withinStaleWhileRevalidateWindow(result)) {
|
||||
// Serve stale response immediately
|
||||
sendCachedValue(handler, opts, result, age, null, true)
|
||||
|
||||
// Start background revalidation (fire-and-forget)
|
||||
queueMicrotask(() => {
|
||||
let headers = {
|
||||
...opts.headers,
|
||||
'if-modified-since': new Date(result.cachedAt).toUTCString()
|
||||
}
|
||||
|
||||
if (result.etag) {
|
||||
headers['if-none-match'] = result.etag
|
||||
}
|
||||
|
||||
if (result.vary) {
|
||||
headers = {
|
||||
...headers,
|
||||
...result.vary
|
||||
}
|
||||
}
|
||||
|
||||
// Background revalidation - update cache if we get new data
|
||||
dispatch(
|
||||
{
|
||||
...opts,
|
||||
headers
|
||||
},
|
||||
new CacheHandler(globalOpts, cacheKey, {
|
||||
// Silent handler that just updates the cache
|
||||
onRequestStart () {},
|
||||
onRequestUpgrade () {},
|
||||
onResponseStart () {},
|
||||
onResponseData () {},
|
||||
onResponseEnd () {},
|
||||
onResponseError () {}
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
let withinStaleIfErrorThreshold = false
|
||||
const staleIfErrorExpiry = result.cacheControlDirectives['stale-if-error'] ?? reqCacheControl?.['stale-if-error']
|
||||
if (staleIfErrorExpiry) {
|
||||
withinStaleIfErrorThreshold = now < (result.staleAt + (staleIfErrorExpiry * 1000))
|
||||
}
|
||||
|
||||
let headers = {
|
||||
...opts.headers,
|
||||
'if-modified-since': new Date(result.cachedAt).toUTCString()
|
||||
}
|
||||
|
||||
if (result.etag) {
|
||||
headers['if-none-match'] = result.etag
|
||||
}
|
||||
|
||||
if (result.vary) {
|
||||
headers = {
|
||||
...headers,
|
||||
...result.vary
|
||||
}
|
||||
}
|
||||
|
||||
// We need to revalidate the response
|
||||
return dispatch(
|
||||
{
|
||||
...opts,
|
||||
headers
|
||||
},
|
||||
new CacheRevalidationHandler(
|
||||
(success, context) => {
|
||||
if (success) {
|
||||
// TODO: successful revalidation should be considered fresh (not give stale warning).
|
||||
sendCachedValue(handler, opts, result, age, context, stale)
|
||||
} else if (util.isStream(result.body)) {
|
||||
result.body.on('error', nop).destroy()
|
||||
}
|
||||
},
|
||||
new CacheHandler(globalOpts, cacheKey, handler),
|
||||
withinStaleIfErrorThreshold
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
// Dump request body.
|
||||
if (util.isStream(opts.body)) {
|
||||
opts.body.on('error', nop).destroy()
|
||||
}
|
||||
|
||||
sendCachedValue(handler, opts, result, age, null, false)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheOptions} [opts]
|
||||
* @returns {import('../../types/dispatcher.d.ts').default.DispatcherComposeInterceptor}
|
||||
*/
|
||||
module.exports = (opts = {}) => {
|
||||
const {
|
||||
store = new MemoryCacheStore(),
|
||||
methods = ['GET'],
|
||||
cacheByDefault = undefined,
|
||||
type = 'shared',
|
||||
origins = undefined
|
||||
} = opts
|
||||
|
||||
if (typeof opts !== 'object' || opts === null) {
|
||||
throw new TypeError(`expected type of opts to be an Object, got ${opts === null ? 'null' : typeof opts}`)
|
||||
}
|
||||
|
||||
assertCacheStore(store, 'opts.store')
|
||||
assertCacheMethods(methods, 'opts.methods')
|
||||
assertCacheOrigins(origins, 'opts.origins')
|
||||
|
||||
if (typeof cacheByDefault !== 'undefined' && typeof cacheByDefault !== 'number') {
|
||||
throw new TypeError(`expected opts.cacheByDefault to be number or undefined, got ${typeof cacheByDefault}`)
|
||||
}
|
||||
|
||||
if (typeof type !== 'undefined' && type !== 'shared' && type !== 'private') {
|
||||
throw new TypeError(`expected opts.type to be shared, private, or undefined, got ${typeof type}`)
|
||||
}
|
||||
|
||||
const globalOpts = {
|
||||
store,
|
||||
methods,
|
||||
cacheByDefault,
|
||||
type
|
||||
}
|
||||
|
||||
const safeMethodsToNotCache = util.safeHTTPMethods.filter(method => methods.includes(method) === false)
|
||||
|
||||
return dispatch => {
|
||||
return (opts, handler) => {
|
||||
if (!opts.origin || safeMethodsToNotCache.includes(opts.method)) {
|
||||
// Not a method we want to cache or we don't have the origin, skip
|
||||
return dispatch(opts, handler)
|
||||
}
|
||||
|
||||
// Check if origin is in whitelist
|
||||
if (origins !== undefined) {
|
||||
const requestOrigin = opts.origin.toString().toLowerCase()
|
||||
let isAllowed = false
|
||||
|
||||
for (let i = 0; i < origins.length; i++) {
|
||||
const allowed = origins[i]
|
||||
if (typeof allowed === 'string') {
|
||||
if (allowed.toLowerCase() === requestOrigin) {
|
||||
isAllowed = true
|
||||
break
|
||||
}
|
||||
} else if (allowed.test(requestOrigin)) {
|
||||
isAllowed = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (!isAllowed) {
|
||||
return dispatch(opts, handler)
|
||||
}
|
||||
}
|
||||
|
||||
opts = {
|
||||
...opts,
|
||||
headers: normalizeHeaders(opts)
|
||||
}
|
||||
|
||||
const reqCacheControl = opts.headers?.['cache-control']
|
||||
? parseCacheControlHeader(opts.headers['cache-control'])
|
||||
: undefined
|
||||
|
||||
if (reqCacheControl?.['no-store']) {
|
||||
return dispatch(opts, handler)
|
||||
}
|
||||
|
||||
/**
|
||||
* @type {import('../../types/cache-interceptor.d.ts').default.CacheKey}
|
||||
*/
|
||||
const cacheKey = makeCacheKey(opts)
|
||||
const result = store.get(cacheKey)
|
||||
|
||||
if (result && typeof result.then === 'function') {
|
||||
return result
|
||||
.then(result => handleResult(dispatch,
|
||||
globalOpts,
|
||||
cacheKey,
|
||||
handler,
|
||||
opts,
|
||||
reqCacheControl,
|
||||
result
|
||||
))
|
||||
} else {
|
||||
return handleResult(
|
||||
dispatch,
|
||||
globalOpts,
|
||||
cacheKey,
|
||||
handler,
|
||||
opts,
|
||||
reqCacheControl,
|
||||
result
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
259
backend/node_modules/undici/lib/interceptor/decompress.js
generated
vendored
Normal file
259
backend/node_modules/undici/lib/interceptor/decompress.js
generated
vendored
Normal file
@@ -0,0 +1,259 @@
|
||||
'use strict'
|
||||
|
||||
const { createInflate, createGunzip, createBrotliDecompress, createZstdDecompress } = require('node:zlib')
|
||||
const { pipeline } = require('node:stream')
|
||||
const DecoratorHandler = require('../handler/decorator-handler')
|
||||
const { runtimeFeatures } = require('../util/runtime-features')
|
||||
|
||||
/** @typedef {import('node:stream').Transform} Transform */
|
||||
/** @typedef {import('node:stream').Transform} Controller */
|
||||
/** @typedef {Transform&import('node:zlib').Zlib} DecompressorStream */
|
||||
|
||||
/** @type {Record<string, () => DecompressorStream>} */
|
||||
const supportedEncodings = {
|
||||
gzip: createGunzip,
|
||||
'x-gzip': createGunzip,
|
||||
br: createBrotliDecompress,
|
||||
deflate: createInflate,
|
||||
compress: createInflate,
|
||||
'x-compress': createInflate,
|
||||
...(runtimeFeatures.has('zstd') ? { zstd: createZstdDecompress } : {})
|
||||
}
|
||||
|
||||
const defaultSkipStatusCodes = /** @type {const} */ ([204, 304])
|
||||
|
||||
let warningEmitted = /** @type {boolean} */ (false)
|
||||
|
||||
/**
|
||||
* @typedef {Object} DecompressHandlerOptions
|
||||
* @property {number[]|Readonly<number[]>} [skipStatusCodes=[204, 304]] - List of status codes to skip decompression for
|
||||
* @property {boolean} [skipErrorResponses] - Whether to skip decompression for error responses (status codes >= 400)
|
||||
*/
|
||||
|
||||
class DecompressHandler extends DecoratorHandler {
|
||||
/** @type {Transform[]} */
|
||||
#decompressors = []
|
||||
/** @type {Readonly<number[]>} */
|
||||
#skipStatusCodes
|
||||
/** @type {boolean} */
|
||||
#skipErrorResponses
|
||||
|
||||
constructor (handler, { skipStatusCodes = defaultSkipStatusCodes, skipErrorResponses = true } = {}) {
|
||||
super(handler)
|
||||
this.#skipStatusCodes = skipStatusCodes
|
||||
this.#skipErrorResponses = skipErrorResponses
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if decompression should be skipped based on encoding and status code
|
||||
* @param {string} contentEncoding - Content-Encoding header value
|
||||
* @param {number} statusCode - HTTP status code of the response
|
||||
* @returns {boolean} - True if decompression should be skipped
|
||||
*/
|
||||
#shouldSkipDecompression (contentEncoding, statusCode) {
|
||||
if (!contentEncoding || statusCode < 200) return true
|
||||
if (this.#skipStatusCodes.includes(statusCode)) return true
|
||||
if (this.#skipErrorResponses && statusCode >= 400) return true
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a chain of decompressors for multiple content encodings
|
||||
*
|
||||
* @param {string} encodings - Comma-separated list of content encodings
|
||||
* @returns {Array<DecompressorStream>} - Array of decompressor streams
|
||||
* @throws {Error} - If the number of content-encodings exceeds the maximum allowed
|
||||
*/
|
||||
#createDecompressionChain (encodings) {
|
||||
const parts = encodings.split(',')
|
||||
|
||||
// Limit the number of content-encodings to prevent resource exhaustion.
|
||||
// CVE fix similar to urllib3 (GHSA-gm62-xv2j-4w53) and curl (CVE-2022-32206).
|
||||
const maxContentEncodings = 5
|
||||
if (parts.length > maxContentEncodings) {
|
||||
throw new Error(`too many content-encodings in response: ${parts.length}, maximum allowed is ${maxContentEncodings}`)
|
||||
}
|
||||
|
||||
/** @type {DecompressorStream[]} */
|
||||
const decompressors = []
|
||||
|
||||
for (let i = parts.length - 1; i >= 0; i--) {
|
||||
const encoding = parts[i].trim()
|
||||
if (!encoding) continue
|
||||
|
||||
if (!supportedEncodings[encoding]) {
|
||||
decompressors.length = 0 // Clear if unsupported encoding
|
||||
return decompressors // Unsupported encoding
|
||||
}
|
||||
|
||||
decompressors.push(supportedEncodings[encoding]())
|
||||
}
|
||||
|
||||
return decompressors
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets up event handlers for a decompressor stream using readable events
|
||||
* @param {DecompressorStream} decompressor - The decompressor stream
|
||||
* @param {Controller} controller - The controller to coordinate with
|
||||
* @returns {void}
|
||||
*/
|
||||
#setupDecompressorEvents (decompressor, controller) {
|
||||
decompressor.on('readable', () => {
|
||||
let chunk
|
||||
while ((chunk = decompressor.read()) !== null) {
|
||||
const result = super.onResponseData(controller, chunk)
|
||||
if (result === false) {
|
||||
break
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
decompressor.on('error', (error) => {
|
||||
super.onResponseError(controller, error)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets up event handling for a single decompressor
|
||||
* @param {Controller} controller - The controller to handle events
|
||||
* @returns {void}
|
||||
*/
|
||||
#setupSingleDecompressor (controller) {
|
||||
const decompressor = this.#decompressors[0]
|
||||
this.#setupDecompressorEvents(decompressor, controller)
|
||||
|
||||
decompressor.on('end', () => {
|
||||
super.onResponseEnd(controller, {})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets up event handling for multiple chained decompressors using pipeline
|
||||
* @param {Controller} controller - The controller to handle events
|
||||
* @returns {void}
|
||||
*/
|
||||
#setupMultipleDecompressors (controller) {
|
||||
const lastDecompressor = this.#decompressors[this.#decompressors.length - 1]
|
||||
this.#setupDecompressorEvents(lastDecompressor, controller)
|
||||
|
||||
pipeline(this.#decompressors, (err) => {
|
||||
if (err) {
|
||||
super.onResponseError(controller, err)
|
||||
return
|
||||
}
|
||||
super.onResponseEnd(controller, {})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleans up decompressor references to prevent memory leaks
|
||||
* @returns {void}
|
||||
*/
|
||||
#cleanupDecompressors () {
|
||||
this.#decompressors.length = 0
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Controller} controller
|
||||
* @param {number} statusCode
|
||||
* @param {Record<string, string | string[] | undefined>} headers
|
||||
* @param {string} statusMessage
|
||||
* @returns {void}
|
||||
*/
|
||||
onResponseStart (controller, statusCode, headers, statusMessage) {
|
||||
const contentEncoding = headers['content-encoding']
|
||||
|
||||
// If content encoding is not supported or status code is in skip list
|
||||
if (this.#shouldSkipDecompression(contentEncoding, statusCode)) {
|
||||
return super.onResponseStart(controller, statusCode, headers, statusMessage)
|
||||
}
|
||||
|
||||
const decompressors = this.#createDecompressionChain(contentEncoding.toLowerCase())
|
||||
|
||||
if (decompressors.length === 0) {
|
||||
this.#cleanupDecompressors()
|
||||
return super.onResponseStart(controller, statusCode, headers, statusMessage)
|
||||
}
|
||||
|
||||
this.#decompressors = decompressors
|
||||
|
||||
// Remove compression headers since we're decompressing
|
||||
const { 'content-encoding': _, 'content-length': __, ...newHeaders } = headers
|
||||
|
||||
if (this.#decompressors.length === 1) {
|
||||
this.#setupSingleDecompressor(controller)
|
||||
} else {
|
||||
this.#setupMultipleDecompressors(controller)
|
||||
}
|
||||
|
||||
return super.onResponseStart(controller, statusCode, newHeaders, statusMessage)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Controller} controller
|
||||
* @param {Buffer} chunk
|
||||
* @returns {void}
|
||||
*/
|
||||
onResponseData (controller, chunk) {
|
||||
if (this.#decompressors.length > 0) {
|
||||
this.#decompressors[0].write(chunk)
|
||||
return
|
||||
}
|
||||
super.onResponseData(controller, chunk)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Controller} controller
|
||||
* @param {Record<string, string | string[]> | undefined} trailers
|
||||
* @returns {void}
|
||||
*/
|
||||
onResponseEnd (controller, trailers) {
|
||||
if (this.#decompressors.length > 0) {
|
||||
this.#decompressors[0].end()
|
||||
this.#cleanupDecompressors()
|
||||
return
|
||||
}
|
||||
super.onResponseEnd(controller, trailers)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Controller} controller
|
||||
* @param {Error} err
|
||||
* @returns {void}
|
||||
*/
|
||||
onResponseError (controller, err) {
|
||||
if (this.#decompressors.length > 0) {
|
||||
for (const decompressor of this.#decompressors) {
|
||||
decompressor.destroy(err)
|
||||
}
|
||||
this.#cleanupDecompressors()
|
||||
}
|
||||
super.onResponseError(controller, err)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a decompression interceptor for HTTP responses
|
||||
* @param {DecompressHandlerOptions} [options] - Options for the interceptor
|
||||
* @returns {Function} - Interceptor function
|
||||
*/
|
||||
function createDecompressInterceptor (options = {}) {
|
||||
// Emit experimental warning only once
|
||||
if (!warningEmitted) {
|
||||
process.emitWarning(
|
||||
'DecompressInterceptor is experimental and subject to change',
|
||||
'ExperimentalWarning'
|
||||
)
|
||||
warningEmitted = true
|
||||
}
|
||||
|
||||
return (dispatch) => {
|
||||
return (opts, handler) => {
|
||||
const decompressHandler = new DecompressHandler(handler, options)
|
||||
return dispatch(opts, decompressHandler)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = createDecompressInterceptor
|
||||
109
backend/node_modules/undici/lib/interceptor/deduplicate.js
generated
vendored
Normal file
109
backend/node_modules/undici/lib/interceptor/deduplicate.js
generated
vendored
Normal file
@@ -0,0 +1,109 @@
|
||||
'use strict'
|
||||
|
||||
const diagnosticsChannel = require('node:diagnostics_channel')
|
||||
const util = require('../core/util')
|
||||
const DeduplicationHandler = require('../handler/deduplication-handler')
|
||||
const { normalizeHeaders, makeCacheKey, makeDeduplicationKey } = require('../util/cache.js')
|
||||
|
||||
const pendingRequestsChannel = diagnosticsChannel.channel('undici:request:pending-requests')
|
||||
|
||||
/**
|
||||
* @param {import('../../types/interceptors.d.ts').default.DeduplicateInterceptorOpts} [opts]
|
||||
* @returns {import('../../types/dispatcher.d.ts').default.DispatcherComposeInterceptor}
|
||||
*/
|
||||
module.exports = (opts = {}) => {
|
||||
const {
|
||||
methods = ['GET'],
|
||||
skipHeaderNames = [],
|
||||
excludeHeaderNames = []
|
||||
} = opts
|
||||
|
||||
if (typeof opts !== 'object' || opts === null) {
|
||||
throw new TypeError(`expected type of opts to be an Object, got ${opts === null ? 'null' : typeof opts}`)
|
||||
}
|
||||
|
||||
if (!Array.isArray(methods)) {
|
||||
throw new TypeError(`expected opts.methods to be an array, got ${typeof methods}`)
|
||||
}
|
||||
|
||||
for (const method of methods) {
|
||||
if (!util.safeHTTPMethods.includes(method)) {
|
||||
throw new TypeError(`expected opts.methods to only contain safe HTTP methods, got ${method}`)
|
||||
}
|
||||
}
|
||||
|
||||
if (!Array.isArray(skipHeaderNames)) {
|
||||
throw new TypeError(`expected opts.skipHeaderNames to be an array, got ${typeof skipHeaderNames}`)
|
||||
}
|
||||
|
||||
if (!Array.isArray(excludeHeaderNames)) {
|
||||
throw new TypeError(`expected opts.excludeHeaderNames to be an array, got ${typeof excludeHeaderNames}`)
|
||||
}
|
||||
|
||||
// Convert to lowercase Set for case-insensitive header matching
|
||||
const skipHeaderNamesSet = new Set(skipHeaderNames.map(name => name.toLowerCase()))
|
||||
|
||||
// Convert to lowercase Set for case-insensitive header exclusion from deduplication key
|
||||
const excludeHeaderNamesSet = new Set(excludeHeaderNames.map(name => name.toLowerCase()))
|
||||
|
||||
const safeMethodsToNotDeduplicate = util.safeHTTPMethods.filter(method => methods.includes(method) === false)
|
||||
|
||||
/**
|
||||
* Map of pending requests for deduplication
|
||||
* @type {Map<string, DeduplicationHandler>}
|
||||
*/
|
||||
const pendingRequests = new Map()
|
||||
|
||||
return dispatch => {
|
||||
return (opts, handler) => {
|
||||
if (!opts.origin || safeMethodsToNotDeduplicate.includes(opts.method)) {
|
||||
return dispatch(opts, handler)
|
||||
}
|
||||
|
||||
opts = {
|
||||
...opts,
|
||||
headers: normalizeHeaders(opts)
|
||||
}
|
||||
|
||||
// Skip deduplication if request contains any of the specified headers
|
||||
if (skipHeaderNamesSet.size > 0) {
|
||||
for (const headerName of Object.keys(opts.headers)) {
|
||||
if (skipHeaderNamesSet.has(headerName.toLowerCase())) {
|
||||
return dispatch(opts, handler)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const cacheKey = makeCacheKey(opts)
|
||||
const dedupeKey = makeDeduplicationKey(cacheKey, excludeHeaderNamesSet)
|
||||
|
||||
// Check if there's already a pending request for this key
|
||||
const pendingHandler = pendingRequests.get(dedupeKey)
|
||||
if (pendingHandler) {
|
||||
// Add this handler to the waiting list
|
||||
pendingHandler.addWaitingHandler(handler)
|
||||
return true
|
||||
}
|
||||
|
||||
// Create a new deduplication handler
|
||||
const deduplicationHandler = new DeduplicationHandler(
|
||||
handler,
|
||||
() => {
|
||||
// Clean up when request completes
|
||||
pendingRequests.delete(dedupeKey)
|
||||
if (pendingRequestsChannel.hasSubscribers) {
|
||||
pendingRequestsChannel.publish({ size: pendingRequests.size, key: dedupeKey, type: 'removed' })
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
// Register the pending request
|
||||
pendingRequests.set(dedupeKey, deduplicationHandler)
|
||||
if (pendingRequestsChannel.hasSubscribers) {
|
||||
pendingRequestsChannel.publish({ size: pendingRequests.size, key: dedupeKey, type: 'added' })
|
||||
}
|
||||
|
||||
return dispatch(opts, deduplicationHandler)
|
||||
}
|
||||
}
|
||||
}
|
||||
474
backend/node_modules/undici/lib/interceptor/dns.js
generated
vendored
Normal file
474
backend/node_modules/undici/lib/interceptor/dns.js
generated
vendored
Normal file
@@ -0,0 +1,474 @@
|
||||
'use strict'
|
||||
const { isIP } = require('node:net')
|
||||
const { lookup } = require('node:dns')
|
||||
const DecoratorHandler = require('../handler/decorator-handler')
|
||||
const { InvalidArgumentError, InformationalError } = require('../core/errors')
|
||||
const maxInt = Math.pow(2, 31) - 1
|
||||
|
||||
class DNSStorage {
|
||||
#maxItems = 0
|
||||
#records = new Map()
|
||||
|
||||
constructor (opts) {
|
||||
this.#maxItems = opts.maxItems
|
||||
}
|
||||
|
||||
get size () {
|
||||
return this.#records.size
|
||||
}
|
||||
|
||||
get (hostname) {
|
||||
return this.#records.get(hostname) ?? null
|
||||
}
|
||||
|
||||
set (hostname, records) {
|
||||
this.#records.set(hostname, records)
|
||||
}
|
||||
|
||||
delete (hostname) {
|
||||
this.#records.delete(hostname)
|
||||
}
|
||||
|
||||
// Delegate to storage decide can we do more lookups or not
|
||||
full () {
|
||||
return this.size >= this.#maxItems
|
||||
}
|
||||
}
|
||||
|
||||
class DNSInstance {
|
||||
#maxTTL = 0
|
||||
#maxItems = 0
|
||||
dualStack = true
|
||||
affinity = null
|
||||
lookup = null
|
||||
pick = null
|
||||
storage = null
|
||||
|
||||
constructor (opts) {
|
||||
this.#maxTTL = opts.maxTTL
|
||||
this.#maxItems = opts.maxItems
|
||||
this.dualStack = opts.dualStack
|
||||
this.affinity = opts.affinity
|
||||
this.lookup = opts.lookup ?? this.#defaultLookup
|
||||
this.pick = opts.pick ?? this.#defaultPick
|
||||
this.storage = opts.storage ?? new DNSStorage(opts)
|
||||
}
|
||||
|
||||
runLookup (origin, opts, cb) {
|
||||
const ips = this.storage.get(origin.hostname)
|
||||
|
||||
// If full, we just return the origin
|
||||
if (ips == null && this.storage.full()) {
|
||||
cb(null, origin)
|
||||
return
|
||||
}
|
||||
|
||||
const newOpts = {
|
||||
affinity: this.affinity,
|
||||
dualStack: this.dualStack,
|
||||
lookup: this.lookup,
|
||||
pick: this.pick,
|
||||
...opts.dns,
|
||||
maxTTL: this.#maxTTL,
|
||||
maxItems: this.#maxItems
|
||||
}
|
||||
|
||||
// If no IPs we lookup
|
||||
if (ips == null) {
|
||||
this.lookup(origin, newOpts, (err, addresses) => {
|
||||
if (err || addresses == null || addresses.length === 0) {
|
||||
cb(err ?? new InformationalError('No DNS entries found'))
|
||||
return
|
||||
}
|
||||
|
||||
this.setRecords(origin, addresses)
|
||||
const records = this.storage.get(origin.hostname)
|
||||
|
||||
const ip = this.pick(
|
||||
origin,
|
||||
records,
|
||||
newOpts.affinity
|
||||
)
|
||||
|
||||
let port
|
||||
if (typeof ip.port === 'number') {
|
||||
port = `:${ip.port}`
|
||||
} else if (origin.port !== '') {
|
||||
port = `:${origin.port}`
|
||||
} else {
|
||||
port = ''
|
||||
}
|
||||
|
||||
cb(
|
||||
null,
|
||||
new URL(`${origin.protocol}//${
|
||||
ip.family === 6 ? `[${ip.address}]` : ip.address
|
||||
}${port}`)
|
||||
)
|
||||
})
|
||||
} else {
|
||||
// If there's IPs we pick
|
||||
const ip = this.pick(
|
||||
origin,
|
||||
ips,
|
||||
newOpts.affinity
|
||||
)
|
||||
|
||||
// If no IPs we lookup - deleting old records
|
||||
if (ip == null) {
|
||||
this.storage.delete(origin.hostname)
|
||||
this.runLookup(origin, opts, cb)
|
||||
return
|
||||
}
|
||||
|
||||
let port
|
||||
if (typeof ip.port === 'number') {
|
||||
port = `:${ip.port}`
|
||||
} else if (origin.port !== '') {
|
||||
port = `:${origin.port}`
|
||||
} else {
|
||||
port = ''
|
||||
}
|
||||
|
||||
cb(
|
||||
null,
|
||||
new URL(`${origin.protocol}//${
|
||||
ip.family === 6 ? `[${ip.address}]` : ip.address
|
||||
}${port}`)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#defaultLookup (origin, opts, cb) {
|
||||
lookup(
|
||||
origin.hostname,
|
||||
{
|
||||
all: true,
|
||||
family: this.dualStack === false ? this.affinity : 0,
|
||||
order: 'ipv4first'
|
||||
},
|
||||
(err, addresses) => {
|
||||
if (err) {
|
||||
return cb(err)
|
||||
}
|
||||
|
||||
const results = new Map()
|
||||
|
||||
for (const addr of addresses) {
|
||||
// On linux we found duplicates, we attempt to remove them with
|
||||
// the latest record
|
||||
results.set(`${addr.address}:${addr.family}`, addr)
|
||||
}
|
||||
|
||||
cb(null, results.values())
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
#defaultPick (origin, hostnameRecords, affinity) {
|
||||
let ip = null
|
||||
const { records, offset } = hostnameRecords
|
||||
|
||||
let family
|
||||
if (this.dualStack) {
|
||||
if (affinity == null) {
|
||||
// Balance between ip families
|
||||
if (offset == null || offset === maxInt) {
|
||||
hostnameRecords.offset = 0
|
||||
affinity = 4
|
||||
} else {
|
||||
hostnameRecords.offset++
|
||||
affinity = (hostnameRecords.offset & 1) === 1 ? 6 : 4
|
||||
}
|
||||
}
|
||||
|
||||
if (records[affinity] != null && records[affinity].ips.length > 0) {
|
||||
family = records[affinity]
|
||||
} else {
|
||||
family = records[affinity === 4 ? 6 : 4]
|
||||
}
|
||||
} else {
|
||||
family = records[affinity]
|
||||
}
|
||||
|
||||
// If no IPs we return null
|
||||
if (family == null || family.ips.length === 0) {
|
||||
return ip
|
||||
}
|
||||
|
||||
if (family.offset == null || family.offset === maxInt) {
|
||||
family.offset = 0
|
||||
} else {
|
||||
family.offset++
|
||||
}
|
||||
|
||||
const position = family.offset % family.ips.length
|
||||
ip = family.ips[position] ?? null
|
||||
|
||||
if (ip == null) {
|
||||
return ip
|
||||
}
|
||||
|
||||
if (Date.now() - ip.timestamp > ip.ttl) { // record TTL is already in ms
|
||||
// We delete expired records
|
||||
// It is possible that they have different TTL, so we manage them individually
|
||||
family.ips.splice(position, 1)
|
||||
return this.pick(origin, hostnameRecords, affinity)
|
||||
}
|
||||
|
||||
return ip
|
||||
}
|
||||
|
||||
pickFamily (origin, ipFamily) {
|
||||
const records = this.storage.get(origin.hostname)?.records
|
||||
if (!records) {
|
||||
return null
|
||||
}
|
||||
|
||||
const family = records[ipFamily]
|
||||
if (!family) {
|
||||
return null
|
||||
}
|
||||
|
||||
if (family.offset == null || family.offset === maxInt) {
|
||||
family.offset = 0
|
||||
} else {
|
||||
family.offset++
|
||||
}
|
||||
|
||||
const position = family.offset % family.ips.length
|
||||
const ip = family.ips[position] ?? null
|
||||
if (ip == null) {
|
||||
return ip
|
||||
}
|
||||
|
||||
if (Date.now() - ip.timestamp > ip.ttl) { // record TTL is already in ms
|
||||
// We delete expired records
|
||||
// It is possible that they have different TTL, so we manage them individually
|
||||
family.ips.splice(position, 1)
|
||||
}
|
||||
|
||||
return ip
|
||||
}
|
||||
|
||||
setRecords (origin, addresses) {
|
||||
const timestamp = Date.now()
|
||||
const records = { records: { 4: null, 6: null } }
|
||||
let minTTL = this.#maxTTL
|
||||
for (const record of addresses) {
|
||||
record.timestamp = timestamp
|
||||
if (typeof record.ttl === 'number') {
|
||||
// The record TTL is expected to be in ms
|
||||
record.ttl = Math.min(record.ttl, this.#maxTTL)
|
||||
minTTL = Math.min(minTTL, record.ttl)
|
||||
} else {
|
||||
record.ttl = this.#maxTTL
|
||||
}
|
||||
|
||||
const familyRecords = records.records[record.family] ?? { ips: [] }
|
||||
|
||||
familyRecords.ips.push(record)
|
||||
records.records[record.family] = familyRecords
|
||||
}
|
||||
|
||||
// We provide a default TTL if external storage will be used without TTL per record-level support
|
||||
this.storage.set(origin.hostname, records, { ttl: minTTL })
|
||||
}
|
||||
|
||||
deleteRecords (origin) {
|
||||
this.storage.delete(origin.hostname)
|
||||
}
|
||||
|
||||
getHandler (meta, opts) {
|
||||
return new DNSDispatchHandler(this, meta, opts)
|
||||
}
|
||||
}
|
||||
|
||||
class DNSDispatchHandler extends DecoratorHandler {
|
||||
#state = null
|
||||
#opts = null
|
||||
#dispatch = null
|
||||
#origin = null
|
||||
#controller = null
|
||||
#newOrigin = null
|
||||
#firstTry = true
|
||||
|
||||
constructor (state, { origin, handler, dispatch, newOrigin }, opts) {
|
||||
super(handler)
|
||||
this.#origin = origin
|
||||
this.#newOrigin = newOrigin
|
||||
this.#opts = { ...opts }
|
||||
this.#state = state
|
||||
this.#dispatch = dispatch
|
||||
}
|
||||
|
||||
onResponseError (controller, err) {
|
||||
switch (err.code) {
|
||||
case 'ETIMEDOUT':
|
||||
case 'ECONNREFUSED': {
|
||||
if (this.#state.dualStack) {
|
||||
if (!this.#firstTry) {
|
||||
super.onResponseError(controller, err)
|
||||
return
|
||||
}
|
||||
this.#firstTry = false
|
||||
|
||||
// Pick an ip address from the other family
|
||||
const otherFamily = this.#newOrigin.hostname[0] === '[' ? 4 : 6
|
||||
const ip = this.#state.pickFamily(this.#origin, otherFamily)
|
||||
if (ip == null) {
|
||||
super.onResponseError(controller, err)
|
||||
return
|
||||
}
|
||||
|
||||
let port
|
||||
if (typeof ip.port === 'number') {
|
||||
port = `:${ip.port}`
|
||||
} else if (this.#origin.port !== '') {
|
||||
port = `:${this.#origin.port}`
|
||||
} else {
|
||||
port = ''
|
||||
}
|
||||
|
||||
const dispatchOpts = {
|
||||
...this.#opts,
|
||||
origin: `${this.#origin.protocol}//${
|
||||
ip.family === 6 ? `[${ip.address}]` : ip.address
|
||||
}${port}`
|
||||
}
|
||||
this.#dispatch(dispatchOpts, this)
|
||||
return
|
||||
}
|
||||
|
||||
// if dual-stack disabled, we error out
|
||||
super.onResponseError(controller, err)
|
||||
break
|
||||
}
|
||||
case 'ENOTFOUND':
|
||||
this.#state.deleteRecords(this.#origin)
|
||||
super.onResponseError(controller, err)
|
||||
break
|
||||
default:
|
||||
super.onResponseError(controller, err)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = interceptorOpts => {
|
||||
if (
|
||||
interceptorOpts?.maxTTL != null &&
|
||||
(typeof interceptorOpts?.maxTTL !== 'number' || interceptorOpts?.maxTTL < 0)
|
||||
) {
|
||||
throw new InvalidArgumentError('Invalid maxTTL. Must be a positive number')
|
||||
}
|
||||
|
||||
if (
|
||||
interceptorOpts?.maxItems != null &&
|
||||
(typeof interceptorOpts?.maxItems !== 'number' ||
|
||||
interceptorOpts?.maxItems < 1)
|
||||
) {
|
||||
throw new InvalidArgumentError(
|
||||
'Invalid maxItems. Must be a positive number and greater than zero'
|
||||
)
|
||||
}
|
||||
|
||||
if (
|
||||
interceptorOpts?.affinity != null &&
|
||||
interceptorOpts?.affinity !== 4 &&
|
||||
interceptorOpts?.affinity !== 6
|
||||
) {
|
||||
throw new InvalidArgumentError('Invalid affinity. Must be either 4 or 6')
|
||||
}
|
||||
|
||||
if (
|
||||
interceptorOpts?.dualStack != null &&
|
||||
typeof interceptorOpts?.dualStack !== 'boolean'
|
||||
) {
|
||||
throw new InvalidArgumentError('Invalid dualStack. Must be a boolean')
|
||||
}
|
||||
|
||||
if (
|
||||
interceptorOpts?.lookup != null &&
|
||||
typeof interceptorOpts?.lookup !== 'function'
|
||||
) {
|
||||
throw new InvalidArgumentError('Invalid lookup. Must be a function')
|
||||
}
|
||||
|
||||
if (
|
||||
interceptorOpts?.pick != null &&
|
||||
typeof interceptorOpts?.pick !== 'function'
|
||||
) {
|
||||
throw new InvalidArgumentError('Invalid pick. Must be a function')
|
||||
}
|
||||
|
||||
if (
|
||||
interceptorOpts?.storage != null &&
|
||||
(typeof interceptorOpts?.storage?.get !== 'function' ||
|
||||
typeof interceptorOpts?.storage?.set !== 'function' ||
|
||||
typeof interceptorOpts?.storage?.full !== 'function' ||
|
||||
typeof interceptorOpts?.storage?.delete !== 'function'
|
||||
)
|
||||
) {
|
||||
throw new InvalidArgumentError('Invalid storage. Must be a object with methods: { get, set, full, delete }')
|
||||
}
|
||||
|
||||
const dualStack = interceptorOpts?.dualStack ?? true
|
||||
let affinity
|
||||
if (dualStack) {
|
||||
affinity = interceptorOpts?.affinity ?? null
|
||||
} else {
|
||||
affinity = interceptorOpts?.affinity ?? 4
|
||||
}
|
||||
|
||||
const opts = {
|
||||
maxTTL: interceptorOpts?.maxTTL ?? 10e3, // Expressed in ms
|
||||
lookup: interceptorOpts?.lookup ?? null,
|
||||
pick: interceptorOpts?.pick ?? null,
|
||||
dualStack,
|
||||
affinity,
|
||||
maxItems: interceptorOpts?.maxItems ?? Infinity,
|
||||
storage: interceptorOpts?.storage
|
||||
}
|
||||
|
||||
const instance = new DNSInstance(opts)
|
||||
|
||||
return dispatch => {
|
||||
return function dnsInterceptor (origDispatchOpts, handler) {
|
||||
const origin =
|
||||
origDispatchOpts.origin.constructor === URL
|
||||
? origDispatchOpts.origin
|
||||
: new URL(origDispatchOpts.origin)
|
||||
|
||||
if (isIP(origin.hostname) !== 0) {
|
||||
return dispatch(origDispatchOpts, handler)
|
||||
}
|
||||
|
||||
instance.runLookup(origin, origDispatchOpts, (err, newOrigin) => {
|
||||
if (err) {
|
||||
return handler.onResponseError(null, err)
|
||||
}
|
||||
|
||||
const dispatchOpts = {
|
||||
...origDispatchOpts,
|
||||
servername: origin.hostname, // For SNI on TLS
|
||||
origin: newOrigin.origin,
|
||||
headers: {
|
||||
host: origin.host,
|
||||
...origDispatchOpts.headers
|
||||
}
|
||||
}
|
||||
|
||||
dispatch(
|
||||
dispatchOpts,
|
||||
instance.getHandler(
|
||||
{ origin, dispatch, handler, newOrigin },
|
||||
origDispatchOpts
|
||||
)
|
||||
)
|
||||
})
|
||||
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
112
backend/node_modules/undici/lib/interceptor/dump.js
generated
vendored
Normal file
112
backend/node_modules/undici/lib/interceptor/dump.js
generated
vendored
Normal file
@@ -0,0 +1,112 @@
|
||||
'use strict'
|
||||
|
||||
const { InvalidArgumentError, RequestAbortedError } = require('../core/errors')
|
||||
const DecoratorHandler = require('../handler/decorator-handler')
|
||||
|
||||
class DumpHandler extends DecoratorHandler {
|
||||
#maxSize = 1024 * 1024
|
||||
#dumped = false
|
||||
#size = 0
|
||||
#controller = null
|
||||
aborted = false
|
||||
reason = false
|
||||
|
||||
constructor ({ maxSize, signal }, handler) {
|
||||
if (maxSize != null && (!Number.isFinite(maxSize) || maxSize < 1)) {
|
||||
throw new InvalidArgumentError('maxSize must be a number greater than 0')
|
||||
}
|
||||
|
||||
super(handler)
|
||||
|
||||
this.#maxSize = maxSize ?? this.#maxSize
|
||||
// this.#handler = handler
|
||||
}
|
||||
|
||||
#abort (reason) {
|
||||
this.aborted = true
|
||||
this.reason = reason
|
||||
}
|
||||
|
||||
onRequestStart (controller, context) {
|
||||
controller.abort = this.#abort.bind(this)
|
||||
this.#controller = controller
|
||||
|
||||
return super.onRequestStart(controller, context)
|
||||
}
|
||||
|
||||
onResponseStart (controller, statusCode, headers, statusMessage) {
|
||||
const contentLength = headers['content-length']
|
||||
|
||||
if (contentLength != null && contentLength > this.#maxSize) {
|
||||
throw new RequestAbortedError(
|
||||
`Response size (${contentLength}) larger than maxSize (${
|
||||
this.#maxSize
|
||||
})`
|
||||
)
|
||||
}
|
||||
|
||||
if (this.aborted === true) {
|
||||
return true
|
||||
}
|
||||
|
||||
return super.onResponseStart(controller, statusCode, headers, statusMessage)
|
||||
}
|
||||
|
||||
onResponseError (controller, err) {
|
||||
if (this.#dumped) {
|
||||
return
|
||||
}
|
||||
|
||||
// On network errors before connect, controller will be null
|
||||
err = this.#controller?.reason ?? err
|
||||
|
||||
super.onResponseError(controller, err)
|
||||
}
|
||||
|
||||
onResponseData (controller, chunk) {
|
||||
this.#size = this.#size + chunk.length
|
||||
|
||||
if (this.#size >= this.#maxSize) {
|
||||
this.#dumped = true
|
||||
|
||||
if (this.aborted === true) {
|
||||
super.onResponseError(controller, this.reason)
|
||||
} else {
|
||||
super.onResponseEnd(controller, {})
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
onResponseEnd (controller, trailers) {
|
||||
if (this.#dumped) {
|
||||
return
|
||||
}
|
||||
|
||||
if (this.#controller.aborted === true) {
|
||||
super.onResponseError(controller, this.reason)
|
||||
return
|
||||
}
|
||||
|
||||
super.onResponseEnd(controller, trailers)
|
||||
}
|
||||
}
|
||||
|
||||
function createDumpInterceptor (
|
||||
{ maxSize: defaultMaxSize } = {
|
||||
maxSize: 1024 * 1024
|
||||
}
|
||||
) {
|
||||
return dispatch => {
|
||||
return function Intercept (opts, handler) {
|
||||
const { dumpMaxSize = defaultMaxSize } = opts
|
||||
|
||||
const dumpHandler = new DumpHandler({ maxSize: dumpMaxSize, signal: opts.signal }, handler)
|
||||
|
||||
return dispatch(opts, dumpHandler)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = createDumpInterceptor
|
||||
21
backend/node_modules/undici/lib/interceptor/redirect.js
generated
vendored
Normal file
21
backend/node_modules/undici/lib/interceptor/redirect.js
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
'use strict'
|
||||
|
||||
const RedirectHandler = require('../handler/redirect-handler')
|
||||
|
||||
function createRedirectInterceptor ({ maxRedirections: defaultMaxRedirections } = {}) {
|
||||
return (dispatch) => {
|
||||
return function Intercept (opts, handler) {
|
||||
const { maxRedirections = defaultMaxRedirections, ...rest } = opts
|
||||
|
||||
if (maxRedirections == null || maxRedirections === 0) {
|
||||
return dispatch(opts, handler)
|
||||
}
|
||||
|
||||
const dispatchOpts = { ...rest } // Stop sub dispatcher from also redirecting.
|
||||
const redirectHandler = new RedirectHandler(dispatch, maxRedirections, dispatchOpts, handler)
|
||||
return dispatch(dispatchOpts, redirectHandler)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = createRedirectInterceptor
|
||||
95
backend/node_modules/undici/lib/interceptor/response-error.js
generated
vendored
Normal file
95
backend/node_modules/undici/lib/interceptor/response-error.js
generated
vendored
Normal file
@@ -0,0 +1,95 @@
|
||||
'use strict'
|
||||
|
||||
// const { parseHeaders } = require('../core/util')
|
||||
const DecoratorHandler = require('../handler/decorator-handler')
|
||||
const { ResponseError } = require('../core/errors')
|
||||
|
||||
class ResponseErrorHandler extends DecoratorHandler {
|
||||
#statusCode
|
||||
#contentType
|
||||
#decoder
|
||||
#headers
|
||||
#body
|
||||
|
||||
constructor (_opts, { handler }) {
|
||||
super(handler)
|
||||
}
|
||||
|
||||
#checkContentType (contentType) {
|
||||
return (this.#contentType ?? '').indexOf(contentType) === 0
|
||||
}
|
||||
|
||||
onRequestStart (controller, context) {
|
||||
this.#statusCode = 0
|
||||
this.#contentType = null
|
||||
this.#decoder = null
|
||||
this.#headers = null
|
||||
this.#body = ''
|
||||
|
||||
return super.onRequestStart(controller, context)
|
||||
}
|
||||
|
||||
onResponseStart (controller, statusCode, headers, statusMessage) {
|
||||
this.#statusCode = statusCode
|
||||
this.#headers = headers
|
||||
this.#contentType = headers['content-type']
|
||||
|
||||
if (this.#statusCode < 400) {
|
||||
return super.onResponseStart(controller, statusCode, headers, statusMessage)
|
||||
}
|
||||
|
||||
if (this.#checkContentType('application/json') || this.#checkContentType('text/plain')) {
|
||||
this.#decoder = new TextDecoder('utf-8')
|
||||
}
|
||||
}
|
||||
|
||||
onResponseData (controller, chunk) {
|
||||
if (this.#statusCode < 400) {
|
||||
return super.onResponseData(controller, chunk)
|
||||
}
|
||||
|
||||
this.#body += this.#decoder?.decode(chunk, { stream: true }) ?? ''
|
||||
}
|
||||
|
||||
onResponseEnd (controller, trailers) {
|
||||
if (this.#statusCode >= 400) {
|
||||
this.#body += this.#decoder?.decode(undefined, { stream: false }) ?? ''
|
||||
|
||||
if (this.#checkContentType('application/json')) {
|
||||
try {
|
||||
this.#body = JSON.parse(this.#body)
|
||||
} catch {
|
||||
// Do nothing...
|
||||
}
|
||||
}
|
||||
|
||||
let err
|
||||
const stackTraceLimit = Error.stackTraceLimit
|
||||
Error.stackTraceLimit = 0
|
||||
try {
|
||||
err = new ResponseError('Response Error', this.#statusCode, {
|
||||
body: this.#body,
|
||||
headers: this.#headers
|
||||
})
|
||||
} finally {
|
||||
Error.stackTraceLimit = stackTraceLimit
|
||||
}
|
||||
|
||||
super.onResponseError(controller, err)
|
||||
} else {
|
||||
super.onResponseEnd(controller, trailers)
|
||||
}
|
||||
}
|
||||
|
||||
onResponseError (controller, err) {
|
||||
super.onResponseError(controller, err)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = () => {
|
||||
return (dispatch) => {
|
||||
return function Intercept (opts, handler) {
|
||||
return dispatch(opts, new ResponseErrorHandler(opts, { handler }))
|
||||
}
|
||||
}
|
||||
}
|
||||
19
backend/node_modules/undici/lib/interceptor/retry.js
generated
vendored
Normal file
19
backend/node_modules/undici/lib/interceptor/retry.js
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
'use strict'
|
||||
const RetryHandler = require('../handler/retry-handler')
|
||||
|
||||
module.exports = globalOpts => {
|
||||
return dispatch => {
|
||||
return function retryInterceptor (opts, handler) {
|
||||
return dispatch(
|
||||
opts,
|
||||
new RetryHandler(
|
||||
{ ...opts, retryOptions: { ...globalOpts, ...opts.retryOptions } },
|
||||
{
|
||||
handler,
|
||||
dispatch
|
||||
}
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
0
backend/node_modules/undici/lib/llhttp/.gitkeep
generated
vendored
Normal file
0
backend/node_modules/undici/lib/llhttp/.gitkeep
generated
vendored
Normal file
195
backend/node_modules/undici/lib/llhttp/constants.d.ts
generated
vendored
Normal file
195
backend/node_modules/undici/lib/llhttp/constants.d.ts
generated
vendored
Normal file
@@ -0,0 +1,195 @@
|
||||
export type IntDict = Record<string, number>;
|
||||
export declare const ERROR: IntDict;
|
||||
export declare const TYPE: IntDict;
|
||||
export declare const FLAGS: IntDict;
|
||||
export declare const LENIENT_FLAGS: IntDict;
|
||||
export declare const METHODS: IntDict;
|
||||
export declare const STATUSES: IntDict;
|
||||
export declare const FINISH: IntDict;
|
||||
export declare const HEADER_STATE: IntDict;
|
||||
export declare const METHODS_HTTP: number[];
|
||||
export declare const METHODS_ICE: number[];
|
||||
export declare const METHODS_RTSP: number[];
|
||||
export declare const METHOD_MAP: IntDict;
|
||||
export declare const H_METHOD_MAP: {
|
||||
[k: string]: number;
|
||||
};
|
||||
export declare const STATUSES_HTTP: number[];
|
||||
export type CharList = (string | number)[];
|
||||
export declare const ALPHA: CharList;
|
||||
export declare const NUM_MAP: {
|
||||
0: number;
|
||||
1: number;
|
||||
2: number;
|
||||
3: number;
|
||||
4: number;
|
||||
5: number;
|
||||
6: number;
|
||||
7: number;
|
||||
8: number;
|
||||
9: number;
|
||||
};
|
||||
export declare const HEX_MAP: {
|
||||
0: number;
|
||||
1: number;
|
||||
2: number;
|
||||
3: number;
|
||||
4: number;
|
||||
5: number;
|
||||
6: number;
|
||||
7: number;
|
||||
8: number;
|
||||
9: number;
|
||||
A: number;
|
||||
B: number;
|
||||
C: number;
|
||||
D: number;
|
||||
E: number;
|
||||
F: number;
|
||||
a: number;
|
||||
b: number;
|
||||
c: number;
|
||||
d: number;
|
||||
e: number;
|
||||
f: number;
|
||||
};
|
||||
export declare const NUM: CharList;
|
||||
export declare const ALPHANUM: CharList;
|
||||
export declare const MARK: CharList;
|
||||
export declare const USERINFO_CHARS: CharList;
|
||||
export declare const URL_CHAR: CharList;
|
||||
export declare const HEX: CharList;
|
||||
export declare const TOKEN: CharList;
|
||||
export declare const HEADER_CHARS: CharList;
|
||||
export declare const CONNECTION_TOKEN_CHARS: CharList;
|
||||
export declare const QUOTED_STRING: CharList;
|
||||
export declare const HTAB_SP_VCHAR_OBS_TEXT: CharList;
|
||||
export declare const MAJOR: {
|
||||
0: number;
|
||||
1: number;
|
||||
2: number;
|
||||
3: number;
|
||||
4: number;
|
||||
5: number;
|
||||
6: number;
|
||||
7: number;
|
||||
8: number;
|
||||
9: number;
|
||||
};
|
||||
export declare const MINOR: {
|
||||
0: number;
|
||||
1: number;
|
||||
2: number;
|
||||
3: number;
|
||||
4: number;
|
||||
5: number;
|
||||
6: number;
|
||||
7: number;
|
||||
8: number;
|
||||
9: number;
|
||||
};
|
||||
export declare const SPECIAL_HEADERS: {
|
||||
connection: number;
|
||||
'content-length': number;
|
||||
'proxy-connection': number;
|
||||
'transfer-encoding': number;
|
||||
upgrade: number;
|
||||
};
|
||||
declare const _default: {
|
||||
ERROR: IntDict;
|
||||
TYPE: IntDict;
|
||||
FLAGS: IntDict;
|
||||
LENIENT_FLAGS: IntDict;
|
||||
METHODS: IntDict;
|
||||
STATUSES: IntDict;
|
||||
FINISH: IntDict;
|
||||
HEADER_STATE: IntDict;
|
||||
ALPHA: CharList;
|
||||
NUM_MAP: {
|
||||
0: number;
|
||||
1: number;
|
||||
2: number;
|
||||
3: number;
|
||||
4: number;
|
||||
5: number;
|
||||
6: number;
|
||||
7: number;
|
||||
8: number;
|
||||
9: number;
|
||||
};
|
||||
HEX_MAP: {
|
||||
0: number;
|
||||
1: number;
|
||||
2: number;
|
||||
3: number;
|
||||
4: number;
|
||||
5: number;
|
||||
6: number;
|
||||
7: number;
|
||||
8: number;
|
||||
9: number;
|
||||
A: number;
|
||||
B: number;
|
||||
C: number;
|
||||
D: number;
|
||||
E: number;
|
||||
F: number;
|
||||
a: number;
|
||||
b: number;
|
||||
c: number;
|
||||
d: number;
|
||||
e: number;
|
||||
f: number;
|
||||
};
|
||||
NUM: CharList;
|
||||
ALPHANUM: CharList;
|
||||
MARK: CharList;
|
||||
USERINFO_CHARS: CharList;
|
||||
URL_CHAR: CharList;
|
||||
HEX: CharList;
|
||||
TOKEN: CharList;
|
||||
HEADER_CHARS: CharList;
|
||||
CONNECTION_TOKEN_CHARS: CharList;
|
||||
QUOTED_STRING: CharList;
|
||||
HTAB_SP_VCHAR_OBS_TEXT: CharList;
|
||||
MAJOR: {
|
||||
0: number;
|
||||
1: number;
|
||||
2: number;
|
||||
3: number;
|
||||
4: number;
|
||||
5: number;
|
||||
6: number;
|
||||
7: number;
|
||||
8: number;
|
||||
9: number;
|
||||
};
|
||||
MINOR: {
|
||||
0: number;
|
||||
1: number;
|
||||
2: number;
|
||||
3: number;
|
||||
4: number;
|
||||
5: number;
|
||||
6: number;
|
||||
7: number;
|
||||
8: number;
|
||||
9: number;
|
||||
};
|
||||
SPECIAL_HEADERS: {
|
||||
connection: number;
|
||||
'content-length': number;
|
||||
'proxy-connection': number;
|
||||
'transfer-encoding': number;
|
||||
upgrade: number;
|
||||
};
|
||||
METHODS_HTTP: number[];
|
||||
METHODS_ICE: number[];
|
||||
METHODS_RTSP: number[];
|
||||
METHOD_MAP: IntDict;
|
||||
H_METHOD_MAP: {
|
||||
[k: string]: number;
|
||||
};
|
||||
STATUSES_HTTP: number[];
|
||||
};
|
||||
export default _default;
|
||||
531
backend/node_modules/undici/lib/llhttp/constants.js
generated
vendored
Normal file
531
backend/node_modules/undici/lib/llhttp/constants.js
generated
vendored
Normal file
@@ -0,0 +1,531 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.SPECIAL_HEADERS = exports.MINOR = exports.MAJOR = exports.HTAB_SP_VCHAR_OBS_TEXT = exports.QUOTED_STRING = exports.CONNECTION_TOKEN_CHARS = exports.HEADER_CHARS = exports.TOKEN = exports.HEX = exports.URL_CHAR = exports.USERINFO_CHARS = exports.MARK = exports.ALPHANUM = exports.NUM = exports.HEX_MAP = exports.NUM_MAP = exports.ALPHA = exports.STATUSES_HTTP = exports.H_METHOD_MAP = exports.METHOD_MAP = exports.METHODS_RTSP = exports.METHODS_ICE = exports.METHODS_HTTP = exports.HEADER_STATE = exports.FINISH = exports.STATUSES = exports.METHODS = exports.LENIENT_FLAGS = exports.FLAGS = exports.TYPE = exports.ERROR = void 0;
|
||||
const utils_1 = require("./utils");
|
||||
// Emums
|
||||
exports.ERROR = {
|
||||
OK: 0,
|
||||
INTERNAL: 1,
|
||||
STRICT: 2,
|
||||
CR_EXPECTED: 25,
|
||||
LF_EXPECTED: 3,
|
||||
UNEXPECTED_CONTENT_LENGTH: 4,
|
||||
UNEXPECTED_SPACE: 30,
|
||||
CLOSED_CONNECTION: 5,
|
||||
INVALID_METHOD: 6,
|
||||
INVALID_URL: 7,
|
||||
INVALID_CONSTANT: 8,
|
||||
INVALID_VERSION: 9,
|
||||
INVALID_HEADER_TOKEN: 10,
|
||||
INVALID_CONTENT_LENGTH: 11,
|
||||
INVALID_CHUNK_SIZE: 12,
|
||||
INVALID_STATUS: 13,
|
||||
INVALID_EOF_STATE: 14,
|
||||
INVALID_TRANSFER_ENCODING: 15,
|
||||
CB_MESSAGE_BEGIN: 16,
|
||||
CB_HEADERS_COMPLETE: 17,
|
||||
CB_MESSAGE_COMPLETE: 18,
|
||||
CB_CHUNK_HEADER: 19,
|
||||
CB_CHUNK_COMPLETE: 20,
|
||||
PAUSED: 21,
|
||||
PAUSED_UPGRADE: 22,
|
||||
PAUSED_H2_UPGRADE: 23,
|
||||
USER: 24,
|
||||
CB_URL_COMPLETE: 26,
|
||||
CB_STATUS_COMPLETE: 27,
|
||||
CB_METHOD_COMPLETE: 32,
|
||||
CB_VERSION_COMPLETE: 33,
|
||||
CB_HEADER_FIELD_COMPLETE: 28,
|
||||
CB_HEADER_VALUE_COMPLETE: 29,
|
||||
CB_CHUNK_EXTENSION_NAME_COMPLETE: 34,
|
||||
CB_CHUNK_EXTENSION_VALUE_COMPLETE: 35,
|
||||
CB_RESET: 31,
|
||||
CB_PROTOCOL_COMPLETE: 38,
|
||||
};
|
||||
exports.TYPE = {
|
||||
BOTH: 0, // default
|
||||
REQUEST: 1,
|
||||
RESPONSE: 2,
|
||||
};
|
||||
exports.FLAGS = {
|
||||
CONNECTION_KEEP_ALIVE: 1 << 0,
|
||||
CONNECTION_CLOSE: 1 << 1,
|
||||
CONNECTION_UPGRADE: 1 << 2,
|
||||
CHUNKED: 1 << 3,
|
||||
UPGRADE: 1 << 4,
|
||||
CONTENT_LENGTH: 1 << 5,
|
||||
SKIPBODY: 1 << 6,
|
||||
TRAILING: 1 << 7,
|
||||
// 1 << 8 is unused
|
||||
TRANSFER_ENCODING: 1 << 9,
|
||||
};
|
||||
exports.LENIENT_FLAGS = {
|
||||
HEADERS: 1 << 0,
|
||||
CHUNKED_LENGTH: 1 << 1,
|
||||
KEEP_ALIVE: 1 << 2,
|
||||
TRANSFER_ENCODING: 1 << 3,
|
||||
VERSION: 1 << 4,
|
||||
DATA_AFTER_CLOSE: 1 << 5,
|
||||
OPTIONAL_LF_AFTER_CR: 1 << 6,
|
||||
OPTIONAL_CRLF_AFTER_CHUNK: 1 << 7,
|
||||
OPTIONAL_CR_BEFORE_LF: 1 << 8,
|
||||
SPACES_AFTER_CHUNK_SIZE: 1 << 9,
|
||||
};
|
||||
exports.METHODS = {
|
||||
'DELETE': 0,
|
||||
'GET': 1,
|
||||
'HEAD': 2,
|
||||
'POST': 3,
|
||||
'PUT': 4,
|
||||
/* pathological */
|
||||
'CONNECT': 5,
|
||||
'OPTIONS': 6,
|
||||
'TRACE': 7,
|
||||
/* WebDAV */
|
||||
'COPY': 8,
|
||||
'LOCK': 9,
|
||||
'MKCOL': 10,
|
||||
'MOVE': 11,
|
||||
'PROPFIND': 12,
|
||||
'PROPPATCH': 13,
|
||||
'SEARCH': 14,
|
||||
'UNLOCK': 15,
|
||||
'BIND': 16,
|
||||
'REBIND': 17,
|
||||
'UNBIND': 18,
|
||||
'ACL': 19,
|
||||
/* subversion */
|
||||
'REPORT': 20,
|
||||
'MKACTIVITY': 21,
|
||||
'CHECKOUT': 22,
|
||||
'MERGE': 23,
|
||||
/* upnp */
|
||||
'M-SEARCH': 24,
|
||||
'NOTIFY': 25,
|
||||
'SUBSCRIBE': 26,
|
||||
'UNSUBSCRIBE': 27,
|
||||
/* RFC-5789 */
|
||||
'PATCH': 28,
|
||||
'PURGE': 29,
|
||||
/* CalDAV */
|
||||
'MKCALENDAR': 30,
|
||||
/* RFC-2068, section 19.6.1.2 */
|
||||
'LINK': 31,
|
||||
'UNLINK': 32,
|
||||
/* icecast */
|
||||
'SOURCE': 33,
|
||||
/* RFC-7540, section 11.6 */
|
||||
'PRI': 34,
|
||||
/* RFC-2326 RTSP */
|
||||
'DESCRIBE': 35,
|
||||
'ANNOUNCE': 36,
|
||||
'SETUP': 37,
|
||||
'PLAY': 38,
|
||||
'PAUSE': 39,
|
||||
'TEARDOWN': 40,
|
||||
'GET_PARAMETER': 41,
|
||||
'SET_PARAMETER': 42,
|
||||
'REDIRECT': 43,
|
||||
'RECORD': 44,
|
||||
/* RAOP */
|
||||
'FLUSH': 45,
|
||||
/* DRAFT https://www.ietf.org/archive/id/draft-ietf-httpbis-safe-method-w-body-02.html */
|
||||
'QUERY': 46,
|
||||
};
|
||||
exports.STATUSES = {
|
||||
CONTINUE: 100,
|
||||
SWITCHING_PROTOCOLS: 101,
|
||||
PROCESSING: 102,
|
||||
EARLY_HINTS: 103,
|
||||
RESPONSE_IS_STALE: 110, // Unofficial
|
||||
REVALIDATION_FAILED: 111, // Unofficial
|
||||
DISCONNECTED_OPERATION: 112, // Unofficial
|
||||
HEURISTIC_EXPIRATION: 113, // Unofficial
|
||||
MISCELLANEOUS_WARNING: 199, // Unofficial
|
||||
OK: 200,
|
||||
CREATED: 201,
|
||||
ACCEPTED: 202,
|
||||
NON_AUTHORITATIVE_INFORMATION: 203,
|
||||
NO_CONTENT: 204,
|
||||
RESET_CONTENT: 205,
|
||||
PARTIAL_CONTENT: 206,
|
||||
MULTI_STATUS: 207,
|
||||
ALREADY_REPORTED: 208,
|
||||
TRANSFORMATION_APPLIED: 214, // Unofficial
|
||||
IM_USED: 226,
|
||||
MISCELLANEOUS_PERSISTENT_WARNING: 299, // Unofficial
|
||||
MULTIPLE_CHOICES: 300,
|
||||
MOVED_PERMANENTLY: 301,
|
||||
FOUND: 302,
|
||||
SEE_OTHER: 303,
|
||||
NOT_MODIFIED: 304,
|
||||
USE_PROXY: 305,
|
||||
SWITCH_PROXY: 306, // No longer used
|
||||
TEMPORARY_REDIRECT: 307,
|
||||
PERMANENT_REDIRECT: 308,
|
||||
BAD_REQUEST: 400,
|
||||
UNAUTHORIZED: 401,
|
||||
PAYMENT_REQUIRED: 402,
|
||||
FORBIDDEN: 403,
|
||||
NOT_FOUND: 404,
|
||||
METHOD_NOT_ALLOWED: 405,
|
||||
NOT_ACCEPTABLE: 406,
|
||||
PROXY_AUTHENTICATION_REQUIRED: 407,
|
||||
REQUEST_TIMEOUT: 408,
|
||||
CONFLICT: 409,
|
||||
GONE: 410,
|
||||
LENGTH_REQUIRED: 411,
|
||||
PRECONDITION_FAILED: 412,
|
||||
PAYLOAD_TOO_LARGE: 413,
|
||||
URI_TOO_LONG: 414,
|
||||
UNSUPPORTED_MEDIA_TYPE: 415,
|
||||
RANGE_NOT_SATISFIABLE: 416,
|
||||
EXPECTATION_FAILED: 417,
|
||||
IM_A_TEAPOT: 418,
|
||||
PAGE_EXPIRED: 419, // Unofficial
|
||||
ENHANCE_YOUR_CALM: 420, // Unofficial
|
||||
MISDIRECTED_REQUEST: 421,
|
||||
UNPROCESSABLE_ENTITY: 422,
|
||||
LOCKED: 423,
|
||||
FAILED_DEPENDENCY: 424,
|
||||
TOO_EARLY: 425,
|
||||
UPGRADE_REQUIRED: 426,
|
||||
PRECONDITION_REQUIRED: 428,
|
||||
TOO_MANY_REQUESTS: 429,
|
||||
REQUEST_HEADER_FIELDS_TOO_LARGE_UNOFFICIAL: 430, // Unofficial
|
||||
REQUEST_HEADER_FIELDS_TOO_LARGE: 431,
|
||||
LOGIN_TIMEOUT: 440, // Unofficial
|
||||
NO_RESPONSE: 444, // Unofficial
|
||||
RETRY_WITH: 449, // Unofficial
|
||||
BLOCKED_BY_PARENTAL_CONTROL: 450, // Unofficial
|
||||
UNAVAILABLE_FOR_LEGAL_REASONS: 451,
|
||||
CLIENT_CLOSED_LOAD_BALANCED_REQUEST: 460, // Unofficial
|
||||
INVALID_X_FORWARDED_FOR: 463, // Unofficial
|
||||
REQUEST_HEADER_TOO_LARGE: 494, // Unofficial
|
||||
SSL_CERTIFICATE_ERROR: 495, // Unofficial
|
||||
SSL_CERTIFICATE_REQUIRED: 496, // Unofficial
|
||||
HTTP_REQUEST_SENT_TO_HTTPS_PORT: 497, // Unofficial
|
||||
INVALID_TOKEN: 498, // Unofficial
|
||||
CLIENT_CLOSED_REQUEST: 499, // Unofficial
|
||||
INTERNAL_SERVER_ERROR: 500,
|
||||
NOT_IMPLEMENTED: 501,
|
||||
BAD_GATEWAY: 502,
|
||||
SERVICE_UNAVAILABLE: 503,
|
||||
GATEWAY_TIMEOUT: 504,
|
||||
HTTP_VERSION_NOT_SUPPORTED: 505,
|
||||
VARIANT_ALSO_NEGOTIATES: 506,
|
||||
INSUFFICIENT_STORAGE: 507,
|
||||
LOOP_DETECTED: 508,
|
||||
BANDWIDTH_LIMIT_EXCEEDED: 509,
|
||||
NOT_EXTENDED: 510,
|
||||
NETWORK_AUTHENTICATION_REQUIRED: 511,
|
||||
WEB_SERVER_UNKNOWN_ERROR: 520, // Unofficial
|
||||
WEB_SERVER_IS_DOWN: 521, // Unofficial
|
||||
CONNECTION_TIMEOUT: 522, // Unofficial
|
||||
ORIGIN_IS_UNREACHABLE: 523, // Unofficial
|
||||
TIMEOUT_OCCURED: 524, // Unofficial
|
||||
SSL_HANDSHAKE_FAILED: 525, // Unofficial
|
||||
INVALID_SSL_CERTIFICATE: 526, // Unofficial
|
||||
RAILGUN_ERROR: 527, // Unofficial
|
||||
SITE_IS_OVERLOADED: 529, // Unofficial
|
||||
SITE_IS_FROZEN: 530, // Unofficial
|
||||
IDENTITY_PROVIDER_AUTHENTICATION_ERROR: 561, // Unofficial
|
||||
NETWORK_READ_TIMEOUT: 598, // Unofficial
|
||||
NETWORK_CONNECT_TIMEOUT: 599, // Unofficial
|
||||
};
|
||||
exports.FINISH = {
|
||||
SAFE: 0,
|
||||
SAFE_WITH_CB: 1,
|
||||
UNSAFE: 2,
|
||||
};
|
||||
exports.HEADER_STATE = {
|
||||
GENERAL: 0,
|
||||
CONNECTION: 1,
|
||||
CONTENT_LENGTH: 2,
|
||||
TRANSFER_ENCODING: 3,
|
||||
UPGRADE: 4,
|
||||
CONNECTION_KEEP_ALIVE: 5,
|
||||
CONNECTION_CLOSE: 6,
|
||||
CONNECTION_UPGRADE: 7,
|
||||
TRANSFER_ENCODING_CHUNKED: 8,
|
||||
};
|
||||
// C headers
|
||||
exports.METHODS_HTTP = [
|
||||
exports.METHODS.DELETE,
|
||||
exports.METHODS.GET,
|
||||
exports.METHODS.HEAD,
|
||||
exports.METHODS.POST,
|
||||
exports.METHODS.PUT,
|
||||
exports.METHODS.CONNECT,
|
||||
exports.METHODS.OPTIONS,
|
||||
exports.METHODS.TRACE,
|
||||
exports.METHODS.COPY,
|
||||
exports.METHODS.LOCK,
|
||||
exports.METHODS.MKCOL,
|
||||
exports.METHODS.MOVE,
|
||||
exports.METHODS.PROPFIND,
|
||||
exports.METHODS.PROPPATCH,
|
||||
exports.METHODS.SEARCH,
|
||||
exports.METHODS.UNLOCK,
|
||||
exports.METHODS.BIND,
|
||||
exports.METHODS.REBIND,
|
||||
exports.METHODS.UNBIND,
|
||||
exports.METHODS.ACL,
|
||||
exports.METHODS.REPORT,
|
||||
exports.METHODS.MKACTIVITY,
|
||||
exports.METHODS.CHECKOUT,
|
||||
exports.METHODS.MERGE,
|
||||
exports.METHODS['M-SEARCH'],
|
||||
exports.METHODS.NOTIFY,
|
||||
exports.METHODS.SUBSCRIBE,
|
||||
exports.METHODS.UNSUBSCRIBE,
|
||||
exports.METHODS.PATCH,
|
||||
exports.METHODS.PURGE,
|
||||
exports.METHODS.MKCALENDAR,
|
||||
exports.METHODS.LINK,
|
||||
exports.METHODS.UNLINK,
|
||||
exports.METHODS.PRI,
|
||||
// TODO(indutny): should we allow it with HTTP?
|
||||
exports.METHODS.SOURCE,
|
||||
exports.METHODS.QUERY,
|
||||
];
|
||||
exports.METHODS_ICE = [
|
||||
exports.METHODS.SOURCE,
|
||||
];
|
||||
exports.METHODS_RTSP = [
|
||||
exports.METHODS.OPTIONS,
|
||||
exports.METHODS.DESCRIBE,
|
||||
exports.METHODS.ANNOUNCE,
|
||||
exports.METHODS.SETUP,
|
||||
exports.METHODS.PLAY,
|
||||
exports.METHODS.PAUSE,
|
||||
exports.METHODS.TEARDOWN,
|
||||
exports.METHODS.GET_PARAMETER,
|
||||
exports.METHODS.SET_PARAMETER,
|
||||
exports.METHODS.REDIRECT,
|
||||
exports.METHODS.RECORD,
|
||||
exports.METHODS.FLUSH,
|
||||
// For AirPlay
|
||||
exports.METHODS.GET,
|
||||
exports.METHODS.POST,
|
||||
];
|
||||
exports.METHOD_MAP = (0, utils_1.enumToMap)(exports.METHODS);
|
||||
exports.H_METHOD_MAP = Object.fromEntries(Object.entries(exports.METHODS).filter(([k]) => k.startsWith('H')));
|
||||
exports.STATUSES_HTTP = [
|
||||
exports.STATUSES.CONTINUE,
|
||||
exports.STATUSES.SWITCHING_PROTOCOLS,
|
||||
exports.STATUSES.PROCESSING,
|
||||
exports.STATUSES.EARLY_HINTS,
|
||||
exports.STATUSES.RESPONSE_IS_STALE,
|
||||
exports.STATUSES.REVALIDATION_FAILED,
|
||||
exports.STATUSES.DISCONNECTED_OPERATION,
|
||||
exports.STATUSES.HEURISTIC_EXPIRATION,
|
||||
exports.STATUSES.MISCELLANEOUS_WARNING,
|
||||
exports.STATUSES.OK,
|
||||
exports.STATUSES.CREATED,
|
||||
exports.STATUSES.ACCEPTED,
|
||||
exports.STATUSES.NON_AUTHORITATIVE_INFORMATION,
|
||||
exports.STATUSES.NO_CONTENT,
|
||||
exports.STATUSES.RESET_CONTENT,
|
||||
exports.STATUSES.PARTIAL_CONTENT,
|
||||
exports.STATUSES.MULTI_STATUS,
|
||||
exports.STATUSES.ALREADY_REPORTED,
|
||||
exports.STATUSES.TRANSFORMATION_APPLIED,
|
||||
exports.STATUSES.IM_USED,
|
||||
exports.STATUSES.MISCELLANEOUS_PERSISTENT_WARNING,
|
||||
exports.STATUSES.MULTIPLE_CHOICES,
|
||||
exports.STATUSES.MOVED_PERMANENTLY,
|
||||
exports.STATUSES.FOUND,
|
||||
exports.STATUSES.SEE_OTHER,
|
||||
exports.STATUSES.NOT_MODIFIED,
|
||||
exports.STATUSES.USE_PROXY,
|
||||
exports.STATUSES.SWITCH_PROXY,
|
||||
exports.STATUSES.TEMPORARY_REDIRECT,
|
||||
exports.STATUSES.PERMANENT_REDIRECT,
|
||||
exports.STATUSES.BAD_REQUEST,
|
||||
exports.STATUSES.UNAUTHORIZED,
|
||||
exports.STATUSES.PAYMENT_REQUIRED,
|
||||
exports.STATUSES.FORBIDDEN,
|
||||
exports.STATUSES.NOT_FOUND,
|
||||
exports.STATUSES.METHOD_NOT_ALLOWED,
|
||||
exports.STATUSES.NOT_ACCEPTABLE,
|
||||
exports.STATUSES.PROXY_AUTHENTICATION_REQUIRED,
|
||||
exports.STATUSES.REQUEST_TIMEOUT,
|
||||
exports.STATUSES.CONFLICT,
|
||||
exports.STATUSES.GONE,
|
||||
exports.STATUSES.LENGTH_REQUIRED,
|
||||
exports.STATUSES.PRECONDITION_FAILED,
|
||||
exports.STATUSES.PAYLOAD_TOO_LARGE,
|
||||
exports.STATUSES.URI_TOO_LONG,
|
||||
exports.STATUSES.UNSUPPORTED_MEDIA_TYPE,
|
||||
exports.STATUSES.RANGE_NOT_SATISFIABLE,
|
||||
exports.STATUSES.EXPECTATION_FAILED,
|
||||
exports.STATUSES.IM_A_TEAPOT,
|
||||
exports.STATUSES.PAGE_EXPIRED,
|
||||
exports.STATUSES.ENHANCE_YOUR_CALM,
|
||||
exports.STATUSES.MISDIRECTED_REQUEST,
|
||||
exports.STATUSES.UNPROCESSABLE_ENTITY,
|
||||
exports.STATUSES.LOCKED,
|
||||
exports.STATUSES.FAILED_DEPENDENCY,
|
||||
exports.STATUSES.TOO_EARLY,
|
||||
exports.STATUSES.UPGRADE_REQUIRED,
|
||||
exports.STATUSES.PRECONDITION_REQUIRED,
|
||||
exports.STATUSES.TOO_MANY_REQUESTS,
|
||||
exports.STATUSES.REQUEST_HEADER_FIELDS_TOO_LARGE_UNOFFICIAL,
|
||||
exports.STATUSES.REQUEST_HEADER_FIELDS_TOO_LARGE,
|
||||
exports.STATUSES.LOGIN_TIMEOUT,
|
||||
exports.STATUSES.NO_RESPONSE,
|
||||
exports.STATUSES.RETRY_WITH,
|
||||
exports.STATUSES.BLOCKED_BY_PARENTAL_CONTROL,
|
||||
exports.STATUSES.UNAVAILABLE_FOR_LEGAL_REASONS,
|
||||
exports.STATUSES.CLIENT_CLOSED_LOAD_BALANCED_REQUEST,
|
||||
exports.STATUSES.INVALID_X_FORWARDED_FOR,
|
||||
exports.STATUSES.REQUEST_HEADER_TOO_LARGE,
|
||||
exports.STATUSES.SSL_CERTIFICATE_ERROR,
|
||||
exports.STATUSES.SSL_CERTIFICATE_REQUIRED,
|
||||
exports.STATUSES.HTTP_REQUEST_SENT_TO_HTTPS_PORT,
|
||||
exports.STATUSES.INVALID_TOKEN,
|
||||
exports.STATUSES.CLIENT_CLOSED_REQUEST,
|
||||
exports.STATUSES.INTERNAL_SERVER_ERROR,
|
||||
exports.STATUSES.NOT_IMPLEMENTED,
|
||||
exports.STATUSES.BAD_GATEWAY,
|
||||
exports.STATUSES.SERVICE_UNAVAILABLE,
|
||||
exports.STATUSES.GATEWAY_TIMEOUT,
|
||||
exports.STATUSES.HTTP_VERSION_NOT_SUPPORTED,
|
||||
exports.STATUSES.VARIANT_ALSO_NEGOTIATES,
|
||||
exports.STATUSES.INSUFFICIENT_STORAGE,
|
||||
exports.STATUSES.LOOP_DETECTED,
|
||||
exports.STATUSES.BANDWIDTH_LIMIT_EXCEEDED,
|
||||
exports.STATUSES.NOT_EXTENDED,
|
||||
exports.STATUSES.NETWORK_AUTHENTICATION_REQUIRED,
|
||||
exports.STATUSES.WEB_SERVER_UNKNOWN_ERROR,
|
||||
exports.STATUSES.WEB_SERVER_IS_DOWN,
|
||||
exports.STATUSES.CONNECTION_TIMEOUT,
|
||||
exports.STATUSES.ORIGIN_IS_UNREACHABLE,
|
||||
exports.STATUSES.TIMEOUT_OCCURED,
|
||||
exports.STATUSES.SSL_HANDSHAKE_FAILED,
|
||||
exports.STATUSES.INVALID_SSL_CERTIFICATE,
|
||||
exports.STATUSES.RAILGUN_ERROR,
|
||||
exports.STATUSES.SITE_IS_OVERLOADED,
|
||||
exports.STATUSES.SITE_IS_FROZEN,
|
||||
exports.STATUSES.IDENTITY_PROVIDER_AUTHENTICATION_ERROR,
|
||||
exports.STATUSES.NETWORK_READ_TIMEOUT,
|
||||
exports.STATUSES.NETWORK_CONNECT_TIMEOUT,
|
||||
];
|
||||
exports.ALPHA = [];
|
||||
for (let i = 'A'.charCodeAt(0); i <= 'Z'.charCodeAt(0); i++) {
|
||||
// Upper case
|
||||
exports.ALPHA.push(String.fromCharCode(i));
|
||||
// Lower case
|
||||
exports.ALPHA.push(String.fromCharCode(i + 0x20));
|
||||
}
|
||||
exports.NUM_MAP = {
|
||||
0: 0, 1: 1, 2: 2, 3: 3, 4: 4,
|
||||
5: 5, 6: 6, 7: 7, 8: 8, 9: 9,
|
||||
};
|
||||
exports.HEX_MAP = {
|
||||
0: 0, 1: 1, 2: 2, 3: 3, 4: 4,
|
||||
5: 5, 6: 6, 7: 7, 8: 8, 9: 9,
|
||||
A: 0XA, B: 0XB, C: 0XC, D: 0XD, E: 0XE, F: 0XF,
|
||||
a: 0xa, b: 0xb, c: 0xc, d: 0xd, e: 0xe, f: 0xf,
|
||||
};
|
||||
exports.NUM = [
|
||||
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
|
||||
];
|
||||
exports.ALPHANUM = exports.ALPHA.concat(exports.NUM);
|
||||
exports.MARK = ['-', '_', '.', '!', '~', '*', '\'', '(', ')'];
|
||||
exports.USERINFO_CHARS = exports.ALPHANUM
|
||||
.concat(exports.MARK)
|
||||
.concat(['%', ';', ':', '&', '=', '+', '$', ',']);
|
||||
// TODO(indutny): use RFC
|
||||
exports.URL_CHAR = [
|
||||
'!', '"', '$', '%', '&', '\'',
|
||||
'(', ')', '*', '+', ',', '-', '.', '/',
|
||||
':', ';', '<', '=', '>',
|
||||
'@', '[', '\\', ']', '^', '_',
|
||||
'`',
|
||||
'{', '|', '}', '~',
|
||||
].concat(exports.ALPHANUM);
|
||||
exports.HEX = exports.NUM.concat(['a', 'b', 'c', 'd', 'e', 'f', 'A', 'B', 'C', 'D', 'E', 'F']);
|
||||
/* Tokens as defined by rfc 2616. Also lowercases them.
|
||||
* token = 1*<any CHAR except CTLs or separators>
|
||||
* separators = "(" | ")" | "<" | ">" | "@"
|
||||
* | "," | ";" | ":" | "\" | <">
|
||||
* | "/" | "[" | "]" | "?" | "="
|
||||
* | "{" | "}" | SP | HT
|
||||
*/
|
||||
exports.TOKEN = [
|
||||
'!', '#', '$', '%', '&', '\'',
|
||||
'*', '+', '-', '.',
|
||||
'^', '_', '`',
|
||||
'|', '~',
|
||||
].concat(exports.ALPHANUM);
|
||||
/*
|
||||
* Verify that a char is a valid visible (printable) US-ASCII
|
||||
* character or %x80-FF
|
||||
*/
|
||||
exports.HEADER_CHARS = ['\t'];
|
||||
for (let i = 32; i <= 255; i++) {
|
||||
if (i !== 127) {
|
||||
exports.HEADER_CHARS.push(i);
|
||||
}
|
||||
}
|
||||
// ',' = \x44
|
||||
exports.CONNECTION_TOKEN_CHARS = exports.HEADER_CHARS.filter((c) => c !== 44);
|
||||
exports.QUOTED_STRING = ['\t', ' '];
|
||||
for (let i = 0x21; i <= 0xff; i++) {
|
||||
if (i !== 0x22 && i !== 0x5c) { // All characters in ASCII except \ and "
|
||||
exports.QUOTED_STRING.push(i);
|
||||
}
|
||||
}
|
||||
exports.HTAB_SP_VCHAR_OBS_TEXT = ['\t', ' '];
|
||||
// VCHAR: https://tools.ietf.org/html/rfc5234#appendix-B.1
|
||||
for (let i = 0x21; i <= 0x7E; i++) {
|
||||
exports.HTAB_SP_VCHAR_OBS_TEXT.push(i);
|
||||
}
|
||||
// OBS_TEXT: https://datatracker.ietf.org/doc/html/rfc9110#name-collected-abnf
|
||||
for (let i = 0x80; i <= 0xff; i++) {
|
||||
exports.HTAB_SP_VCHAR_OBS_TEXT.push(i);
|
||||
}
|
||||
exports.MAJOR = exports.NUM_MAP;
|
||||
exports.MINOR = exports.MAJOR;
|
||||
exports.SPECIAL_HEADERS = {
|
||||
'connection': exports.HEADER_STATE.CONNECTION,
|
||||
'content-length': exports.HEADER_STATE.CONTENT_LENGTH,
|
||||
'proxy-connection': exports.HEADER_STATE.CONNECTION,
|
||||
'transfer-encoding': exports.HEADER_STATE.TRANSFER_ENCODING,
|
||||
'upgrade': exports.HEADER_STATE.UPGRADE,
|
||||
};
|
||||
exports.default = {
|
||||
ERROR: exports.ERROR,
|
||||
TYPE: exports.TYPE,
|
||||
FLAGS: exports.FLAGS,
|
||||
LENIENT_FLAGS: exports.LENIENT_FLAGS,
|
||||
METHODS: exports.METHODS,
|
||||
STATUSES: exports.STATUSES,
|
||||
FINISH: exports.FINISH,
|
||||
HEADER_STATE: exports.HEADER_STATE,
|
||||
ALPHA: exports.ALPHA,
|
||||
NUM_MAP: exports.NUM_MAP,
|
||||
HEX_MAP: exports.HEX_MAP,
|
||||
NUM: exports.NUM,
|
||||
ALPHANUM: exports.ALPHANUM,
|
||||
MARK: exports.MARK,
|
||||
USERINFO_CHARS: exports.USERINFO_CHARS,
|
||||
URL_CHAR: exports.URL_CHAR,
|
||||
HEX: exports.HEX,
|
||||
TOKEN: exports.TOKEN,
|
||||
HEADER_CHARS: exports.HEADER_CHARS,
|
||||
CONNECTION_TOKEN_CHARS: exports.CONNECTION_TOKEN_CHARS,
|
||||
QUOTED_STRING: exports.QUOTED_STRING,
|
||||
HTAB_SP_VCHAR_OBS_TEXT: exports.HTAB_SP_VCHAR_OBS_TEXT,
|
||||
MAJOR: exports.MAJOR,
|
||||
MINOR: exports.MINOR,
|
||||
SPECIAL_HEADERS: exports.SPECIAL_HEADERS,
|
||||
METHODS_HTTP: exports.METHODS_HTTP,
|
||||
METHODS_ICE: exports.METHODS_ICE,
|
||||
METHODS_RTSP: exports.METHODS_RTSP,
|
||||
METHOD_MAP: exports.METHOD_MAP,
|
||||
H_METHOD_MAP: exports.H_METHOD_MAP,
|
||||
STATUSES_HTTP: exports.STATUSES_HTTP,
|
||||
};
|
||||
15
backend/node_modules/undici/lib/llhttp/llhttp-wasm.js
generated
vendored
Normal file
15
backend/node_modules/undici/lib/llhttp/llhttp-wasm.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
15
backend/node_modules/undici/lib/llhttp/llhttp_simd-wasm.js
generated
vendored
Normal file
15
backend/node_modules/undici/lib/llhttp/llhttp_simd-wasm.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
2
backend/node_modules/undici/lib/llhttp/utils.d.ts
generated
vendored
Normal file
2
backend/node_modules/undici/lib/llhttp/utils.d.ts
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
import type { IntDict } from './constants';
|
||||
export declare function enumToMap(obj: IntDict, filter?: readonly number[], exceptions?: readonly number[]): IntDict;
|
||||
12
backend/node_modules/undici/lib/llhttp/utils.js
generated
vendored
Normal file
12
backend/node_modules/undici/lib/llhttp/utils.js
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.enumToMap = enumToMap;
|
||||
function enumToMap(obj, filter = [], exceptions = []) {
|
||||
const emptyFilter = (filter?.length ?? 0) === 0;
|
||||
const emptyExceptions = (exceptions?.length ?? 0) === 0;
|
||||
return Object.fromEntries(Object.entries(obj).filter(([, value]) => {
|
||||
return (typeof value === 'number' &&
|
||||
(emptyFilter || filter.includes(value)) &&
|
||||
(emptyExceptions || !exceptions.includes(value)));
|
||||
}));
|
||||
}
|
||||
232
backend/node_modules/undici/lib/mock/mock-agent.js
generated
vendored
Normal file
232
backend/node_modules/undici/lib/mock/mock-agent.js
generated
vendored
Normal file
@@ -0,0 +1,232 @@
|
||||
'use strict'
|
||||
|
||||
const { kClients } = require('../core/symbols')
|
||||
const Agent = require('../dispatcher/agent')
|
||||
const {
|
||||
kAgent,
|
||||
kMockAgentSet,
|
||||
kMockAgentGet,
|
||||
kDispatches,
|
||||
kIsMockActive,
|
||||
kNetConnect,
|
||||
kGetNetConnect,
|
||||
kOptions,
|
||||
kFactory,
|
||||
kMockAgentRegisterCallHistory,
|
||||
kMockAgentIsCallHistoryEnabled,
|
||||
kMockAgentAddCallHistoryLog,
|
||||
kMockAgentMockCallHistoryInstance,
|
||||
kMockAgentAcceptsNonStandardSearchParameters,
|
||||
kMockCallHistoryAddLog,
|
||||
kIgnoreTrailingSlash
|
||||
} = require('./mock-symbols')
|
||||
const MockClient = require('./mock-client')
|
||||
const MockPool = require('./mock-pool')
|
||||
const { matchValue, normalizeSearchParams, buildAndValidateMockOptions, normalizeOrigin } = require('./mock-utils')
|
||||
const { InvalidArgumentError, UndiciError } = require('../core/errors')
|
||||
const Dispatcher = require('../dispatcher/dispatcher')
|
||||
const PendingInterceptorsFormatter = require('./pending-interceptors-formatter')
|
||||
const { MockCallHistory } = require('./mock-call-history')
|
||||
|
||||
class MockAgent extends Dispatcher {
|
||||
constructor (opts = {}) {
|
||||
super(opts)
|
||||
|
||||
const mockOptions = buildAndValidateMockOptions(opts)
|
||||
|
||||
this[kNetConnect] = true
|
||||
this[kIsMockActive] = true
|
||||
this[kMockAgentIsCallHistoryEnabled] = mockOptions.enableCallHistory ?? false
|
||||
this[kMockAgentAcceptsNonStandardSearchParameters] = mockOptions.acceptNonStandardSearchParameters ?? false
|
||||
this[kIgnoreTrailingSlash] = mockOptions.ignoreTrailingSlash ?? false
|
||||
|
||||
// Instantiate Agent and encapsulate
|
||||
if (opts?.agent && typeof opts.agent.dispatch !== 'function') {
|
||||
throw new InvalidArgumentError('Argument opts.agent must implement Agent')
|
||||
}
|
||||
const agent = opts?.agent ? opts.agent : new Agent(opts)
|
||||
this[kAgent] = agent
|
||||
|
||||
this[kClients] = agent[kClients]
|
||||
this[kOptions] = mockOptions
|
||||
|
||||
if (this[kMockAgentIsCallHistoryEnabled]) {
|
||||
this[kMockAgentRegisterCallHistory]()
|
||||
}
|
||||
}
|
||||
|
||||
get (origin) {
|
||||
// Normalize origin to handle URL objects and case-insensitive hostnames
|
||||
const normalizedOrigin = normalizeOrigin(origin)
|
||||
const originKey = this[kIgnoreTrailingSlash] ? normalizedOrigin.replace(/\/$/, '') : normalizedOrigin
|
||||
|
||||
let dispatcher = this[kMockAgentGet](originKey)
|
||||
|
||||
if (!dispatcher) {
|
||||
dispatcher = this[kFactory](originKey)
|
||||
this[kMockAgentSet](originKey, dispatcher)
|
||||
}
|
||||
return dispatcher
|
||||
}
|
||||
|
||||
dispatch (opts, handler) {
|
||||
opts.origin = normalizeOrigin(opts.origin)
|
||||
|
||||
// Call MockAgent.get to perform additional setup before dispatching as normal
|
||||
this.get(opts.origin)
|
||||
|
||||
this[kMockAgentAddCallHistoryLog](opts)
|
||||
|
||||
const acceptNonStandardSearchParameters = this[kMockAgentAcceptsNonStandardSearchParameters]
|
||||
|
||||
const dispatchOpts = { ...opts }
|
||||
|
||||
if (acceptNonStandardSearchParameters && dispatchOpts.path) {
|
||||
const [path, searchParams] = dispatchOpts.path.split('?')
|
||||
const normalizedSearchParams = normalizeSearchParams(searchParams, acceptNonStandardSearchParameters)
|
||||
dispatchOpts.path = `${path}?${normalizedSearchParams}`
|
||||
}
|
||||
|
||||
return this[kAgent].dispatch(dispatchOpts, handler)
|
||||
}
|
||||
|
||||
async close () {
|
||||
this.clearCallHistory()
|
||||
await this[kAgent].close()
|
||||
this[kClients].clear()
|
||||
}
|
||||
|
||||
deactivate () {
|
||||
this[kIsMockActive] = false
|
||||
}
|
||||
|
||||
activate () {
|
||||
this[kIsMockActive] = true
|
||||
}
|
||||
|
||||
enableNetConnect (matcher) {
|
||||
if (typeof matcher === 'string' || typeof matcher === 'function' || matcher instanceof RegExp) {
|
||||
if (Array.isArray(this[kNetConnect])) {
|
||||
this[kNetConnect].push(matcher)
|
||||
} else {
|
||||
this[kNetConnect] = [matcher]
|
||||
}
|
||||
} else if (typeof matcher === 'undefined') {
|
||||
this[kNetConnect] = true
|
||||
} else {
|
||||
throw new InvalidArgumentError('Unsupported matcher. Must be one of String|Function|RegExp.')
|
||||
}
|
||||
}
|
||||
|
||||
disableNetConnect () {
|
||||
this[kNetConnect] = false
|
||||
}
|
||||
|
||||
enableCallHistory () {
|
||||
this[kMockAgentIsCallHistoryEnabled] = true
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
disableCallHistory () {
|
||||
this[kMockAgentIsCallHistoryEnabled] = false
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
getCallHistory () {
|
||||
return this[kMockAgentMockCallHistoryInstance]
|
||||
}
|
||||
|
||||
clearCallHistory () {
|
||||
if (this[kMockAgentMockCallHistoryInstance] !== undefined) {
|
||||
this[kMockAgentMockCallHistoryInstance].clear()
|
||||
}
|
||||
}
|
||||
|
||||
// This is required to bypass issues caused by using global symbols - see:
|
||||
// https://github.com/nodejs/undici/issues/1447
|
||||
get isMockActive () {
|
||||
return this[kIsMockActive]
|
||||
}
|
||||
|
||||
[kMockAgentRegisterCallHistory] () {
|
||||
if (this[kMockAgentMockCallHistoryInstance] === undefined) {
|
||||
this[kMockAgentMockCallHistoryInstance] = new MockCallHistory()
|
||||
}
|
||||
}
|
||||
|
||||
[kMockAgentAddCallHistoryLog] (opts) {
|
||||
if (this[kMockAgentIsCallHistoryEnabled]) {
|
||||
// additional setup when enableCallHistory class method is used after mockAgent instantiation
|
||||
this[kMockAgentRegisterCallHistory]()
|
||||
|
||||
// add call history log on every call (intercepted or not)
|
||||
this[kMockAgentMockCallHistoryInstance][kMockCallHistoryAddLog](opts)
|
||||
}
|
||||
}
|
||||
|
||||
[kMockAgentSet] (origin, dispatcher) {
|
||||
this[kClients].set(origin, { count: 0, dispatcher })
|
||||
}
|
||||
|
||||
[kFactory] (origin) {
|
||||
const mockOptions = Object.assign({ agent: this }, this[kOptions])
|
||||
return this[kOptions] && this[kOptions].connections === 1
|
||||
? new MockClient(origin, mockOptions)
|
||||
: new MockPool(origin, mockOptions)
|
||||
}
|
||||
|
||||
[kMockAgentGet] (origin) {
|
||||
// First check if we can immediately find it
|
||||
const result = this[kClients].get(origin)
|
||||
if (result?.dispatcher) {
|
||||
return result.dispatcher
|
||||
}
|
||||
|
||||
// If the origin is not a string create a dummy parent pool and return to user
|
||||
if (typeof origin !== 'string') {
|
||||
const dispatcher = this[kFactory]('http://localhost:9999')
|
||||
this[kMockAgentSet](origin, dispatcher)
|
||||
return dispatcher
|
||||
}
|
||||
|
||||
// If we match, create a pool and assign the same dispatches
|
||||
for (const [keyMatcher, result] of Array.from(this[kClients])) {
|
||||
if (result && typeof keyMatcher !== 'string' && matchValue(keyMatcher, origin)) {
|
||||
const dispatcher = this[kFactory](origin)
|
||||
this[kMockAgentSet](origin, dispatcher)
|
||||
dispatcher[kDispatches] = result.dispatcher[kDispatches]
|
||||
return dispatcher
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[kGetNetConnect] () {
|
||||
return this[kNetConnect]
|
||||
}
|
||||
|
||||
pendingInterceptors () {
|
||||
const mockAgentClients = this[kClients]
|
||||
|
||||
return Array.from(mockAgentClients.entries())
|
||||
.flatMap(([origin, result]) => result.dispatcher[kDispatches].map(dispatch => ({ ...dispatch, origin })))
|
||||
.filter(({ pending }) => pending)
|
||||
}
|
||||
|
||||
assertNoPendingInterceptors ({ pendingInterceptorsFormatter = new PendingInterceptorsFormatter() } = {}) {
|
||||
const pending = this.pendingInterceptors()
|
||||
|
||||
if (pending.length === 0) {
|
||||
return
|
||||
}
|
||||
|
||||
throw new UndiciError(
|
||||
pending.length === 1
|
||||
? `1 interceptor is pending:\n\n${pendingInterceptorsFormatter.format(pending)}`.trim()
|
||||
: `${pending.length} interceptors are pending:\n\n${pendingInterceptorsFormatter.format(pending)}`.trim()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = MockAgent
|
||||
248
backend/node_modules/undici/lib/mock/mock-call-history.js
generated
vendored
Normal file
248
backend/node_modules/undici/lib/mock/mock-call-history.js
generated
vendored
Normal file
@@ -0,0 +1,248 @@
|
||||
'use strict'
|
||||
|
||||
const { kMockCallHistoryAddLog } = require('./mock-symbols')
|
||||
const { InvalidArgumentError } = require('../core/errors')
|
||||
|
||||
function handleFilterCallsWithOptions (criteria, options, handler, store) {
|
||||
switch (options.operator) {
|
||||
case 'OR':
|
||||
store.push(...handler(criteria))
|
||||
|
||||
return store
|
||||
case 'AND':
|
||||
return handler.call({ logs: store }, criteria)
|
||||
default:
|
||||
// guard -- should never happens because buildAndValidateFilterCallsOptions is called before
|
||||
throw new InvalidArgumentError('options.operator must to be a case insensitive string equal to \'OR\' or \'AND\'')
|
||||
}
|
||||
}
|
||||
|
||||
function buildAndValidateFilterCallsOptions (options = {}) {
|
||||
const finalOptions = {}
|
||||
|
||||
if ('operator' in options) {
|
||||
if (typeof options.operator !== 'string' || (options.operator.toUpperCase() !== 'OR' && options.operator.toUpperCase() !== 'AND')) {
|
||||
throw new InvalidArgumentError('options.operator must to be a case insensitive string equal to \'OR\' or \'AND\'')
|
||||
}
|
||||
|
||||
return {
|
||||
...finalOptions,
|
||||
operator: options.operator.toUpperCase()
|
||||
}
|
||||
}
|
||||
|
||||
return finalOptions
|
||||
}
|
||||
|
||||
function makeFilterCalls (parameterName) {
|
||||
return (parameterValue) => {
|
||||
if (typeof parameterValue === 'string' || parameterValue == null) {
|
||||
return this.logs.filter((log) => {
|
||||
return log[parameterName] === parameterValue
|
||||
})
|
||||
}
|
||||
if (parameterValue instanceof RegExp) {
|
||||
return this.logs.filter((log) => {
|
||||
return parameterValue.test(log[parameterName])
|
||||
})
|
||||
}
|
||||
|
||||
throw new InvalidArgumentError(`${parameterName} parameter should be one of string, regexp, undefined or null`)
|
||||
}
|
||||
}
|
||||
function computeUrlWithMaybeSearchParameters (requestInit) {
|
||||
// path can contains query url parameters
|
||||
// or query can contains query url parameters
|
||||
try {
|
||||
const url = new URL(requestInit.path, requestInit.origin)
|
||||
|
||||
// requestInit.path contains query url parameters
|
||||
// requestInit.query is then undefined
|
||||
if (url.search.length !== 0) {
|
||||
return url
|
||||
}
|
||||
|
||||
// requestInit.query can be populated here
|
||||
url.search = new URLSearchParams(requestInit.query).toString()
|
||||
|
||||
return url
|
||||
} catch (error) {
|
||||
throw new InvalidArgumentError('An error occurred when computing MockCallHistoryLog.url', { cause: error })
|
||||
}
|
||||
}
|
||||
|
||||
class MockCallHistoryLog {
|
||||
constructor (requestInit = {}) {
|
||||
this.body = requestInit.body
|
||||
this.headers = requestInit.headers
|
||||
this.method = requestInit.method
|
||||
|
||||
const url = computeUrlWithMaybeSearchParameters(requestInit)
|
||||
|
||||
this.fullUrl = url.toString()
|
||||
this.origin = url.origin
|
||||
this.path = url.pathname
|
||||
this.searchParams = Object.fromEntries(url.searchParams)
|
||||
this.protocol = url.protocol
|
||||
this.host = url.host
|
||||
this.port = url.port
|
||||
this.hash = url.hash
|
||||
}
|
||||
|
||||
toMap () {
|
||||
return new Map([
|
||||
['protocol', this.protocol],
|
||||
['host', this.host],
|
||||
['port', this.port],
|
||||
['origin', this.origin],
|
||||
['path', this.path],
|
||||
['hash', this.hash],
|
||||
['searchParams', this.searchParams],
|
||||
['fullUrl', this.fullUrl],
|
||||
['method', this.method],
|
||||
['body', this.body],
|
||||
['headers', this.headers]]
|
||||
)
|
||||
}
|
||||
|
||||
toString () {
|
||||
const options = { betweenKeyValueSeparator: '->', betweenPairSeparator: '|' }
|
||||
let result = ''
|
||||
|
||||
this.toMap().forEach((value, key) => {
|
||||
if (typeof value === 'string' || value === undefined || value === null) {
|
||||
result = `${result}${key}${options.betweenKeyValueSeparator}${value}${options.betweenPairSeparator}`
|
||||
}
|
||||
if ((typeof value === 'object' && value !== null) || Array.isArray(value)) {
|
||||
result = `${result}${key}${options.betweenKeyValueSeparator}${JSON.stringify(value)}${options.betweenPairSeparator}`
|
||||
}
|
||||
// maybe miss something for non Record / Array headers and searchParams here
|
||||
})
|
||||
|
||||
// delete last betweenPairSeparator
|
||||
return result.slice(0, -1)
|
||||
}
|
||||
}
|
||||
|
||||
class MockCallHistory {
|
||||
logs = []
|
||||
|
||||
calls () {
|
||||
return this.logs
|
||||
}
|
||||
|
||||
firstCall () {
|
||||
return this.logs.at(0)
|
||||
}
|
||||
|
||||
lastCall () {
|
||||
return this.logs.at(-1)
|
||||
}
|
||||
|
||||
nthCall (number) {
|
||||
if (typeof number !== 'number') {
|
||||
throw new InvalidArgumentError('nthCall must be called with a number')
|
||||
}
|
||||
if (!Number.isInteger(number)) {
|
||||
throw new InvalidArgumentError('nthCall must be called with an integer')
|
||||
}
|
||||
if (Math.sign(number) !== 1) {
|
||||
throw new InvalidArgumentError('nthCall must be called with a positive value. use firstCall or lastCall instead')
|
||||
}
|
||||
|
||||
// non zero based index. this is more human readable
|
||||
return this.logs.at(number - 1)
|
||||
}
|
||||
|
||||
filterCalls (criteria, options) {
|
||||
// perf
|
||||
if (this.logs.length === 0) {
|
||||
return this.logs
|
||||
}
|
||||
if (typeof criteria === 'function') {
|
||||
return this.logs.filter(criteria)
|
||||
}
|
||||
if (criteria instanceof RegExp) {
|
||||
return this.logs.filter((log) => {
|
||||
return criteria.test(log.toString())
|
||||
})
|
||||
}
|
||||
if (typeof criteria === 'object' && criteria !== null) {
|
||||
// no criteria - returning all logs
|
||||
if (Object.keys(criteria).length === 0) {
|
||||
return this.logs
|
||||
}
|
||||
|
||||
const finalOptions = { operator: 'OR', ...buildAndValidateFilterCallsOptions(options) }
|
||||
|
||||
let maybeDuplicatedLogsFiltered = []
|
||||
if ('protocol' in criteria) {
|
||||
maybeDuplicatedLogsFiltered = handleFilterCallsWithOptions(criteria.protocol, finalOptions, this.filterCallsByProtocol, maybeDuplicatedLogsFiltered)
|
||||
}
|
||||
if ('host' in criteria) {
|
||||
maybeDuplicatedLogsFiltered = handleFilterCallsWithOptions(criteria.host, finalOptions, this.filterCallsByHost, maybeDuplicatedLogsFiltered)
|
||||
}
|
||||
if ('port' in criteria) {
|
||||
maybeDuplicatedLogsFiltered = handleFilterCallsWithOptions(criteria.port, finalOptions, this.filterCallsByPort, maybeDuplicatedLogsFiltered)
|
||||
}
|
||||
if ('origin' in criteria) {
|
||||
maybeDuplicatedLogsFiltered = handleFilterCallsWithOptions(criteria.origin, finalOptions, this.filterCallsByOrigin, maybeDuplicatedLogsFiltered)
|
||||
}
|
||||
if ('path' in criteria) {
|
||||
maybeDuplicatedLogsFiltered = handleFilterCallsWithOptions(criteria.path, finalOptions, this.filterCallsByPath, maybeDuplicatedLogsFiltered)
|
||||
}
|
||||
if ('hash' in criteria) {
|
||||
maybeDuplicatedLogsFiltered = handleFilterCallsWithOptions(criteria.hash, finalOptions, this.filterCallsByHash, maybeDuplicatedLogsFiltered)
|
||||
}
|
||||
if ('fullUrl' in criteria) {
|
||||
maybeDuplicatedLogsFiltered = handleFilterCallsWithOptions(criteria.fullUrl, finalOptions, this.filterCallsByFullUrl, maybeDuplicatedLogsFiltered)
|
||||
}
|
||||
if ('method' in criteria) {
|
||||
maybeDuplicatedLogsFiltered = handleFilterCallsWithOptions(criteria.method, finalOptions, this.filterCallsByMethod, maybeDuplicatedLogsFiltered)
|
||||
}
|
||||
|
||||
const uniqLogsFiltered = [...new Set(maybeDuplicatedLogsFiltered)]
|
||||
|
||||
return uniqLogsFiltered
|
||||
}
|
||||
|
||||
throw new InvalidArgumentError('criteria parameter should be one of function, regexp, or object')
|
||||
}
|
||||
|
||||
filterCallsByProtocol = makeFilterCalls.call(this, 'protocol')
|
||||
|
||||
filterCallsByHost = makeFilterCalls.call(this, 'host')
|
||||
|
||||
filterCallsByPort = makeFilterCalls.call(this, 'port')
|
||||
|
||||
filterCallsByOrigin = makeFilterCalls.call(this, 'origin')
|
||||
|
||||
filterCallsByPath = makeFilterCalls.call(this, 'path')
|
||||
|
||||
filterCallsByHash = makeFilterCalls.call(this, 'hash')
|
||||
|
||||
filterCallsByFullUrl = makeFilterCalls.call(this, 'fullUrl')
|
||||
|
||||
filterCallsByMethod = makeFilterCalls.call(this, 'method')
|
||||
|
||||
clear () {
|
||||
this.logs = []
|
||||
}
|
||||
|
||||
[kMockCallHistoryAddLog] (requestInit) {
|
||||
const log = new MockCallHistoryLog(requestInit)
|
||||
|
||||
this.logs.push(log)
|
||||
|
||||
return log
|
||||
}
|
||||
|
||||
* [Symbol.iterator] () {
|
||||
for (const log of this.calls()) {
|
||||
yield log
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.MockCallHistory = MockCallHistory
|
||||
module.exports.MockCallHistoryLog = MockCallHistoryLog
|
||||
68
backend/node_modules/undici/lib/mock/mock-client.js
generated
vendored
Normal file
68
backend/node_modules/undici/lib/mock/mock-client.js
generated
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
'use strict'
|
||||
|
||||
const { promisify } = require('node:util')
|
||||
const Client = require('../dispatcher/client')
|
||||
const { buildMockDispatch } = require('./mock-utils')
|
||||
const {
|
||||
kDispatches,
|
||||
kMockAgent,
|
||||
kClose,
|
||||
kOriginalClose,
|
||||
kOrigin,
|
||||
kOriginalDispatch,
|
||||
kConnected,
|
||||
kIgnoreTrailingSlash
|
||||
} = require('./mock-symbols')
|
||||
const { MockInterceptor } = require('./mock-interceptor')
|
||||
const Symbols = require('../core/symbols')
|
||||
const { InvalidArgumentError } = require('../core/errors')
|
||||
|
||||
/**
|
||||
* MockClient provides an API that extends the Client to influence the mockDispatches.
|
||||
*/
|
||||
class MockClient extends Client {
|
||||
constructor (origin, opts) {
|
||||
if (!opts || !opts.agent || typeof opts.agent.dispatch !== 'function') {
|
||||
throw new InvalidArgumentError('Argument opts.agent must implement Agent')
|
||||
}
|
||||
|
||||
super(origin, opts)
|
||||
|
||||
this[kMockAgent] = opts.agent
|
||||
this[kOrigin] = origin
|
||||
this[kIgnoreTrailingSlash] = opts.ignoreTrailingSlash ?? false
|
||||
this[kDispatches] = []
|
||||
this[kConnected] = 1
|
||||
this[kOriginalDispatch] = this.dispatch
|
||||
this[kOriginalClose] = this.close.bind(this)
|
||||
|
||||
this.dispatch = buildMockDispatch.call(this)
|
||||
this.close = this[kClose]
|
||||
}
|
||||
|
||||
get [Symbols.kConnected] () {
|
||||
return this[kConnected]
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets up the base interceptor for mocking replies from undici.
|
||||
*/
|
||||
intercept (opts) {
|
||||
return new MockInterceptor(
|
||||
opts && { ignoreTrailingSlash: this[kIgnoreTrailingSlash], ...opts },
|
||||
this[kDispatches]
|
||||
)
|
||||
}
|
||||
|
||||
cleanMocks () {
|
||||
this[kDispatches] = []
|
||||
}
|
||||
|
||||
async [kClose] () {
|
||||
await promisify(this[kOriginalClose])()
|
||||
this[kConnected] = 0
|
||||
this[kMockAgent][Symbols.kClients].delete(this[kOrigin])
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = MockClient
|
||||
29
backend/node_modules/undici/lib/mock/mock-errors.js
generated
vendored
Normal file
29
backend/node_modules/undici/lib/mock/mock-errors.js
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
'use strict'
|
||||
|
||||
const { UndiciError } = require('../core/errors')
|
||||
|
||||
const kMockNotMatchedError = Symbol.for('undici.error.UND_MOCK_ERR_MOCK_NOT_MATCHED')
|
||||
|
||||
/**
|
||||
* The request does not match any registered mock dispatches.
|
||||
*/
|
||||
class MockNotMatchedError extends UndiciError {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
this.name = 'MockNotMatchedError'
|
||||
this.message = message || 'The request does not match any registered mock dispatches'
|
||||
this.code = 'UND_MOCK_ERR_MOCK_NOT_MATCHED'
|
||||
}
|
||||
|
||||
static [Symbol.hasInstance] (instance) {
|
||||
return instance && instance[kMockNotMatchedError] === true
|
||||
}
|
||||
|
||||
get [kMockNotMatchedError] () {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
MockNotMatchedError
|
||||
}
|
||||
209
backend/node_modules/undici/lib/mock/mock-interceptor.js
generated
vendored
Normal file
209
backend/node_modules/undici/lib/mock/mock-interceptor.js
generated
vendored
Normal file
@@ -0,0 +1,209 @@
|
||||
'use strict'
|
||||
|
||||
const { getResponseData, buildKey, addMockDispatch } = require('./mock-utils')
|
||||
const {
|
||||
kDispatches,
|
||||
kDispatchKey,
|
||||
kDefaultHeaders,
|
||||
kDefaultTrailers,
|
||||
kContentLength,
|
||||
kMockDispatch,
|
||||
kIgnoreTrailingSlash
|
||||
} = require('./mock-symbols')
|
||||
const { InvalidArgumentError } = require('../core/errors')
|
||||
const { serializePathWithQuery } = require('../core/util')
|
||||
|
||||
/**
|
||||
* Defines the scope API for an interceptor reply
|
||||
*/
|
||||
class MockScope {
|
||||
constructor (mockDispatch) {
|
||||
this[kMockDispatch] = mockDispatch
|
||||
}
|
||||
|
||||
/**
|
||||
* Delay a reply by a set amount in ms.
|
||||
*/
|
||||
delay (waitInMs) {
|
||||
if (typeof waitInMs !== 'number' || !Number.isInteger(waitInMs) || waitInMs <= 0) {
|
||||
throw new InvalidArgumentError('waitInMs must be a valid integer > 0')
|
||||
}
|
||||
|
||||
this[kMockDispatch].delay = waitInMs
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* For a defined reply, never mark as consumed.
|
||||
*/
|
||||
persist () {
|
||||
this[kMockDispatch].persist = true
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Allow one to define a reply for a set amount of matching requests.
|
||||
*/
|
||||
times (repeatTimes) {
|
||||
if (typeof repeatTimes !== 'number' || !Number.isInteger(repeatTimes) || repeatTimes <= 0) {
|
||||
throw new InvalidArgumentError('repeatTimes must be a valid integer > 0')
|
||||
}
|
||||
|
||||
this[kMockDispatch].times = repeatTimes
|
||||
return this
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines an interceptor for a Mock
|
||||
*/
|
||||
class MockInterceptor {
|
||||
constructor (opts, mockDispatches) {
|
||||
if (typeof opts !== 'object') {
|
||||
throw new InvalidArgumentError('opts must be an object')
|
||||
}
|
||||
if (typeof opts.path === 'undefined') {
|
||||
throw new InvalidArgumentError('opts.path must be defined')
|
||||
}
|
||||
if (typeof opts.method === 'undefined') {
|
||||
opts.method = 'GET'
|
||||
}
|
||||
// See https://github.com/nodejs/undici/issues/1245
|
||||
// As per RFC 3986, clients are not supposed to send URI
|
||||
// fragments to servers when they retrieve a document,
|
||||
if (typeof opts.path === 'string') {
|
||||
if (opts.query) {
|
||||
opts.path = serializePathWithQuery(opts.path, opts.query)
|
||||
} else {
|
||||
// Matches https://github.com/nodejs/undici/blob/main/lib/web/fetch/index.js#L1811
|
||||
const parsedURL = new URL(opts.path, 'data://')
|
||||
opts.path = parsedURL.pathname + parsedURL.search
|
||||
}
|
||||
}
|
||||
if (typeof opts.method === 'string') {
|
||||
opts.method = opts.method.toUpperCase()
|
||||
}
|
||||
|
||||
this[kDispatchKey] = buildKey(opts)
|
||||
this[kDispatches] = mockDispatches
|
||||
this[kIgnoreTrailingSlash] = opts.ignoreTrailingSlash ?? false
|
||||
this[kDefaultHeaders] = {}
|
||||
this[kDefaultTrailers] = {}
|
||||
this[kContentLength] = false
|
||||
}
|
||||
|
||||
createMockScopeDispatchData ({ statusCode, data, responseOptions }) {
|
||||
const responseData = getResponseData(data)
|
||||
const contentLength = this[kContentLength] ? { 'content-length': responseData.length } : {}
|
||||
const headers = { ...this[kDefaultHeaders], ...contentLength, ...responseOptions.headers }
|
||||
const trailers = { ...this[kDefaultTrailers], ...responseOptions.trailers }
|
||||
|
||||
return { statusCode, data, headers, trailers }
|
||||
}
|
||||
|
||||
validateReplyParameters (replyParameters) {
|
||||
if (typeof replyParameters.statusCode === 'undefined') {
|
||||
throw new InvalidArgumentError('statusCode must be defined')
|
||||
}
|
||||
if (typeof replyParameters.responseOptions !== 'object' || replyParameters.responseOptions === null) {
|
||||
throw new InvalidArgumentError('responseOptions must be an object')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock an undici request with a defined reply.
|
||||
*/
|
||||
reply (replyOptionsCallbackOrStatusCode) {
|
||||
// Values of reply aren't available right now as they
|
||||
// can only be available when the reply callback is invoked.
|
||||
if (typeof replyOptionsCallbackOrStatusCode === 'function') {
|
||||
// We'll first wrap the provided callback in another function,
|
||||
// this function will properly resolve the data from the callback
|
||||
// when invoked.
|
||||
const wrappedDefaultsCallback = (opts) => {
|
||||
// Our reply options callback contains the parameter for statusCode, data and options.
|
||||
const resolvedData = replyOptionsCallbackOrStatusCode(opts)
|
||||
|
||||
// Check if it is in the right format
|
||||
if (typeof resolvedData !== 'object' || resolvedData === null) {
|
||||
throw new InvalidArgumentError('reply options callback must return an object')
|
||||
}
|
||||
|
||||
const replyParameters = { data: '', responseOptions: {}, ...resolvedData }
|
||||
this.validateReplyParameters(replyParameters)
|
||||
// Since the values can be obtained immediately we return them
|
||||
// from this higher order function that will be resolved later.
|
||||
return {
|
||||
...this.createMockScopeDispatchData(replyParameters)
|
||||
}
|
||||
}
|
||||
|
||||
// Add usual dispatch data, but this time set the data parameter to function that will eventually provide data.
|
||||
const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], wrappedDefaultsCallback, { ignoreTrailingSlash: this[kIgnoreTrailingSlash] })
|
||||
return new MockScope(newMockDispatch)
|
||||
}
|
||||
|
||||
// We can have either one or three parameters, if we get here,
|
||||
// we should have 1-3 parameters. So we spread the arguments of
|
||||
// this function to obtain the parameters, since replyData will always
|
||||
// just be the statusCode.
|
||||
const replyParameters = {
|
||||
statusCode: replyOptionsCallbackOrStatusCode,
|
||||
data: arguments[1] === undefined ? '' : arguments[1],
|
||||
responseOptions: arguments[2] === undefined ? {} : arguments[2]
|
||||
}
|
||||
this.validateReplyParameters(replyParameters)
|
||||
|
||||
// Send in-already provided data like usual
|
||||
const dispatchData = this.createMockScopeDispatchData(replyParameters)
|
||||
const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], dispatchData, { ignoreTrailingSlash: this[kIgnoreTrailingSlash] })
|
||||
return new MockScope(newMockDispatch)
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock an undici request with a defined error.
|
||||
*/
|
||||
replyWithError (error) {
|
||||
if (typeof error === 'undefined') {
|
||||
throw new InvalidArgumentError('error must be defined')
|
||||
}
|
||||
|
||||
const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], { error }, { ignoreTrailingSlash: this[kIgnoreTrailingSlash] })
|
||||
return new MockScope(newMockDispatch)
|
||||
}
|
||||
|
||||
/**
|
||||
* Set default reply headers on the interceptor for subsequent replies
|
||||
*/
|
||||
defaultReplyHeaders (headers) {
|
||||
if (typeof headers === 'undefined') {
|
||||
throw new InvalidArgumentError('headers must be defined')
|
||||
}
|
||||
|
||||
this[kDefaultHeaders] = headers
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Set default reply trailers on the interceptor for subsequent replies
|
||||
*/
|
||||
defaultReplyTrailers (trailers) {
|
||||
if (typeof trailers === 'undefined') {
|
||||
throw new InvalidArgumentError('trailers must be defined')
|
||||
}
|
||||
|
||||
this[kDefaultTrailers] = trailers
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Set reply content length header for replies on the interceptor
|
||||
*/
|
||||
replyContentLength () {
|
||||
this[kContentLength] = true
|
||||
return this
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.MockInterceptor = MockInterceptor
|
||||
module.exports.MockScope = MockScope
|
||||
68
backend/node_modules/undici/lib/mock/mock-pool.js
generated
vendored
Normal file
68
backend/node_modules/undici/lib/mock/mock-pool.js
generated
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
'use strict'
|
||||
|
||||
const { promisify } = require('node:util')
|
||||
const Pool = require('../dispatcher/pool')
|
||||
const { buildMockDispatch } = require('./mock-utils')
|
||||
const {
|
||||
kDispatches,
|
||||
kMockAgent,
|
||||
kClose,
|
||||
kOriginalClose,
|
||||
kOrigin,
|
||||
kOriginalDispatch,
|
||||
kConnected,
|
||||
kIgnoreTrailingSlash
|
||||
} = require('./mock-symbols')
|
||||
const { MockInterceptor } = require('./mock-interceptor')
|
||||
const Symbols = require('../core/symbols')
|
||||
const { InvalidArgumentError } = require('../core/errors')
|
||||
|
||||
/**
|
||||
* MockPool provides an API that extends the Pool to influence the mockDispatches.
|
||||
*/
|
||||
class MockPool extends Pool {
|
||||
constructor (origin, opts) {
|
||||
if (!opts || !opts.agent || typeof opts.agent.dispatch !== 'function') {
|
||||
throw new InvalidArgumentError('Argument opts.agent must implement Agent')
|
||||
}
|
||||
|
||||
super(origin, opts)
|
||||
|
||||
this[kMockAgent] = opts.agent
|
||||
this[kOrigin] = origin
|
||||
this[kIgnoreTrailingSlash] = opts.ignoreTrailingSlash ?? false
|
||||
this[kDispatches] = []
|
||||
this[kConnected] = 1
|
||||
this[kOriginalDispatch] = this.dispatch
|
||||
this[kOriginalClose] = this.close.bind(this)
|
||||
|
||||
this.dispatch = buildMockDispatch.call(this)
|
||||
this.close = this[kClose]
|
||||
}
|
||||
|
||||
get [Symbols.kConnected] () {
|
||||
return this[kConnected]
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets up the base interceptor for mocking replies from undici.
|
||||
*/
|
||||
intercept (opts) {
|
||||
return new MockInterceptor(
|
||||
opts && { ignoreTrailingSlash: this[kIgnoreTrailingSlash], ...opts },
|
||||
this[kDispatches]
|
||||
)
|
||||
}
|
||||
|
||||
cleanMocks () {
|
||||
this[kDispatches] = []
|
||||
}
|
||||
|
||||
async [kClose] () {
|
||||
await promisify(this[kOriginalClose])()
|
||||
this[kConnected] = 0
|
||||
this[kMockAgent][Symbols.kClients].delete(this[kOrigin])
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = MockPool
|
||||
31
backend/node_modules/undici/lib/mock/mock-symbols.js
generated
vendored
Normal file
31
backend/node_modules/undici/lib/mock/mock-symbols.js
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
'use strict'
|
||||
|
||||
module.exports = {
|
||||
kAgent: Symbol('agent'),
|
||||
kOptions: Symbol('options'),
|
||||
kFactory: Symbol('factory'),
|
||||
kDispatches: Symbol('dispatches'),
|
||||
kDispatchKey: Symbol('dispatch key'),
|
||||
kDefaultHeaders: Symbol('default headers'),
|
||||
kDefaultTrailers: Symbol('default trailers'),
|
||||
kContentLength: Symbol('content length'),
|
||||
kMockAgent: Symbol('mock agent'),
|
||||
kMockAgentSet: Symbol('mock agent set'),
|
||||
kMockAgentGet: Symbol('mock agent get'),
|
||||
kMockDispatch: Symbol('mock dispatch'),
|
||||
kClose: Symbol('close'),
|
||||
kOriginalClose: Symbol('original agent close'),
|
||||
kOriginalDispatch: Symbol('original dispatch'),
|
||||
kOrigin: Symbol('origin'),
|
||||
kIsMockActive: Symbol('is mock active'),
|
||||
kNetConnect: Symbol('net connect'),
|
||||
kGetNetConnect: Symbol('get net connect'),
|
||||
kConnected: Symbol('connected'),
|
||||
kIgnoreTrailingSlash: Symbol('ignore trailing slash'),
|
||||
kMockAgentMockCallHistoryInstance: Symbol('mock agent mock call history name'),
|
||||
kMockAgentRegisterCallHistory: Symbol('mock agent register mock call history'),
|
||||
kMockAgentAddCallHistoryLog: Symbol('mock agent add call history log'),
|
||||
kMockAgentIsCallHistoryEnabled: Symbol('mock agent is call history enabled'),
|
||||
kMockAgentAcceptsNonStandardSearchParameters: Symbol('mock agent accepts non standard search parameters'),
|
||||
kMockCallHistoryAddLog: Symbol('mock call history add log')
|
||||
}
|
||||
480
backend/node_modules/undici/lib/mock/mock-utils.js
generated
vendored
Normal file
480
backend/node_modules/undici/lib/mock/mock-utils.js
generated
vendored
Normal file
@@ -0,0 +1,480 @@
|
||||
'use strict'
|
||||
|
||||
const { MockNotMatchedError } = require('./mock-errors')
|
||||
const {
|
||||
kDispatches,
|
||||
kMockAgent,
|
||||
kOriginalDispatch,
|
||||
kOrigin,
|
||||
kGetNetConnect
|
||||
} = require('./mock-symbols')
|
||||
const { serializePathWithQuery } = require('../core/util')
|
||||
const { STATUS_CODES } = require('node:http')
|
||||
const {
|
||||
types: {
|
||||
isPromise
|
||||
}
|
||||
} = require('node:util')
|
||||
const { InvalidArgumentError } = require('../core/errors')
|
||||
|
||||
function matchValue (match, value) {
|
||||
if (typeof match === 'string') {
|
||||
return match === value
|
||||
}
|
||||
if (match instanceof RegExp) {
|
||||
return match.test(value)
|
||||
}
|
||||
if (typeof match === 'function') {
|
||||
return match(value) === true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
function lowerCaseEntries (headers) {
|
||||
return Object.fromEntries(
|
||||
Object.entries(headers).map(([headerName, headerValue]) => {
|
||||
return [headerName.toLocaleLowerCase(), headerValue]
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('../../index').Headers|string[]|Record<string, string>} headers
|
||||
* @param {string} key
|
||||
*/
|
||||
function getHeaderByName (headers, key) {
|
||||
if (Array.isArray(headers)) {
|
||||
for (let i = 0; i < headers.length; i += 2) {
|
||||
if (headers[i].toLocaleLowerCase() === key.toLocaleLowerCase()) {
|
||||
return headers[i + 1]
|
||||
}
|
||||
}
|
||||
|
||||
return undefined
|
||||
} else if (typeof headers.get === 'function') {
|
||||
return headers.get(key)
|
||||
} else {
|
||||
return lowerCaseEntries(headers)[key.toLocaleLowerCase()]
|
||||
}
|
||||
}
|
||||
|
||||
/** @param {string[]} headers */
|
||||
function buildHeadersFromArray (headers) { // fetch HeadersList
|
||||
const clone = headers.slice()
|
||||
const entries = []
|
||||
for (let index = 0; index < clone.length; index += 2) {
|
||||
entries.push([clone[index], clone[index + 1]])
|
||||
}
|
||||
return Object.fromEntries(entries)
|
||||
}
|
||||
|
||||
function matchHeaders (mockDispatch, headers) {
|
||||
if (typeof mockDispatch.headers === 'function') {
|
||||
if (Array.isArray(headers)) { // fetch HeadersList
|
||||
headers = buildHeadersFromArray(headers)
|
||||
}
|
||||
return mockDispatch.headers(headers ? lowerCaseEntries(headers) : {})
|
||||
}
|
||||
if (typeof mockDispatch.headers === 'undefined') {
|
||||
return true
|
||||
}
|
||||
if (typeof headers !== 'object' || typeof mockDispatch.headers !== 'object') {
|
||||
return false
|
||||
}
|
||||
|
||||
for (const [matchHeaderName, matchHeaderValue] of Object.entries(mockDispatch.headers)) {
|
||||
const headerValue = getHeaderByName(headers, matchHeaderName)
|
||||
|
||||
if (!matchValue(matchHeaderValue, headerValue)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
function normalizeSearchParams (query) {
|
||||
if (typeof query !== 'string') {
|
||||
return query
|
||||
}
|
||||
|
||||
const originalQp = new URLSearchParams(query)
|
||||
const normalizedQp = new URLSearchParams()
|
||||
|
||||
for (let [key, value] of originalQp.entries()) {
|
||||
key = key.replace('[]', '')
|
||||
|
||||
const valueRepresentsString = /^(['"]).*\1$/.test(value)
|
||||
if (valueRepresentsString) {
|
||||
normalizedQp.append(key, value)
|
||||
continue
|
||||
}
|
||||
|
||||
if (value.includes(',')) {
|
||||
const values = value.split(',')
|
||||
for (const v of values) {
|
||||
normalizedQp.append(key, v)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
normalizedQp.append(key, value)
|
||||
}
|
||||
|
||||
return normalizedQp
|
||||
}
|
||||
|
||||
function safeUrl (path) {
|
||||
if (typeof path !== 'string') {
|
||||
return path
|
||||
}
|
||||
const pathSegments = path.split('?', 3)
|
||||
if (pathSegments.length !== 2) {
|
||||
return path
|
||||
}
|
||||
|
||||
const qp = new URLSearchParams(pathSegments.pop())
|
||||
qp.sort()
|
||||
return [...pathSegments, qp.toString()].join('?')
|
||||
}
|
||||
|
||||
function matchKey (mockDispatch, { path, method, body, headers }) {
|
||||
const pathMatch = matchValue(mockDispatch.path, path)
|
||||
const methodMatch = matchValue(mockDispatch.method, method)
|
||||
const bodyMatch = typeof mockDispatch.body !== 'undefined' ? matchValue(mockDispatch.body, body) : true
|
||||
const headersMatch = matchHeaders(mockDispatch, headers)
|
||||
return pathMatch && methodMatch && bodyMatch && headersMatch
|
||||
}
|
||||
|
||||
function getResponseData (data) {
|
||||
if (Buffer.isBuffer(data)) {
|
||||
return data
|
||||
} else if (data instanceof Uint8Array) {
|
||||
return data
|
||||
} else if (data instanceof ArrayBuffer) {
|
||||
return data
|
||||
} else if (typeof data === 'object') {
|
||||
return JSON.stringify(data)
|
||||
} else if (data) {
|
||||
return data.toString()
|
||||
} else {
|
||||
return ''
|
||||
}
|
||||
}
|
||||
|
||||
function getMockDispatch (mockDispatches, key) {
|
||||
const basePath = key.query ? serializePathWithQuery(key.path, key.query) : key.path
|
||||
const resolvedPath = typeof basePath === 'string' ? safeUrl(basePath) : basePath
|
||||
|
||||
const resolvedPathWithoutTrailingSlash = removeTrailingSlash(resolvedPath)
|
||||
|
||||
// Match path
|
||||
let matchedMockDispatches = mockDispatches
|
||||
.filter(({ consumed }) => !consumed)
|
||||
.filter(({ path, ignoreTrailingSlash }) => {
|
||||
return ignoreTrailingSlash
|
||||
? matchValue(removeTrailingSlash(safeUrl(path)), resolvedPathWithoutTrailingSlash)
|
||||
: matchValue(safeUrl(path), resolvedPath)
|
||||
})
|
||||
if (matchedMockDispatches.length === 0) {
|
||||
throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`)
|
||||
}
|
||||
|
||||
// Match method
|
||||
matchedMockDispatches = matchedMockDispatches.filter(({ method }) => matchValue(method, key.method))
|
||||
if (matchedMockDispatches.length === 0) {
|
||||
throw new MockNotMatchedError(`Mock dispatch not matched for method '${key.method}' on path '${resolvedPath}'`)
|
||||
}
|
||||
|
||||
// Match body
|
||||
matchedMockDispatches = matchedMockDispatches.filter(({ body }) => typeof body !== 'undefined' ? matchValue(body, key.body) : true)
|
||||
if (matchedMockDispatches.length === 0) {
|
||||
throw new MockNotMatchedError(`Mock dispatch not matched for body '${key.body}' on path '${resolvedPath}'`)
|
||||
}
|
||||
|
||||
// Match headers
|
||||
matchedMockDispatches = matchedMockDispatches.filter((mockDispatch) => matchHeaders(mockDispatch, key.headers))
|
||||
if (matchedMockDispatches.length === 0) {
|
||||
const headers = typeof key.headers === 'object' ? JSON.stringify(key.headers) : key.headers
|
||||
throw new MockNotMatchedError(`Mock dispatch not matched for headers '${headers}' on path '${resolvedPath}'`)
|
||||
}
|
||||
|
||||
return matchedMockDispatches[0]
|
||||
}
|
||||
|
||||
function addMockDispatch (mockDispatches, key, data, opts) {
|
||||
const baseData = { timesInvoked: 0, times: 1, persist: false, consumed: false, ...opts }
|
||||
const replyData = typeof data === 'function' ? { callback: data } : { ...data }
|
||||
const newMockDispatch = { ...baseData, ...key, pending: true, data: { error: null, ...replyData } }
|
||||
mockDispatches.push(newMockDispatch)
|
||||
return newMockDispatch
|
||||
}
|
||||
|
||||
function deleteMockDispatch (mockDispatches, key) {
|
||||
const index = mockDispatches.findIndex(dispatch => {
|
||||
if (!dispatch.consumed) {
|
||||
return false
|
||||
}
|
||||
return matchKey(dispatch, key)
|
||||
})
|
||||
if (index !== -1) {
|
||||
mockDispatches.splice(index, 1)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} path Path to remove trailing slash from
|
||||
*/
|
||||
function removeTrailingSlash (path) {
|
||||
while (path.endsWith('/')) {
|
||||
path = path.slice(0, -1)
|
||||
}
|
||||
|
||||
if (path.length === 0) {
|
||||
path = '/'
|
||||
}
|
||||
|
||||
return path
|
||||
}
|
||||
|
||||
function buildKey (opts) {
|
||||
const { path, method, body, headers, query } = opts
|
||||
|
||||
return {
|
||||
path,
|
||||
method,
|
||||
body,
|
||||
headers,
|
||||
query
|
||||
}
|
||||
}
|
||||
|
||||
function generateKeyValues (data) {
|
||||
const keys = Object.keys(data)
|
||||
const result = []
|
||||
for (let i = 0; i < keys.length; ++i) {
|
||||
const key = keys[i]
|
||||
const value = data[key]
|
||||
const name = Buffer.from(`${key}`)
|
||||
if (Array.isArray(value)) {
|
||||
for (let j = 0; j < value.length; ++j) {
|
||||
result.push(name, Buffer.from(`${value[j]}`))
|
||||
}
|
||||
} else {
|
||||
result.push(name, Buffer.from(`${value}`))
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://developer.mozilla.org/en-US/docs/Web/HTTP/Status
|
||||
* @param {number} statusCode
|
||||
*/
|
||||
function getStatusText (statusCode) {
|
||||
return STATUS_CODES[statusCode] || 'unknown'
|
||||
}
|
||||
|
||||
async function getResponse (body) {
|
||||
const buffers = []
|
||||
for await (const data of body) {
|
||||
buffers.push(data)
|
||||
}
|
||||
return Buffer.concat(buffers).toString('utf8')
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock dispatch function used to simulate undici dispatches
|
||||
*/
|
||||
function mockDispatch (opts, handler) {
|
||||
// Get mock dispatch from built key
|
||||
const key = buildKey(opts)
|
||||
const mockDispatch = getMockDispatch(this[kDispatches], key)
|
||||
|
||||
mockDispatch.timesInvoked++
|
||||
|
||||
// Here's where we resolve a callback if a callback is present for the dispatch data.
|
||||
if (mockDispatch.data.callback) {
|
||||
mockDispatch.data = { ...mockDispatch.data, ...mockDispatch.data.callback(opts) }
|
||||
}
|
||||
|
||||
// Parse mockDispatch data
|
||||
const { data: { statusCode, data, headers, trailers, error }, delay, persist } = mockDispatch
|
||||
const { timesInvoked, times } = mockDispatch
|
||||
|
||||
// If it's used up and not persistent, mark as consumed
|
||||
mockDispatch.consumed = !persist && timesInvoked >= times
|
||||
mockDispatch.pending = timesInvoked < times
|
||||
|
||||
// If specified, trigger dispatch error
|
||||
if (error !== null) {
|
||||
deleteMockDispatch(this[kDispatches], key)
|
||||
handler.onError(error)
|
||||
return true
|
||||
}
|
||||
|
||||
// Track whether the request has been aborted
|
||||
let aborted = false
|
||||
let timer = null
|
||||
|
||||
function abort (err) {
|
||||
if (aborted) {
|
||||
return
|
||||
}
|
||||
aborted = true
|
||||
|
||||
// Clear the pending delayed response if any
|
||||
if (timer !== null) {
|
||||
clearTimeout(timer)
|
||||
timer = null
|
||||
}
|
||||
|
||||
// Notify the handler of the abort
|
||||
handler.onError(err)
|
||||
}
|
||||
|
||||
// Call onConnect to allow the handler to register the abort callback
|
||||
handler.onConnect?.(abort, null)
|
||||
|
||||
// Handle the request with a delay if necessary
|
||||
if (typeof delay === 'number' && delay > 0) {
|
||||
timer = setTimeout(() => {
|
||||
timer = null
|
||||
handleReply(this[kDispatches])
|
||||
}, delay)
|
||||
} else {
|
||||
handleReply(this[kDispatches])
|
||||
}
|
||||
|
||||
function handleReply (mockDispatches, _data = data) {
|
||||
// Don't send response if the request was aborted
|
||||
if (aborted) {
|
||||
return
|
||||
}
|
||||
|
||||
// fetch's HeadersList is a 1D string array
|
||||
const optsHeaders = Array.isArray(opts.headers)
|
||||
? buildHeadersFromArray(opts.headers)
|
||||
: opts.headers
|
||||
const body = typeof _data === 'function'
|
||||
? _data({ ...opts, headers: optsHeaders })
|
||||
: _data
|
||||
|
||||
// util.types.isPromise is likely needed for jest.
|
||||
if (isPromise(body)) {
|
||||
// If handleReply is asynchronous, throwing an error
|
||||
// in the callback will reject the promise, rather than
|
||||
// synchronously throw the error, which breaks some tests.
|
||||
// Rather, we wait for the callback to resolve if it is a
|
||||
// promise, and then re-run handleReply with the new body.
|
||||
return body.then((newData) => handleReply(mockDispatches, newData))
|
||||
}
|
||||
|
||||
// Check again if aborted after async body resolution
|
||||
if (aborted) {
|
||||
return
|
||||
}
|
||||
|
||||
const responseData = getResponseData(body)
|
||||
const responseHeaders = generateKeyValues(headers)
|
||||
const responseTrailers = generateKeyValues(trailers)
|
||||
|
||||
handler.onHeaders?.(statusCode, responseHeaders, resume, getStatusText(statusCode))
|
||||
handler.onData?.(Buffer.from(responseData))
|
||||
handler.onComplete?.(responseTrailers)
|
||||
deleteMockDispatch(mockDispatches, key)
|
||||
}
|
||||
|
||||
function resume () {}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
function buildMockDispatch () {
|
||||
const agent = this[kMockAgent]
|
||||
const origin = this[kOrigin]
|
||||
const originalDispatch = this[kOriginalDispatch]
|
||||
|
||||
return function dispatch (opts, handler) {
|
||||
if (agent.isMockActive) {
|
||||
try {
|
||||
mockDispatch.call(this, opts, handler)
|
||||
} catch (error) {
|
||||
if (error.code === 'UND_MOCK_ERR_MOCK_NOT_MATCHED') {
|
||||
const netConnect = agent[kGetNetConnect]()
|
||||
if (netConnect === false) {
|
||||
throw new MockNotMatchedError(`${error.message}: subsequent request to origin ${origin} was not allowed (net.connect disabled)`)
|
||||
}
|
||||
if (checkNetConnect(netConnect, origin)) {
|
||||
originalDispatch.call(this, opts, handler)
|
||||
} else {
|
||||
throw new MockNotMatchedError(`${error.message}: subsequent request to origin ${origin} was not allowed (net.connect is not enabled for this origin)`)
|
||||
}
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
} else {
|
||||
originalDispatch.call(this, opts, handler)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function checkNetConnect (netConnect, origin) {
|
||||
const url = new URL(origin)
|
||||
if (netConnect === true) {
|
||||
return true
|
||||
} else if (Array.isArray(netConnect) && netConnect.some((matcher) => matchValue(matcher, url.host))) {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
function normalizeOrigin (origin) {
|
||||
if (typeof origin !== 'string' && !(origin instanceof URL)) {
|
||||
return origin
|
||||
}
|
||||
|
||||
if (origin instanceof URL) {
|
||||
return origin.origin
|
||||
}
|
||||
|
||||
return origin.toLowerCase()
|
||||
}
|
||||
|
||||
function buildAndValidateMockOptions (opts) {
|
||||
const { agent, ...mockOptions } = opts
|
||||
|
||||
if ('enableCallHistory' in mockOptions && typeof mockOptions.enableCallHistory !== 'boolean') {
|
||||
throw new InvalidArgumentError('options.enableCallHistory must to be a boolean')
|
||||
}
|
||||
|
||||
if ('acceptNonStandardSearchParameters' in mockOptions && typeof mockOptions.acceptNonStandardSearchParameters !== 'boolean') {
|
||||
throw new InvalidArgumentError('options.acceptNonStandardSearchParameters must to be a boolean')
|
||||
}
|
||||
|
||||
if ('ignoreTrailingSlash' in mockOptions && typeof mockOptions.ignoreTrailingSlash !== 'boolean') {
|
||||
throw new InvalidArgumentError('options.ignoreTrailingSlash must to be a boolean')
|
||||
}
|
||||
|
||||
return mockOptions
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getResponseData,
|
||||
getMockDispatch,
|
||||
addMockDispatch,
|
||||
deleteMockDispatch,
|
||||
buildKey,
|
||||
generateKeyValues,
|
||||
matchValue,
|
||||
getResponse,
|
||||
getStatusText,
|
||||
mockDispatch,
|
||||
buildMockDispatch,
|
||||
checkNetConnect,
|
||||
buildAndValidateMockOptions,
|
||||
getHeaderByName,
|
||||
buildHeadersFromArray,
|
||||
normalizeSearchParams,
|
||||
normalizeOrigin
|
||||
}
|
||||
43
backend/node_modules/undici/lib/mock/pending-interceptors-formatter.js
generated
vendored
Normal file
43
backend/node_modules/undici/lib/mock/pending-interceptors-formatter.js
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
'use strict'
|
||||
|
||||
const { Transform } = require('node:stream')
|
||||
const { Console } = require('node:console')
|
||||
|
||||
const PERSISTENT = process.versions.icu ? '✅' : 'Y '
|
||||
const NOT_PERSISTENT = process.versions.icu ? '❌' : 'N '
|
||||
|
||||
/**
|
||||
* Gets the output of `console.table(…)` as a string.
|
||||
*/
|
||||
module.exports = class PendingInterceptorsFormatter {
|
||||
constructor ({ disableColors } = {}) {
|
||||
this.transform = new Transform({
|
||||
transform (chunk, _enc, cb) {
|
||||
cb(null, chunk)
|
||||
}
|
||||
})
|
||||
|
||||
this.logger = new Console({
|
||||
stdout: this.transform,
|
||||
inspectOptions: {
|
||||
colors: !disableColors && !process.env.CI
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
format (pendingInterceptors) {
|
||||
const withPrettyHeaders = pendingInterceptors.map(
|
||||
({ method, path, data: { statusCode }, persist, times, timesInvoked, origin }) => ({
|
||||
Method: method,
|
||||
Origin: origin,
|
||||
Path: path,
|
||||
'Status code': statusCode,
|
||||
Persistent: persist ? PERSISTENT : NOT_PERSISTENT,
|
||||
Invocations: timesInvoked,
|
||||
Remaining: persist ? Infinity : times - timesInvoked
|
||||
}))
|
||||
|
||||
this.logger.table(withPrettyHeaders)
|
||||
return this.transform.read().toString()
|
||||
}
|
||||
}
|
||||
353
backend/node_modules/undici/lib/mock/snapshot-agent.js
generated
vendored
Normal file
353
backend/node_modules/undici/lib/mock/snapshot-agent.js
generated
vendored
Normal file
@@ -0,0 +1,353 @@
|
||||
'use strict'
|
||||
|
||||
const Agent = require('../dispatcher/agent')
|
||||
const MockAgent = require('./mock-agent')
|
||||
const { SnapshotRecorder } = require('./snapshot-recorder')
|
||||
const WrapHandler = require('../handler/wrap-handler')
|
||||
const { InvalidArgumentError, UndiciError } = require('../core/errors')
|
||||
const { validateSnapshotMode } = require('./snapshot-utils')
|
||||
|
||||
const kSnapshotRecorder = Symbol('kSnapshotRecorder')
|
||||
const kSnapshotMode = Symbol('kSnapshotMode')
|
||||
const kSnapshotPath = Symbol('kSnapshotPath')
|
||||
const kSnapshotLoaded = Symbol('kSnapshotLoaded')
|
||||
const kRealAgent = Symbol('kRealAgent')
|
||||
|
||||
// Static flag to ensure warning is only emitted once per process
|
||||
let warningEmitted = false
|
||||
|
||||
class SnapshotAgent extends MockAgent {
|
||||
constructor (opts = {}) {
|
||||
// Emit experimental warning only once
|
||||
if (!warningEmitted) {
|
||||
process.emitWarning(
|
||||
'SnapshotAgent is experimental and subject to change',
|
||||
'ExperimentalWarning'
|
||||
)
|
||||
warningEmitted = true
|
||||
}
|
||||
|
||||
const {
|
||||
mode = 'record',
|
||||
snapshotPath = null,
|
||||
...mockAgentOpts
|
||||
} = opts
|
||||
|
||||
super(mockAgentOpts)
|
||||
|
||||
validateSnapshotMode(mode)
|
||||
|
||||
// Validate snapshotPath is provided when required
|
||||
if ((mode === 'playback' || mode === 'update') && !snapshotPath) {
|
||||
throw new InvalidArgumentError(`snapshotPath is required when mode is '${mode}'`)
|
||||
}
|
||||
|
||||
this[kSnapshotMode] = mode
|
||||
this[kSnapshotPath] = snapshotPath
|
||||
|
||||
this[kSnapshotRecorder] = new SnapshotRecorder({
|
||||
snapshotPath: this[kSnapshotPath],
|
||||
mode: this[kSnapshotMode],
|
||||
maxSnapshots: opts.maxSnapshots,
|
||||
autoFlush: opts.autoFlush,
|
||||
flushInterval: opts.flushInterval,
|
||||
matchHeaders: opts.matchHeaders,
|
||||
ignoreHeaders: opts.ignoreHeaders,
|
||||
excludeHeaders: opts.excludeHeaders,
|
||||
matchBody: opts.matchBody,
|
||||
matchQuery: opts.matchQuery,
|
||||
caseSensitive: opts.caseSensitive,
|
||||
shouldRecord: opts.shouldRecord,
|
||||
shouldPlayback: opts.shouldPlayback,
|
||||
excludeUrls: opts.excludeUrls
|
||||
})
|
||||
this[kSnapshotLoaded] = false
|
||||
|
||||
// For recording/update mode, we need a real agent to make actual requests
|
||||
// For playback mode, we need a real agent if there are excluded URLs
|
||||
if (this[kSnapshotMode] === 'record' || this[kSnapshotMode] === 'update' ||
|
||||
(this[kSnapshotMode] === 'playback' && opts.excludeUrls && opts.excludeUrls.length > 0)) {
|
||||
this[kRealAgent] = new Agent(opts)
|
||||
}
|
||||
|
||||
// Auto-load snapshots in playback/update mode
|
||||
if ((this[kSnapshotMode] === 'playback' || this[kSnapshotMode] === 'update') && this[kSnapshotPath]) {
|
||||
this.loadSnapshots().catch(() => {
|
||||
// Ignore load errors - file might not exist yet
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
dispatch (opts, handler) {
|
||||
handler = WrapHandler.wrap(handler)
|
||||
const mode = this[kSnapshotMode]
|
||||
|
||||
// Check if URL should be excluded (pass through without mocking/recording)
|
||||
if (this[kSnapshotRecorder].isUrlExcluded(opts)) {
|
||||
// Real agent is guaranteed by constructor when excludeUrls is configured
|
||||
return this[kRealAgent].dispatch(opts, handler)
|
||||
}
|
||||
|
||||
if (mode === 'playback' || mode === 'update') {
|
||||
// Ensure snapshots are loaded
|
||||
if (!this[kSnapshotLoaded]) {
|
||||
// Need to load asynchronously, delegate to async version
|
||||
return this.#asyncDispatch(opts, handler)
|
||||
}
|
||||
|
||||
// Try to find existing snapshot (synchronous)
|
||||
const snapshot = this[kSnapshotRecorder].findSnapshot(opts)
|
||||
|
||||
if (snapshot) {
|
||||
// Use recorded response (synchronous)
|
||||
return this.#replaySnapshot(snapshot, handler)
|
||||
} else if (mode === 'update') {
|
||||
// Make real request and record it (async required)
|
||||
return this.#recordAndReplay(opts, handler)
|
||||
} else {
|
||||
// Playback mode but no snapshot found
|
||||
const error = new UndiciError(`No snapshot found for ${opts.method || 'GET'} ${opts.path}`)
|
||||
if (handler.onError) {
|
||||
handler.onError(error)
|
||||
return
|
||||
}
|
||||
throw error
|
||||
}
|
||||
} else if (mode === 'record') {
|
||||
// Record mode - make real request and save response (async required)
|
||||
return this.#recordAndReplay(opts, handler)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Async version of dispatch for when we need to load snapshots first
|
||||
*/
|
||||
async #asyncDispatch (opts, handler) {
|
||||
await this.loadSnapshots()
|
||||
return this.dispatch(opts, handler)
|
||||
}
|
||||
|
||||
/**
|
||||
* Records a real request and replays the response
|
||||
*/
|
||||
#recordAndReplay (opts, handler) {
|
||||
const responseData = {
|
||||
statusCode: null,
|
||||
headers: {},
|
||||
trailers: {},
|
||||
body: []
|
||||
}
|
||||
|
||||
const self = this // Capture 'this' context for use within nested handler callbacks
|
||||
|
||||
const recordingHandler = {
|
||||
onRequestStart (controller, context) {
|
||||
return handler.onRequestStart(controller, { ...context, history: this.history })
|
||||
},
|
||||
|
||||
onRequestUpgrade (controller, statusCode, headers, socket) {
|
||||
return handler.onRequestUpgrade(controller, statusCode, headers, socket)
|
||||
},
|
||||
|
||||
onResponseStart (controller, statusCode, headers, statusMessage) {
|
||||
responseData.statusCode = statusCode
|
||||
responseData.headers = headers
|
||||
return handler.onResponseStart(controller, statusCode, headers, statusMessage)
|
||||
},
|
||||
|
||||
onResponseData (controller, chunk) {
|
||||
responseData.body.push(chunk)
|
||||
return handler.onResponseData(controller, chunk)
|
||||
},
|
||||
|
||||
onResponseEnd (controller, trailers) {
|
||||
responseData.trailers = trailers
|
||||
|
||||
// Record the interaction using captured 'self' context (fire and forget)
|
||||
const responseBody = Buffer.concat(responseData.body)
|
||||
self[kSnapshotRecorder].record(opts, {
|
||||
statusCode: responseData.statusCode,
|
||||
headers: responseData.headers,
|
||||
body: responseBody,
|
||||
trailers: responseData.trailers
|
||||
})
|
||||
.then(() => handler.onResponseEnd(controller, trailers))
|
||||
.catch((error) => handler.onResponseError(controller, error))
|
||||
}
|
||||
}
|
||||
|
||||
// Use composed agent if available (includes interceptors), otherwise use real agent
|
||||
const agent = this[kRealAgent]
|
||||
return agent.dispatch(opts, recordingHandler)
|
||||
}
|
||||
|
||||
/**
|
||||
* Replays a recorded response
|
||||
*
|
||||
* @param {Object} snapshot - The recorded snapshot to replay.
|
||||
* @param {Object} handler - The handler to call with the response data.
|
||||
* @returns {void}
|
||||
*/
|
||||
#replaySnapshot (snapshot, handler) {
|
||||
try {
|
||||
const { response } = snapshot
|
||||
|
||||
const controller = {
|
||||
pause () { },
|
||||
resume () { },
|
||||
abort (reason) {
|
||||
this.aborted = true
|
||||
this.reason = reason
|
||||
},
|
||||
|
||||
aborted: false,
|
||||
paused: false
|
||||
}
|
||||
|
||||
handler.onRequestStart(controller)
|
||||
|
||||
handler.onResponseStart(controller, response.statusCode, response.headers)
|
||||
|
||||
// Body is always stored as base64 string
|
||||
const body = Buffer.from(response.body, 'base64')
|
||||
handler.onResponseData(controller, body)
|
||||
|
||||
handler.onResponseEnd(controller, response.trailers)
|
||||
} catch (error) {
|
||||
handler.onError?.(error)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads snapshots from file
|
||||
*
|
||||
* @param {string} [filePath] - Optional file path to load snapshots from.
|
||||
* @returns {Promise<void>} - Resolves when snapshots are loaded.
|
||||
*/
|
||||
async loadSnapshots (filePath) {
|
||||
await this[kSnapshotRecorder].loadSnapshots(filePath || this[kSnapshotPath])
|
||||
this[kSnapshotLoaded] = true
|
||||
|
||||
// In playback mode, set up MockAgent interceptors for all snapshots
|
||||
if (this[kSnapshotMode] === 'playback') {
|
||||
this.#setupMockInterceptors()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves snapshots to file
|
||||
*
|
||||
* @param {string} [filePath] - Optional file path to save snapshots to.
|
||||
* @returns {Promise<void>} - Resolves when snapshots are saved.
|
||||
*/
|
||||
async saveSnapshots (filePath) {
|
||||
return this[kSnapshotRecorder].saveSnapshots(filePath || this[kSnapshotPath])
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets up MockAgent interceptors based on recorded snapshots.
|
||||
*
|
||||
* This method creates MockAgent interceptors for each recorded snapshot,
|
||||
* allowing the SnapshotAgent to fall back to MockAgent's standard intercept
|
||||
* mechanism in playback mode. Each interceptor is configured to persist
|
||||
* (remain active for multiple requests) and responds with the recorded
|
||||
* response data.
|
||||
*
|
||||
* Called automatically when loading snapshots in playback mode.
|
||||
*
|
||||
* @returns {void}
|
||||
*/
|
||||
#setupMockInterceptors () {
|
||||
for (const snapshot of this[kSnapshotRecorder].getSnapshots()) {
|
||||
const { request, responses, response } = snapshot
|
||||
const url = new URL(request.url)
|
||||
|
||||
const mockPool = this.get(url.origin)
|
||||
|
||||
// Handle both new format (responses array) and legacy format (response object)
|
||||
const responseData = responses ? responses[0] : response
|
||||
if (!responseData) continue
|
||||
|
||||
mockPool.intercept({
|
||||
path: url.pathname + url.search,
|
||||
method: request.method,
|
||||
headers: request.headers,
|
||||
body: request.body
|
||||
}).reply(responseData.statusCode, responseData.body, {
|
||||
headers: responseData.headers,
|
||||
trailers: responseData.trailers
|
||||
}).persist()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the snapshot recorder
|
||||
* @return {SnapshotRecorder} - The snapshot recorder instance
|
||||
*/
|
||||
getRecorder () {
|
||||
return this[kSnapshotRecorder]
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the current mode
|
||||
* @return {import('./snapshot-utils').SnapshotMode} - The current snapshot mode
|
||||
*/
|
||||
getMode () {
|
||||
return this[kSnapshotMode]
|
||||
}
|
||||
|
||||
/**
|
||||
* Clears all snapshots
|
||||
* @returns {void}
|
||||
*/
|
||||
clearSnapshots () {
|
||||
this[kSnapshotRecorder].clear()
|
||||
}
|
||||
|
||||
/**
|
||||
* Resets call counts for all snapshots (useful for test cleanup)
|
||||
* @returns {void}
|
||||
*/
|
||||
resetCallCounts () {
|
||||
this[kSnapshotRecorder].resetCallCounts()
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes a specific snapshot by request options
|
||||
* @param {import('./snapshot-recorder').SnapshotRequestOptions} requestOpts - Request options to identify the snapshot
|
||||
* @return {Promise<boolean>} - Returns true if the snapshot was deleted, false if not found
|
||||
*/
|
||||
deleteSnapshot (requestOpts) {
|
||||
return this[kSnapshotRecorder].deleteSnapshot(requestOpts)
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets information about a specific snapshot
|
||||
* @returns {import('./snapshot-recorder').SnapshotInfo|null} - Snapshot information or null if not found
|
||||
*/
|
||||
getSnapshotInfo (requestOpts) {
|
||||
return this[kSnapshotRecorder].getSnapshotInfo(requestOpts)
|
||||
}
|
||||
|
||||
/**
|
||||
* Replaces all snapshots with new data (full replacement)
|
||||
* @param {Array<{hash: string; snapshot: import('./snapshot-recorder').SnapshotEntryshotEntry}>|Record<string, import('./snapshot-recorder').SnapshotEntry>} snapshotData - New snapshot data to replace existing snapshots
|
||||
* @returns {void}
|
||||
*/
|
||||
replaceSnapshots (snapshotData) {
|
||||
this[kSnapshotRecorder].replaceSnapshots(snapshotData)
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the agent, saving snapshots and cleaning up resources.
|
||||
*
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async close () {
|
||||
await this[kSnapshotRecorder].close()
|
||||
await this[kRealAgent]?.close()
|
||||
await super.close()
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = SnapshotAgent
|
||||
588
backend/node_modules/undici/lib/mock/snapshot-recorder.js
generated
vendored
Normal file
588
backend/node_modules/undici/lib/mock/snapshot-recorder.js
generated
vendored
Normal file
@@ -0,0 +1,588 @@
|
||||
'use strict'
|
||||
|
||||
const { writeFile, readFile, mkdir } = require('node:fs/promises')
|
||||
const { dirname, resolve } = require('node:path')
|
||||
const { setTimeout, clearTimeout } = require('node:timers')
|
||||
const { InvalidArgumentError, UndiciError } = require('../core/errors')
|
||||
const { hashId, isUrlExcludedFactory, normalizeHeaders, createHeaderFilters } = require('./snapshot-utils')
|
||||
|
||||
/**
|
||||
* @typedef {Object} SnapshotRequestOptions
|
||||
* @property {string} method - HTTP method (e.g. 'GET', 'POST', etc.)
|
||||
* @property {string} path - Request path
|
||||
* @property {string} origin - Request origin (base URL)
|
||||
* @property {import('./snapshot-utils').Headers|import('./snapshot-utils').UndiciHeaders} headers - Request headers
|
||||
* @property {import('./snapshot-utils').NormalizedHeaders} _normalizedHeaders - Request headers as a lowercase object
|
||||
* @property {string|Buffer} [body] - Request body (optional)
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} SnapshotEntryRequest
|
||||
* @property {string} method - HTTP method (e.g. 'GET', 'POST', etc.)
|
||||
* @property {string} url - Full URL of the request
|
||||
* @property {import('./snapshot-utils').NormalizedHeaders} headers - Normalized headers as a lowercase object
|
||||
* @property {string|Buffer} [body] - Request body (optional)
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} SnapshotEntryResponse
|
||||
* @property {number} statusCode - HTTP status code of the response
|
||||
* @property {import('./snapshot-utils').NormalizedHeaders} headers - Normalized response headers as a lowercase object
|
||||
* @property {string} body - Response body as a base64url encoded string
|
||||
* @property {Object} [trailers] - Optional response trailers
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} SnapshotEntry
|
||||
* @property {SnapshotEntryRequest} request - The request object
|
||||
* @property {Array<SnapshotEntryResponse>} responses - Array of response objects
|
||||
* @property {number} callCount - Number of times this snapshot has been called
|
||||
* @property {string} timestamp - ISO timestamp of when the snapshot was created
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} SnapshotRecorderMatchOptions
|
||||
* @property {Array<string>} [matchHeaders=[]] - Headers to match (empty array means match all headers)
|
||||
* @property {Array<string>} [ignoreHeaders=[]] - Headers to ignore for matching
|
||||
* @property {Array<string>} [excludeHeaders=[]] - Headers to exclude from matching
|
||||
* @property {boolean} [matchBody=true] - Whether to match request body
|
||||
* @property {boolean} [matchQuery=true] - Whether to match query properties
|
||||
* @property {boolean} [caseSensitive=false] - Whether header matching is case-sensitive
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} SnapshotRecorderOptions
|
||||
* @property {string} [snapshotPath] - Path to save/load snapshots
|
||||
* @property {import('./snapshot-utils').SnapshotMode} [mode='record'] - Mode: 'record' or 'playback'
|
||||
* @property {number} [maxSnapshots=Infinity] - Maximum number of snapshots to keep
|
||||
* @property {boolean} [autoFlush=false] - Whether to automatically flush snapshots to disk
|
||||
* @property {number} [flushInterval=30000] - Auto-flush interval in milliseconds (default: 30 seconds)
|
||||
* @property {Array<string|RegExp>} [excludeUrls=[]] - URLs to exclude from recording
|
||||
* @property {function} [shouldRecord=null] - Function to filter requests for recording
|
||||
* @property {function} [shouldPlayback=null] - Function to filter requests
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} SnapshotFormattedRequest
|
||||
* @property {string} method - HTTP method (e.g. 'GET', 'POST', etc.)
|
||||
* @property {string} url - Full URL of the request (with query parameters if matchQuery is true)
|
||||
* @property {import('./snapshot-utils').NormalizedHeaders} headers - Normalized headers as a lowercase object
|
||||
* @property {string} body - Request body (optional, only if matchBody is true)
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} SnapshotInfo
|
||||
* @property {string} hash - Hash key for the snapshot
|
||||
* @property {SnapshotEntryRequest} request - The request object
|
||||
* @property {number} responseCount - Number of responses recorded for this request
|
||||
* @property {number} callCount - Number of times this snapshot has been called
|
||||
* @property {string} timestamp - ISO timestamp of when the snapshot was created
|
||||
*/
|
||||
|
||||
/**
|
||||
* Formats a request for consistent snapshot storage
|
||||
* Caches normalized headers to avoid repeated processing
|
||||
*
|
||||
* @param {SnapshotRequestOptions} opts - Request options
|
||||
* @param {import('./snapshot-utils').HeaderFilters} headerFilters - Cached header sets for performance
|
||||
* @param {SnapshotRecorderMatchOptions} [matchOptions] - Matching options for headers and body
|
||||
* @returns {SnapshotFormattedRequest} - Formatted request object
|
||||
*/
|
||||
function formatRequestKey (opts, headerFilters, matchOptions = {}) {
|
||||
const url = new URL(opts.path, opts.origin)
|
||||
|
||||
// Cache normalized headers if not already done
|
||||
const normalized = opts._normalizedHeaders || normalizeHeaders(opts.headers)
|
||||
if (!opts._normalizedHeaders) {
|
||||
opts._normalizedHeaders = normalized
|
||||
}
|
||||
|
||||
return {
|
||||
method: opts.method || 'GET',
|
||||
url: matchOptions.matchQuery !== false ? url.toString() : `${url.origin}${url.pathname}`,
|
||||
headers: filterHeadersForMatching(normalized, headerFilters, matchOptions),
|
||||
body: matchOptions.matchBody !== false && opts.body ? String(opts.body) : ''
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Filters headers based on matching configuration
|
||||
*
|
||||
* @param {import('./snapshot-utils').Headers} headers - Headers to filter
|
||||
* @param {import('./snapshot-utils').HeaderFilters} headerFilters - Cached sets for ignore, exclude, and match headers
|
||||
* @param {SnapshotRecorderMatchOptions} [matchOptions] - Matching options for headers
|
||||
*/
|
||||
function filterHeadersForMatching (headers, headerFilters, matchOptions = {}) {
|
||||
if (!headers || typeof headers !== 'object') return {}
|
||||
|
||||
const {
|
||||
caseSensitive = false
|
||||
} = matchOptions
|
||||
|
||||
const filtered = {}
|
||||
const { ignore, exclude, match } = headerFilters
|
||||
|
||||
for (const [key, value] of Object.entries(headers)) {
|
||||
const headerKey = caseSensitive ? key : key.toLowerCase()
|
||||
|
||||
// Skip if in exclude list (for security)
|
||||
if (exclude.has(headerKey)) continue
|
||||
|
||||
// Skip if in ignore list (for matching)
|
||||
if (ignore.has(headerKey)) continue
|
||||
|
||||
// If matchHeaders is specified, only include those headers
|
||||
if (match.size !== 0) {
|
||||
if (!match.has(headerKey)) continue
|
||||
}
|
||||
|
||||
filtered[headerKey] = value
|
||||
}
|
||||
|
||||
return filtered
|
||||
}
|
||||
|
||||
/**
|
||||
* Filters headers for storage (only excludes sensitive headers)
|
||||
*
|
||||
* @param {import('./snapshot-utils').Headers} headers - Headers to filter
|
||||
* @param {import('./snapshot-utils').HeaderFilters} headerFilters - Cached sets for ignore, exclude, and match headers
|
||||
* @param {SnapshotRecorderMatchOptions} [matchOptions] - Matching options for headers
|
||||
*/
|
||||
function filterHeadersForStorage (headers, headerFilters, matchOptions = {}) {
|
||||
if (!headers || typeof headers !== 'object') return {}
|
||||
|
||||
const {
|
||||
caseSensitive = false
|
||||
} = matchOptions
|
||||
|
||||
const filtered = {}
|
||||
const { exclude: excludeSet } = headerFilters
|
||||
|
||||
for (const [key, value] of Object.entries(headers)) {
|
||||
const headerKey = caseSensitive ? key : key.toLowerCase()
|
||||
|
||||
// Skip if in exclude list (for security)
|
||||
if (excludeSet.has(headerKey)) continue
|
||||
|
||||
filtered[headerKey] = value
|
||||
}
|
||||
|
||||
return filtered
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a hash key for request matching
|
||||
* Properly orders headers to avoid conflicts and uses crypto hashing when available
|
||||
*
|
||||
* @param {SnapshotFormattedRequest} formattedRequest - Request object
|
||||
* @returns {string} - Base64url encoded hash of the request
|
||||
*/
|
||||
function createRequestHash (formattedRequest) {
|
||||
const parts = [
|
||||
formattedRequest.method,
|
||||
formattedRequest.url
|
||||
]
|
||||
|
||||
// Process headers in a deterministic way to avoid conflicts
|
||||
if (formattedRequest.headers && typeof formattedRequest.headers === 'object') {
|
||||
const headerKeys = Object.keys(formattedRequest.headers).sort()
|
||||
for (const key of headerKeys) {
|
||||
const values = Array.isArray(formattedRequest.headers[key])
|
||||
? formattedRequest.headers[key]
|
||||
: [formattedRequest.headers[key]]
|
||||
|
||||
// Add header name
|
||||
parts.push(key)
|
||||
|
||||
// Add all values for this header, sorted for consistency
|
||||
for (const value of values.sort()) {
|
||||
parts.push(String(value))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add body
|
||||
parts.push(formattedRequest.body)
|
||||
|
||||
const content = parts.join('|')
|
||||
|
||||
return hashId(content)
|
||||
}
|
||||
|
||||
class SnapshotRecorder {
|
||||
/** @type {NodeJS.Timeout | null} */
|
||||
#flushTimeout
|
||||
|
||||
/** @type {import('./snapshot-utils').IsUrlExcluded} */
|
||||
#isUrlExcluded
|
||||
|
||||
/** @type {Map<string, SnapshotEntry>} */
|
||||
#snapshots = new Map()
|
||||
|
||||
/** @type {string|undefined} */
|
||||
#snapshotPath
|
||||
|
||||
/** @type {number} */
|
||||
#maxSnapshots = Infinity
|
||||
|
||||
/** @type {boolean} */
|
||||
#autoFlush = false
|
||||
|
||||
/** @type {import('./snapshot-utils').HeaderFilters} */
|
||||
#headerFilters
|
||||
|
||||
/**
|
||||
* Creates a new SnapshotRecorder instance
|
||||
* @param {SnapshotRecorderOptions&SnapshotRecorderMatchOptions} [options={}] - Configuration options for the recorder
|
||||
*/
|
||||
constructor (options = {}) {
|
||||
this.#snapshotPath = options.snapshotPath
|
||||
this.#maxSnapshots = options.maxSnapshots || Infinity
|
||||
this.#autoFlush = options.autoFlush || false
|
||||
this.flushInterval = options.flushInterval || 30000 // 30 seconds default
|
||||
this._flushTimer = null
|
||||
|
||||
// Matching configuration
|
||||
/** @type {Required<SnapshotRecorderMatchOptions>} */
|
||||
this.matchOptions = {
|
||||
matchHeaders: options.matchHeaders || [], // empty means match all headers
|
||||
ignoreHeaders: options.ignoreHeaders || [],
|
||||
excludeHeaders: options.excludeHeaders || [],
|
||||
matchBody: options.matchBody !== false, // default: true
|
||||
matchQuery: options.matchQuery !== false, // default: true
|
||||
caseSensitive: options.caseSensitive || false
|
||||
}
|
||||
|
||||
// Cache processed header sets to avoid recreating them on every request
|
||||
this.#headerFilters = createHeaderFilters(this.matchOptions)
|
||||
|
||||
// Request filtering callbacks
|
||||
this.shouldRecord = options.shouldRecord || (() => true) // function(requestOpts) -> boolean
|
||||
this.shouldPlayback = options.shouldPlayback || (() => true) // function(requestOpts) -> boolean
|
||||
|
||||
// URL pattern filtering
|
||||
this.#isUrlExcluded = isUrlExcludedFactory(options.excludeUrls) // Array of regex patterns or strings
|
||||
|
||||
// Start auto-flush timer if enabled
|
||||
if (this.#autoFlush && this.#snapshotPath) {
|
||||
this.#startAutoFlush()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Records a request-response interaction
|
||||
* @param {SnapshotRequestOptions} requestOpts - Request options
|
||||
* @param {SnapshotEntryResponse} response - Response data to record
|
||||
* @return {Promise<void>} - Resolves when the recording is complete
|
||||
*/
|
||||
async record (requestOpts, response) {
|
||||
// Check if recording should be filtered out
|
||||
if (!this.shouldRecord(requestOpts)) {
|
||||
return // Skip recording
|
||||
}
|
||||
|
||||
// Check URL exclusion patterns
|
||||
if (this.isUrlExcluded(requestOpts)) {
|
||||
return // Skip recording
|
||||
}
|
||||
|
||||
const request = formatRequestKey(requestOpts, this.#headerFilters, this.matchOptions)
|
||||
const hash = createRequestHash(request)
|
||||
|
||||
// Extract response data - always store body as base64
|
||||
const normalizedHeaders = normalizeHeaders(response.headers)
|
||||
|
||||
/** @type {SnapshotEntryResponse} */
|
||||
const responseData = {
|
||||
statusCode: response.statusCode,
|
||||
headers: filterHeadersForStorage(normalizedHeaders, this.#headerFilters, this.matchOptions),
|
||||
body: Buffer.isBuffer(response.body)
|
||||
? response.body.toString('base64')
|
||||
: Buffer.from(String(response.body || '')).toString('base64'),
|
||||
trailers: response.trailers
|
||||
}
|
||||
|
||||
// Remove oldest snapshot if we exceed maxSnapshots limit
|
||||
if (this.#snapshots.size >= this.#maxSnapshots && !this.#snapshots.has(hash)) {
|
||||
const oldestKey = this.#snapshots.keys().next().value
|
||||
this.#snapshots.delete(oldestKey)
|
||||
}
|
||||
|
||||
// Support sequential responses - if snapshot exists, add to responses array
|
||||
const existingSnapshot = this.#snapshots.get(hash)
|
||||
if (existingSnapshot && existingSnapshot.responses) {
|
||||
existingSnapshot.responses.push(responseData)
|
||||
existingSnapshot.timestamp = new Date().toISOString()
|
||||
} else {
|
||||
this.#snapshots.set(hash, {
|
||||
request,
|
||||
responses: [responseData], // Always store as array for consistency
|
||||
callCount: 0,
|
||||
timestamp: new Date().toISOString()
|
||||
})
|
||||
}
|
||||
|
||||
// Auto-flush if enabled
|
||||
if (this.#autoFlush && this.#snapshotPath) {
|
||||
this.#scheduleFlush()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a URL should be excluded from recording/playback
|
||||
* @param {SnapshotRequestOptions} requestOpts - Request options to check
|
||||
* @returns {boolean} - True if URL is excluded
|
||||
*/
|
||||
isUrlExcluded (requestOpts) {
|
||||
const url = new URL(requestOpts.path, requestOpts.origin).toString()
|
||||
return this.#isUrlExcluded(url)
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds a matching snapshot for the given request
|
||||
* Returns the appropriate response based on call count for sequential responses
|
||||
*
|
||||
* @param {SnapshotRequestOptions} requestOpts - Request options to match
|
||||
* @returns {SnapshotEntry&Record<'response', SnapshotEntryResponse>|undefined} - Matching snapshot response or undefined if not found
|
||||
*/
|
||||
findSnapshot (requestOpts) {
|
||||
// Check if playback should be filtered out
|
||||
if (!this.shouldPlayback(requestOpts)) {
|
||||
return undefined // Skip playback
|
||||
}
|
||||
|
||||
// Check URL exclusion patterns
|
||||
if (this.isUrlExcluded(requestOpts)) {
|
||||
return undefined // Skip playback
|
||||
}
|
||||
|
||||
const request = formatRequestKey(requestOpts, this.#headerFilters, this.matchOptions)
|
||||
const hash = createRequestHash(request)
|
||||
const snapshot = this.#snapshots.get(hash)
|
||||
|
||||
if (!snapshot) return undefined
|
||||
|
||||
// Handle sequential responses
|
||||
const currentCallCount = snapshot.callCount || 0
|
||||
const responseIndex = Math.min(currentCallCount, snapshot.responses.length - 1)
|
||||
snapshot.callCount = currentCallCount + 1
|
||||
|
||||
return {
|
||||
...snapshot,
|
||||
response: snapshot.responses[responseIndex]
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads snapshots from file
|
||||
* @param {string} [filePath] - Optional file path to load snapshots from
|
||||
* @return {Promise<void>} - Resolves when snapshots are loaded
|
||||
*/
|
||||
async loadSnapshots (filePath) {
|
||||
const path = filePath || this.#snapshotPath
|
||||
if (!path) {
|
||||
throw new InvalidArgumentError('Snapshot path is required')
|
||||
}
|
||||
|
||||
try {
|
||||
const data = await readFile(resolve(path), 'utf8')
|
||||
const parsed = JSON.parse(data)
|
||||
|
||||
// Convert array format back to Map
|
||||
if (Array.isArray(parsed)) {
|
||||
this.#snapshots.clear()
|
||||
for (const { hash, snapshot } of parsed) {
|
||||
this.#snapshots.set(hash, snapshot)
|
||||
}
|
||||
} else {
|
||||
// Legacy object format
|
||||
this.#snapshots = new Map(Object.entries(parsed))
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
// File doesn't exist yet - that's ok for recording mode
|
||||
this.#snapshots.clear()
|
||||
} else {
|
||||
throw new UndiciError(`Failed to load snapshots from ${path}`, { cause: error })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves snapshots to file
|
||||
*
|
||||
* @param {string} [filePath] - Optional file path to save snapshots
|
||||
* @returns {Promise<void>} - Resolves when snapshots are saved
|
||||
*/
|
||||
async saveSnapshots (filePath) {
|
||||
const path = filePath || this.#snapshotPath
|
||||
if (!path) {
|
||||
throw new InvalidArgumentError('Snapshot path is required')
|
||||
}
|
||||
|
||||
const resolvedPath = resolve(path)
|
||||
|
||||
// Ensure directory exists
|
||||
await mkdir(dirname(resolvedPath), { recursive: true })
|
||||
|
||||
// Convert Map to serializable format
|
||||
const data = Array.from(this.#snapshots.entries()).map(([hash, snapshot]) => ({
|
||||
hash,
|
||||
snapshot
|
||||
}))
|
||||
|
||||
await writeFile(resolvedPath, JSON.stringify(data, null, 2), { flush: true })
|
||||
}
|
||||
|
||||
/**
|
||||
* Clears all recorded snapshots
|
||||
* @returns {void}
|
||||
*/
|
||||
clear () {
|
||||
this.#snapshots.clear()
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets all recorded snapshots
|
||||
* @return {Array<SnapshotEntry>} - Array of all recorded snapshots
|
||||
*/
|
||||
getSnapshots () {
|
||||
return Array.from(this.#snapshots.values())
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets snapshot count
|
||||
* @return {number} - Number of recorded snapshots
|
||||
*/
|
||||
size () {
|
||||
return this.#snapshots.size
|
||||
}
|
||||
|
||||
/**
|
||||
* Resets call counts for all snapshots (useful for test cleanup)
|
||||
* @returns {void}
|
||||
*/
|
||||
resetCallCounts () {
|
||||
for (const snapshot of this.#snapshots.values()) {
|
||||
snapshot.callCount = 0
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes a specific snapshot by request options
|
||||
* @param {SnapshotRequestOptions} requestOpts - Request options to match
|
||||
* @returns {boolean} - True if snapshot was deleted, false if not found
|
||||
*/
|
||||
deleteSnapshot (requestOpts) {
|
||||
const request = formatRequestKey(requestOpts, this.#headerFilters, this.matchOptions)
|
||||
const hash = createRequestHash(request)
|
||||
return this.#snapshots.delete(hash)
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets information about a specific snapshot
|
||||
* @param {SnapshotRequestOptions} requestOpts - Request options to match
|
||||
* @returns {SnapshotInfo|null} - Snapshot information or null if not found
|
||||
*/
|
||||
getSnapshotInfo (requestOpts) {
|
||||
const request = formatRequestKey(requestOpts, this.#headerFilters, this.matchOptions)
|
||||
const hash = createRequestHash(request)
|
||||
const snapshot = this.#snapshots.get(hash)
|
||||
|
||||
if (!snapshot) return null
|
||||
|
||||
return {
|
||||
hash,
|
||||
request: snapshot.request,
|
||||
responseCount: snapshot.responses ? snapshot.responses.length : (snapshot.response ? 1 : 0), // .response for legacy snapshots
|
||||
callCount: snapshot.callCount || 0,
|
||||
timestamp: snapshot.timestamp
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Replaces all snapshots with new data (full replacement)
|
||||
* @param {Array<{hash: string; snapshot: SnapshotEntry}>|Record<string, SnapshotEntry>} snapshotData - New snapshot data to replace existing ones
|
||||
* @returns {void}
|
||||
*/
|
||||
replaceSnapshots (snapshotData) {
|
||||
this.#snapshots.clear()
|
||||
|
||||
if (Array.isArray(snapshotData)) {
|
||||
for (const { hash, snapshot } of snapshotData) {
|
||||
this.#snapshots.set(hash, snapshot)
|
||||
}
|
||||
} else if (snapshotData && typeof snapshotData === 'object') {
|
||||
// Legacy object format
|
||||
this.#snapshots = new Map(Object.entries(snapshotData))
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts the auto-flush timer
|
||||
* @returns {void}
|
||||
*/
|
||||
#startAutoFlush () {
|
||||
return this.#scheduleFlush()
|
||||
}
|
||||
|
||||
/**
|
||||
* Stops the auto-flush timer
|
||||
* @returns {void}
|
||||
*/
|
||||
#stopAutoFlush () {
|
||||
if (this.#flushTimeout) {
|
||||
clearTimeout(this.#flushTimeout)
|
||||
// Ensure any pending flush is completed
|
||||
this.saveSnapshots().catch(() => {
|
||||
// Ignore flush errors
|
||||
})
|
||||
this.#flushTimeout = null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Schedules a flush (debounced to avoid excessive writes)
|
||||
*/
|
||||
#scheduleFlush () {
|
||||
this.#flushTimeout = setTimeout(() => {
|
||||
this.saveSnapshots().catch(() => {
|
||||
// Ignore flush errors
|
||||
})
|
||||
if (this.#autoFlush) {
|
||||
this.#flushTimeout?.refresh()
|
||||
} else {
|
||||
this.#flushTimeout = null
|
||||
}
|
||||
}, 1000) // 1 second debounce
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup method to stop timers
|
||||
* @returns {void}
|
||||
*/
|
||||
destroy () {
|
||||
this.#stopAutoFlush()
|
||||
if (this.#flushTimeout) {
|
||||
clearTimeout(this.#flushTimeout)
|
||||
this.#flushTimeout = null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Async close method that saves all recordings and performs cleanup
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async close () {
|
||||
// Save any pending recordings if we have a snapshot path
|
||||
if (this.#snapshotPath && this.#snapshots.size !== 0) {
|
||||
await this.saveSnapshots()
|
||||
}
|
||||
|
||||
// Perform cleanup
|
||||
this.destroy()
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { SnapshotRecorder, formatRequestKey, createRequestHash, filterHeadersForMatching, filterHeadersForStorage, createHeaderFilters }
|
||||
158
backend/node_modules/undici/lib/mock/snapshot-utils.js
generated
vendored
Normal file
158
backend/node_modules/undici/lib/mock/snapshot-utils.js
generated
vendored
Normal file
@@ -0,0 +1,158 @@
|
||||
'use strict'
|
||||
|
||||
const { InvalidArgumentError } = require('../core/errors')
|
||||
const { runtimeFeatures } = require('../util/runtime-features.js')
|
||||
|
||||
/**
|
||||
* @typedef {Object} HeaderFilters
|
||||
* @property {Set<string>} ignore - Set of headers to ignore for matching
|
||||
* @property {Set<string>} exclude - Set of headers to exclude from matching
|
||||
* @property {Set<string>} match - Set of headers to match (empty means match
|
||||
*/
|
||||
|
||||
/**
|
||||
* Creates cached header sets for performance
|
||||
*
|
||||
* @param {import('./snapshot-recorder').SnapshotRecorderMatchOptions} matchOptions - Matching options for headers
|
||||
* @returns {HeaderFilters} - Cached sets for ignore, exclude, and match headers
|
||||
*/
|
||||
function createHeaderFilters (matchOptions = {}) {
|
||||
const { ignoreHeaders = [], excludeHeaders = [], matchHeaders = [], caseSensitive = false } = matchOptions
|
||||
|
||||
return {
|
||||
ignore: new Set(ignoreHeaders.map(header => caseSensitive ? header : header.toLowerCase())),
|
||||
exclude: new Set(excludeHeaders.map(header => caseSensitive ? header : header.toLowerCase())),
|
||||
match: new Set(matchHeaders.map(header => caseSensitive ? header : header.toLowerCase()))
|
||||
}
|
||||
}
|
||||
|
||||
const crypto = runtimeFeatures.has('crypto')
|
||||
? require('node:crypto')
|
||||
: null
|
||||
|
||||
/**
|
||||
* @callback HashIdFunction
|
||||
* @param {string} value - The value to hash
|
||||
* @returns {string} - The base64url encoded hash of the value
|
||||
*/
|
||||
|
||||
/**
|
||||
* Generates a hash for a given value
|
||||
* @type {HashIdFunction}
|
||||
*/
|
||||
const hashId = crypto?.hash
|
||||
? (value) => crypto.hash('sha256', value, 'base64url')
|
||||
: (value) => Buffer.from(value).toString('base64url')
|
||||
|
||||
/**
|
||||
* @typedef {(url: string) => boolean} IsUrlExcluded Checks if a URL matches any of the exclude patterns
|
||||
*/
|
||||
|
||||
/** @typedef {{[key: Lowercase<string>]: string}} NormalizedHeaders */
|
||||
/** @typedef {Array<string>} UndiciHeaders */
|
||||
/** @typedef {Record<string, string|string[]>} Headers */
|
||||
|
||||
/**
|
||||
* @param {*} headers
|
||||
* @returns {headers is UndiciHeaders}
|
||||
*/
|
||||
function isUndiciHeaders (headers) {
|
||||
return Array.isArray(headers) && (headers.length & 1) === 0
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory function to create a URL exclusion checker
|
||||
* @param {Array<string| RegExp>} [excludePatterns=[]] - Array of patterns to exclude
|
||||
* @returns {IsUrlExcluded} - A function that checks if a URL matches any of the exclude patterns
|
||||
*/
|
||||
function isUrlExcludedFactory (excludePatterns = []) {
|
||||
if (excludePatterns.length === 0) {
|
||||
return () => false
|
||||
}
|
||||
|
||||
return function isUrlExcluded (url) {
|
||||
let urlLowerCased
|
||||
|
||||
for (const pattern of excludePatterns) {
|
||||
if (typeof pattern === 'string') {
|
||||
if (!urlLowerCased) {
|
||||
// Convert URL to lowercase only once
|
||||
urlLowerCased = url.toLowerCase()
|
||||
}
|
||||
// Simple string match (case-insensitive)
|
||||
if (urlLowerCased.includes(pattern.toLowerCase())) {
|
||||
return true
|
||||
}
|
||||
} else if (pattern instanceof RegExp) {
|
||||
// Regex pattern match
|
||||
if (pattern.test(url)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalizes headers for consistent comparison
|
||||
*
|
||||
* @param {Object|UndiciHeaders} headers - Headers to normalize
|
||||
* @returns {NormalizedHeaders} - Normalized headers as a lowercase object
|
||||
*/
|
||||
function normalizeHeaders (headers) {
|
||||
/** @type {NormalizedHeaders} */
|
||||
const normalizedHeaders = {}
|
||||
|
||||
if (!headers) return normalizedHeaders
|
||||
|
||||
// Handle array format (undici internal format: [name, value, name, value, ...])
|
||||
if (isUndiciHeaders(headers)) {
|
||||
for (let i = 0; i < headers.length; i += 2) {
|
||||
const key = headers[i]
|
||||
const value = headers[i + 1]
|
||||
if (key && value !== undefined) {
|
||||
// Convert Buffers to strings if needed
|
||||
const keyStr = Buffer.isBuffer(key) ? key.toString() : key
|
||||
const valueStr = Buffer.isBuffer(value) ? value.toString() : value
|
||||
normalizedHeaders[keyStr.toLowerCase()] = valueStr
|
||||
}
|
||||
}
|
||||
return normalizedHeaders
|
||||
}
|
||||
|
||||
// Handle object format
|
||||
if (headers && typeof headers === 'object') {
|
||||
for (const [key, value] of Object.entries(headers)) {
|
||||
if (key && typeof key === 'string') {
|
||||
normalizedHeaders[key.toLowerCase()] = Array.isArray(value) ? value.join(', ') : String(value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return normalizedHeaders
|
||||
}
|
||||
|
||||
const validSnapshotModes = /** @type {const} */ (['record', 'playback', 'update'])
|
||||
|
||||
/** @typedef {typeof validSnapshotModes[number]} SnapshotMode */
|
||||
|
||||
/**
|
||||
* @param {*} mode - The snapshot mode to validate
|
||||
* @returns {asserts mode is SnapshotMode}
|
||||
*/
|
||||
function validateSnapshotMode (mode) {
|
||||
if (!validSnapshotModes.includes(mode)) {
|
||||
throw new InvalidArgumentError(`Invalid snapshot mode: ${mode}. Must be one of: ${validSnapshotModes.join(', ')}`)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
createHeaderFilters,
|
||||
hashId,
|
||||
isUndiciHeaders,
|
||||
normalizeHeaders,
|
||||
isUrlExcludedFactory,
|
||||
validateSnapshotMode
|
||||
}
|
||||
405
backend/node_modules/undici/lib/util/cache.js
generated
vendored
Normal file
405
backend/node_modules/undici/lib/util/cache.js
generated
vendored
Normal file
@@ -0,0 +1,405 @@
|
||||
'use strict'
|
||||
|
||||
const {
|
||||
safeHTTPMethods,
|
||||
pathHasQueryOrFragment
|
||||
} = require('../core/util')
|
||||
|
||||
const { serializePathWithQuery } = require('../core/util')
|
||||
|
||||
/**
|
||||
* @param {import('../../types/dispatcher.d.ts').default.DispatchOptions} opts
|
||||
*/
|
||||
function makeCacheKey (opts) {
|
||||
if (!opts.origin) {
|
||||
throw new Error('opts.origin is undefined')
|
||||
}
|
||||
|
||||
let fullPath = opts.path || '/'
|
||||
|
||||
if (opts.query && !pathHasQueryOrFragment(opts.path)) {
|
||||
fullPath = serializePathWithQuery(fullPath, opts.query)
|
||||
}
|
||||
|
||||
return {
|
||||
origin: opts.origin.toString(),
|
||||
method: opts.method,
|
||||
path: fullPath,
|
||||
headers: opts.headers
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Record<string, string[] | string>}
|
||||
* @returns {Record<string, string[] | string>}
|
||||
*/
|
||||
function normalizeHeaders (opts) {
|
||||
let headers
|
||||
if (opts.headers == null) {
|
||||
headers = {}
|
||||
} else if (typeof opts.headers[Symbol.iterator] === 'function') {
|
||||
headers = {}
|
||||
for (const x of opts.headers) {
|
||||
if (!Array.isArray(x)) {
|
||||
throw new Error('opts.headers is not a valid header map')
|
||||
}
|
||||
const [key, val] = x
|
||||
if (typeof key !== 'string' || typeof val !== 'string') {
|
||||
throw new Error('opts.headers is not a valid header map')
|
||||
}
|
||||
headers[key.toLowerCase()] = val
|
||||
}
|
||||
} else if (typeof opts.headers === 'object') {
|
||||
headers = {}
|
||||
|
||||
for (const key of Object.keys(opts.headers)) {
|
||||
headers[key.toLowerCase()] = opts.headers[key]
|
||||
}
|
||||
} else {
|
||||
throw new Error('opts.headers is not an object')
|
||||
}
|
||||
|
||||
return headers
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {any} key
|
||||
*/
|
||||
function assertCacheKey (key) {
|
||||
if (typeof key !== 'object') {
|
||||
throw new TypeError(`expected key to be object, got ${typeof key}`)
|
||||
}
|
||||
|
||||
for (const property of ['origin', 'method', 'path']) {
|
||||
if (typeof key[property] !== 'string') {
|
||||
throw new TypeError(`expected key.${property} to be string, got ${typeof key[property]}`)
|
||||
}
|
||||
}
|
||||
|
||||
if (key.headers !== undefined && typeof key.headers !== 'object') {
|
||||
throw new TypeError(`expected headers to be object, got ${typeof key}`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {any} value
|
||||
*/
|
||||
function assertCacheValue (value) {
|
||||
if (typeof value !== 'object') {
|
||||
throw new TypeError(`expected value to be object, got ${typeof value}`)
|
||||
}
|
||||
|
||||
for (const property of ['statusCode', 'cachedAt', 'staleAt', 'deleteAt']) {
|
||||
if (typeof value[property] !== 'number') {
|
||||
throw new TypeError(`expected value.${property} to be number, got ${typeof value[property]}`)
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof value.statusMessage !== 'string') {
|
||||
throw new TypeError(`expected value.statusMessage to be string, got ${typeof value.statusMessage}`)
|
||||
}
|
||||
|
||||
if (value.headers != null && typeof value.headers !== 'object') {
|
||||
throw new TypeError(`expected value.rawHeaders to be object, got ${typeof value.headers}`)
|
||||
}
|
||||
|
||||
if (value.vary !== undefined && typeof value.vary !== 'object') {
|
||||
throw new TypeError(`expected value.vary to be object, got ${typeof value.vary}`)
|
||||
}
|
||||
|
||||
if (value.etag !== undefined && typeof value.etag !== 'string') {
|
||||
throw new TypeError(`expected value.etag to be string, got ${typeof value.etag}`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://www.rfc-editor.org/rfc/rfc9111.html#name-cache-control
|
||||
* @see https://www.iana.org/assignments/http-cache-directives/http-cache-directives.xhtml
|
||||
|
||||
* @param {string | string[]} header
|
||||
* @returns {import('../../types/cache-interceptor.d.ts').default.CacheControlDirectives}
|
||||
*/
|
||||
function parseCacheControlHeader (header) {
|
||||
/**
|
||||
* @type {import('../../types/cache-interceptor.d.ts').default.CacheControlDirectives}
|
||||
*/
|
||||
const output = {}
|
||||
|
||||
let directives
|
||||
if (Array.isArray(header)) {
|
||||
directives = []
|
||||
|
||||
for (const directive of header) {
|
||||
directives.push(...directive.split(','))
|
||||
}
|
||||
} else {
|
||||
directives = header.split(',')
|
||||
}
|
||||
|
||||
for (let i = 0; i < directives.length; i++) {
|
||||
const directive = directives[i].toLowerCase()
|
||||
const keyValueDelimiter = directive.indexOf('=')
|
||||
|
||||
let key
|
||||
let value
|
||||
if (keyValueDelimiter !== -1) {
|
||||
key = directive.substring(0, keyValueDelimiter).trimStart()
|
||||
value = directive.substring(keyValueDelimiter + 1)
|
||||
} else {
|
||||
key = directive.trim()
|
||||
}
|
||||
|
||||
switch (key) {
|
||||
case 'min-fresh':
|
||||
case 'max-stale':
|
||||
case 'max-age':
|
||||
case 's-maxage':
|
||||
case 'stale-while-revalidate':
|
||||
case 'stale-if-error': {
|
||||
if (value === undefined || value[0] === ' ') {
|
||||
continue
|
||||
}
|
||||
|
||||
if (
|
||||
value.length >= 2 &&
|
||||
value[0] === '"' &&
|
||||
value[value.length - 1] === '"'
|
||||
) {
|
||||
value = value.substring(1, value.length - 1)
|
||||
}
|
||||
|
||||
const parsedValue = parseInt(value, 10)
|
||||
// eslint-disable-next-line no-self-compare
|
||||
if (parsedValue !== parsedValue) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (key === 'max-age' && key in output && output[key] >= parsedValue) {
|
||||
continue
|
||||
}
|
||||
|
||||
output[key] = parsedValue
|
||||
|
||||
break
|
||||
}
|
||||
case 'private':
|
||||
case 'no-cache': {
|
||||
if (value) {
|
||||
// The private and no-cache directives can be unqualified (aka just
|
||||
// `private` or `no-cache`) or qualified (w/ a value). When they're
|
||||
// qualified, it's a list of headers like `no-cache=header1`,
|
||||
// `no-cache="header1"`, or `no-cache="header1, header2"`
|
||||
// If we're given multiple headers, the comma messes us up since
|
||||
// we split the full header by commas. So, let's loop through the
|
||||
// remaining parts in front of us until we find one that ends in a
|
||||
// quote. We can then just splice all of the parts in between the
|
||||
// starting quote and the ending quote out of the directives array
|
||||
// and continue parsing like normal.
|
||||
// https://www.rfc-editor.org/rfc/rfc9111.html#name-no-cache-2
|
||||
if (value[0] === '"') {
|
||||
// Something like `no-cache="some-header"` OR `no-cache="some-header, another-header"`.
|
||||
|
||||
// Add the first header on and cut off the leading quote
|
||||
const headers = [value.substring(1)]
|
||||
|
||||
let foundEndingQuote = value[value.length - 1] === '"'
|
||||
if (!foundEndingQuote) {
|
||||
// Something like `no-cache="some-header, another-header"`
|
||||
// This can still be something invalid, e.g. `no-cache="some-header, ...`
|
||||
for (let j = i + 1; j < directives.length; j++) {
|
||||
const nextPart = directives[j]
|
||||
const nextPartLength = nextPart.length
|
||||
|
||||
headers.push(nextPart.trim())
|
||||
|
||||
if (nextPartLength !== 0 && nextPart[nextPartLength - 1] === '"') {
|
||||
foundEndingQuote = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (foundEndingQuote) {
|
||||
let lastHeader = headers[headers.length - 1]
|
||||
if (lastHeader[lastHeader.length - 1] === '"') {
|
||||
lastHeader = lastHeader.substring(0, lastHeader.length - 1)
|
||||
headers[headers.length - 1] = lastHeader
|
||||
}
|
||||
|
||||
if (key in output) {
|
||||
output[key] = output[key].concat(headers)
|
||||
} else {
|
||||
output[key] = headers
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Something like `no-cache="some-header"`
|
||||
if (key in output) {
|
||||
output[key] = output[key].concat(value)
|
||||
} else {
|
||||
output[key] = [value]
|
||||
}
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
}
|
||||
// eslint-disable-next-line no-fallthrough
|
||||
case 'public':
|
||||
case 'no-store':
|
||||
case 'must-revalidate':
|
||||
case 'proxy-revalidate':
|
||||
case 'immutable':
|
||||
case 'no-transform':
|
||||
case 'must-understand':
|
||||
case 'only-if-cached':
|
||||
if (value) {
|
||||
// These are qualified (something like `public=...`) when they aren't
|
||||
// allowed to be, skip
|
||||
continue
|
||||
}
|
||||
|
||||
output[key] = true
|
||||
break
|
||||
default:
|
||||
// Ignore unknown directives as per https://www.rfc-editor.org/rfc/rfc9111.html#section-5.2.3-1
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
return output
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string | string[]} varyHeader Vary header from the server
|
||||
* @param {Record<string, string | string[]>} headers Request headers
|
||||
* @returns {Record<string, string | string[]>}
|
||||
*/
|
||||
function parseVaryHeader (varyHeader, headers) {
|
||||
if (typeof varyHeader === 'string' && varyHeader.includes('*')) {
|
||||
return headers
|
||||
}
|
||||
|
||||
const output = /** @type {Record<string, string | string[] | null>} */ ({})
|
||||
|
||||
const varyingHeaders = typeof varyHeader === 'string'
|
||||
? varyHeader.split(',')
|
||||
: varyHeader
|
||||
|
||||
for (const header of varyingHeaders) {
|
||||
const trimmedHeader = header.trim().toLowerCase()
|
||||
|
||||
output[trimmedHeader] = headers[trimmedHeader] ?? null
|
||||
}
|
||||
|
||||
return output
|
||||
}
|
||||
|
||||
/**
|
||||
* Note: this deviates from the spec a little. Empty etags ("", W/"") are valid,
|
||||
* however, including them in cached resposnes serves little to no purpose.
|
||||
*
|
||||
* @see https://www.rfc-editor.org/rfc/rfc9110.html#name-etag
|
||||
*
|
||||
* @param {string} etag
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isEtagUsable (etag) {
|
||||
if (etag.length <= 2) {
|
||||
// Shortest an etag can be is two chars (just ""). This is where we deviate
|
||||
// from the spec requiring a min of 3 chars however
|
||||
return false
|
||||
}
|
||||
|
||||
if (etag[0] === '"' && etag[etag.length - 1] === '"') {
|
||||
// ETag: ""asd123"" or ETag: "W/"asd123"", kinda undefined behavior in the
|
||||
// spec. Some servers will accept these while others don't.
|
||||
// ETag: "asd123"
|
||||
return !(etag[1] === '"' || etag.startsWith('"W/'))
|
||||
}
|
||||
|
||||
if (etag.startsWith('W/"') && etag[etag.length - 1] === '"') {
|
||||
// ETag: W/"", also where we deviate from the spec & require a min of 3
|
||||
// chars
|
||||
// ETag: for W/"", W/"asd123"
|
||||
return etag.length !== 4
|
||||
}
|
||||
|
||||
// Anything else
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {unknown} store
|
||||
* @returns {asserts store is import('../../types/cache-interceptor.d.ts').default.CacheStore}
|
||||
*/
|
||||
function assertCacheStore (store, name = 'CacheStore') {
|
||||
if (typeof store !== 'object' || store === null) {
|
||||
throw new TypeError(`expected type of ${name} to be a CacheStore, got ${store === null ? 'null' : typeof store}`)
|
||||
}
|
||||
|
||||
for (const fn of ['get', 'createWriteStream', 'delete']) {
|
||||
if (typeof store[fn] !== 'function') {
|
||||
throw new TypeError(`${name} needs to have a \`${fn}()\` function`)
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @param {unknown} methods
|
||||
* @returns {asserts methods is import('../../types/cache-interceptor.d.ts').default.CacheMethods[]}
|
||||
*/
|
||||
function assertCacheMethods (methods, name = 'CacheMethods') {
|
||||
if (!Array.isArray(methods)) {
|
||||
throw new TypeError(`expected type of ${name} needs to be an array, got ${methods === null ? 'null' : typeof methods}`)
|
||||
}
|
||||
|
||||
if (methods.length === 0) {
|
||||
throw new TypeError(`${name} needs to have at least one method`)
|
||||
}
|
||||
|
||||
for (const method of methods) {
|
||||
if (!safeHTTPMethods.includes(method)) {
|
||||
throw new TypeError(`element of ${name}-array needs to be one of following values: ${safeHTTPMethods.join(', ')}, got ${method}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a string key for request deduplication purposes.
|
||||
* This key is used to identify in-flight requests that can be shared.
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} cacheKey
|
||||
* @param {Set<string>} [excludeHeaders] Set of lowercase header names to exclude from the key
|
||||
* @returns {string}
|
||||
*/
|
||||
function makeDeduplicationKey (cacheKey, excludeHeaders) {
|
||||
// Create a deterministic string key from the cache key
|
||||
// Include origin, method, path, and sorted headers
|
||||
let key = `${cacheKey.origin}:${cacheKey.method}:${cacheKey.path}`
|
||||
|
||||
if (cacheKey.headers) {
|
||||
const sortedHeaders = Object.keys(cacheKey.headers).sort()
|
||||
for (const header of sortedHeaders) {
|
||||
// Skip excluded headers
|
||||
if (excludeHeaders?.has(header.toLowerCase())) {
|
||||
continue
|
||||
}
|
||||
const value = cacheKey.headers[header]
|
||||
key += `:${header}=${Array.isArray(value) ? value.join(',') : value}`
|
||||
}
|
||||
}
|
||||
|
||||
return key
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
makeCacheKey,
|
||||
normalizeHeaders,
|
||||
assertCacheKey,
|
||||
assertCacheValue,
|
||||
parseCacheControlHeader,
|
||||
parseVaryHeader,
|
||||
isEtagUsable,
|
||||
assertCacheMethods,
|
||||
assertCacheStore,
|
||||
makeDeduplicationKey
|
||||
}
|
||||
653
backend/node_modules/undici/lib/util/date.js
generated
vendored
Normal file
653
backend/node_modules/undici/lib/util/date.js
generated
vendored
Normal file
@@ -0,0 +1,653 @@
|
||||
'use strict'
|
||||
|
||||
/**
|
||||
* @see https://www.rfc-editor.org/rfc/rfc9110.html#name-date-time-formats
|
||||
*
|
||||
* @param {string} date
|
||||
* @returns {Date | undefined}
|
||||
*/
|
||||
function parseHttpDate (date) {
|
||||
// Sun, 06 Nov 1994 08:49:37 GMT ; IMF-fixdate
|
||||
// Sun Nov 6 08:49:37 1994 ; ANSI C's asctime() format
|
||||
// Sunday, 06-Nov-94 08:49:37 GMT ; obsolete RFC 850 format
|
||||
|
||||
switch (date[3]) {
|
||||
case ',': return parseImfDate(date)
|
||||
case ' ': return parseAscTimeDate(date)
|
||||
default: return parseRfc850Date(date)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://httpwg.org/specs/rfc9110.html#preferred.date.format
|
||||
*
|
||||
* @param {string} date
|
||||
* @returns {Date | undefined}
|
||||
*/
|
||||
function parseImfDate (date) {
|
||||
if (
|
||||
date.length !== 29 ||
|
||||
date[4] !== ' ' ||
|
||||
date[7] !== ' ' ||
|
||||
date[11] !== ' ' ||
|
||||
date[16] !== ' ' ||
|
||||
date[19] !== ':' ||
|
||||
date[22] !== ':' ||
|
||||
date[25] !== ' ' ||
|
||||
date[26] !== 'G' ||
|
||||
date[27] !== 'M' ||
|
||||
date[28] !== 'T'
|
||||
) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
let weekday = -1
|
||||
if (date[0] === 'S' && date[1] === 'u' && date[2] === 'n') { // Sunday
|
||||
weekday = 0
|
||||
} else if (date[0] === 'M' && date[1] === 'o' && date[2] === 'n') { // Monday
|
||||
weekday = 1
|
||||
} else if (date[0] === 'T' && date[1] === 'u' && date[2] === 'e') { // Tuesday
|
||||
weekday = 2
|
||||
} else if (date[0] === 'W' && date[1] === 'e' && date[2] === 'd') { // Wednesday
|
||||
weekday = 3
|
||||
} else if (date[0] === 'T' && date[1] === 'h' && date[2] === 'u') { // Thursday
|
||||
weekday = 4
|
||||
} else if (date[0] === 'F' && date[1] === 'r' && date[2] === 'i') { // Friday
|
||||
weekday = 5
|
||||
} else if (date[0] === 'S' && date[1] === 'a' && date[2] === 't') { // Saturday
|
||||
weekday = 6
|
||||
} else {
|
||||
return undefined // Not a valid day of the week
|
||||
}
|
||||
|
||||
let day = 0
|
||||
if (date[5] === '0') {
|
||||
// Single digit day, e.g. "Sun Nov 6 08:49:37 1994"
|
||||
const code = date.charCodeAt(6)
|
||||
if (code < 49 || code > 57) {
|
||||
return undefined // Not a digit
|
||||
}
|
||||
day = code - 48 // Convert ASCII code to number
|
||||
} else {
|
||||
const code1 = date.charCodeAt(5)
|
||||
if (code1 < 49 || code1 > 51) {
|
||||
return undefined // Not a digit between 1 and 3
|
||||
}
|
||||
const code2 = date.charCodeAt(6)
|
||||
if (code2 < 48 || code2 > 57) {
|
||||
return undefined // Not a digit
|
||||
}
|
||||
day = (code1 - 48) * 10 + (code2 - 48) // Convert ASCII codes to number
|
||||
}
|
||||
|
||||
let monthIdx = -1
|
||||
if (
|
||||
(date[8] === 'J' && date[9] === 'a' && date[10] === 'n')
|
||||
) {
|
||||
monthIdx = 0 // Jan
|
||||
} else if (
|
||||
(date[8] === 'F' && date[9] === 'e' && date[10] === 'b')
|
||||
) {
|
||||
monthIdx = 1 // Feb
|
||||
} else if (
|
||||
(date[8] === 'M' && date[9] === 'a')
|
||||
) {
|
||||
if (date[10] === 'r') {
|
||||
monthIdx = 2 // Mar
|
||||
} else if (date[10] === 'y') {
|
||||
monthIdx = 4 // May
|
||||
} else {
|
||||
return undefined // Invalid month
|
||||
}
|
||||
} else if (
|
||||
(date[8] === 'J')
|
||||
) {
|
||||
if (date[9] === 'a' && date[10] === 'n') {
|
||||
monthIdx = 0 // Jan
|
||||
} else if (date[9] === 'u') {
|
||||
if (date[10] === 'n') {
|
||||
monthIdx = 5 // Jun
|
||||
} else if (date[10] === 'l') {
|
||||
monthIdx = 6 // Jul
|
||||
} else {
|
||||
return undefined // Invalid month
|
||||
}
|
||||
} else {
|
||||
return undefined // Invalid month
|
||||
}
|
||||
} else if (
|
||||
(date[8] === 'A')
|
||||
) {
|
||||
if (date[9] === 'p' && date[10] === 'r') {
|
||||
monthIdx = 3 // Apr
|
||||
} else if (date[9] === 'u' && date[10] === 'g') {
|
||||
monthIdx = 7 // Aug
|
||||
} else {
|
||||
return undefined // Invalid month
|
||||
}
|
||||
} else if (
|
||||
(date[8] === 'S' && date[9] === 'e' && date[10] === 'p')
|
||||
) {
|
||||
monthIdx = 8 // Sep
|
||||
} else if (
|
||||
(date[8] === 'O' && date[9] === 'c' && date[10] === 't')
|
||||
) {
|
||||
monthIdx = 9 // Oct
|
||||
} else if (
|
||||
(date[8] === 'N' && date[9] === 'o' && date[10] === 'v')
|
||||
) {
|
||||
monthIdx = 10 // Nov
|
||||
} else if (
|
||||
(date[8] === 'D' && date[9] === 'e' && date[10] === 'c')
|
||||
) {
|
||||
monthIdx = 11 // Dec
|
||||
} else {
|
||||
// Not a valid month
|
||||
return undefined
|
||||
}
|
||||
|
||||
const yearDigit1 = date.charCodeAt(12)
|
||||
if (yearDigit1 < 48 || yearDigit1 > 57) {
|
||||
return undefined // Not a digit
|
||||
}
|
||||
const yearDigit2 = date.charCodeAt(13)
|
||||
if (yearDigit2 < 48 || yearDigit2 > 57) {
|
||||
return undefined // Not a digit
|
||||
}
|
||||
const yearDigit3 = date.charCodeAt(14)
|
||||
if (yearDigit3 < 48 || yearDigit3 > 57) {
|
||||
return undefined // Not a digit
|
||||
}
|
||||
const yearDigit4 = date.charCodeAt(15)
|
||||
if (yearDigit4 < 48 || yearDigit4 > 57) {
|
||||
return undefined // Not a digit
|
||||
}
|
||||
const year = (yearDigit1 - 48) * 1000 + (yearDigit2 - 48) * 100 + (yearDigit3 - 48) * 10 + (yearDigit4 - 48)
|
||||
|
||||
let hour = 0
|
||||
if (date[17] === '0') {
|
||||
const code = date.charCodeAt(18)
|
||||
if (code < 48 || code > 57) {
|
||||
return undefined // Not a digit
|
||||
}
|
||||
hour = code - 48 // Convert ASCII code to number
|
||||
} else {
|
||||
const code1 = date.charCodeAt(17)
|
||||
if (code1 < 48 || code1 > 50) {
|
||||
return undefined // Not a digit between 0 and 2
|
||||
}
|
||||
const code2 = date.charCodeAt(18)
|
||||
if (code2 < 48 || code2 > 57) {
|
||||
return undefined // Not a digit
|
||||
}
|
||||
if (code1 === 50 && code2 > 51) {
|
||||
return undefined // Hour cannot be greater than 23
|
||||
}
|
||||
hour = (code1 - 48) * 10 + (code2 - 48) // Convert ASCII codes to number
|
||||
}
|
||||
|
||||
let minute = 0
|
||||
if (date[20] === '0') {
|
||||
const code = date.charCodeAt(21)
|
||||
if (code < 48 || code > 57) {
|
||||
return undefined // Not a digit
|
||||
}
|
||||
minute = code - 48 // Convert ASCII code to number
|
||||
} else {
|
||||
const code1 = date.charCodeAt(20)
|
||||
if (code1 < 48 || code1 > 53) {
|
||||
return undefined // Not a digit between 0 and 5
|
||||
}
|
||||
const code2 = date.charCodeAt(21)
|
||||
if (code2 < 48 || code2 > 57) {
|
||||
return undefined // Not a digit
|
||||
}
|
||||
minute = (code1 - 48) * 10 + (code2 - 48) // Convert ASCII codes to number
|
||||
}
|
||||
|
||||
let second = 0
|
||||
if (date[23] === '0') {
|
||||
const code = date.charCodeAt(24)
|
||||
if (code < 48 || code > 57) {
|
||||
return undefined // Not a digit
|
||||
}
|
||||
second = code - 48 // Convert ASCII code to number
|
||||
} else {
|
||||
const code1 = date.charCodeAt(23)
|
||||
if (code1 < 48 || code1 > 53) {
|
||||
return undefined // Not a digit between 0 and 5
|
||||
}
|
||||
const code2 = date.charCodeAt(24)
|
||||
if (code2 < 48 || code2 > 57) {
|
||||
return undefined // Not a digit
|
||||
}
|
||||
second = (code1 - 48) * 10 + (code2 - 48) // Convert ASCII codes to number
|
||||
}
|
||||
|
||||
const result = new Date(Date.UTC(year, monthIdx, day, hour, minute, second))
|
||||
return result.getUTCDay() === weekday ? result : undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://httpwg.org/specs/rfc9110.html#obsolete.date.formats
|
||||
*
|
||||
* @param {string} date
|
||||
* @returns {Date | undefined}
|
||||
*/
|
||||
function parseAscTimeDate (date) {
|
||||
// This is assumed to be in UTC
|
||||
|
||||
if (
|
||||
date.length !== 24 ||
|
||||
date[7] !== ' ' ||
|
||||
date[10] !== ' ' ||
|
||||
date[19] !== ' '
|
||||
) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
let weekday = -1
|
||||
if (date[0] === 'S' && date[1] === 'u' && date[2] === 'n') { // Sunday
|
||||
weekday = 0
|
||||
} else if (date[0] === 'M' && date[1] === 'o' && date[2] === 'n') { // Monday
|
||||
weekday = 1
|
||||
} else if (date[0] === 'T' && date[1] === 'u' && date[2] === 'e') { // Tuesday
|
||||
weekday = 2
|
||||
} else if (date[0] === 'W' && date[1] === 'e' && date[2] === 'd') { // Wednesday
|
||||
weekday = 3
|
||||
} else if (date[0] === 'T' && date[1] === 'h' && date[2] === 'u') { // Thursday
|
||||
weekday = 4
|
||||
} else if (date[0] === 'F' && date[1] === 'r' && date[2] === 'i') { // Friday
|
||||
weekday = 5
|
||||
} else if (date[0] === 'S' && date[1] === 'a' && date[2] === 't') { // Saturday
|
||||
weekday = 6
|
||||
} else {
|
||||
return undefined // Not a valid day of the week
|
||||
}
|
||||
|
||||
let monthIdx = -1
|
||||
if (
|
||||
(date[4] === 'J' && date[5] === 'a' && date[6] === 'n')
|
||||
) {
|
||||
monthIdx = 0 // Jan
|
||||
} else if (
|
||||
(date[4] === 'F' && date[5] === 'e' && date[6] === 'b')
|
||||
) {
|
||||
monthIdx = 1 // Feb
|
||||
} else if (
|
||||
(date[4] === 'M' && date[5] === 'a')
|
||||
) {
|
||||
if (date[6] === 'r') {
|
||||
monthIdx = 2 // Mar
|
||||
} else if (date[6] === 'y') {
|
||||
monthIdx = 4 // May
|
||||
} else {
|
||||
return undefined // Invalid month
|
||||
}
|
||||
} else if (
|
||||
(date[4] === 'J')
|
||||
) {
|
||||
if (date[5] === 'a' && date[6] === 'n') {
|
||||
monthIdx = 0 // Jan
|
||||
} else if (date[5] === 'u') {
|
||||
if (date[6] === 'n') {
|
||||
monthIdx = 5 // Jun
|
||||
} else if (date[6] === 'l') {
|
||||
monthIdx = 6 // Jul
|
||||
} else {
|
||||
return undefined // Invalid month
|
||||
}
|
||||
} else {
|
||||
return undefined // Invalid month
|
||||
}
|
||||
} else if (
|
||||
(date[4] === 'A')
|
||||
) {
|
||||
if (date[5] === 'p' && date[6] === 'r') {
|
||||
monthIdx = 3 // Apr
|
||||
} else if (date[5] === 'u' && date[6] === 'g') {
|
||||
monthIdx = 7 // Aug
|
||||
} else {
|
||||
return undefined // Invalid month
|
||||
}
|
||||
} else if (
|
||||
(date[4] === 'S' && date[5] === 'e' && date[6] === 'p')
|
||||
) {
|
||||
monthIdx = 8 // Sep
|
||||
} else if (
|
||||
(date[4] === 'O' && date[5] === 'c' && date[6] === 't')
|
||||
) {
|
||||
monthIdx = 9 // Oct
|
||||
} else if (
|
||||
(date[4] === 'N' && date[5] === 'o' && date[6] === 'v')
|
||||
) {
|
||||
monthIdx = 10 // Nov
|
||||
} else if (
|
||||
(date[4] === 'D' && date[5] === 'e' && date[6] === 'c')
|
||||
) {
|
||||
monthIdx = 11 // Dec
|
||||
} else {
|
||||
// Not a valid month
|
||||
return undefined
|
||||
}
|
||||
|
||||
let day = 0
|
||||
if (date[8] === ' ') {
|
||||
// Single digit day, e.g. "Sun Nov 6 08:49:37 1994"
|
||||
const code = date.charCodeAt(9)
|
||||
if (code < 49 || code > 57) {
|
||||
return undefined // Not a digit
|
||||
}
|
||||
day = code - 48 // Convert ASCII code to number
|
||||
} else {
|
||||
const code1 = date.charCodeAt(8)
|
||||
if (code1 < 49 || code1 > 51) {
|
||||
return undefined // Not a digit between 1 and 3
|
||||
}
|
||||
const code2 = date.charCodeAt(9)
|
||||
if (code2 < 48 || code2 > 57) {
|
||||
return undefined // Not a digit
|
||||
}
|
||||
day = (code1 - 48) * 10 + (code2 - 48) // Convert ASCII codes to number
|
||||
}
|
||||
|
||||
let hour = 0
|
||||
if (date[11] === '0') {
|
||||
const code = date.charCodeAt(12)
|
||||
if (code < 48 || code > 57) {
|
||||
return undefined // Not a digit
|
||||
}
|
||||
hour = code - 48 // Convert ASCII code to number
|
||||
} else {
|
||||
const code1 = date.charCodeAt(11)
|
||||
if (code1 < 48 || code1 > 50) {
|
||||
return undefined // Not a digit between 0 and 2
|
||||
}
|
||||
const code2 = date.charCodeAt(12)
|
||||
if (code2 < 48 || code2 > 57) {
|
||||
return undefined // Not a digit
|
||||
}
|
||||
if (code1 === 50 && code2 > 51) {
|
||||
return undefined // Hour cannot be greater than 23
|
||||
}
|
||||
hour = (code1 - 48) * 10 + (code2 - 48) // Convert ASCII codes to number
|
||||
}
|
||||
|
||||
let minute = 0
|
||||
if (date[14] === '0') {
|
||||
const code = date.charCodeAt(15)
|
||||
if (code < 48 || code > 57) {
|
||||
return undefined // Not a digit
|
||||
}
|
||||
minute = code - 48 // Convert ASCII code to number
|
||||
} else {
|
||||
const code1 = date.charCodeAt(14)
|
||||
if (code1 < 48 || code1 > 53) {
|
||||
return undefined // Not a digit between 0 and 5
|
||||
}
|
||||
const code2 = date.charCodeAt(15)
|
||||
if (code2 < 48 || code2 > 57) {
|
||||
return undefined // Not a digit
|
||||
}
|
||||
minute = (code1 - 48) * 10 + (code2 - 48) // Convert ASCII codes to number
|
||||
}
|
||||
|
||||
let second = 0
|
||||
if (date[17] === '0') {
|
||||
const code = date.charCodeAt(18)
|
||||
if (code < 48 || code > 57) {
|
||||
return undefined // Not a digit
|
||||
}
|
||||
second = code - 48 // Convert ASCII code to number
|
||||
} else {
|
||||
const code1 = date.charCodeAt(17)
|
||||
if (code1 < 48 || code1 > 53) {
|
||||
return undefined // Not a digit between 0 and 5
|
||||
}
|
||||
const code2 = date.charCodeAt(18)
|
||||
if (code2 < 48 || code2 > 57) {
|
||||
return undefined // Not a digit
|
||||
}
|
||||
second = (code1 - 48) * 10 + (code2 - 48) // Convert ASCII codes to number
|
||||
}
|
||||
|
||||
const yearDigit1 = date.charCodeAt(20)
|
||||
if (yearDigit1 < 48 || yearDigit1 > 57) {
|
||||
return undefined // Not a digit
|
||||
}
|
||||
const yearDigit2 = date.charCodeAt(21)
|
||||
if (yearDigit2 < 48 || yearDigit2 > 57) {
|
||||
return undefined // Not a digit
|
||||
}
|
||||
const yearDigit3 = date.charCodeAt(22)
|
||||
if (yearDigit3 < 48 || yearDigit3 > 57) {
|
||||
return undefined // Not a digit
|
||||
}
|
||||
const yearDigit4 = date.charCodeAt(23)
|
||||
if (yearDigit4 < 48 || yearDigit4 > 57) {
|
||||
return undefined // Not a digit
|
||||
}
|
||||
const year = (yearDigit1 - 48) * 1000 + (yearDigit2 - 48) * 100 + (yearDigit3 - 48) * 10 + (yearDigit4 - 48)
|
||||
|
||||
const result = new Date(Date.UTC(year, monthIdx, day, hour, minute, second))
|
||||
return result.getUTCDay() === weekday ? result : undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://httpwg.org/specs/rfc9110.html#obsolete.date.formats
|
||||
*
|
||||
* @param {string} date
|
||||
* @returns {Date | undefined}
|
||||
*/
|
||||
function parseRfc850Date (date) {
|
||||
let commaIndex = -1
|
||||
|
||||
let weekday = -1
|
||||
if (date[0] === 'S') {
|
||||
if (date[1] === 'u' && date[2] === 'n' && date[3] === 'd' && date[4] === 'a' && date[5] === 'y') {
|
||||
weekday = 0 // Sunday
|
||||
commaIndex = 6
|
||||
} else if (date[1] === 'a' && date[2] === 't' && date[3] === 'u' && date[4] === 'r' && date[5] === 'd' && date[6] === 'a' && date[7] === 'y') {
|
||||
weekday = 6 // Saturday
|
||||
commaIndex = 8
|
||||
}
|
||||
} else if (date[0] === 'M' && date[1] === 'o' && date[2] === 'n' && date[3] === 'd' && date[4] === 'a' && date[5] === 'y') {
|
||||
weekday = 1 // Monday
|
||||
commaIndex = 6
|
||||
} else if (date[0] === 'T') {
|
||||
if (date[1] === 'u' && date[2] === 'e' && date[3] === 's' && date[4] === 'd' && date[5] === 'a' && date[6] === 'y') {
|
||||
weekday = 2 // Tuesday
|
||||
commaIndex = 7
|
||||
} else if (date[1] === 'h' && date[2] === 'u' && date[3] === 'r' && date[4] === 's' && date[5] === 'd' && date[6] === 'a' && date[7] === 'y') {
|
||||
weekday = 4 // Thursday
|
||||
commaIndex = 8
|
||||
}
|
||||
} else if (date[0] === 'W' && date[1] === 'e' && date[2] === 'd' && date[3] === 'n' && date[4] === 'e' && date[5] === 's' && date[6] === 'd' && date[7] === 'a' && date[8] === 'y') {
|
||||
weekday = 3 // Wednesday
|
||||
commaIndex = 9
|
||||
} else if (date[0] === 'F' && date[1] === 'r' && date[2] === 'i' && date[3] === 'd' && date[4] === 'a' && date[5] === 'y') {
|
||||
weekday = 5 // Friday
|
||||
commaIndex = 6
|
||||
} else {
|
||||
// Not a valid day name
|
||||
return undefined
|
||||
}
|
||||
|
||||
if (
|
||||
date[commaIndex] !== ',' ||
|
||||
(date.length - commaIndex - 1) !== 23 ||
|
||||
date[commaIndex + 1] !== ' ' ||
|
||||
date[commaIndex + 4] !== '-' ||
|
||||
date[commaIndex + 8] !== '-' ||
|
||||
date[commaIndex + 11] !== ' ' ||
|
||||
date[commaIndex + 14] !== ':' ||
|
||||
date[commaIndex + 17] !== ':' ||
|
||||
date[commaIndex + 20] !== ' ' ||
|
||||
date[commaIndex + 21] !== 'G' ||
|
||||
date[commaIndex + 22] !== 'M' ||
|
||||
date[commaIndex + 23] !== 'T'
|
||||
) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
let day = 0
|
||||
if (date[commaIndex + 2] === '0') {
|
||||
// Single digit day, e.g. "Sun Nov 6 08:49:37 1994"
|
||||
const code = date.charCodeAt(commaIndex + 3)
|
||||
if (code < 49 || code > 57) {
|
||||
return undefined // Not a digit
|
||||
}
|
||||
day = code - 48 // Convert ASCII code to number
|
||||
} else {
|
||||
const code1 = date.charCodeAt(commaIndex + 2)
|
||||
if (code1 < 49 || code1 > 51) {
|
||||
return undefined // Not a digit between 1 and 3
|
||||
}
|
||||
const code2 = date.charCodeAt(commaIndex + 3)
|
||||
if (code2 < 48 || code2 > 57) {
|
||||
return undefined // Not a digit
|
||||
}
|
||||
day = (code1 - 48) * 10 + (code2 - 48) // Convert ASCII codes to number
|
||||
}
|
||||
|
||||
let monthIdx = -1
|
||||
if (
|
||||
(date[commaIndex + 5] === 'J' && date[commaIndex + 6] === 'a' && date[commaIndex + 7] === 'n')
|
||||
) {
|
||||
monthIdx = 0 // Jan
|
||||
} else if (
|
||||
(date[commaIndex + 5] === 'F' && date[commaIndex + 6] === 'e' && date[commaIndex + 7] === 'b')
|
||||
) {
|
||||
monthIdx = 1 // Feb
|
||||
} else if (
|
||||
(date[commaIndex + 5] === 'M' && date[commaIndex + 6] === 'a' && date[commaIndex + 7] === 'r')
|
||||
) {
|
||||
monthIdx = 2 // Mar
|
||||
} else if (
|
||||
(date[commaIndex + 5] === 'A' && date[commaIndex + 6] === 'p' && date[commaIndex + 7] === 'r')
|
||||
) {
|
||||
monthIdx = 3 // Apr
|
||||
} else if (
|
||||
(date[commaIndex + 5] === 'M' && date[commaIndex + 6] === 'a' && date[commaIndex + 7] === 'y')
|
||||
) {
|
||||
monthIdx = 4 // May
|
||||
} else if (
|
||||
(date[commaIndex + 5] === 'J' && date[commaIndex + 6] === 'u' && date[commaIndex + 7] === 'n')
|
||||
) {
|
||||
monthIdx = 5 // Jun
|
||||
} else if (
|
||||
(date[commaIndex + 5] === 'J' && date[commaIndex + 6] === 'u' && date[commaIndex + 7] === 'l')
|
||||
) {
|
||||
monthIdx = 6 // Jul
|
||||
} else if (
|
||||
(date[commaIndex + 5] === 'A' && date[commaIndex + 6] === 'u' && date[commaIndex + 7] === 'g')
|
||||
) {
|
||||
monthIdx = 7 // Aug
|
||||
} else if (
|
||||
(date[commaIndex + 5] === 'S' && date[commaIndex + 6] === 'e' && date[commaIndex + 7] === 'p')
|
||||
) {
|
||||
monthIdx = 8 // Sep
|
||||
} else if (
|
||||
(date[commaIndex + 5] === 'O' && date[commaIndex + 6] === 'c' && date[commaIndex + 7] === 't')
|
||||
) {
|
||||
monthIdx = 9 // Oct
|
||||
} else if (
|
||||
(date[commaIndex + 5] === 'N' && date[commaIndex + 6] === 'o' && date[commaIndex + 7] === 'v')
|
||||
) {
|
||||
monthIdx = 10 // Nov
|
||||
} else if (
|
||||
(date[commaIndex + 5] === 'D' && date[commaIndex + 6] === 'e' && date[commaIndex + 7] === 'c')
|
||||
) {
|
||||
monthIdx = 11 // Dec
|
||||
} else {
|
||||
// Not a valid month
|
||||
return undefined
|
||||
}
|
||||
|
||||
const yearDigit1 = date.charCodeAt(commaIndex + 9)
|
||||
if (yearDigit1 < 48 || yearDigit1 > 57) {
|
||||
return undefined // Not a digit
|
||||
}
|
||||
const yearDigit2 = date.charCodeAt(commaIndex + 10)
|
||||
if (yearDigit2 < 48 || yearDigit2 > 57) {
|
||||
return undefined // Not a digit
|
||||
}
|
||||
|
||||
let year = (yearDigit1 - 48) * 10 + (yearDigit2 - 48) // Convert ASCII codes to number
|
||||
|
||||
// RFC 6265 states that the year is in the range 1970-2069.
|
||||
// @see https://datatracker.ietf.org/doc/html/rfc6265#section-5.1.1
|
||||
//
|
||||
// 3. If the year-value is greater than or equal to 70 and less than or
|
||||
// equal to 99, increment the year-value by 1900.
|
||||
// 4. If the year-value is greater than or equal to 0 and less than or
|
||||
// equal to 69, increment the year-value by 2000.
|
||||
year += year < 70 ? 2000 : 1900
|
||||
|
||||
let hour = 0
|
||||
if (date[commaIndex + 12] === '0') {
|
||||
const code = date.charCodeAt(commaIndex + 13)
|
||||
if (code < 48 || code > 57) {
|
||||
return undefined // Not a digit
|
||||
}
|
||||
hour = code - 48 // Convert ASCII code to number
|
||||
} else {
|
||||
const code1 = date.charCodeAt(commaIndex + 12)
|
||||
if (code1 < 48 || code1 > 50) {
|
||||
return undefined // Not a digit between 0 and 2
|
||||
}
|
||||
const code2 = date.charCodeAt(commaIndex + 13)
|
||||
if (code2 < 48 || code2 > 57) {
|
||||
return undefined // Not a digit
|
||||
}
|
||||
if (code1 === 50 && code2 > 51) {
|
||||
return undefined // Hour cannot be greater than 23
|
||||
}
|
||||
hour = (code1 - 48) * 10 + (code2 - 48) // Convert ASCII codes to number
|
||||
}
|
||||
|
||||
let minute = 0
|
||||
if (date[commaIndex + 15] === '0') {
|
||||
const code = date.charCodeAt(commaIndex + 16)
|
||||
if (code < 48 || code > 57) {
|
||||
return undefined // Not a digit
|
||||
}
|
||||
minute = code - 48 // Convert ASCII code to number
|
||||
} else {
|
||||
const code1 = date.charCodeAt(commaIndex + 15)
|
||||
if (code1 < 48 || code1 > 53) {
|
||||
return undefined // Not a digit between 0 and 5
|
||||
}
|
||||
const code2 = date.charCodeAt(commaIndex + 16)
|
||||
if (code2 < 48 || code2 > 57) {
|
||||
return undefined // Not a digit
|
||||
}
|
||||
minute = (code1 - 48) * 10 + (code2 - 48) // Convert ASCII codes to number
|
||||
}
|
||||
|
||||
let second = 0
|
||||
if (date[commaIndex + 18] === '0') {
|
||||
const code = date.charCodeAt(commaIndex + 19)
|
||||
if (code < 48 || code > 57) {
|
||||
return undefined // Not a digit
|
||||
}
|
||||
second = code - 48 // Convert ASCII code to number
|
||||
} else {
|
||||
const code1 = date.charCodeAt(commaIndex + 18)
|
||||
if (code1 < 48 || code1 > 53) {
|
||||
return undefined // Not a digit between 0 and 5
|
||||
}
|
||||
const code2 = date.charCodeAt(commaIndex + 19)
|
||||
if (code2 < 48 || code2 > 57) {
|
||||
return undefined // Not a digit
|
||||
}
|
||||
second = (code1 - 48) * 10 + (code2 - 48) // Convert ASCII codes to number
|
||||
}
|
||||
|
||||
const result = new Date(Date.UTC(year, monthIdx, day, hour, minute, second))
|
||||
return result.getUTCDay() === weekday ? result : undefined
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
parseHttpDate
|
||||
}
|
||||
28
backend/node_modules/undici/lib/util/promise.js
generated
vendored
Normal file
28
backend/node_modules/undici/lib/util/promise.js
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
'use strict'
|
||||
|
||||
/**
|
||||
* @template {*} T
|
||||
* @typedef {Object} DeferredPromise
|
||||
* @property {Promise<T>} promise
|
||||
* @property {(value?: T) => void} resolve
|
||||
* @property {(reason?: any) => void} reject
|
||||
*/
|
||||
|
||||
/**
|
||||
* @template {*} T
|
||||
* @returns {DeferredPromise<T>} An object containing a promise and its resolve/reject methods.
|
||||
*/
|
||||
function createDeferredPromise () {
|
||||
let res
|
||||
let rej
|
||||
const promise = new Promise((resolve, reject) => {
|
||||
res = resolve
|
||||
rej = reject
|
||||
})
|
||||
|
||||
return { promise, resolve: res, reject: rej }
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
createDeferredPromise
|
||||
}
|
||||
124
backend/node_modules/undici/lib/util/runtime-features.js
generated
vendored
Normal file
124
backend/node_modules/undici/lib/util/runtime-features.js
generated
vendored
Normal file
@@ -0,0 +1,124 @@
|
||||
'use strict'
|
||||
|
||||
/** @typedef {`node:${string}`} NodeModuleName */
|
||||
|
||||
/** @type {Record<NodeModuleName, () => any>} */
|
||||
const lazyLoaders = {
|
||||
__proto__: null,
|
||||
'node:crypto': () => require('node:crypto'),
|
||||
'node:sqlite': () => require('node:sqlite'),
|
||||
'node:worker_threads': () => require('node:worker_threads'),
|
||||
'node:zlib': () => require('node:zlib')
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {NodeModuleName} moduleName
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function detectRuntimeFeatureByNodeModule (moduleName) {
|
||||
try {
|
||||
lazyLoaders[moduleName]()
|
||||
return true
|
||||
} catch (err) {
|
||||
if (err.code !== 'ERR_UNKNOWN_BUILTIN_MODULE' && err.code !== 'ERR_NO_CRYPTO') {
|
||||
throw err
|
||||
}
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {NodeModuleName} moduleName
|
||||
* @param {string} property
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function detectRuntimeFeatureByExportedProperty (moduleName, property) {
|
||||
const module = lazyLoaders[moduleName]()
|
||||
return typeof module[property] !== 'undefined'
|
||||
}
|
||||
|
||||
const runtimeFeaturesByExportedProperty = /** @type {const} */ (['markAsUncloneable', 'zstd'])
|
||||
|
||||
/** @type {Record<RuntimeFeatureByExportedProperty, [NodeModuleName, string]>} */
|
||||
const exportedPropertyLookup = {
|
||||
markAsUncloneable: ['node:worker_threads', 'markAsUncloneable'],
|
||||
zstd: ['node:zlib', 'createZstdDecompress']
|
||||
}
|
||||
|
||||
/** @typedef {typeof runtimeFeaturesByExportedProperty[number]} RuntimeFeatureByExportedProperty */
|
||||
|
||||
const runtimeFeaturesAsNodeModule = /** @type {const} */ (['crypto', 'sqlite'])
|
||||
/** @typedef {typeof runtimeFeaturesAsNodeModule[number]} RuntimeFeatureByNodeModule */
|
||||
|
||||
const features = /** @type {const} */ ([
|
||||
...runtimeFeaturesAsNodeModule,
|
||||
...runtimeFeaturesByExportedProperty
|
||||
])
|
||||
|
||||
/** @typedef {typeof features[number]} Feature */
|
||||
|
||||
/**
|
||||
* @param {Feature} feature
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function detectRuntimeFeature (feature) {
|
||||
if (runtimeFeaturesAsNodeModule.includes(/** @type {RuntimeFeatureByNodeModule} */ (feature))) {
|
||||
return detectRuntimeFeatureByNodeModule(`node:${feature}`)
|
||||
} else if (runtimeFeaturesByExportedProperty.includes(/** @type {RuntimeFeatureByExportedProperty} */ (feature))) {
|
||||
const [moduleName, property] = exportedPropertyLookup[feature]
|
||||
return detectRuntimeFeatureByExportedProperty(moduleName, property)
|
||||
}
|
||||
throw new TypeError(`unknown feature: ${feature}`)
|
||||
}
|
||||
|
||||
/**
|
||||
* @class
|
||||
* @name RuntimeFeatures
|
||||
*/
|
||||
class RuntimeFeatures {
|
||||
/** @type {Map<Feature, boolean>} */
|
||||
#map = new Map()
|
||||
|
||||
/**
|
||||
* Clears all cached feature detections.
|
||||
*/
|
||||
clear () {
|
||||
this.#map.clear()
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Feature} feature
|
||||
* @returns {boolean}
|
||||
*/
|
||||
has (feature) {
|
||||
return (
|
||||
this.#map.get(feature) ?? this.#detectRuntimeFeature(feature)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Feature} feature
|
||||
* @param {boolean} value
|
||||
*/
|
||||
set (feature, value) {
|
||||
if (features.includes(feature) === false) {
|
||||
throw new TypeError(`unknown feature: ${feature}`)
|
||||
}
|
||||
this.#map.set(feature, value)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Feature} feature
|
||||
* @returns {boolean}
|
||||
*/
|
||||
#detectRuntimeFeature (feature) {
|
||||
const result = detectRuntimeFeature(feature)
|
||||
this.#map.set(feature, result)
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new RuntimeFeatures()
|
||||
|
||||
module.exports.runtimeFeatures = instance
|
||||
module.exports.default = instance
|
||||
32
backend/node_modules/undici/lib/util/stats.js
generated
vendored
Normal file
32
backend/node_modules/undici/lib/util/stats.js
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
'use strict'
|
||||
|
||||
const {
|
||||
kConnected,
|
||||
kPending,
|
||||
kRunning,
|
||||
kSize,
|
||||
kFree,
|
||||
kQueued
|
||||
} = require('../core/symbols')
|
||||
|
||||
class ClientStats {
|
||||
constructor (client) {
|
||||
this.connected = client[kConnected]
|
||||
this.pending = client[kPending]
|
||||
this.running = client[kRunning]
|
||||
this.size = client[kSize]
|
||||
}
|
||||
}
|
||||
|
||||
class PoolStats {
|
||||
constructor (pool) {
|
||||
this.connected = pool[kConnected]
|
||||
this.free = pool[kFree]
|
||||
this.pending = pool[kPending]
|
||||
this.queued = pool[kQueued]
|
||||
this.running = pool[kRunning]
|
||||
this.size = pool[kSize]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { ClientStats, PoolStats }
|
||||
425
backend/node_modules/undici/lib/util/timers.js
generated
vendored
Normal file
425
backend/node_modules/undici/lib/util/timers.js
generated
vendored
Normal file
@@ -0,0 +1,425 @@
|
||||
'use strict'
|
||||
|
||||
/**
|
||||
* This module offers an optimized timer implementation designed for scenarios
|
||||
* where high precision is not critical.
|
||||
*
|
||||
* The timer achieves faster performance by using a low-resolution approach,
|
||||
* with an accuracy target of within 500ms. This makes it particularly useful
|
||||
* for timers with delays of 1 second or more, where exact timing is less
|
||||
* crucial.
|
||||
*
|
||||
* It's important to note that Node.js timers are inherently imprecise, as
|
||||
* delays can occur due to the event loop being blocked by other operations.
|
||||
* Consequently, timers may trigger later than their scheduled time.
|
||||
*/
|
||||
|
||||
/**
|
||||
* The fastNow variable contains the internal fast timer clock value.
|
||||
*
|
||||
* @type {number}
|
||||
*/
|
||||
let fastNow = 0
|
||||
|
||||
/**
|
||||
* RESOLUTION_MS represents the target resolution time in milliseconds.
|
||||
*
|
||||
* @type {number}
|
||||
* @default 1000
|
||||
*/
|
||||
const RESOLUTION_MS = 1e3
|
||||
|
||||
/**
|
||||
* TICK_MS defines the desired interval in milliseconds between each tick.
|
||||
* The target value is set to half the resolution time, minus 1 ms, to account
|
||||
* for potential event loop overhead.
|
||||
*
|
||||
* @type {number}
|
||||
* @default 499
|
||||
*/
|
||||
const TICK_MS = (RESOLUTION_MS >> 1) - 1
|
||||
|
||||
/**
|
||||
* fastNowTimeout is a Node.js timer used to manage and process
|
||||
* the FastTimers stored in the `fastTimers` array.
|
||||
*
|
||||
* @type {NodeJS.Timeout}
|
||||
*/
|
||||
let fastNowTimeout
|
||||
|
||||
/**
|
||||
* The kFastTimer symbol is used to identify FastTimer instances.
|
||||
*
|
||||
* @type {Symbol}
|
||||
*/
|
||||
const kFastTimer = Symbol('kFastTimer')
|
||||
|
||||
/**
|
||||
* The fastTimers array contains all active FastTimers.
|
||||
*
|
||||
* @type {FastTimer[]}
|
||||
*/
|
||||
const fastTimers = []
|
||||
|
||||
/**
|
||||
* These constants represent the various states of a FastTimer.
|
||||
*/
|
||||
|
||||
/**
|
||||
* The `NOT_IN_LIST` constant indicates that the FastTimer is not included
|
||||
* in the `fastTimers` array. Timers with this status will not be processed
|
||||
* during the next tick by the `onTick` function.
|
||||
*
|
||||
* A FastTimer can be re-added to the `fastTimers` array by invoking the
|
||||
* `refresh` method on the FastTimer instance.
|
||||
*
|
||||
* @type {-2}
|
||||
*/
|
||||
const NOT_IN_LIST = -2
|
||||
|
||||
/**
|
||||
* The `TO_BE_CLEARED` constant indicates that the FastTimer is scheduled
|
||||
* for removal from the `fastTimers` array. A FastTimer in this state will
|
||||
* be removed in the next tick by the `onTick` function and will no longer
|
||||
* be processed.
|
||||
*
|
||||
* This status is also set when the `clear` method is called on the FastTimer instance.
|
||||
*
|
||||
* @type {-1}
|
||||
*/
|
||||
const TO_BE_CLEARED = -1
|
||||
|
||||
/**
|
||||
* The `PENDING` constant signifies that the FastTimer is awaiting processing
|
||||
* in the next tick by the `onTick` function. Timers with this status will have
|
||||
* their `_idleStart` value set and their status updated to `ACTIVE` in the next tick.
|
||||
*
|
||||
* @type {0}
|
||||
*/
|
||||
const PENDING = 0
|
||||
|
||||
/**
|
||||
* The `ACTIVE` constant indicates that the FastTimer is active and waiting
|
||||
* for its timer to expire. During the next tick, the `onTick` function will
|
||||
* check if the timer has expired, and if so, it will execute the associated callback.
|
||||
*
|
||||
* @type {1}
|
||||
*/
|
||||
const ACTIVE = 1
|
||||
|
||||
/**
|
||||
* The onTick function processes the fastTimers array.
|
||||
*
|
||||
* @returns {void}
|
||||
*/
|
||||
function onTick () {
|
||||
/**
|
||||
* Increment the fastNow value by the TICK_MS value, despite the actual time
|
||||
* that has passed since the last tick. This approach ensures independence
|
||||
* from the system clock and delays caused by a blocked event loop.
|
||||
*
|
||||
* @type {number}
|
||||
*/
|
||||
fastNow += TICK_MS
|
||||
|
||||
/**
|
||||
* The `idx` variable is used to iterate over the `fastTimers` array.
|
||||
* Expired timers are removed by replacing them with the last element in the array.
|
||||
* Consequently, `idx` is only incremented when the current element is not removed.
|
||||
*
|
||||
* @type {number}
|
||||
*/
|
||||
let idx = 0
|
||||
|
||||
/**
|
||||
* The len variable will contain the length of the fastTimers array
|
||||
* and will be decremented when a FastTimer should be removed from the
|
||||
* fastTimers array.
|
||||
*
|
||||
* @type {number}
|
||||
*/
|
||||
let len = fastTimers.length
|
||||
|
||||
while (idx < len) {
|
||||
/**
|
||||
* @type {FastTimer}
|
||||
*/
|
||||
const timer = fastTimers[idx]
|
||||
|
||||
// If the timer is in the ACTIVE state and the timer has expired, it will
|
||||
// be processed in the next tick.
|
||||
if (timer._state === PENDING) {
|
||||
// Set the _idleStart value to the fastNow value minus the TICK_MS value
|
||||
// to account for the time the timer was in the PENDING state.
|
||||
timer._idleStart = fastNow - TICK_MS
|
||||
timer._state = ACTIVE
|
||||
} else if (
|
||||
timer._state === ACTIVE &&
|
||||
fastNow >= timer._idleStart + timer._idleTimeout
|
||||
) {
|
||||
timer._state = TO_BE_CLEARED
|
||||
timer._idleStart = -1
|
||||
timer._onTimeout(timer._timerArg)
|
||||
}
|
||||
|
||||
if (timer._state === TO_BE_CLEARED) {
|
||||
timer._state = NOT_IN_LIST
|
||||
|
||||
// Move the last element to the current index and decrement len if it is
|
||||
// not the only element in the array.
|
||||
if (--len !== 0) {
|
||||
fastTimers[idx] = fastTimers[len]
|
||||
}
|
||||
} else {
|
||||
++idx
|
||||
}
|
||||
}
|
||||
|
||||
// Set the length of the fastTimers array to the new length and thus
|
||||
// removing the excess FastTimers elements from the array.
|
||||
fastTimers.length = len
|
||||
|
||||
// If there are still active FastTimers in the array, refresh the Timer.
|
||||
// If there are no active FastTimers, the timer will be refreshed again
|
||||
// when a new FastTimer is instantiated.
|
||||
if (fastTimers.length !== 0) {
|
||||
refreshTimeout()
|
||||
}
|
||||
}
|
||||
|
||||
function refreshTimeout () {
|
||||
// If the fastNowTimeout is already set and the Timer has the refresh()-
|
||||
// method available, call it to refresh the timer.
|
||||
// Some timer objects returned by setTimeout may not have a .refresh()
|
||||
// method (e.g. mocked timers in tests).
|
||||
if (fastNowTimeout?.refresh) {
|
||||
fastNowTimeout.refresh()
|
||||
// fastNowTimeout is not instantiated yet or refresh is not availabe,
|
||||
// create a new Timer.
|
||||
} else {
|
||||
clearTimeout(fastNowTimeout)
|
||||
fastNowTimeout = setTimeout(onTick, TICK_MS)
|
||||
// If the Timer has an unref method, call it to allow the process to exit,
|
||||
// if there are no other active handles. When using fake timers or mocked
|
||||
// environments (like Jest), .unref() may not be defined,
|
||||
fastNowTimeout?.unref()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The `FastTimer` class is a data structure designed to store and manage
|
||||
* timer information.
|
||||
*/
|
||||
class FastTimer {
|
||||
[kFastTimer] = true
|
||||
|
||||
/**
|
||||
* The state of the timer, which can be one of the following:
|
||||
* - NOT_IN_LIST (-2)
|
||||
* - TO_BE_CLEARED (-1)
|
||||
* - PENDING (0)
|
||||
* - ACTIVE (1)
|
||||
*
|
||||
* @type {-2|-1|0|1}
|
||||
* @private
|
||||
*/
|
||||
_state = NOT_IN_LIST
|
||||
|
||||
/**
|
||||
* The number of milliseconds to wait before calling the callback.
|
||||
*
|
||||
* @type {number}
|
||||
* @private
|
||||
*/
|
||||
_idleTimeout = -1
|
||||
|
||||
/**
|
||||
* The time in milliseconds when the timer was started. This value is used to
|
||||
* calculate when the timer should expire.
|
||||
*
|
||||
* @type {number}
|
||||
* @default -1
|
||||
* @private
|
||||
*/
|
||||
_idleStart = -1
|
||||
|
||||
/**
|
||||
* The function to be executed when the timer expires.
|
||||
* @type {Function}
|
||||
* @private
|
||||
*/
|
||||
_onTimeout
|
||||
|
||||
/**
|
||||
* The argument to be passed to the callback when the timer expires.
|
||||
*
|
||||
* @type {*}
|
||||
* @private
|
||||
*/
|
||||
_timerArg
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
* @param {Function} callback A function to be executed after the timer
|
||||
* expires.
|
||||
* @param {number} delay The time, in milliseconds that the timer should wait
|
||||
* before the specified function or code is executed.
|
||||
* @param {*} arg
|
||||
*/
|
||||
constructor (callback, delay, arg) {
|
||||
this._onTimeout = callback
|
||||
this._idleTimeout = delay
|
||||
this._timerArg = arg
|
||||
|
||||
this.refresh()
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the timer's start time to the current time, and reschedules the timer
|
||||
* to call its callback at the previously specified duration adjusted to the
|
||||
* current time.
|
||||
* Using this on a timer that has already called its callback will reactivate
|
||||
* the timer.
|
||||
*
|
||||
* @returns {void}
|
||||
*/
|
||||
refresh () {
|
||||
// In the special case that the timer is not in the list of active timers,
|
||||
// add it back to the array to be processed in the next tick by the onTick
|
||||
// function.
|
||||
if (this._state === NOT_IN_LIST) {
|
||||
fastTimers.push(this)
|
||||
}
|
||||
|
||||
// If the timer is the only active timer, refresh the fastNowTimeout for
|
||||
// better resolution.
|
||||
if (!fastNowTimeout || fastTimers.length === 1) {
|
||||
refreshTimeout()
|
||||
}
|
||||
|
||||
// Setting the state to PENDING will cause the timer to be reset in the
|
||||
// next tick by the onTick function.
|
||||
this._state = PENDING
|
||||
}
|
||||
|
||||
/**
|
||||
* The `clear` method cancels the timer, preventing it from executing.
|
||||
*
|
||||
* @returns {void}
|
||||
* @private
|
||||
*/
|
||||
clear () {
|
||||
// Set the state to TO_BE_CLEARED to mark the timer for removal in the next
|
||||
// tick by the onTick function.
|
||||
this._state = TO_BE_CLEARED
|
||||
|
||||
// Reset the _idleStart value to -1 to indicate that the timer is no longer
|
||||
// active.
|
||||
this._idleStart = -1
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This module exports a setTimeout and clearTimeout function that can be
|
||||
* used as a drop-in replacement for the native functions.
|
||||
*/
|
||||
module.exports = {
|
||||
/**
|
||||
* The setTimeout() method sets a timer which executes a function once the
|
||||
* timer expires.
|
||||
* @param {Function} callback A function to be executed after the timer
|
||||
* expires.
|
||||
* @param {number} delay The time, in milliseconds that the timer should
|
||||
* wait before the specified function or code is executed.
|
||||
* @param {*} [arg] An optional argument to be passed to the callback function
|
||||
* when the timer expires.
|
||||
* @returns {NodeJS.Timeout|FastTimer}
|
||||
*/
|
||||
setTimeout (callback, delay, arg) {
|
||||
// If the delay is less than or equal to the RESOLUTION_MS value return a
|
||||
// native Node.js Timer instance.
|
||||
return delay <= RESOLUTION_MS
|
||||
? setTimeout(callback, delay, arg)
|
||||
: new FastTimer(callback, delay, arg)
|
||||
},
|
||||
/**
|
||||
* The clearTimeout method cancels an instantiated Timer previously created
|
||||
* by calling setTimeout.
|
||||
*
|
||||
* @param {NodeJS.Timeout|FastTimer} timeout
|
||||
*/
|
||||
clearTimeout (timeout) {
|
||||
// If the timeout is a FastTimer, call its own clear method.
|
||||
if (timeout[kFastTimer]) {
|
||||
/**
|
||||
* @type {FastTimer}
|
||||
*/
|
||||
timeout.clear()
|
||||
// Otherwise it is an instance of a native NodeJS.Timeout, so call the
|
||||
// Node.js native clearTimeout function.
|
||||
} else {
|
||||
clearTimeout(timeout)
|
||||
}
|
||||
},
|
||||
/**
|
||||
* The setFastTimeout() method sets a fastTimer which executes a function once
|
||||
* the timer expires.
|
||||
* @param {Function} callback A function to be executed after the timer
|
||||
* expires.
|
||||
* @param {number} delay The time, in milliseconds that the timer should
|
||||
* wait before the specified function or code is executed.
|
||||
* @param {*} [arg] An optional argument to be passed to the callback function
|
||||
* when the timer expires.
|
||||
* @returns {FastTimer}
|
||||
*/
|
||||
setFastTimeout (callback, delay, arg) {
|
||||
return new FastTimer(callback, delay, arg)
|
||||
},
|
||||
/**
|
||||
* The clearTimeout method cancels an instantiated FastTimer previously
|
||||
* created by calling setFastTimeout.
|
||||
*
|
||||
* @param {FastTimer} timeout
|
||||
*/
|
||||
clearFastTimeout (timeout) {
|
||||
timeout.clear()
|
||||
},
|
||||
/**
|
||||
* The now method returns the value of the internal fast timer clock.
|
||||
*
|
||||
* @returns {number}
|
||||
*/
|
||||
now () {
|
||||
return fastNow
|
||||
},
|
||||
/**
|
||||
* Trigger the onTick function to process the fastTimers array.
|
||||
* Exported for testing purposes only.
|
||||
* Marking as deprecated to discourage any use outside of testing.
|
||||
* @deprecated
|
||||
* @param {number} [delay=0] The delay in milliseconds to add to the now value.
|
||||
*/
|
||||
tick (delay = 0) {
|
||||
fastNow += delay - RESOLUTION_MS + 1
|
||||
onTick()
|
||||
onTick()
|
||||
},
|
||||
/**
|
||||
* Reset FastTimers.
|
||||
* Exported for testing purposes only.
|
||||
* Marking as deprecated to discourage any use outside of testing.
|
||||
* @deprecated
|
||||
*/
|
||||
reset () {
|
||||
fastNow = 0
|
||||
fastTimers.length = 0
|
||||
clearTimeout(fastNowTimeout)
|
||||
fastNowTimeout = null
|
||||
},
|
||||
/**
|
||||
* Exporting for testing purposes only.
|
||||
* Marking as deprecated to discourage any use outside of testing.
|
||||
* @deprecated
|
||||
*/
|
||||
kFastTimer
|
||||
}
|
||||
864
backend/node_modules/undici/lib/web/cache/cache.js
generated
vendored
Normal file
864
backend/node_modules/undici/lib/web/cache/cache.js
generated
vendored
Normal file
@@ -0,0 +1,864 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert')
|
||||
|
||||
const { kConstruct } = require('../../core/symbols')
|
||||
const { urlEquals, getFieldValues } = require('./util')
|
||||
const { kEnumerableProperty, isDisturbed } = require('../../core/util')
|
||||
const { webidl } = require('../webidl')
|
||||
const { cloneResponse, fromInnerResponse, getResponseState } = require('../fetch/response')
|
||||
const { Request, fromInnerRequest, getRequestState } = require('../fetch/request')
|
||||
const { fetching } = require('../fetch/index')
|
||||
const { urlIsHttpHttpsScheme, readAllBytes } = require('../fetch/util')
|
||||
const { createDeferredPromise } = require('../../util/promise')
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/ServiceWorker/#dfn-cache-batch-operation
|
||||
* @typedef {Object} CacheBatchOperation
|
||||
* @property {'delete' | 'put'} type
|
||||
* @property {any} request
|
||||
* @property {any} response
|
||||
* @property {import('../../../types/cache').CacheQueryOptions} options
|
||||
*/
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/ServiceWorker/#dfn-request-response-list
|
||||
* @typedef {[any, any][]} requestResponseList
|
||||
*/
|
||||
|
||||
class Cache {
|
||||
/**
|
||||
* @see https://w3c.github.io/ServiceWorker/#dfn-relevant-request-response-list
|
||||
* @type {requestResponseList}
|
||||
*/
|
||||
#relevantRequestResponseList
|
||||
|
||||
constructor () {
|
||||
if (arguments[0] !== kConstruct) {
|
||||
webidl.illegalConstructor()
|
||||
}
|
||||
|
||||
webidl.util.markAsUncloneable(this)
|
||||
this.#relevantRequestResponseList = arguments[1]
|
||||
}
|
||||
|
||||
async match (request, options = {}) {
|
||||
webidl.brandCheck(this, Cache)
|
||||
|
||||
const prefix = 'Cache.match'
|
||||
webidl.argumentLengthCheck(arguments, 1, prefix)
|
||||
|
||||
request = webidl.converters.RequestInfo(request)
|
||||
options = webidl.converters.CacheQueryOptions(options, prefix, 'options')
|
||||
|
||||
const p = this.#internalMatchAll(request, options, 1)
|
||||
|
||||
if (p.length === 0) {
|
||||
return
|
||||
}
|
||||
|
||||
return p[0]
|
||||
}
|
||||
|
||||
async matchAll (request = undefined, options = {}) {
|
||||
webidl.brandCheck(this, Cache)
|
||||
|
||||
const prefix = 'Cache.matchAll'
|
||||
if (request !== undefined) request = webidl.converters.RequestInfo(request)
|
||||
options = webidl.converters.CacheQueryOptions(options, prefix, 'options')
|
||||
|
||||
return this.#internalMatchAll(request, options)
|
||||
}
|
||||
|
||||
async add (request) {
|
||||
webidl.brandCheck(this, Cache)
|
||||
|
||||
const prefix = 'Cache.add'
|
||||
webidl.argumentLengthCheck(arguments, 1, prefix)
|
||||
|
||||
request = webidl.converters.RequestInfo(request)
|
||||
|
||||
// 1.
|
||||
const requests = [request]
|
||||
|
||||
// 2.
|
||||
const responseArrayPromise = this.addAll(requests)
|
||||
|
||||
// 3.
|
||||
return await responseArrayPromise
|
||||
}
|
||||
|
||||
async addAll (requests) {
|
||||
webidl.brandCheck(this, Cache)
|
||||
|
||||
const prefix = 'Cache.addAll'
|
||||
webidl.argumentLengthCheck(arguments, 1, prefix)
|
||||
|
||||
// 1.
|
||||
const responsePromises = []
|
||||
|
||||
// 2.
|
||||
const requestList = []
|
||||
|
||||
// 3.
|
||||
for (let request of requests) {
|
||||
if (request === undefined) {
|
||||
throw webidl.errors.conversionFailed({
|
||||
prefix,
|
||||
argument: 'Argument 1',
|
||||
types: ['undefined is not allowed']
|
||||
})
|
||||
}
|
||||
|
||||
request = webidl.converters.RequestInfo(request)
|
||||
|
||||
if (typeof request === 'string') {
|
||||
continue
|
||||
}
|
||||
|
||||
// 3.1
|
||||
const r = getRequestState(request)
|
||||
|
||||
// 3.2
|
||||
if (!urlIsHttpHttpsScheme(r.url) || r.method !== 'GET') {
|
||||
throw webidl.errors.exception({
|
||||
header: prefix,
|
||||
message: 'Expected http/s scheme when method is not GET.'
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// 4.
|
||||
/** @type {ReturnType<typeof fetching>[]} */
|
||||
const fetchControllers = []
|
||||
|
||||
// 5.
|
||||
for (const request of requests) {
|
||||
// 5.1
|
||||
const r = getRequestState(new Request(request))
|
||||
|
||||
// 5.2
|
||||
if (!urlIsHttpHttpsScheme(r.url)) {
|
||||
throw webidl.errors.exception({
|
||||
header: prefix,
|
||||
message: 'Expected http/s scheme.'
|
||||
})
|
||||
}
|
||||
|
||||
// 5.4
|
||||
r.initiator = 'fetch'
|
||||
r.destination = 'subresource'
|
||||
|
||||
// 5.5
|
||||
requestList.push(r)
|
||||
|
||||
// 5.6
|
||||
const responsePromise = createDeferredPromise()
|
||||
|
||||
// 5.7
|
||||
fetchControllers.push(fetching({
|
||||
request: r,
|
||||
processResponse (response) {
|
||||
// 1.
|
||||
if (response.type === 'error' || response.status === 206 || response.status < 200 || response.status > 299) {
|
||||
responsePromise.reject(webidl.errors.exception({
|
||||
header: 'Cache.addAll',
|
||||
message: 'Received an invalid status code or the request failed.'
|
||||
}))
|
||||
} else if (response.headersList.contains('vary')) { // 2.
|
||||
// 2.1
|
||||
const fieldValues = getFieldValues(response.headersList.get('vary'))
|
||||
|
||||
// 2.2
|
||||
for (const fieldValue of fieldValues) {
|
||||
// 2.2.1
|
||||
if (fieldValue === '*') {
|
||||
responsePromise.reject(webidl.errors.exception({
|
||||
header: 'Cache.addAll',
|
||||
message: 'invalid vary field value'
|
||||
}))
|
||||
|
||||
for (const controller of fetchControllers) {
|
||||
controller.abort()
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
processResponseEndOfBody (response) {
|
||||
// 1.
|
||||
if (response.aborted) {
|
||||
responsePromise.reject(new DOMException('aborted', 'AbortError'))
|
||||
return
|
||||
}
|
||||
|
||||
// 2.
|
||||
responsePromise.resolve(response)
|
||||
}
|
||||
}))
|
||||
|
||||
// 5.8
|
||||
responsePromises.push(responsePromise.promise)
|
||||
}
|
||||
|
||||
// 6.
|
||||
const p = Promise.all(responsePromises)
|
||||
|
||||
// 7.
|
||||
const responses = await p
|
||||
|
||||
// 7.1
|
||||
const operations = []
|
||||
|
||||
// 7.2
|
||||
let index = 0
|
||||
|
||||
// 7.3
|
||||
for (const response of responses) {
|
||||
// 7.3.1
|
||||
/** @type {CacheBatchOperation} */
|
||||
const operation = {
|
||||
type: 'put', // 7.3.2
|
||||
request: requestList[index], // 7.3.3
|
||||
response // 7.3.4
|
||||
}
|
||||
|
||||
operations.push(operation) // 7.3.5
|
||||
|
||||
index++ // 7.3.6
|
||||
}
|
||||
|
||||
// 7.5
|
||||
const cacheJobPromise = createDeferredPromise()
|
||||
|
||||
// 7.6.1
|
||||
let errorData = null
|
||||
|
||||
// 7.6.2
|
||||
try {
|
||||
this.#batchCacheOperations(operations)
|
||||
} catch (e) {
|
||||
errorData = e
|
||||
}
|
||||
|
||||
// 7.6.3
|
||||
queueMicrotask(() => {
|
||||
// 7.6.3.1
|
||||
if (errorData === null) {
|
||||
cacheJobPromise.resolve(undefined)
|
||||
} else {
|
||||
// 7.6.3.2
|
||||
cacheJobPromise.reject(errorData)
|
||||
}
|
||||
})
|
||||
|
||||
// 7.7
|
||||
return cacheJobPromise.promise
|
||||
}
|
||||
|
||||
async put (request, response) {
|
||||
webidl.brandCheck(this, Cache)
|
||||
|
||||
const prefix = 'Cache.put'
|
||||
webidl.argumentLengthCheck(arguments, 2, prefix)
|
||||
|
||||
request = webidl.converters.RequestInfo(request)
|
||||
response = webidl.converters.Response(response, prefix, 'response')
|
||||
|
||||
// 1.
|
||||
let innerRequest = null
|
||||
|
||||
// 2.
|
||||
if (webidl.is.Request(request)) {
|
||||
innerRequest = getRequestState(request)
|
||||
} else { // 3.
|
||||
innerRequest = getRequestState(new Request(request))
|
||||
}
|
||||
|
||||
// 4.
|
||||
if (!urlIsHttpHttpsScheme(innerRequest.url) || innerRequest.method !== 'GET') {
|
||||
throw webidl.errors.exception({
|
||||
header: prefix,
|
||||
message: 'Expected an http/s scheme when method is not GET'
|
||||
})
|
||||
}
|
||||
|
||||
// 5.
|
||||
const innerResponse = getResponseState(response)
|
||||
|
||||
// 6.
|
||||
if (innerResponse.status === 206) {
|
||||
throw webidl.errors.exception({
|
||||
header: prefix,
|
||||
message: 'Got 206 status'
|
||||
})
|
||||
}
|
||||
|
||||
// 7.
|
||||
if (innerResponse.headersList.contains('vary')) {
|
||||
// 7.1.
|
||||
const fieldValues = getFieldValues(innerResponse.headersList.get('vary'))
|
||||
|
||||
// 7.2.
|
||||
for (const fieldValue of fieldValues) {
|
||||
// 7.2.1
|
||||
if (fieldValue === '*') {
|
||||
throw webidl.errors.exception({
|
||||
header: prefix,
|
||||
message: 'Got * vary field value'
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 8.
|
||||
if (innerResponse.body && (isDisturbed(innerResponse.body.stream) || innerResponse.body.stream.locked)) {
|
||||
throw webidl.errors.exception({
|
||||
header: prefix,
|
||||
message: 'Response body is locked or disturbed'
|
||||
})
|
||||
}
|
||||
|
||||
// 9.
|
||||
const clonedResponse = cloneResponse(innerResponse)
|
||||
|
||||
// 10.
|
||||
const bodyReadPromise = createDeferredPromise()
|
||||
|
||||
// 11.
|
||||
if (innerResponse.body != null) {
|
||||
// 11.1
|
||||
const stream = innerResponse.body.stream
|
||||
|
||||
// 11.2
|
||||
const reader = stream.getReader()
|
||||
|
||||
// 11.3
|
||||
readAllBytes(reader, bodyReadPromise.resolve, bodyReadPromise.reject)
|
||||
} else {
|
||||
bodyReadPromise.resolve(undefined)
|
||||
}
|
||||
|
||||
// 12.
|
||||
/** @type {CacheBatchOperation[]} */
|
||||
const operations = []
|
||||
|
||||
// 13.
|
||||
/** @type {CacheBatchOperation} */
|
||||
const operation = {
|
||||
type: 'put', // 14.
|
||||
request: innerRequest, // 15.
|
||||
response: clonedResponse // 16.
|
||||
}
|
||||
|
||||
// 17.
|
||||
operations.push(operation)
|
||||
|
||||
// 19.
|
||||
const bytes = await bodyReadPromise.promise
|
||||
|
||||
if (clonedResponse.body != null) {
|
||||
clonedResponse.body.source = bytes
|
||||
}
|
||||
|
||||
// 19.1
|
||||
const cacheJobPromise = createDeferredPromise()
|
||||
|
||||
// 19.2.1
|
||||
let errorData = null
|
||||
|
||||
// 19.2.2
|
||||
try {
|
||||
this.#batchCacheOperations(operations)
|
||||
} catch (e) {
|
||||
errorData = e
|
||||
}
|
||||
|
||||
// 19.2.3
|
||||
queueMicrotask(() => {
|
||||
// 19.2.3.1
|
||||
if (errorData === null) {
|
||||
cacheJobPromise.resolve()
|
||||
} else { // 19.2.3.2
|
||||
cacheJobPromise.reject(errorData)
|
||||
}
|
||||
})
|
||||
|
||||
return cacheJobPromise.promise
|
||||
}
|
||||
|
||||
async delete (request, options = {}) {
|
||||
webidl.brandCheck(this, Cache)
|
||||
|
||||
const prefix = 'Cache.delete'
|
||||
webidl.argumentLengthCheck(arguments, 1, prefix)
|
||||
|
||||
request = webidl.converters.RequestInfo(request)
|
||||
options = webidl.converters.CacheQueryOptions(options, prefix, 'options')
|
||||
|
||||
/**
|
||||
* @type {Request}
|
||||
*/
|
||||
let r = null
|
||||
|
||||
if (webidl.is.Request(request)) {
|
||||
r = getRequestState(request)
|
||||
|
||||
if (r.method !== 'GET' && !options.ignoreMethod) {
|
||||
return false
|
||||
}
|
||||
} else {
|
||||
assert(typeof request === 'string')
|
||||
|
||||
r = getRequestState(new Request(request))
|
||||
}
|
||||
|
||||
/** @type {CacheBatchOperation[]} */
|
||||
const operations = []
|
||||
|
||||
/** @type {CacheBatchOperation} */
|
||||
const operation = {
|
||||
type: 'delete',
|
||||
request: r,
|
||||
options
|
||||
}
|
||||
|
||||
operations.push(operation)
|
||||
|
||||
const cacheJobPromise = createDeferredPromise()
|
||||
|
||||
let errorData = null
|
||||
let requestResponses
|
||||
|
||||
try {
|
||||
requestResponses = this.#batchCacheOperations(operations)
|
||||
} catch (e) {
|
||||
errorData = e
|
||||
}
|
||||
|
||||
queueMicrotask(() => {
|
||||
if (errorData === null) {
|
||||
cacheJobPromise.resolve(!!requestResponses?.length)
|
||||
} else {
|
||||
cacheJobPromise.reject(errorData)
|
||||
}
|
||||
})
|
||||
|
||||
return cacheJobPromise.promise
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/ServiceWorker/#dom-cache-keys
|
||||
* @param {any} request
|
||||
* @param {import('../../../types/cache').CacheQueryOptions} options
|
||||
* @returns {Promise<readonly Request[]>}
|
||||
*/
|
||||
async keys (request = undefined, options = {}) {
|
||||
webidl.brandCheck(this, Cache)
|
||||
|
||||
const prefix = 'Cache.keys'
|
||||
|
||||
if (request !== undefined) request = webidl.converters.RequestInfo(request)
|
||||
options = webidl.converters.CacheQueryOptions(options, prefix, 'options')
|
||||
|
||||
// 1.
|
||||
let r = null
|
||||
|
||||
// 2.
|
||||
if (request !== undefined) {
|
||||
// 2.1
|
||||
if (webidl.is.Request(request)) {
|
||||
// 2.1.1
|
||||
r = getRequestState(request)
|
||||
|
||||
// 2.1.2
|
||||
if (r.method !== 'GET' && !options.ignoreMethod) {
|
||||
return []
|
||||
}
|
||||
} else if (typeof request === 'string') { // 2.2
|
||||
r = getRequestState(new Request(request))
|
||||
}
|
||||
}
|
||||
|
||||
// 4.
|
||||
const promise = createDeferredPromise()
|
||||
|
||||
// 5.
|
||||
// 5.1
|
||||
const requests = []
|
||||
|
||||
// 5.2
|
||||
if (request === undefined) {
|
||||
// 5.2.1
|
||||
for (const requestResponse of this.#relevantRequestResponseList) {
|
||||
// 5.2.1.1
|
||||
requests.push(requestResponse[0])
|
||||
}
|
||||
} else { // 5.3
|
||||
// 5.3.1
|
||||
const requestResponses = this.#queryCache(r, options)
|
||||
|
||||
// 5.3.2
|
||||
for (const requestResponse of requestResponses) {
|
||||
// 5.3.2.1
|
||||
requests.push(requestResponse[0])
|
||||
}
|
||||
}
|
||||
|
||||
// 5.4
|
||||
queueMicrotask(() => {
|
||||
// 5.4.1
|
||||
const requestList = []
|
||||
|
||||
// 5.4.2
|
||||
for (const request of requests) {
|
||||
const requestObject = fromInnerRequest(
|
||||
request,
|
||||
undefined,
|
||||
new AbortController().signal,
|
||||
'immutable'
|
||||
)
|
||||
// 5.4.2.1
|
||||
requestList.push(requestObject)
|
||||
}
|
||||
|
||||
// 5.4.3
|
||||
promise.resolve(Object.freeze(requestList))
|
||||
})
|
||||
|
||||
return promise.promise
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/ServiceWorker/#batch-cache-operations-algorithm
|
||||
* @param {CacheBatchOperation[]} operations
|
||||
* @returns {requestResponseList}
|
||||
*/
|
||||
#batchCacheOperations (operations) {
|
||||
// 1.
|
||||
const cache = this.#relevantRequestResponseList
|
||||
|
||||
// 2.
|
||||
const backupCache = [...cache]
|
||||
|
||||
// 3.
|
||||
const addedItems = []
|
||||
|
||||
// 4.1
|
||||
const resultList = []
|
||||
|
||||
try {
|
||||
// 4.2
|
||||
for (const operation of operations) {
|
||||
// 4.2.1
|
||||
if (operation.type !== 'delete' && operation.type !== 'put') {
|
||||
throw webidl.errors.exception({
|
||||
header: 'Cache.#batchCacheOperations',
|
||||
message: 'operation type does not match "delete" or "put"'
|
||||
})
|
||||
}
|
||||
|
||||
// 4.2.2
|
||||
if (operation.type === 'delete' && operation.response != null) {
|
||||
throw webidl.errors.exception({
|
||||
header: 'Cache.#batchCacheOperations',
|
||||
message: 'delete operation should not have an associated response'
|
||||
})
|
||||
}
|
||||
|
||||
// 4.2.3
|
||||
if (this.#queryCache(operation.request, operation.options, addedItems).length) {
|
||||
throw new DOMException('???', 'InvalidStateError')
|
||||
}
|
||||
|
||||
// 4.2.4
|
||||
let requestResponses
|
||||
|
||||
// 4.2.5
|
||||
if (operation.type === 'delete') {
|
||||
// 4.2.5.1
|
||||
requestResponses = this.#queryCache(operation.request, operation.options)
|
||||
|
||||
// TODO: the spec is wrong, this is needed to pass WPTs
|
||||
if (requestResponses.length === 0) {
|
||||
return []
|
||||
}
|
||||
|
||||
// 4.2.5.2
|
||||
for (const requestResponse of requestResponses) {
|
||||
const idx = cache.indexOf(requestResponse)
|
||||
assert(idx !== -1)
|
||||
|
||||
// 4.2.5.2.1
|
||||
cache.splice(idx, 1)
|
||||
}
|
||||
} else if (operation.type === 'put') { // 4.2.6
|
||||
// 4.2.6.1
|
||||
if (operation.response == null) {
|
||||
throw webidl.errors.exception({
|
||||
header: 'Cache.#batchCacheOperations',
|
||||
message: 'put operation should have an associated response'
|
||||
})
|
||||
}
|
||||
|
||||
// 4.2.6.2
|
||||
const r = operation.request
|
||||
|
||||
// 4.2.6.3
|
||||
if (!urlIsHttpHttpsScheme(r.url)) {
|
||||
throw webidl.errors.exception({
|
||||
header: 'Cache.#batchCacheOperations',
|
||||
message: 'expected http or https scheme'
|
||||
})
|
||||
}
|
||||
|
||||
// 4.2.6.4
|
||||
if (r.method !== 'GET') {
|
||||
throw webidl.errors.exception({
|
||||
header: 'Cache.#batchCacheOperations',
|
||||
message: 'not get method'
|
||||
})
|
||||
}
|
||||
|
||||
// 4.2.6.5
|
||||
if (operation.options != null) {
|
||||
throw webidl.errors.exception({
|
||||
header: 'Cache.#batchCacheOperations',
|
||||
message: 'options must not be defined'
|
||||
})
|
||||
}
|
||||
|
||||
// 4.2.6.6
|
||||
requestResponses = this.#queryCache(operation.request)
|
||||
|
||||
// 4.2.6.7
|
||||
for (const requestResponse of requestResponses) {
|
||||
const idx = cache.indexOf(requestResponse)
|
||||
assert(idx !== -1)
|
||||
|
||||
// 4.2.6.7.1
|
||||
cache.splice(idx, 1)
|
||||
}
|
||||
|
||||
// 4.2.6.8
|
||||
cache.push([operation.request, operation.response])
|
||||
|
||||
// 4.2.6.10
|
||||
addedItems.push([operation.request, operation.response])
|
||||
}
|
||||
|
||||
// 4.2.7
|
||||
resultList.push([operation.request, operation.response])
|
||||
}
|
||||
|
||||
// 4.3
|
||||
return resultList
|
||||
} catch (e) { // 5.
|
||||
// 5.1
|
||||
this.#relevantRequestResponseList.length = 0
|
||||
|
||||
// 5.2
|
||||
this.#relevantRequestResponseList = backupCache
|
||||
|
||||
// 5.3
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/ServiceWorker/#query-cache
|
||||
* @param {any} requestQuery
|
||||
* @param {import('../../../types/cache').CacheQueryOptions} options
|
||||
* @param {requestResponseList} targetStorage
|
||||
* @returns {requestResponseList}
|
||||
*/
|
||||
#queryCache (requestQuery, options, targetStorage) {
|
||||
/** @type {requestResponseList} */
|
||||
const resultList = []
|
||||
|
||||
const storage = targetStorage ?? this.#relevantRequestResponseList
|
||||
|
||||
for (const requestResponse of storage) {
|
||||
const [cachedRequest, cachedResponse] = requestResponse
|
||||
if (this.#requestMatchesCachedItem(requestQuery, cachedRequest, cachedResponse, options)) {
|
||||
resultList.push(requestResponse)
|
||||
}
|
||||
}
|
||||
|
||||
return resultList
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/ServiceWorker/#request-matches-cached-item-algorithm
|
||||
* @param {any} requestQuery
|
||||
* @param {any} request
|
||||
* @param {any | null} response
|
||||
* @param {import('../../../types/cache').CacheQueryOptions | undefined} options
|
||||
* @returns {boolean}
|
||||
*/
|
||||
#requestMatchesCachedItem (requestQuery, request, response = null, options) {
|
||||
// if (options?.ignoreMethod === false && request.method === 'GET') {
|
||||
// return false
|
||||
// }
|
||||
|
||||
const queryURL = new URL(requestQuery.url)
|
||||
|
||||
const cachedURL = new URL(request.url)
|
||||
|
||||
if (options?.ignoreSearch) {
|
||||
cachedURL.search = ''
|
||||
|
||||
queryURL.search = ''
|
||||
}
|
||||
|
||||
if (!urlEquals(queryURL, cachedURL, true)) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (
|
||||
response == null ||
|
||||
options?.ignoreVary ||
|
||||
!response.headersList.contains('vary')
|
||||
) {
|
||||
return true
|
||||
}
|
||||
|
||||
const fieldValues = getFieldValues(response.headersList.get('vary'))
|
||||
|
||||
for (const fieldValue of fieldValues) {
|
||||
if (fieldValue === '*') {
|
||||
return false
|
||||
}
|
||||
|
||||
const requestValue = request.headersList.get(fieldValue)
|
||||
const queryValue = requestQuery.headersList.get(fieldValue)
|
||||
|
||||
// If one has the header and the other doesn't, or one has
|
||||
// a different value than the other, return false
|
||||
if (requestValue !== queryValue) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
#internalMatchAll (request, options, maxResponses = Infinity) {
|
||||
// 1.
|
||||
let r = null
|
||||
|
||||
// 2.
|
||||
if (request !== undefined) {
|
||||
if (webidl.is.Request(request)) {
|
||||
// 2.1.1
|
||||
r = getRequestState(request)
|
||||
|
||||
// 2.1.2
|
||||
if (r.method !== 'GET' && !options.ignoreMethod) {
|
||||
return []
|
||||
}
|
||||
} else if (typeof request === 'string') {
|
||||
// 2.2.1
|
||||
r = getRequestState(new Request(request))
|
||||
}
|
||||
}
|
||||
|
||||
// 5.
|
||||
// 5.1
|
||||
const responses = []
|
||||
|
||||
// 5.2
|
||||
if (request === undefined) {
|
||||
// 5.2.1
|
||||
for (const requestResponse of this.#relevantRequestResponseList) {
|
||||
responses.push(requestResponse[1])
|
||||
}
|
||||
} else { // 5.3
|
||||
// 5.3.1
|
||||
const requestResponses = this.#queryCache(r, options)
|
||||
|
||||
// 5.3.2
|
||||
for (const requestResponse of requestResponses) {
|
||||
responses.push(requestResponse[1])
|
||||
}
|
||||
}
|
||||
|
||||
// 5.4
|
||||
// We don't implement CORs so we don't need to loop over the responses, yay!
|
||||
|
||||
// 5.5.1
|
||||
const responseList = []
|
||||
|
||||
// 5.5.2
|
||||
for (const response of responses) {
|
||||
// 5.5.2.1
|
||||
const responseObject = fromInnerResponse(cloneResponse(response), 'immutable')
|
||||
|
||||
responseList.push(responseObject)
|
||||
|
||||
if (responseList.length >= maxResponses) {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// 6.
|
||||
return Object.freeze(responseList)
|
||||
}
|
||||
}
|
||||
|
||||
Object.defineProperties(Cache.prototype, {
|
||||
[Symbol.toStringTag]: {
|
||||
value: 'Cache',
|
||||
configurable: true
|
||||
},
|
||||
match: kEnumerableProperty,
|
||||
matchAll: kEnumerableProperty,
|
||||
add: kEnumerableProperty,
|
||||
addAll: kEnumerableProperty,
|
||||
put: kEnumerableProperty,
|
||||
delete: kEnumerableProperty,
|
||||
keys: kEnumerableProperty
|
||||
})
|
||||
|
||||
const cacheQueryOptionConverters = [
|
||||
{
|
||||
key: 'ignoreSearch',
|
||||
converter: webidl.converters.boolean,
|
||||
defaultValue: () => false
|
||||
},
|
||||
{
|
||||
key: 'ignoreMethod',
|
||||
converter: webidl.converters.boolean,
|
||||
defaultValue: () => false
|
||||
},
|
||||
{
|
||||
key: 'ignoreVary',
|
||||
converter: webidl.converters.boolean,
|
||||
defaultValue: () => false
|
||||
}
|
||||
]
|
||||
|
||||
webidl.converters.CacheQueryOptions = webidl.dictionaryConverter(cacheQueryOptionConverters)
|
||||
|
||||
webidl.converters.MultiCacheQueryOptions = webidl.dictionaryConverter([
|
||||
...cacheQueryOptionConverters,
|
||||
{
|
||||
key: 'cacheName',
|
||||
converter: webidl.converters.DOMString
|
||||
}
|
||||
])
|
||||
|
||||
webidl.converters.Response = webidl.interfaceConverter(
|
||||
webidl.is.Response,
|
||||
'Response'
|
||||
)
|
||||
|
||||
webidl.converters['sequence<RequestInfo>'] = webidl.sequenceConverter(
|
||||
webidl.converters.RequestInfo
|
||||
)
|
||||
|
||||
module.exports = {
|
||||
Cache
|
||||
}
|
||||
152
backend/node_modules/undici/lib/web/cache/cachestorage.js
generated
vendored
Normal file
152
backend/node_modules/undici/lib/web/cache/cachestorage.js
generated
vendored
Normal file
@@ -0,0 +1,152 @@
|
||||
'use strict'
|
||||
|
||||
const { Cache } = require('./cache')
|
||||
const { webidl } = require('../webidl')
|
||||
const { kEnumerableProperty } = require('../../core/util')
|
||||
const { kConstruct } = require('../../core/symbols')
|
||||
|
||||
class CacheStorage {
|
||||
/**
|
||||
* @see https://w3c.github.io/ServiceWorker/#dfn-relevant-name-to-cache-map
|
||||
* @type {Map<string, import('./cache').requestResponseList}
|
||||
*/
|
||||
#caches = new Map()
|
||||
|
||||
constructor () {
|
||||
if (arguments[0] !== kConstruct) {
|
||||
webidl.illegalConstructor()
|
||||
}
|
||||
|
||||
webidl.util.markAsUncloneable(this)
|
||||
}
|
||||
|
||||
async match (request, options = {}) {
|
||||
webidl.brandCheck(this, CacheStorage)
|
||||
webidl.argumentLengthCheck(arguments, 1, 'CacheStorage.match')
|
||||
|
||||
request = webidl.converters.RequestInfo(request)
|
||||
options = webidl.converters.MultiCacheQueryOptions(options)
|
||||
|
||||
// 1.
|
||||
if (options.cacheName != null) {
|
||||
// 1.1.1.1
|
||||
if (this.#caches.has(options.cacheName)) {
|
||||
// 1.1.1.1.1
|
||||
const cacheList = this.#caches.get(options.cacheName)
|
||||
const cache = new Cache(kConstruct, cacheList)
|
||||
|
||||
return await cache.match(request, options)
|
||||
}
|
||||
} else { // 2.
|
||||
// 2.2
|
||||
for (const cacheList of this.#caches.values()) {
|
||||
const cache = new Cache(kConstruct, cacheList)
|
||||
|
||||
// 2.2.1.2
|
||||
const response = await cache.match(request, options)
|
||||
|
||||
if (response !== undefined) {
|
||||
return response
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/ServiceWorker/#cache-storage-has
|
||||
* @param {string} cacheName
|
||||
* @returns {Promise<boolean>}
|
||||
*/
|
||||
async has (cacheName) {
|
||||
webidl.brandCheck(this, CacheStorage)
|
||||
|
||||
const prefix = 'CacheStorage.has'
|
||||
webidl.argumentLengthCheck(arguments, 1, prefix)
|
||||
|
||||
cacheName = webidl.converters.DOMString(cacheName, prefix, 'cacheName')
|
||||
|
||||
// 2.1.1
|
||||
// 2.2
|
||||
return this.#caches.has(cacheName)
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/ServiceWorker/#dom-cachestorage-open
|
||||
* @param {string} cacheName
|
||||
* @returns {Promise<Cache>}
|
||||
*/
|
||||
async open (cacheName) {
|
||||
webidl.brandCheck(this, CacheStorage)
|
||||
|
||||
const prefix = 'CacheStorage.open'
|
||||
webidl.argumentLengthCheck(arguments, 1, prefix)
|
||||
|
||||
cacheName = webidl.converters.DOMString(cacheName, prefix, 'cacheName')
|
||||
|
||||
// 2.1
|
||||
if (this.#caches.has(cacheName)) {
|
||||
// await caches.open('v1') !== await caches.open('v1')
|
||||
|
||||
// 2.1.1
|
||||
const cache = this.#caches.get(cacheName)
|
||||
|
||||
// 2.1.1.1
|
||||
return new Cache(kConstruct, cache)
|
||||
}
|
||||
|
||||
// 2.2
|
||||
const cache = []
|
||||
|
||||
// 2.3
|
||||
this.#caches.set(cacheName, cache)
|
||||
|
||||
// 2.4
|
||||
return new Cache(kConstruct, cache)
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/ServiceWorker/#cache-storage-delete
|
||||
* @param {string} cacheName
|
||||
* @returns {Promise<boolean>}
|
||||
*/
|
||||
async delete (cacheName) {
|
||||
webidl.brandCheck(this, CacheStorage)
|
||||
|
||||
const prefix = 'CacheStorage.delete'
|
||||
webidl.argumentLengthCheck(arguments, 1, prefix)
|
||||
|
||||
cacheName = webidl.converters.DOMString(cacheName, prefix, 'cacheName')
|
||||
|
||||
return this.#caches.delete(cacheName)
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/ServiceWorker/#cache-storage-keys
|
||||
* @returns {Promise<string[]>}
|
||||
*/
|
||||
async keys () {
|
||||
webidl.brandCheck(this, CacheStorage)
|
||||
|
||||
// 2.1
|
||||
const keys = this.#caches.keys()
|
||||
|
||||
// 2.2
|
||||
return [...keys]
|
||||
}
|
||||
}
|
||||
|
||||
Object.defineProperties(CacheStorage.prototype, {
|
||||
[Symbol.toStringTag]: {
|
||||
value: 'CacheStorage',
|
||||
configurable: true
|
||||
},
|
||||
match: kEnumerableProperty,
|
||||
has: kEnumerableProperty,
|
||||
open: kEnumerableProperty,
|
||||
delete: kEnumerableProperty,
|
||||
keys: kEnumerableProperty
|
||||
})
|
||||
|
||||
module.exports = {
|
||||
CacheStorage
|
||||
}
|
||||
45
backend/node_modules/undici/lib/web/cache/util.js
generated
vendored
Normal file
45
backend/node_modules/undici/lib/web/cache/util.js
generated
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert')
|
||||
const { URLSerializer } = require('../fetch/data-url')
|
||||
const { isValidHeaderName } = require('../fetch/util')
|
||||
|
||||
/**
|
||||
* @see https://url.spec.whatwg.org/#concept-url-equals
|
||||
* @param {URL} A
|
||||
* @param {URL} B
|
||||
* @param {boolean | undefined} excludeFragment
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function urlEquals (A, B, excludeFragment = false) {
|
||||
const serializedA = URLSerializer(A, excludeFragment)
|
||||
|
||||
const serializedB = URLSerializer(B, excludeFragment)
|
||||
|
||||
return serializedA === serializedB
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://github.com/chromium/chromium/blob/694d20d134cb553d8d89e5500b9148012b1ba299/content/browser/cache_storage/cache_storage_cache.cc#L260-L262
|
||||
* @param {string} header
|
||||
*/
|
||||
function getFieldValues (header) {
|
||||
assert(header !== null)
|
||||
|
||||
const values = []
|
||||
|
||||
for (let value of header.split(',')) {
|
||||
value = value.trim()
|
||||
|
||||
if (isValidHeaderName(value)) {
|
||||
values.push(value)
|
||||
}
|
||||
}
|
||||
|
||||
return values
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
urlEquals,
|
||||
getFieldValues
|
||||
}
|
||||
12
backend/node_modules/undici/lib/web/cookies/constants.js
generated
vendored
Normal file
12
backend/node_modules/undici/lib/web/cookies/constants.js
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
'use strict'
|
||||
|
||||
// https://wicg.github.io/cookie-store/#cookie-maximum-attribute-value-size
|
||||
const maxAttributeValueSize = 1024
|
||||
|
||||
// https://wicg.github.io/cookie-store/#cookie-maximum-name-value-pair-size
|
||||
const maxNameValuePairSize = 4096
|
||||
|
||||
module.exports = {
|
||||
maxAttributeValueSize,
|
||||
maxNameValuePairSize
|
||||
}
|
||||
199
backend/node_modules/undici/lib/web/cookies/index.js
generated
vendored
Normal file
199
backend/node_modules/undici/lib/web/cookies/index.js
generated
vendored
Normal file
@@ -0,0 +1,199 @@
|
||||
'use strict'
|
||||
|
||||
const { parseSetCookie } = require('./parse')
|
||||
const { stringify } = require('./util')
|
||||
const { webidl } = require('../webidl')
|
||||
const { Headers } = require('../fetch/headers')
|
||||
|
||||
const brandChecks = webidl.brandCheckMultiple([Headers, globalThis.Headers].filter(Boolean))
|
||||
|
||||
/**
|
||||
* @typedef {Object} Cookie
|
||||
* @property {string} name
|
||||
* @property {string} value
|
||||
* @property {Date|number} [expires]
|
||||
* @property {number} [maxAge]
|
||||
* @property {string} [domain]
|
||||
* @property {string} [path]
|
||||
* @property {boolean} [secure]
|
||||
* @property {boolean} [httpOnly]
|
||||
* @property {'Strict'|'Lax'|'None'} [sameSite]
|
||||
* @property {string[]} [unparsed]
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {Headers} headers
|
||||
* @returns {Record<string, string>}
|
||||
*/
|
||||
function getCookies (headers) {
|
||||
webidl.argumentLengthCheck(arguments, 1, 'getCookies')
|
||||
|
||||
brandChecks(headers)
|
||||
|
||||
const cookie = headers.get('cookie')
|
||||
|
||||
/** @type {Record<string, string>} */
|
||||
const out = {}
|
||||
|
||||
if (!cookie) {
|
||||
return out
|
||||
}
|
||||
|
||||
for (const piece of cookie.split(';')) {
|
||||
const [name, ...value] = piece.split('=')
|
||||
|
||||
out[name.trim()] = value.join('=')
|
||||
}
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Headers} headers
|
||||
* @param {string} name
|
||||
* @param {{ path?: string, domain?: string }|undefined} attributes
|
||||
* @returns {void}
|
||||
*/
|
||||
function deleteCookie (headers, name, attributes) {
|
||||
brandChecks(headers)
|
||||
|
||||
const prefix = 'deleteCookie'
|
||||
webidl.argumentLengthCheck(arguments, 2, prefix)
|
||||
|
||||
name = webidl.converters.DOMString(name, prefix, 'name')
|
||||
attributes = webidl.converters.DeleteCookieAttributes(attributes)
|
||||
|
||||
// Matches behavior of
|
||||
// https://github.com/denoland/deno_std/blob/63827b16330b82489a04614027c33b7904e08be5/http/cookie.ts#L278
|
||||
setCookie(headers, {
|
||||
name,
|
||||
value: '',
|
||||
expires: new Date(0),
|
||||
...attributes
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Headers} headers
|
||||
* @returns {Cookie[]}
|
||||
*/
|
||||
function getSetCookies (headers) {
|
||||
webidl.argumentLengthCheck(arguments, 1, 'getSetCookies')
|
||||
|
||||
brandChecks(headers)
|
||||
|
||||
const cookies = headers.getSetCookie()
|
||||
|
||||
if (!cookies) {
|
||||
return []
|
||||
}
|
||||
|
||||
return cookies.map((pair) => parseSetCookie(pair))
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a cookie string
|
||||
* @param {string} cookie
|
||||
*/
|
||||
function parseCookie (cookie) {
|
||||
cookie = webidl.converters.DOMString(cookie)
|
||||
|
||||
return parseSetCookie(cookie)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Headers} headers
|
||||
* @param {Cookie} cookie
|
||||
* @returns {void}
|
||||
*/
|
||||
function setCookie (headers, cookie) {
|
||||
webidl.argumentLengthCheck(arguments, 2, 'setCookie')
|
||||
|
||||
brandChecks(headers)
|
||||
|
||||
cookie = webidl.converters.Cookie(cookie)
|
||||
|
||||
const str = stringify(cookie)
|
||||
|
||||
if (str) {
|
||||
headers.append('set-cookie', str, true)
|
||||
}
|
||||
}
|
||||
|
||||
webidl.converters.DeleteCookieAttributes = webidl.dictionaryConverter([
|
||||
{
|
||||
converter: webidl.nullableConverter(webidl.converters.DOMString),
|
||||
key: 'path',
|
||||
defaultValue: () => null
|
||||
},
|
||||
{
|
||||
converter: webidl.nullableConverter(webidl.converters.DOMString),
|
||||
key: 'domain',
|
||||
defaultValue: () => null
|
||||
}
|
||||
])
|
||||
|
||||
webidl.converters.Cookie = webidl.dictionaryConverter([
|
||||
{
|
||||
converter: webidl.converters.DOMString,
|
||||
key: 'name'
|
||||
},
|
||||
{
|
||||
converter: webidl.converters.DOMString,
|
||||
key: 'value'
|
||||
},
|
||||
{
|
||||
converter: webidl.nullableConverter((value) => {
|
||||
if (typeof value === 'number') {
|
||||
return webidl.converters['unsigned long long'](value)
|
||||
}
|
||||
|
||||
return new Date(value)
|
||||
}),
|
||||
key: 'expires',
|
||||
defaultValue: () => null
|
||||
},
|
||||
{
|
||||
converter: webidl.nullableConverter(webidl.converters['long long']),
|
||||
key: 'maxAge',
|
||||
defaultValue: () => null
|
||||
},
|
||||
{
|
||||
converter: webidl.nullableConverter(webidl.converters.DOMString),
|
||||
key: 'domain',
|
||||
defaultValue: () => null
|
||||
},
|
||||
{
|
||||
converter: webidl.nullableConverter(webidl.converters.DOMString),
|
||||
key: 'path',
|
||||
defaultValue: () => null
|
||||
},
|
||||
{
|
||||
converter: webidl.nullableConverter(webidl.converters.boolean),
|
||||
key: 'secure',
|
||||
defaultValue: () => null
|
||||
},
|
||||
{
|
||||
converter: webidl.nullableConverter(webidl.converters.boolean),
|
||||
key: 'httpOnly',
|
||||
defaultValue: () => null
|
||||
},
|
||||
{
|
||||
converter: webidl.converters.USVString,
|
||||
key: 'sameSite',
|
||||
allowedValues: ['Strict', 'Lax', 'None']
|
||||
},
|
||||
{
|
||||
converter: webidl.sequenceConverter(webidl.converters.DOMString),
|
||||
key: 'unparsed',
|
||||
defaultValue: () => []
|
||||
}
|
||||
])
|
||||
|
||||
module.exports = {
|
||||
getCookies,
|
||||
deleteCookie,
|
||||
getSetCookies,
|
||||
setCookie,
|
||||
parseCookie
|
||||
}
|
||||
322
backend/node_modules/undici/lib/web/cookies/parse.js
generated
vendored
Normal file
322
backend/node_modules/undici/lib/web/cookies/parse.js
generated
vendored
Normal file
@@ -0,0 +1,322 @@
|
||||
'use strict'
|
||||
|
||||
const { collectASequenceOfCodePointsFast } = require('../infra')
|
||||
const { maxNameValuePairSize, maxAttributeValueSize } = require('./constants')
|
||||
const { isCTLExcludingHtab } = require('./util')
|
||||
const assert = require('node:assert')
|
||||
const { unescape: qsUnescape } = require('node:querystring')
|
||||
|
||||
/**
|
||||
* @description Parses the field-value attributes of a set-cookie header string.
|
||||
* @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4
|
||||
* @param {string} header
|
||||
* @returns {import('./index').Cookie|null} if the header is invalid, null will be returned
|
||||
*/
|
||||
function parseSetCookie (header) {
|
||||
// 1. If the set-cookie-string contains a %x00-08 / %x0A-1F / %x7F
|
||||
// character (CTL characters excluding HTAB): Abort these steps and
|
||||
// ignore the set-cookie-string entirely.
|
||||
if (isCTLExcludingHtab(header)) {
|
||||
return null
|
||||
}
|
||||
|
||||
let nameValuePair = ''
|
||||
let unparsedAttributes = ''
|
||||
let name = ''
|
||||
let value = ''
|
||||
|
||||
// 2. If the set-cookie-string contains a %x3B (";") character:
|
||||
if (header.includes(';')) {
|
||||
// 1. The name-value-pair string consists of the characters up to,
|
||||
// but not including, the first %x3B (";"), and the unparsed-
|
||||
// attributes consist of the remainder of the set-cookie-string
|
||||
// (including the %x3B (";") in question).
|
||||
const position = { position: 0 }
|
||||
|
||||
nameValuePair = collectASequenceOfCodePointsFast(';', header, position)
|
||||
unparsedAttributes = header.slice(position.position)
|
||||
} else {
|
||||
// Otherwise:
|
||||
|
||||
// 1. The name-value-pair string consists of all the characters
|
||||
// contained in the set-cookie-string, and the unparsed-
|
||||
// attributes is the empty string.
|
||||
nameValuePair = header
|
||||
}
|
||||
|
||||
// 3. If the name-value-pair string lacks a %x3D ("=") character, then
|
||||
// the name string is empty, and the value string is the value of
|
||||
// name-value-pair.
|
||||
if (!nameValuePair.includes('=')) {
|
||||
value = nameValuePair
|
||||
} else {
|
||||
// Otherwise, the name string consists of the characters up to, but
|
||||
// not including, the first %x3D ("=") character, and the (possibly
|
||||
// empty) value string consists of the characters after the first
|
||||
// %x3D ("=") character.
|
||||
const position = { position: 0 }
|
||||
name = collectASequenceOfCodePointsFast(
|
||||
'=',
|
||||
nameValuePair,
|
||||
position
|
||||
)
|
||||
value = nameValuePair.slice(position.position + 1)
|
||||
}
|
||||
|
||||
// 4. Remove any leading or trailing WSP characters from the name
|
||||
// string and the value string.
|
||||
name = name.trim()
|
||||
value = value.trim()
|
||||
|
||||
// 5. If the sum of the lengths of the name string and the value string
|
||||
// is more than 4096 octets, abort these steps and ignore the set-
|
||||
// cookie-string entirely.
|
||||
if (name.length + value.length > maxNameValuePairSize) {
|
||||
return null
|
||||
}
|
||||
|
||||
// 6. The cookie-name is the name string, and the cookie-value is the
|
||||
// value string.
|
||||
// https://datatracker.ietf.org/doc/html/rfc6265
|
||||
// To maximize compatibility with user agents, servers that wish to
|
||||
// store arbitrary data in a cookie-value SHOULD encode that data, for
|
||||
// example, using Base64 [RFC4648].
|
||||
return {
|
||||
name, value: qsUnescape(value), ...parseUnparsedAttributes(unparsedAttributes)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the remaining attributes of a set-cookie header
|
||||
* @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4
|
||||
* @param {string} unparsedAttributes
|
||||
* @param {Object.<string, unknown>} [cookieAttributeList={}]
|
||||
*/
|
||||
function parseUnparsedAttributes (unparsedAttributes, cookieAttributeList = {}) {
|
||||
// 1. If the unparsed-attributes string is empty, skip the rest of
|
||||
// these steps.
|
||||
if (unparsedAttributes.length === 0) {
|
||||
return cookieAttributeList
|
||||
}
|
||||
|
||||
// 2. Discard the first character of the unparsed-attributes (which
|
||||
// will be a %x3B (";") character).
|
||||
assert(unparsedAttributes[0] === ';')
|
||||
unparsedAttributes = unparsedAttributes.slice(1)
|
||||
|
||||
let cookieAv = ''
|
||||
|
||||
// 3. If the remaining unparsed-attributes contains a %x3B (";")
|
||||
// character:
|
||||
if (unparsedAttributes.includes(';')) {
|
||||
// 1. Consume the characters of the unparsed-attributes up to, but
|
||||
// not including, the first %x3B (";") character.
|
||||
cookieAv = collectASequenceOfCodePointsFast(
|
||||
';',
|
||||
unparsedAttributes,
|
||||
{ position: 0 }
|
||||
)
|
||||
unparsedAttributes = unparsedAttributes.slice(cookieAv.length)
|
||||
} else {
|
||||
// Otherwise:
|
||||
|
||||
// 1. Consume the remainder of the unparsed-attributes.
|
||||
cookieAv = unparsedAttributes
|
||||
unparsedAttributes = ''
|
||||
}
|
||||
|
||||
// Let the cookie-av string be the characters consumed in this step.
|
||||
|
||||
let attributeName = ''
|
||||
let attributeValue = ''
|
||||
|
||||
// 4. If the cookie-av string contains a %x3D ("=") character:
|
||||
if (cookieAv.includes('=')) {
|
||||
// 1. The (possibly empty) attribute-name string consists of the
|
||||
// characters up to, but not including, the first %x3D ("=")
|
||||
// character, and the (possibly empty) attribute-value string
|
||||
// consists of the characters after the first %x3D ("=")
|
||||
// character.
|
||||
const position = { position: 0 }
|
||||
|
||||
attributeName = collectASequenceOfCodePointsFast(
|
||||
'=',
|
||||
cookieAv,
|
||||
position
|
||||
)
|
||||
attributeValue = cookieAv.slice(position.position + 1)
|
||||
} else {
|
||||
// Otherwise:
|
||||
|
||||
// 1. The attribute-name string consists of the entire cookie-av
|
||||
// string, and the attribute-value string is empty.
|
||||
attributeName = cookieAv
|
||||
}
|
||||
|
||||
// 5. Remove any leading or trailing WSP characters from the attribute-
|
||||
// name string and the attribute-value string.
|
||||
attributeName = attributeName.trim()
|
||||
attributeValue = attributeValue.trim()
|
||||
|
||||
// 6. If the attribute-value is longer than 1024 octets, ignore the
|
||||
// cookie-av string and return to Step 1 of this algorithm.
|
||||
if (attributeValue.length > maxAttributeValueSize) {
|
||||
return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList)
|
||||
}
|
||||
|
||||
// 7. Process the attribute-name and attribute-value according to the
|
||||
// requirements in the following subsections. (Notice that
|
||||
// attributes with unrecognized attribute-names are ignored.)
|
||||
const attributeNameLowercase = attributeName.toLowerCase()
|
||||
|
||||
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.1
|
||||
// If the attribute-name case-insensitively matches the string
|
||||
// "Expires", the user agent MUST process the cookie-av as follows.
|
||||
if (attributeNameLowercase === 'expires') {
|
||||
// 1. Let the expiry-time be the result of parsing the attribute-value
|
||||
// as cookie-date (see Section 5.1.1).
|
||||
const expiryTime = new Date(attributeValue)
|
||||
|
||||
// 2. If the attribute-value failed to parse as a cookie date, ignore
|
||||
// the cookie-av.
|
||||
|
||||
cookieAttributeList.expires = expiryTime
|
||||
} else if (attributeNameLowercase === 'max-age') {
|
||||
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.2
|
||||
// If the attribute-name case-insensitively matches the string "Max-
|
||||
// Age", the user agent MUST process the cookie-av as follows.
|
||||
|
||||
// 1. If the first character of the attribute-value is not a DIGIT or a
|
||||
// "-" character, ignore the cookie-av.
|
||||
const charCode = attributeValue.charCodeAt(0)
|
||||
|
||||
if ((charCode < 48 || charCode > 57) && attributeValue[0] !== '-') {
|
||||
return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList)
|
||||
}
|
||||
|
||||
// 2. If the remainder of attribute-value contains a non-DIGIT
|
||||
// character, ignore the cookie-av.
|
||||
if (!/^\d+$/.test(attributeValue)) {
|
||||
return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList)
|
||||
}
|
||||
|
||||
// 3. Let delta-seconds be the attribute-value converted to an integer.
|
||||
const deltaSeconds = Number(attributeValue)
|
||||
|
||||
// 4. Let cookie-age-limit be the maximum age of the cookie (which
|
||||
// SHOULD be 400 days or less, see Section 4.1.2.2).
|
||||
|
||||
// 5. Set delta-seconds to the smaller of its present value and cookie-
|
||||
// age-limit.
|
||||
// deltaSeconds = Math.min(deltaSeconds * 1000, maxExpiresMs)
|
||||
|
||||
// 6. If delta-seconds is less than or equal to zero (0), let expiry-
|
||||
// time be the earliest representable date and time. Otherwise, let
|
||||
// the expiry-time be the current date and time plus delta-seconds
|
||||
// seconds.
|
||||
// const expiryTime = deltaSeconds <= 0 ? Date.now() : Date.now() + deltaSeconds
|
||||
|
||||
// 7. Append an attribute to the cookie-attribute-list with an
|
||||
// attribute-name of Max-Age and an attribute-value of expiry-time.
|
||||
cookieAttributeList.maxAge = deltaSeconds
|
||||
} else if (attributeNameLowercase === 'domain') {
|
||||
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.3
|
||||
// If the attribute-name case-insensitively matches the string "Domain",
|
||||
// the user agent MUST process the cookie-av as follows.
|
||||
|
||||
// 1. Let cookie-domain be the attribute-value.
|
||||
let cookieDomain = attributeValue
|
||||
|
||||
// 2. If cookie-domain starts with %x2E ("."), let cookie-domain be
|
||||
// cookie-domain without its leading %x2E (".").
|
||||
if (cookieDomain[0] === '.') {
|
||||
cookieDomain = cookieDomain.slice(1)
|
||||
}
|
||||
|
||||
// 3. Convert the cookie-domain to lower case.
|
||||
cookieDomain = cookieDomain.toLowerCase()
|
||||
|
||||
// 4. Append an attribute to the cookie-attribute-list with an
|
||||
// attribute-name of Domain and an attribute-value of cookie-domain.
|
||||
cookieAttributeList.domain = cookieDomain
|
||||
} else if (attributeNameLowercase === 'path') {
|
||||
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.4
|
||||
// If the attribute-name case-insensitively matches the string "Path",
|
||||
// the user agent MUST process the cookie-av as follows.
|
||||
|
||||
// 1. If the attribute-value is empty or if the first character of the
|
||||
// attribute-value is not %x2F ("/"):
|
||||
let cookiePath = ''
|
||||
if (attributeValue.length === 0 || attributeValue[0] !== '/') {
|
||||
// 1. Let cookie-path be the default-path.
|
||||
cookiePath = '/'
|
||||
} else {
|
||||
// Otherwise:
|
||||
|
||||
// 1. Let cookie-path be the attribute-value.
|
||||
cookiePath = attributeValue
|
||||
}
|
||||
|
||||
// 2. Append an attribute to the cookie-attribute-list with an
|
||||
// attribute-name of Path and an attribute-value of cookie-path.
|
||||
cookieAttributeList.path = cookiePath
|
||||
} else if (attributeNameLowercase === 'secure') {
|
||||
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.5
|
||||
// If the attribute-name case-insensitively matches the string "Secure",
|
||||
// the user agent MUST append an attribute to the cookie-attribute-list
|
||||
// with an attribute-name of Secure and an empty attribute-value.
|
||||
|
||||
cookieAttributeList.secure = true
|
||||
} else if (attributeNameLowercase === 'httponly') {
|
||||
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.6
|
||||
// If the attribute-name case-insensitively matches the string
|
||||
// "HttpOnly", the user agent MUST append an attribute to the cookie-
|
||||
// attribute-list with an attribute-name of HttpOnly and an empty
|
||||
// attribute-value.
|
||||
|
||||
cookieAttributeList.httpOnly = true
|
||||
} else if (attributeNameLowercase === 'samesite') {
|
||||
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.7
|
||||
// If the attribute-name case-insensitively matches the string
|
||||
// "SameSite", the user agent MUST process the cookie-av as follows:
|
||||
|
||||
// 1. Let enforcement be "Default".
|
||||
let enforcement = 'Default'
|
||||
|
||||
const attributeValueLowercase = attributeValue.toLowerCase()
|
||||
// 2. If cookie-av's attribute-value is a case-insensitive match for
|
||||
// "None", set enforcement to "None".
|
||||
if (attributeValueLowercase.includes('none')) {
|
||||
enforcement = 'None'
|
||||
}
|
||||
|
||||
// 3. If cookie-av's attribute-value is a case-insensitive match for
|
||||
// "Strict", set enforcement to "Strict".
|
||||
if (attributeValueLowercase.includes('strict')) {
|
||||
enforcement = 'Strict'
|
||||
}
|
||||
|
||||
// 4. If cookie-av's attribute-value is a case-insensitive match for
|
||||
// "Lax", set enforcement to "Lax".
|
||||
if (attributeValueLowercase.includes('lax')) {
|
||||
enforcement = 'Lax'
|
||||
}
|
||||
|
||||
// 5. Append an attribute to the cookie-attribute-list with an
|
||||
// attribute-name of "SameSite" and an attribute-value of
|
||||
// enforcement.
|
||||
cookieAttributeList.sameSite = enforcement
|
||||
} else {
|
||||
cookieAttributeList.unparsed ??= []
|
||||
|
||||
cookieAttributeList.unparsed.push(`${attributeName}=${attributeValue}`)
|
||||
}
|
||||
|
||||
// 8. Return to Step 1 of this algorithm.
|
||||
return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
parseSetCookie,
|
||||
parseUnparsedAttributes
|
||||
}
|
||||
282
backend/node_modules/undici/lib/web/cookies/util.js
generated
vendored
Normal file
282
backend/node_modules/undici/lib/web/cookies/util.js
generated
vendored
Normal file
@@ -0,0 +1,282 @@
|
||||
'use strict'
|
||||
|
||||
/**
|
||||
* @param {string} value
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isCTLExcludingHtab (value) {
|
||||
for (let i = 0; i < value.length; ++i) {
|
||||
const code = value.charCodeAt(i)
|
||||
|
||||
if (
|
||||
(code >= 0x00 && code <= 0x08) ||
|
||||
(code >= 0x0A && code <= 0x1F) ||
|
||||
code === 0x7F
|
||||
) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
CHAR = <any US-ASCII character (octets 0 - 127)>
|
||||
token = 1*<any CHAR except CTLs or separators>
|
||||
separators = "(" | ")" | "<" | ">" | "@"
|
||||
| "," | ";" | ":" | "\" | <">
|
||||
| "/" | "[" | "]" | "?" | "="
|
||||
| "{" | "}" | SP | HT
|
||||
* @param {string} name
|
||||
*/
|
||||
function validateCookieName (name) {
|
||||
for (let i = 0; i < name.length; ++i) {
|
||||
const code = name.charCodeAt(i)
|
||||
|
||||
if (
|
||||
code < 0x21 || // exclude CTLs (0-31), SP and HT
|
||||
code > 0x7E || // exclude non-ascii and DEL
|
||||
code === 0x22 || // "
|
||||
code === 0x28 || // (
|
||||
code === 0x29 || // )
|
||||
code === 0x3C || // <
|
||||
code === 0x3E || // >
|
||||
code === 0x40 || // @
|
||||
code === 0x2C || // ,
|
||||
code === 0x3B || // ;
|
||||
code === 0x3A || // :
|
||||
code === 0x5C || // \
|
||||
code === 0x2F || // /
|
||||
code === 0x5B || // [
|
||||
code === 0x5D || // ]
|
||||
code === 0x3F || // ?
|
||||
code === 0x3D || // =
|
||||
code === 0x7B || // {
|
||||
code === 0x7D // }
|
||||
) {
|
||||
throw new Error('Invalid cookie name')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
cookie-value = *cookie-octet / ( DQUOTE *cookie-octet DQUOTE )
|
||||
cookie-octet = %x21 / %x23-2B / %x2D-3A / %x3C-5B / %x5D-7E
|
||||
; US-ASCII characters excluding CTLs,
|
||||
; whitespace DQUOTE, comma, semicolon,
|
||||
; and backslash
|
||||
* @param {string} value
|
||||
*/
|
||||
function validateCookieValue (value) {
|
||||
let len = value.length
|
||||
let i = 0
|
||||
|
||||
// if the value is wrapped in DQUOTE
|
||||
if (value[0] === '"') {
|
||||
if (len === 1 || value[len - 1] !== '"') {
|
||||
throw new Error('Invalid cookie value')
|
||||
}
|
||||
--len
|
||||
++i
|
||||
}
|
||||
|
||||
while (i < len) {
|
||||
const code = value.charCodeAt(i++)
|
||||
|
||||
if (
|
||||
code < 0x21 || // exclude CTLs (0-31)
|
||||
code > 0x7E || // non-ascii and DEL (127)
|
||||
code === 0x22 || // "
|
||||
code === 0x2C || // ,
|
||||
code === 0x3B || // ;
|
||||
code === 0x5C // \
|
||||
) {
|
||||
throw new Error('Invalid cookie value')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* path-value = <any CHAR except CTLs or ";">
|
||||
* @param {string} path
|
||||
*/
|
||||
function validateCookiePath (path) {
|
||||
for (let i = 0; i < path.length; ++i) {
|
||||
const code = path.charCodeAt(i)
|
||||
|
||||
if (
|
||||
code < 0x20 || // exclude CTLs (0-31)
|
||||
code === 0x7F || // DEL
|
||||
code === 0x3B // ;
|
||||
) {
|
||||
throw new Error('Invalid cookie path')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* I have no idea why these values aren't allowed to be honest,
|
||||
* but Deno tests these. - Khafra
|
||||
* @param {string} domain
|
||||
*/
|
||||
function validateCookieDomain (domain) {
|
||||
if (
|
||||
domain.startsWith('-') ||
|
||||
domain.endsWith('.') ||
|
||||
domain.endsWith('-')
|
||||
) {
|
||||
throw new Error('Invalid cookie domain')
|
||||
}
|
||||
}
|
||||
|
||||
const IMFDays = [
|
||||
'Sun', 'Mon', 'Tue', 'Wed',
|
||||
'Thu', 'Fri', 'Sat'
|
||||
]
|
||||
|
||||
const IMFMonths = [
|
||||
'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
|
||||
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'
|
||||
]
|
||||
|
||||
const IMFPaddedNumbers = Array(61).fill(0).map((_, i) => i.toString().padStart(2, '0'))
|
||||
|
||||
/**
|
||||
* @see https://www.rfc-editor.org/rfc/rfc7231#section-7.1.1.1
|
||||
* @param {number|Date} date
|
||||
IMF-fixdate = day-name "," SP date1 SP time-of-day SP GMT
|
||||
; fixed length/zone/capitalization subset of the format
|
||||
; see Section 3.3 of [RFC5322]
|
||||
|
||||
day-name = %x4D.6F.6E ; "Mon", case-sensitive
|
||||
/ %x54.75.65 ; "Tue", case-sensitive
|
||||
/ %x57.65.64 ; "Wed", case-sensitive
|
||||
/ %x54.68.75 ; "Thu", case-sensitive
|
||||
/ %x46.72.69 ; "Fri", case-sensitive
|
||||
/ %x53.61.74 ; "Sat", case-sensitive
|
||||
/ %x53.75.6E ; "Sun", case-sensitive
|
||||
date1 = day SP month SP year
|
||||
; e.g., 02 Jun 1982
|
||||
|
||||
day = 2DIGIT
|
||||
month = %x4A.61.6E ; "Jan", case-sensitive
|
||||
/ %x46.65.62 ; "Feb", case-sensitive
|
||||
/ %x4D.61.72 ; "Mar", case-sensitive
|
||||
/ %x41.70.72 ; "Apr", case-sensitive
|
||||
/ %x4D.61.79 ; "May", case-sensitive
|
||||
/ %x4A.75.6E ; "Jun", case-sensitive
|
||||
/ %x4A.75.6C ; "Jul", case-sensitive
|
||||
/ %x41.75.67 ; "Aug", case-sensitive
|
||||
/ %x53.65.70 ; "Sep", case-sensitive
|
||||
/ %x4F.63.74 ; "Oct", case-sensitive
|
||||
/ %x4E.6F.76 ; "Nov", case-sensitive
|
||||
/ %x44.65.63 ; "Dec", case-sensitive
|
||||
year = 4DIGIT
|
||||
|
||||
GMT = %x47.4D.54 ; "GMT", case-sensitive
|
||||
|
||||
time-of-day = hour ":" minute ":" second
|
||||
; 00:00:00 - 23:59:60 (leap second)
|
||||
|
||||
hour = 2DIGIT
|
||||
minute = 2DIGIT
|
||||
second = 2DIGIT
|
||||
*/
|
||||
function toIMFDate (date) {
|
||||
if (typeof date === 'number') {
|
||||
date = new Date(date)
|
||||
}
|
||||
|
||||
return `${IMFDays[date.getUTCDay()]}, ${IMFPaddedNumbers[date.getUTCDate()]} ${IMFMonths[date.getUTCMonth()]} ${date.getUTCFullYear()} ${IMFPaddedNumbers[date.getUTCHours()]}:${IMFPaddedNumbers[date.getUTCMinutes()]}:${IMFPaddedNumbers[date.getUTCSeconds()]} GMT`
|
||||
}
|
||||
|
||||
/**
|
||||
max-age-av = "Max-Age=" non-zero-digit *DIGIT
|
||||
; In practice, both expires-av and max-age-av
|
||||
; are limited to dates representable by the
|
||||
; user agent.
|
||||
* @param {number} maxAge
|
||||
*/
|
||||
function validateCookieMaxAge (maxAge) {
|
||||
if (maxAge < 0) {
|
||||
throw new Error('Invalid cookie max-age')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://www.rfc-editor.org/rfc/rfc6265#section-4.1.1
|
||||
* @param {import('./index').Cookie} cookie
|
||||
*/
|
||||
function stringify (cookie) {
|
||||
if (cookie.name.length === 0) {
|
||||
return null
|
||||
}
|
||||
|
||||
validateCookieName(cookie.name)
|
||||
validateCookieValue(cookie.value)
|
||||
|
||||
const out = [`${cookie.name}=${cookie.value}`]
|
||||
|
||||
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.1
|
||||
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.2
|
||||
if (cookie.name.startsWith('__Secure-')) {
|
||||
cookie.secure = true
|
||||
}
|
||||
|
||||
if (cookie.name.startsWith('__Host-')) {
|
||||
cookie.secure = true
|
||||
cookie.domain = null
|
||||
cookie.path = '/'
|
||||
}
|
||||
|
||||
if (cookie.secure) {
|
||||
out.push('Secure')
|
||||
}
|
||||
|
||||
if (cookie.httpOnly) {
|
||||
out.push('HttpOnly')
|
||||
}
|
||||
|
||||
if (typeof cookie.maxAge === 'number') {
|
||||
validateCookieMaxAge(cookie.maxAge)
|
||||
out.push(`Max-Age=${cookie.maxAge}`)
|
||||
}
|
||||
|
||||
if (cookie.domain) {
|
||||
validateCookieDomain(cookie.domain)
|
||||
out.push(`Domain=${cookie.domain}`)
|
||||
}
|
||||
|
||||
if (cookie.path) {
|
||||
validateCookiePath(cookie.path)
|
||||
out.push(`Path=${cookie.path}`)
|
||||
}
|
||||
|
||||
if (cookie.expires && cookie.expires.toString() !== 'Invalid Date') {
|
||||
out.push(`Expires=${toIMFDate(cookie.expires)}`)
|
||||
}
|
||||
|
||||
if (cookie.sameSite) {
|
||||
out.push(`SameSite=${cookie.sameSite}`)
|
||||
}
|
||||
|
||||
for (const part of cookie.unparsed) {
|
||||
if (!part.includes('=')) {
|
||||
throw new Error('Invalid unparsed')
|
||||
}
|
||||
|
||||
const [key, ...value] = part.split('=')
|
||||
|
||||
out.push(`${key.trim()}=${value.join('=')}`)
|
||||
}
|
||||
|
||||
return out.join('; ')
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
isCTLExcludingHtab,
|
||||
validateCookieName,
|
||||
validateCookiePath,
|
||||
validateCookieValue,
|
||||
toIMFDate,
|
||||
stringify
|
||||
}
|
||||
399
backend/node_modules/undici/lib/web/eventsource/eventsource-stream.js
generated
vendored
Normal file
399
backend/node_modules/undici/lib/web/eventsource/eventsource-stream.js
generated
vendored
Normal file
@@ -0,0 +1,399 @@
|
||||
'use strict'
|
||||
const { Transform } = require('node:stream')
|
||||
const { isASCIINumber, isValidLastEventId } = require('./util')
|
||||
|
||||
/**
|
||||
* @type {number[]} BOM
|
||||
*/
|
||||
const BOM = [0xEF, 0xBB, 0xBF]
|
||||
/**
|
||||
* @type {10} LF
|
||||
*/
|
||||
const LF = 0x0A
|
||||
/**
|
||||
* @type {13} CR
|
||||
*/
|
||||
const CR = 0x0D
|
||||
/**
|
||||
* @type {58} COLON
|
||||
*/
|
||||
const COLON = 0x3A
|
||||
/**
|
||||
* @type {32} SPACE
|
||||
*/
|
||||
const SPACE = 0x20
|
||||
|
||||
/**
|
||||
* @typedef {object} EventSourceStreamEvent
|
||||
* @type {object}
|
||||
* @property {string} [event] The event type.
|
||||
* @property {string} [data] The data of the message.
|
||||
* @property {string} [id] A unique ID for the event.
|
||||
* @property {string} [retry] The reconnection time, in milliseconds.
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef eventSourceSettings
|
||||
* @type {object}
|
||||
* @property {string} [lastEventId] The last event ID received from the server.
|
||||
* @property {string} [origin] The origin of the event source.
|
||||
* @property {number} [reconnectionTime] The reconnection time, in milliseconds.
|
||||
*/
|
||||
|
||||
class EventSourceStream extends Transform {
|
||||
/**
|
||||
* @type {eventSourceSettings}
|
||||
*/
|
||||
state
|
||||
|
||||
/**
|
||||
* Leading byte-order-mark check.
|
||||
* @type {boolean}
|
||||
*/
|
||||
checkBOM = true
|
||||
|
||||
/**
|
||||
* @type {boolean}
|
||||
*/
|
||||
crlfCheck = false
|
||||
|
||||
/**
|
||||
* @type {boolean}
|
||||
*/
|
||||
eventEndCheck = false
|
||||
|
||||
/**
|
||||
* @type {Buffer|null}
|
||||
*/
|
||||
buffer = null
|
||||
|
||||
pos = 0
|
||||
|
||||
event = {
|
||||
data: undefined,
|
||||
event: undefined,
|
||||
id: undefined,
|
||||
retry: undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {object} options
|
||||
* @param {boolean} [options.readableObjectMode]
|
||||
* @param {eventSourceSettings} [options.eventSourceSettings]
|
||||
* @param {(chunk: any, encoding?: BufferEncoding | undefined) => boolean} [options.push]
|
||||
*/
|
||||
constructor (options = {}) {
|
||||
// Enable object mode as EventSourceStream emits objects of shape
|
||||
// EventSourceStreamEvent
|
||||
options.readableObjectMode = true
|
||||
|
||||
super(options)
|
||||
|
||||
this.state = options.eventSourceSettings || {}
|
||||
if (options.push) {
|
||||
this.push = options.push
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Buffer} chunk
|
||||
* @param {string} _encoding
|
||||
* @param {Function} callback
|
||||
* @returns {void}
|
||||
*/
|
||||
_transform (chunk, _encoding, callback) {
|
||||
if (chunk.length === 0) {
|
||||
callback()
|
||||
return
|
||||
}
|
||||
|
||||
// Cache the chunk in the buffer, as the data might not be complete while
|
||||
// processing it
|
||||
// TODO: Investigate if there is a more performant way to handle
|
||||
// incoming chunks
|
||||
// see: https://github.com/nodejs/undici/issues/2630
|
||||
if (this.buffer) {
|
||||
this.buffer = Buffer.concat([this.buffer, chunk])
|
||||
} else {
|
||||
this.buffer = chunk
|
||||
}
|
||||
|
||||
// Strip leading byte-order-mark if we opened the stream and started
|
||||
// the processing of the incoming data
|
||||
if (this.checkBOM) {
|
||||
switch (this.buffer.length) {
|
||||
case 1:
|
||||
// Check if the first byte is the same as the first byte of the BOM
|
||||
if (this.buffer[0] === BOM[0]) {
|
||||
// If it is, we need to wait for more data
|
||||
callback()
|
||||
return
|
||||
}
|
||||
// Set the checkBOM flag to false as we don't need to check for the
|
||||
// BOM anymore
|
||||
this.checkBOM = false
|
||||
|
||||
// The buffer only contains one byte so we need to wait for more data
|
||||
callback()
|
||||
return
|
||||
case 2:
|
||||
// Check if the first two bytes are the same as the first two bytes
|
||||
// of the BOM
|
||||
if (
|
||||
this.buffer[0] === BOM[0] &&
|
||||
this.buffer[1] === BOM[1]
|
||||
) {
|
||||
// If it is, we need to wait for more data, because the third byte
|
||||
// is needed to determine if it is the BOM or not
|
||||
callback()
|
||||
return
|
||||
}
|
||||
|
||||
// Set the checkBOM flag to false as we don't need to check for the
|
||||
// BOM anymore
|
||||
this.checkBOM = false
|
||||
break
|
||||
case 3:
|
||||
// Check if the first three bytes are the same as the first three
|
||||
// bytes of the BOM
|
||||
if (
|
||||
this.buffer[0] === BOM[0] &&
|
||||
this.buffer[1] === BOM[1] &&
|
||||
this.buffer[2] === BOM[2]
|
||||
) {
|
||||
// If it is, we can drop the buffered data, as it is only the BOM
|
||||
this.buffer = Buffer.alloc(0)
|
||||
// Set the checkBOM flag to false as we don't need to check for the
|
||||
// BOM anymore
|
||||
this.checkBOM = false
|
||||
|
||||
// Await more data
|
||||
callback()
|
||||
return
|
||||
}
|
||||
// If it is not the BOM, we can start processing the data
|
||||
this.checkBOM = false
|
||||
break
|
||||
default:
|
||||
// The buffer is longer than 3 bytes, so we can drop the BOM if it is
|
||||
// present
|
||||
if (
|
||||
this.buffer[0] === BOM[0] &&
|
||||
this.buffer[1] === BOM[1] &&
|
||||
this.buffer[2] === BOM[2]
|
||||
) {
|
||||
// Remove the BOM from the buffer
|
||||
this.buffer = this.buffer.subarray(3)
|
||||
}
|
||||
|
||||
// Set the checkBOM flag to false as we don't need to check for the
|
||||
this.checkBOM = false
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
while (this.pos < this.buffer.length) {
|
||||
// If the previous line ended with an end-of-line, we need to check
|
||||
// if the next character is also an end-of-line.
|
||||
if (this.eventEndCheck) {
|
||||
// If the the current character is an end-of-line, then the event
|
||||
// is finished and we can process it
|
||||
|
||||
// If the previous line ended with a carriage return, we need to
|
||||
// check if the current character is a line feed and remove it
|
||||
// from the buffer.
|
||||
if (this.crlfCheck) {
|
||||
// If the current character is a line feed, we can remove it
|
||||
// from the buffer and reset the crlfCheck flag
|
||||
if (this.buffer[this.pos] === LF) {
|
||||
this.buffer = this.buffer.subarray(this.pos + 1)
|
||||
this.pos = 0
|
||||
this.crlfCheck = false
|
||||
|
||||
// It is possible that the line feed is not the end of the
|
||||
// event. We need to check if the next character is an
|
||||
// end-of-line character to determine if the event is
|
||||
// finished. We simply continue the loop to check the next
|
||||
// character.
|
||||
|
||||
// As we removed the line feed from the buffer and set the
|
||||
// crlfCheck flag to false, we basically don't make any
|
||||
// distinction between a line feed and a carriage return.
|
||||
continue
|
||||
}
|
||||
this.crlfCheck = false
|
||||
}
|
||||
|
||||
if (this.buffer[this.pos] === LF || this.buffer[this.pos] === CR) {
|
||||
// If the current character is a carriage return, we need to
|
||||
// set the crlfCheck flag to true, as we need to check if the
|
||||
// next character is a line feed so we can remove it from the
|
||||
// buffer
|
||||
if (this.buffer[this.pos] === CR) {
|
||||
this.crlfCheck = true
|
||||
}
|
||||
|
||||
this.buffer = this.buffer.subarray(this.pos + 1)
|
||||
this.pos = 0
|
||||
if (
|
||||
this.event.data !== undefined || this.event.event || this.event.id !== undefined || this.event.retry) {
|
||||
this.processEvent(this.event)
|
||||
}
|
||||
this.clearEvent()
|
||||
continue
|
||||
}
|
||||
// If the current character is not an end-of-line, then the event
|
||||
// is not finished and we have to reset the eventEndCheck flag
|
||||
this.eventEndCheck = false
|
||||
continue
|
||||
}
|
||||
|
||||
// If the current character is an end-of-line, we can process the
|
||||
// line
|
||||
if (this.buffer[this.pos] === LF || this.buffer[this.pos] === CR) {
|
||||
// If the current character is a carriage return, we need to
|
||||
// set the crlfCheck flag to true, as we need to check if the
|
||||
// next character is a line feed
|
||||
if (this.buffer[this.pos] === CR) {
|
||||
this.crlfCheck = true
|
||||
}
|
||||
|
||||
// In any case, we can process the line as we reached an
|
||||
// end-of-line character
|
||||
this.parseLine(this.buffer.subarray(0, this.pos), this.event)
|
||||
|
||||
// Remove the processed line from the buffer
|
||||
this.buffer = this.buffer.subarray(this.pos + 1)
|
||||
// Reset the position as we removed the processed line from the buffer
|
||||
this.pos = 0
|
||||
// A line was processed and this could be the end of the event. We need
|
||||
// to check if the next line is empty to determine if the event is
|
||||
// finished.
|
||||
this.eventEndCheck = true
|
||||
continue
|
||||
}
|
||||
|
||||
this.pos++
|
||||
}
|
||||
|
||||
callback()
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Buffer} line
|
||||
* @param {EventSourceStreamEvent} event
|
||||
*/
|
||||
parseLine (line, event) {
|
||||
// If the line is empty (a blank line)
|
||||
// Dispatch the event, as defined below.
|
||||
// This will be handled in the _transform method
|
||||
if (line.length === 0) {
|
||||
return
|
||||
}
|
||||
|
||||
// If the line starts with a U+003A COLON character (:)
|
||||
// Ignore the line.
|
||||
const colonPosition = line.indexOf(COLON)
|
||||
if (colonPosition === 0) {
|
||||
return
|
||||
}
|
||||
|
||||
let field = ''
|
||||
let value = ''
|
||||
|
||||
// If the line contains a U+003A COLON character (:)
|
||||
if (colonPosition !== -1) {
|
||||
// Collect the characters on the line before the first U+003A COLON
|
||||
// character (:), and let field be that string.
|
||||
// TODO: Investigate if there is a more performant way to extract the
|
||||
// field
|
||||
// see: https://github.com/nodejs/undici/issues/2630
|
||||
field = line.subarray(0, colonPosition).toString('utf8')
|
||||
|
||||
// Collect the characters on the line after the first U+003A COLON
|
||||
// character (:), and let value be that string.
|
||||
// If value starts with a U+0020 SPACE character, remove it from value.
|
||||
let valueStart = colonPosition + 1
|
||||
if (line[valueStart] === SPACE) {
|
||||
++valueStart
|
||||
}
|
||||
// TODO: Investigate if there is a more performant way to extract the
|
||||
// value
|
||||
// see: https://github.com/nodejs/undici/issues/2630
|
||||
value = line.subarray(valueStart).toString('utf8')
|
||||
|
||||
// Otherwise, the string is not empty but does not contain a U+003A COLON
|
||||
// character (:)
|
||||
} else {
|
||||
// Process the field using the steps described below, using the whole
|
||||
// line as the field name, and the empty string as the field value.
|
||||
field = line.toString('utf8')
|
||||
value = ''
|
||||
}
|
||||
|
||||
// Modify the event with the field name and value. The value is also
|
||||
// decoded as UTF-8
|
||||
switch (field) {
|
||||
case 'data':
|
||||
if (event[field] === undefined) {
|
||||
event[field] = value
|
||||
} else {
|
||||
event[field] += `\n${value}`
|
||||
}
|
||||
break
|
||||
case 'retry':
|
||||
if (isASCIINumber(value)) {
|
||||
event[field] = value
|
||||
}
|
||||
break
|
||||
case 'id':
|
||||
if (isValidLastEventId(value)) {
|
||||
event[field] = value
|
||||
}
|
||||
break
|
||||
case 'event':
|
||||
if (value.length > 0) {
|
||||
event[field] = value
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {EventSourceStreamEvent} event
|
||||
*/
|
||||
processEvent (event) {
|
||||
if (event.retry && isASCIINumber(event.retry)) {
|
||||
this.state.reconnectionTime = parseInt(event.retry, 10)
|
||||
}
|
||||
|
||||
if (event.id !== undefined && isValidLastEventId(event.id)) {
|
||||
this.state.lastEventId = event.id
|
||||
}
|
||||
|
||||
// only dispatch event, when data is provided
|
||||
if (event.data !== undefined) {
|
||||
this.push({
|
||||
type: event.event || 'message',
|
||||
options: {
|
||||
data: event.data,
|
||||
lastEventId: this.state.lastEventId,
|
||||
origin: this.state.origin
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
clearEvent () {
|
||||
this.event = {
|
||||
data: undefined,
|
||||
event: undefined,
|
||||
id: undefined,
|
||||
retry: undefined
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
EventSourceStream
|
||||
}
|
||||
501
backend/node_modules/undici/lib/web/eventsource/eventsource.js
generated
vendored
Normal file
501
backend/node_modules/undici/lib/web/eventsource/eventsource.js
generated
vendored
Normal file
@@ -0,0 +1,501 @@
|
||||
'use strict'
|
||||
|
||||
const { pipeline } = require('node:stream')
|
||||
const { fetching } = require('../fetch')
|
||||
const { makeRequest } = require('../fetch/request')
|
||||
const { webidl } = require('../webidl')
|
||||
const { EventSourceStream } = require('./eventsource-stream')
|
||||
const { parseMIMEType } = require('../fetch/data-url')
|
||||
const { createFastMessageEvent } = require('../websocket/events')
|
||||
const { isNetworkError } = require('../fetch/response')
|
||||
const { kEnumerableProperty } = require('../../core/util')
|
||||
const { environmentSettingsObject } = require('../fetch/util')
|
||||
|
||||
let experimentalWarned = false
|
||||
|
||||
/**
|
||||
* A reconnection time, in milliseconds. This must initially be an implementation-defined value,
|
||||
* probably in the region of a few seconds.
|
||||
*
|
||||
* In Comparison:
|
||||
* - Chrome uses 3000ms.
|
||||
* - Deno uses 5000ms.
|
||||
*
|
||||
* @type {3000}
|
||||
*/
|
||||
const defaultReconnectionTime = 3000
|
||||
|
||||
/**
|
||||
* The readyState attribute represents the state of the connection.
|
||||
* @typedef ReadyState
|
||||
* @type {0|1|2}
|
||||
* @readonly
|
||||
* @see https://html.spec.whatwg.org/multipage/server-sent-events.html#dom-eventsource-readystate-dev
|
||||
*/
|
||||
|
||||
/**
|
||||
* The connection has not yet been established, or it was closed and the user
|
||||
* agent is reconnecting.
|
||||
* @type {0}
|
||||
*/
|
||||
const CONNECTING = 0
|
||||
|
||||
/**
|
||||
* The user agent has an open connection and is dispatching events as it
|
||||
* receives them.
|
||||
* @type {1}
|
||||
*/
|
||||
const OPEN = 1
|
||||
|
||||
/**
|
||||
* The connection is not open, and the user agent is not trying to reconnect.
|
||||
* @type {2}
|
||||
*/
|
||||
const CLOSED = 2
|
||||
|
||||
/**
|
||||
* Requests for the element will have their mode set to "cors" and their credentials mode set to "same-origin".
|
||||
* @type {'anonymous'}
|
||||
*/
|
||||
const ANONYMOUS = 'anonymous'
|
||||
|
||||
/**
|
||||
* Requests for the element will have their mode set to "cors" and their credentials mode set to "include".
|
||||
* @type {'use-credentials'}
|
||||
*/
|
||||
const USE_CREDENTIALS = 'use-credentials'
|
||||
|
||||
/**
|
||||
* The EventSource interface is used to receive server-sent events. It
|
||||
* connects to a server over HTTP and receives events in text/event-stream
|
||||
* format without closing the connection.
|
||||
* @extends {EventTarget}
|
||||
* @see https://html.spec.whatwg.org/multipage/server-sent-events.html#server-sent-events
|
||||
* @api public
|
||||
*/
|
||||
class EventSource extends EventTarget {
|
||||
#events = {
|
||||
open: null,
|
||||
error: null,
|
||||
message: null
|
||||
}
|
||||
|
||||
#url
|
||||
#withCredentials = false
|
||||
|
||||
/**
|
||||
* @type {ReadyState}
|
||||
*/
|
||||
#readyState = CONNECTING
|
||||
|
||||
#request = null
|
||||
#controller = null
|
||||
|
||||
#dispatcher
|
||||
|
||||
/**
|
||||
* @type {import('./eventsource-stream').eventSourceSettings}
|
||||
*/
|
||||
#state
|
||||
|
||||
/**
|
||||
* Creates a new EventSource object.
|
||||
* @param {string} url
|
||||
* @param {EventSourceInit} [eventSourceInitDict={}]
|
||||
* @see https://html.spec.whatwg.org/multipage/server-sent-events.html#the-eventsource-interface
|
||||
*/
|
||||
constructor (url, eventSourceInitDict = {}) {
|
||||
// 1. Let ev be a new EventSource object.
|
||||
super()
|
||||
|
||||
webidl.util.markAsUncloneable(this)
|
||||
|
||||
const prefix = 'EventSource constructor'
|
||||
webidl.argumentLengthCheck(arguments, 1, prefix)
|
||||
|
||||
if (!experimentalWarned) {
|
||||
experimentalWarned = true
|
||||
process.emitWarning('EventSource is experimental, expect them to change at any time.', {
|
||||
code: 'UNDICI-ES'
|
||||
})
|
||||
}
|
||||
|
||||
url = webidl.converters.USVString(url)
|
||||
eventSourceInitDict = webidl.converters.EventSourceInitDict(eventSourceInitDict, prefix, 'eventSourceInitDict')
|
||||
|
||||
this.#dispatcher = eventSourceInitDict.node.dispatcher || eventSourceInitDict.dispatcher
|
||||
this.#state = {
|
||||
lastEventId: '',
|
||||
reconnectionTime: eventSourceInitDict.node.reconnectionTime
|
||||
}
|
||||
|
||||
// 2. Let settings be ev's relevant settings object.
|
||||
// https://html.spec.whatwg.org/multipage/webappapis.html#environment-settings-object
|
||||
const settings = environmentSettingsObject
|
||||
|
||||
let urlRecord
|
||||
|
||||
try {
|
||||
// 3. Let urlRecord be the result of encoding-parsing a URL given url, relative to settings.
|
||||
urlRecord = new URL(url, settings.settingsObject.baseUrl)
|
||||
this.#state.origin = urlRecord.origin
|
||||
} catch (e) {
|
||||
// 4. If urlRecord is failure, then throw a "SyntaxError" DOMException.
|
||||
throw new DOMException(e, 'SyntaxError')
|
||||
}
|
||||
|
||||
// 5. Set ev's url to urlRecord.
|
||||
this.#url = urlRecord.href
|
||||
|
||||
// 6. Let corsAttributeState be Anonymous.
|
||||
let corsAttributeState = ANONYMOUS
|
||||
|
||||
// 7. If the value of eventSourceInitDict's withCredentials member is true,
|
||||
// then set corsAttributeState to Use Credentials and set ev's
|
||||
// withCredentials attribute to true.
|
||||
if (eventSourceInitDict.withCredentials === true) {
|
||||
corsAttributeState = USE_CREDENTIALS
|
||||
this.#withCredentials = true
|
||||
}
|
||||
|
||||
// 8. Let request be the result of creating a potential-CORS request given
|
||||
// urlRecord, the empty string, and corsAttributeState.
|
||||
const initRequest = {
|
||||
redirect: 'follow',
|
||||
keepalive: true,
|
||||
// @see https://html.spec.whatwg.org/multipage/urls-and-fetching.html#cors-settings-attributes
|
||||
mode: 'cors',
|
||||
credentials: corsAttributeState === 'anonymous'
|
||||
? 'same-origin'
|
||||
: 'omit',
|
||||
referrer: 'no-referrer'
|
||||
}
|
||||
|
||||
// 9. Set request's client to settings.
|
||||
initRequest.client = environmentSettingsObject.settingsObject
|
||||
|
||||
// 10. User agents may set (`Accept`, `text/event-stream`) in request's header list.
|
||||
initRequest.headersList = [['accept', { name: 'accept', value: 'text/event-stream' }]]
|
||||
|
||||
// 11. Set request's cache mode to "no-store".
|
||||
initRequest.cache = 'no-store'
|
||||
|
||||
// 12. Set request's initiator type to "other".
|
||||
initRequest.initiator = 'other'
|
||||
|
||||
initRequest.urlList = [new URL(this.#url)]
|
||||
|
||||
// 13. Set ev's request to request.
|
||||
this.#request = makeRequest(initRequest)
|
||||
|
||||
this.#connect()
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the state of this EventSource object's connection. It can have the
|
||||
* values described below.
|
||||
* @returns {ReadyState}
|
||||
* @readonly
|
||||
*/
|
||||
get readyState () {
|
||||
return this.#readyState
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the URL providing the event stream.
|
||||
* @readonly
|
||||
* @returns {string}
|
||||
*/
|
||||
get url () {
|
||||
return this.#url
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a boolean indicating whether the EventSource object was
|
||||
* instantiated with CORS credentials set (true), or not (false, the default).
|
||||
*/
|
||||
get withCredentials () {
|
||||
return this.#withCredentials
|
||||
}
|
||||
|
||||
#connect () {
|
||||
if (this.#readyState === CLOSED) return
|
||||
|
||||
this.#readyState = CONNECTING
|
||||
|
||||
const fetchParams = {
|
||||
request: this.#request,
|
||||
dispatcher: this.#dispatcher
|
||||
}
|
||||
|
||||
// 14. Let processEventSourceEndOfBody given response res be the following step: if res is not a network error, then reestablish the connection.
|
||||
const processEventSourceEndOfBody = (response) => {
|
||||
if (!isNetworkError(response)) {
|
||||
return this.#reconnect()
|
||||
}
|
||||
}
|
||||
|
||||
// 15. Fetch request, with processResponseEndOfBody set to processEventSourceEndOfBody...
|
||||
fetchParams.processResponseEndOfBody = processEventSourceEndOfBody
|
||||
|
||||
// and processResponse set to the following steps given response res:
|
||||
fetchParams.processResponse = (response) => {
|
||||
// 1. If res is an aborted network error, then fail the connection.
|
||||
|
||||
if (isNetworkError(response)) {
|
||||
// 1. When a user agent is to fail the connection, the user agent
|
||||
// must queue a task which, if the readyState attribute is set to a
|
||||
// value other than CLOSED, sets the readyState attribute to CLOSED
|
||||
// and fires an event named error at the EventSource object. Once the
|
||||
// user agent has failed the connection, it does not attempt to
|
||||
// reconnect.
|
||||
if (response.aborted) {
|
||||
this.close()
|
||||
this.dispatchEvent(new Event('error'))
|
||||
return
|
||||
// 2. Otherwise, if res is a network error, then reestablish the
|
||||
// connection, unless the user agent knows that to be futile, in
|
||||
// which case the user agent may fail the connection.
|
||||
} else {
|
||||
this.#reconnect()
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Otherwise, if res's status is not 200, or if res's `Content-Type`
|
||||
// is not `text/event-stream`, then fail the connection.
|
||||
const contentType = response.headersList.get('content-type', true)
|
||||
const mimeType = contentType !== null ? parseMIMEType(contentType) : 'failure'
|
||||
const contentTypeValid = mimeType !== 'failure' && mimeType.essence === 'text/event-stream'
|
||||
if (
|
||||
response.status !== 200 ||
|
||||
contentTypeValid === false
|
||||
) {
|
||||
this.close()
|
||||
this.dispatchEvent(new Event('error'))
|
||||
return
|
||||
}
|
||||
|
||||
// 4. Otherwise, announce the connection and interpret res's body
|
||||
// line by line.
|
||||
|
||||
// When a user agent is to announce the connection, the user agent
|
||||
// must queue a task which, if the readyState attribute is set to a
|
||||
// value other than CLOSED, sets the readyState attribute to OPEN
|
||||
// and fires an event named open at the EventSource object.
|
||||
// @see https://html.spec.whatwg.org/multipage/server-sent-events.html#sse-processing-model
|
||||
this.#readyState = OPEN
|
||||
this.dispatchEvent(new Event('open'))
|
||||
|
||||
// If redirected to a different origin, set the origin to the new origin.
|
||||
this.#state.origin = response.urlList[response.urlList.length - 1].origin
|
||||
|
||||
const eventSourceStream = new EventSourceStream({
|
||||
eventSourceSettings: this.#state,
|
||||
push: (event) => {
|
||||
this.dispatchEvent(createFastMessageEvent(
|
||||
event.type,
|
||||
event.options
|
||||
))
|
||||
}
|
||||
})
|
||||
|
||||
pipeline(response.body.stream,
|
||||
eventSourceStream,
|
||||
(error) => {
|
||||
if (
|
||||
error?.aborted === false
|
||||
) {
|
||||
this.close()
|
||||
this.dispatchEvent(new Event('error'))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
this.#controller = fetching(fetchParams)
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://html.spec.whatwg.org/multipage/server-sent-events.html#sse-processing-model
|
||||
* @returns {void}
|
||||
*/
|
||||
#reconnect () {
|
||||
// When a user agent is to reestablish the connection, the user agent must
|
||||
// run the following steps. These steps are run in parallel, not as part of
|
||||
// a task. (The tasks that it queues, of course, are run like normal tasks
|
||||
// and not themselves in parallel.)
|
||||
|
||||
// 1. Queue a task to run the following steps:
|
||||
|
||||
// 1. If the readyState attribute is set to CLOSED, abort the task.
|
||||
if (this.#readyState === CLOSED) return
|
||||
|
||||
// 2. Set the readyState attribute to CONNECTING.
|
||||
this.#readyState = CONNECTING
|
||||
|
||||
// 3. Fire an event named error at the EventSource object.
|
||||
this.dispatchEvent(new Event('error'))
|
||||
|
||||
// 2. Wait a delay equal to the reconnection time of the event source.
|
||||
setTimeout(() => {
|
||||
// 5. Queue a task to run the following steps:
|
||||
|
||||
// 1. If the EventSource object's readyState attribute is not set to
|
||||
// CONNECTING, then return.
|
||||
if (this.#readyState !== CONNECTING) return
|
||||
|
||||
// 2. Let request be the EventSource object's request.
|
||||
// 3. If the EventSource object's last event ID string is not the empty
|
||||
// string, then:
|
||||
// 1. Let lastEventIDValue be the EventSource object's last event ID
|
||||
// string, encoded as UTF-8.
|
||||
// 2. Set (`Last-Event-ID`, lastEventIDValue) in request's header
|
||||
// list.
|
||||
if (this.#state.lastEventId.length) {
|
||||
this.#request.headersList.set('last-event-id', this.#state.lastEventId, true)
|
||||
}
|
||||
|
||||
// 4. Fetch request and process the response obtained in this fashion, if any, as described earlier in this section.
|
||||
this.#connect()
|
||||
}, this.#state.reconnectionTime)?.unref()
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the connection, if any, and sets the readyState attribute to
|
||||
* CLOSED.
|
||||
*/
|
||||
close () {
|
||||
webidl.brandCheck(this, EventSource)
|
||||
|
||||
if (this.#readyState === CLOSED) return
|
||||
this.#readyState = CLOSED
|
||||
this.#controller.abort()
|
||||
this.#request = null
|
||||
}
|
||||
|
||||
get onopen () {
|
||||
return this.#events.open
|
||||
}
|
||||
|
||||
set onopen (fn) {
|
||||
if (this.#events.open) {
|
||||
this.removeEventListener('open', this.#events.open)
|
||||
}
|
||||
|
||||
const listener = webidl.converters.EventHandlerNonNull(fn)
|
||||
|
||||
if (listener !== null) {
|
||||
this.addEventListener('open', listener)
|
||||
this.#events.open = fn
|
||||
} else {
|
||||
this.#events.open = null
|
||||
}
|
||||
}
|
||||
|
||||
get onmessage () {
|
||||
return this.#events.message
|
||||
}
|
||||
|
||||
set onmessage (fn) {
|
||||
if (this.#events.message) {
|
||||
this.removeEventListener('message', this.#events.message)
|
||||
}
|
||||
|
||||
const listener = webidl.converters.EventHandlerNonNull(fn)
|
||||
|
||||
if (listener !== null) {
|
||||
this.addEventListener('message', listener)
|
||||
this.#events.message = fn
|
||||
} else {
|
||||
this.#events.message = null
|
||||
}
|
||||
}
|
||||
|
||||
get onerror () {
|
||||
return this.#events.error
|
||||
}
|
||||
|
||||
set onerror (fn) {
|
||||
if (this.#events.error) {
|
||||
this.removeEventListener('error', this.#events.error)
|
||||
}
|
||||
|
||||
const listener = webidl.converters.EventHandlerNonNull(fn)
|
||||
|
||||
if (listener !== null) {
|
||||
this.addEventListener('error', listener)
|
||||
this.#events.error = fn
|
||||
} else {
|
||||
this.#events.error = null
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const constantsPropertyDescriptors = {
|
||||
CONNECTING: {
|
||||
__proto__: null,
|
||||
configurable: false,
|
||||
enumerable: true,
|
||||
value: CONNECTING,
|
||||
writable: false
|
||||
},
|
||||
OPEN: {
|
||||
__proto__: null,
|
||||
configurable: false,
|
||||
enumerable: true,
|
||||
value: OPEN,
|
||||
writable: false
|
||||
},
|
||||
CLOSED: {
|
||||
__proto__: null,
|
||||
configurable: false,
|
||||
enumerable: true,
|
||||
value: CLOSED,
|
||||
writable: false
|
||||
}
|
||||
}
|
||||
|
||||
Object.defineProperties(EventSource, constantsPropertyDescriptors)
|
||||
Object.defineProperties(EventSource.prototype, constantsPropertyDescriptors)
|
||||
|
||||
Object.defineProperties(EventSource.prototype, {
|
||||
close: kEnumerableProperty,
|
||||
onerror: kEnumerableProperty,
|
||||
onmessage: kEnumerableProperty,
|
||||
onopen: kEnumerableProperty,
|
||||
readyState: kEnumerableProperty,
|
||||
url: kEnumerableProperty,
|
||||
withCredentials: kEnumerableProperty
|
||||
})
|
||||
|
||||
webidl.converters.EventSourceInitDict = webidl.dictionaryConverter([
|
||||
{
|
||||
key: 'withCredentials',
|
||||
converter: webidl.converters.boolean,
|
||||
defaultValue: () => false
|
||||
},
|
||||
{
|
||||
key: 'dispatcher', // undici only
|
||||
converter: webidl.converters.any
|
||||
},
|
||||
{
|
||||
key: 'node', // undici only
|
||||
converter: webidl.dictionaryConverter([
|
||||
{
|
||||
key: 'reconnectionTime',
|
||||
converter: webidl.converters['unsigned long'],
|
||||
defaultValue: () => defaultReconnectionTime
|
||||
},
|
||||
{
|
||||
key: 'dispatcher',
|
||||
converter: webidl.converters.any
|
||||
}
|
||||
]),
|
||||
defaultValue: () => ({})
|
||||
}
|
||||
])
|
||||
|
||||
module.exports = {
|
||||
EventSource,
|
||||
defaultReconnectionTime
|
||||
}
|
||||
29
backend/node_modules/undici/lib/web/eventsource/util.js
generated
vendored
Normal file
29
backend/node_modules/undici/lib/web/eventsource/util.js
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
'use strict'
|
||||
|
||||
/**
|
||||
* Checks if the given value is a valid LastEventId.
|
||||
* @param {string} value
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isValidLastEventId (value) {
|
||||
// LastEventId should not contain U+0000 NULL
|
||||
return value.indexOf('\u0000') === -1
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the given value is a base 10 digit.
|
||||
* @param {string} value
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isASCIINumber (value) {
|
||||
if (value.length === 0) return false
|
||||
for (let i = 0; i < value.length; i++) {
|
||||
if (value.charCodeAt(i) < 0x30 || value.charCodeAt(i) > 0x39) return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
isValidLastEventId,
|
||||
isASCIINumber
|
||||
}
|
||||
21
backend/node_modules/undici/lib/web/fetch/LICENSE
generated
vendored
Normal file
21
backend/node_modules/undici/lib/web/fetch/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2020 Ethan Arrowood
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
513
backend/node_modules/undici/lib/web/fetch/body.js
generated
vendored
Normal file
513
backend/node_modules/undici/lib/web/fetch/body.js
generated
vendored
Normal file
@@ -0,0 +1,513 @@
|
||||
'use strict'
|
||||
|
||||
const util = require('../../core/util')
|
||||
const {
|
||||
ReadableStreamFrom,
|
||||
readableStreamClose,
|
||||
fullyReadBody,
|
||||
extractMimeType
|
||||
} = require('./util')
|
||||
const { FormData, setFormDataState } = require('./formdata')
|
||||
const { webidl } = require('../webidl')
|
||||
const assert = require('node:assert')
|
||||
const { isErrored, isDisturbed } = require('node:stream')
|
||||
const { isUint8Array } = require('node:util/types')
|
||||
const { serializeAMimeType } = require('./data-url')
|
||||
const { multipartFormDataParser } = require('./formdata-parser')
|
||||
const { createDeferredPromise } = require('../../util/promise')
|
||||
const { parseJSONFromBytes } = require('../infra')
|
||||
const { utf8DecodeBytes } = require('../../encoding')
|
||||
const { runtimeFeatures } = require('../../util/runtime-features.js')
|
||||
|
||||
const random = runtimeFeatures.has('crypto')
|
||||
? require('node:crypto').randomInt
|
||||
: (max) => Math.floor(Math.random() * max)
|
||||
|
||||
const textEncoder = new TextEncoder()
|
||||
function noop () {}
|
||||
|
||||
const streamRegistry = new FinalizationRegistry((weakRef) => {
|
||||
const stream = weakRef.deref()
|
||||
if (stream && !stream.locked && !isDisturbed(stream) && !isErrored(stream)) {
|
||||
stream.cancel('Response object has been garbage collected').catch(noop)
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* Extract a body with type from a byte sequence or BodyInit object
|
||||
*
|
||||
* @param {import('../../../types').BodyInit} object - The BodyInit object to extract from
|
||||
* @param {boolean} [keepalive=false] - If true, indicates that the body
|
||||
* @returns {[{stream: ReadableStream, source: any, length: number | null}, string | null]} - Returns a tuple containing the body and its type
|
||||
*
|
||||
* @see https://fetch.spec.whatwg.org/#concept-bodyinit-extract
|
||||
*/
|
||||
function extractBody (object, keepalive = false) {
|
||||
// 1. Let stream be null.
|
||||
let stream = null
|
||||
let controller = null
|
||||
|
||||
// 2. If object is a ReadableStream object, then set stream to object.
|
||||
if (webidl.is.ReadableStream(object)) {
|
||||
stream = object
|
||||
} else if (webidl.is.Blob(object)) {
|
||||
// 3. Otherwise, if object is a Blob object, set stream to the
|
||||
// result of running object’s get stream.
|
||||
stream = object.stream()
|
||||
} else {
|
||||
// 4. Otherwise, set stream to a new ReadableStream object, and set
|
||||
// up stream with byte reading support.
|
||||
stream = new ReadableStream({
|
||||
pull () {},
|
||||
start (c) {
|
||||
controller = c
|
||||
},
|
||||
cancel () {},
|
||||
type: 'bytes'
|
||||
})
|
||||
}
|
||||
|
||||
// 5. Assert: stream is a ReadableStream object.
|
||||
assert(webidl.is.ReadableStream(stream))
|
||||
|
||||
// 6. Let action be null.
|
||||
let action = null
|
||||
|
||||
// 7. Let source be null.
|
||||
let source = null
|
||||
|
||||
// 8. Let length be null.
|
||||
let length = null
|
||||
|
||||
// 9. Let type be null.
|
||||
let type = null
|
||||
|
||||
// 10. Switch on object:
|
||||
if (typeof object === 'string') {
|
||||
// Set source to the UTF-8 encoding of object.
|
||||
// Note: setting source to a Uint8Array here breaks some mocking assumptions.
|
||||
source = object
|
||||
|
||||
// Set type to `text/plain;charset=UTF-8`.
|
||||
type = 'text/plain;charset=UTF-8'
|
||||
} else if (webidl.is.URLSearchParams(object)) {
|
||||
// URLSearchParams
|
||||
|
||||
// spec says to run application/x-www-form-urlencoded on body.list
|
||||
// this is implemented in Node.js as apart of an URLSearchParams instance toString method
|
||||
// See: https://github.com/nodejs/node/blob/e46c680bf2b211bbd52cf959ca17ee98c7f657f5/lib/internal/url.js#L490
|
||||
// and https://github.com/nodejs/node/blob/e46c680bf2b211bbd52cf959ca17ee98c7f657f5/lib/internal/url.js#L1100
|
||||
|
||||
// Set source to the result of running the application/x-www-form-urlencoded serializer with object’s list.
|
||||
source = object.toString()
|
||||
|
||||
// Set type to `application/x-www-form-urlencoded;charset=UTF-8`.
|
||||
type = 'application/x-www-form-urlencoded;charset=UTF-8'
|
||||
} else if (webidl.is.BufferSource(object)) {
|
||||
// Set source to a copy of the bytes held by object.
|
||||
source = webidl.util.getCopyOfBytesHeldByBufferSource(object)
|
||||
} else if (webidl.is.FormData(object)) {
|
||||
const boundary = `----formdata-undici-0${`${random(1e11)}`.padStart(11, '0')}`
|
||||
const prefix = `--${boundary}\r\nContent-Disposition: form-data`
|
||||
|
||||
/*! formdata-polyfill. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
|
||||
const formdataEscape = (str) =>
|
||||
str.replace(/\n/g, '%0A').replace(/\r/g, '%0D').replace(/"/g, '%22')
|
||||
const normalizeLinefeeds = (value) => value.replace(/\r?\n|\r/g, '\r\n')
|
||||
|
||||
// Set action to this step: run the multipart/form-data
|
||||
// encoding algorithm, with object’s entry list and UTF-8.
|
||||
// - This ensures that the body is immutable and can't be changed afterwords
|
||||
// - That the content-length is calculated in advance.
|
||||
// - And that all parts are pre-encoded and ready to be sent.
|
||||
|
||||
const blobParts = []
|
||||
const rn = new Uint8Array([13, 10]) // '\r\n'
|
||||
length = 0
|
||||
let hasUnknownSizeValue = false
|
||||
|
||||
for (const [name, value] of object) {
|
||||
if (typeof value === 'string') {
|
||||
const chunk = textEncoder.encode(prefix +
|
||||
`; name="${formdataEscape(normalizeLinefeeds(name))}"` +
|
||||
`\r\n\r\n${normalizeLinefeeds(value)}\r\n`)
|
||||
blobParts.push(chunk)
|
||||
length += chunk.byteLength
|
||||
} else {
|
||||
const chunk = textEncoder.encode(`${prefix}; name="${formdataEscape(normalizeLinefeeds(name))}"` +
|
||||
(value.name ? `; filename="${formdataEscape(value.name)}"` : '') + '\r\n' +
|
||||
`Content-Type: ${
|
||||
value.type || 'application/octet-stream'
|
||||
}\r\n\r\n`)
|
||||
blobParts.push(chunk, value, rn)
|
||||
if (typeof value.size === 'number') {
|
||||
length += chunk.byteLength + value.size + rn.byteLength
|
||||
} else {
|
||||
hasUnknownSizeValue = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// CRLF is appended to the body to function with legacy servers and match other implementations.
|
||||
// https://github.com/curl/curl/blob/3434c6b46e682452973972e8313613dfa58cd690/lib/mime.c#L1029-L1030
|
||||
// https://github.com/form-data/form-data/issues/63
|
||||
const chunk = textEncoder.encode(`--${boundary}--\r\n`)
|
||||
blobParts.push(chunk)
|
||||
length += chunk.byteLength
|
||||
if (hasUnknownSizeValue) {
|
||||
length = null
|
||||
}
|
||||
|
||||
// Set source to object.
|
||||
source = object
|
||||
|
||||
action = async function * () {
|
||||
for (const part of blobParts) {
|
||||
if (part.stream) {
|
||||
yield * part.stream()
|
||||
} else {
|
||||
yield part
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Set type to `multipart/form-data; boundary=`,
|
||||
// followed by the multipart/form-data boundary string generated
|
||||
// by the multipart/form-data encoding algorithm.
|
||||
type = `multipart/form-data; boundary=${boundary}`
|
||||
} else if (webidl.is.Blob(object)) {
|
||||
// Blob
|
||||
|
||||
// Set source to object.
|
||||
source = object
|
||||
|
||||
// Set length to object’s size.
|
||||
length = object.size
|
||||
|
||||
// If object’s type attribute is not the empty byte sequence, set
|
||||
// type to its value.
|
||||
if (object.type) {
|
||||
type = object.type
|
||||
}
|
||||
} else if (typeof object[Symbol.asyncIterator] === 'function') {
|
||||
// If keepalive is true, then throw a TypeError.
|
||||
if (keepalive) {
|
||||
throw new TypeError('keepalive')
|
||||
}
|
||||
|
||||
// If object is disturbed or locked, then throw a TypeError.
|
||||
if (util.isDisturbed(object) || object.locked) {
|
||||
throw new TypeError(
|
||||
'Response body object should not be disturbed or locked'
|
||||
)
|
||||
}
|
||||
|
||||
stream =
|
||||
webidl.is.ReadableStream(object) ? object : ReadableStreamFrom(object)
|
||||
}
|
||||
|
||||
// 11. If source is a byte sequence, then set action to a
|
||||
// step that returns source and length to source’s length.
|
||||
if (typeof source === 'string' || isUint8Array(source)) {
|
||||
action = () => {
|
||||
length = typeof source === 'string' ? Buffer.byteLength(source) : source.length
|
||||
return source
|
||||
}
|
||||
}
|
||||
|
||||
// 12. If action is non-null, then run these steps in parallel:
|
||||
if (action != null) {
|
||||
;(async () => {
|
||||
// 1. Run action.
|
||||
const result = action()
|
||||
|
||||
// 2. Whenever one or more bytes are available and stream is not errored,
|
||||
// enqueue the result of creating a Uint8Array from the available bytes into stream.
|
||||
const iterator = result?.[Symbol.asyncIterator]?.()
|
||||
if (iterator) {
|
||||
for await (const bytes of iterator) {
|
||||
if (isErrored(stream)) break
|
||||
if (bytes.length) {
|
||||
controller.enqueue(new Uint8Array(bytes))
|
||||
}
|
||||
}
|
||||
} else if (result?.length && !isErrored(stream)) {
|
||||
controller.enqueue(typeof result === 'string' ? textEncoder.encode(result) : new Uint8Array(result))
|
||||
}
|
||||
|
||||
// 3. When running action is done, close stream.
|
||||
queueMicrotask(() => readableStreamClose(controller))
|
||||
})()
|
||||
}
|
||||
|
||||
// 13. Let body be a body whose stream is stream, source is source,
|
||||
// and length is length.
|
||||
const body = { stream, source, length }
|
||||
|
||||
// 14. Return (body, type).
|
||||
return [body, type]
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef {object} ExtractBodyResult
|
||||
* @property {ReadableStream<Uint8Array<ArrayBuffer>>} stream - The ReadableStream containing the body data
|
||||
* @property {any} source - The original source of the body data
|
||||
* @property {number | null} length - The length of the body data, or null
|
||||
*/
|
||||
|
||||
/**
|
||||
* Safely extract a body with type from a byte sequence or BodyInit object.
|
||||
*
|
||||
* @param {import('../../../types').BodyInit} object - The BodyInit object to extract from
|
||||
* @param {boolean} [keepalive=false] - If true, indicates that the body
|
||||
* @returns {[ExtractBodyResult, string | null]} - Returns a tuple containing the body and its type
|
||||
*
|
||||
* @see https://fetch.spec.whatwg.org/#bodyinit-safely-extract
|
||||
*/
|
||||
function safelyExtractBody (object, keepalive = false) {
|
||||
// To safely extract a body and a `Content-Type` value from
|
||||
// a byte sequence or BodyInit object object, run these steps:
|
||||
|
||||
// 1. If object is a ReadableStream object, then:
|
||||
if (webidl.is.ReadableStream(object)) {
|
||||
// Assert: object is neither disturbed nor locked.
|
||||
assert(!util.isDisturbed(object), 'The body has already been consumed.')
|
||||
assert(!object.locked, 'The stream is locked.')
|
||||
}
|
||||
|
||||
// 2. Return the results of extracting object.
|
||||
return extractBody(object, keepalive)
|
||||
}
|
||||
|
||||
function cloneBody (body) {
|
||||
// To clone a body body, run these steps:
|
||||
|
||||
// https://fetch.spec.whatwg.org/#concept-body-clone
|
||||
|
||||
// 1. Let « out1, out2 » be the result of teeing body’s stream.
|
||||
const { 0: out1, 1: out2 } = body.stream.tee()
|
||||
|
||||
// 2. Set body’s stream to out1.
|
||||
body.stream = out1
|
||||
|
||||
// 3. Return a body whose stream is out2 and other members are copied from body.
|
||||
return {
|
||||
stream: out2,
|
||||
length: body.length,
|
||||
source: body.source
|
||||
}
|
||||
}
|
||||
|
||||
function bodyMixinMethods (instance, getInternalState) {
|
||||
const methods = {
|
||||
blob () {
|
||||
// The blob() method steps are to return the result of
|
||||
// running consume body with this and the following step
|
||||
// given a byte sequence bytes: return a Blob whose
|
||||
// contents are bytes and whose type attribute is this’s
|
||||
// MIME type.
|
||||
return consumeBody(this, (bytes) => {
|
||||
let mimeType = bodyMimeType(getInternalState(this))
|
||||
|
||||
if (mimeType === null) {
|
||||
mimeType = ''
|
||||
} else if (mimeType) {
|
||||
mimeType = serializeAMimeType(mimeType)
|
||||
}
|
||||
|
||||
// Return a Blob whose contents are bytes and type attribute
|
||||
// is mimeType.
|
||||
return new Blob([bytes], { type: mimeType })
|
||||
}, instance, getInternalState)
|
||||
},
|
||||
|
||||
arrayBuffer () {
|
||||
// The arrayBuffer() method steps are to return the result
|
||||
// of running consume body with this and the following step
|
||||
// given a byte sequence bytes: return a new ArrayBuffer
|
||||
// whose contents are bytes.
|
||||
return consumeBody(this, (bytes) => {
|
||||
return new Uint8Array(bytes).buffer
|
||||
}, instance, getInternalState)
|
||||
},
|
||||
|
||||
text () {
|
||||
// The text() method steps are to return the result of running
|
||||
// consume body with this and UTF-8 decode.
|
||||
return consumeBody(this, utf8DecodeBytes, instance, getInternalState)
|
||||
},
|
||||
|
||||
json () {
|
||||
// The json() method steps are to return the result of running
|
||||
// consume body with this and parse JSON from bytes.
|
||||
return consumeBody(this, parseJSONFromBytes, instance, getInternalState)
|
||||
},
|
||||
|
||||
formData () {
|
||||
// The formData() method steps are to return the result of running
|
||||
// consume body with this and the following step given a byte sequence bytes:
|
||||
return consumeBody(this, (value) => {
|
||||
// 1. Let mimeType be the result of get the MIME type with this.
|
||||
const mimeType = bodyMimeType(getInternalState(this))
|
||||
|
||||
// 2. If mimeType is non-null, then switch on mimeType’s essence and run
|
||||
// the corresponding steps:
|
||||
if (mimeType !== null) {
|
||||
switch (mimeType.essence) {
|
||||
case 'multipart/form-data': {
|
||||
// 1. ... [long step]
|
||||
// 2. If that fails for some reason, then throw a TypeError.
|
||||
const parsed = multipartFormDataParser(value, mimeType)
|
||||
|
||||
// 3. Return a new FormData object, appending each entry,
|
||||
// resulting from the parsing operation, to its entry list.
|
||||
const fd = new FormData()
|
||||
setFormDataState(fd, parsed)
|
||||
|
||||
return fd
|
||||
}
|
||||
case 'application/x-www-form-urlencoded': {
|
||||
// 1. Let entries be the result of parsing bytes.
|
||||
const entries = new URLSearchParams(value.toString())
|
||||
|
||||
// 2. If entries is failure, then throw a TypeError.
|
||||
|
||||
// 3. Return a new FormData object whose entry list is entries.
|
||||
const fd = new FormData()
|
||||
|
||||
for (const [name, value] of entries) {
|
||||
fd.append(name, value)
|
||||
}
|
||||
|
||||
return fd
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Throw a TypeError.
|
||||
throw new TypeError(
|
||||
'Content-Type was not one of "multipart/form-data" or "application/x-www-form-urlencoded".'
|
||||
)
|
||||
}, instance, getInternalState)
|
||||
},
|
||||
|
||||
bytes () {
|
||||
// The bytes() method steps are to return the result of running consume body
|
||||
// with this and the following step given a byte sequence bytes: return the
|
||||
// result of creating a Uint8Array from bytes in this’s relevant realm.
|
||||
return consumeBody(this, (bytes) => {
|
||||
return new Uint8Array(bytes)
|
||||
}, instance, getInternalState)
|
||||
}
|
||||
}
|
||||
|
||||
return methods
|
||||
}
|
||||
|
||||
function mixinBody (prototype, getInternalState) {
|
||||
Object.assign(prototype.prototype, bodyMixinMethods(prototype, getInternalState))
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://fetch.spec.whatwg.org/#concept-body-consume-body
|
||||
* @param {any} object internal state
|
||||
* @param {(value: unknown) => unknown} convertBytesToJSValue
|
||||
* @param {any} instance
|
||||
* @param {(target: any) => any} getInternalState
|
||||
*/
|
||||
function consumeBody (object, convertBytesToJSValue, instance, getInternalState) {
|
||||
try {
|
||||
webidl.brandCheck(object, instance)
|
||||
} catch (e) {
|
||||
return Promise.reject(e)
|
||||
}
|
||||
|
||||
const state = getInternalState(object)
|
||||
|
||||
// 1. If object is unusable, then return a promise rejected
|
||||
// with a TypeError.
|
||||
if (bodyUnusable(state)) {
|
||||
return Promise.reject(new TypeError('Body is unusable: Body has already been read'))
|
||||
}
|
||||
|
||||
if (state.aborted) {
|
||||
return Promise.reject(new DOMException('The operation was aborted.', 'AbortError'))
|
||||
}
|
||||
|
||||
// 2. Let promise be a new promise.
|
||||
const promise = createDeferredPromise()
|
||||
|
||||
// 3. Let errorSteps given error be to reject promise with error.
|
||||
const errorSteps = promise.reject
|
||||
|
||||
// 4. Let successSteps given a byte sequence data be to resolve
|
||||
// promise with the result of running convertBytesToJSValue
|
||||
// with data. If that threw an exception, then run errorSteps
|
||||
// with that exception.
|
||||
const successSteps = (data) => {
|
||||
try {
|
||||
promise.resolve(convertBytesToJSValue(data))
|
||||
} catch (e) {
|
||||
errorSteps(e)
|
||||
}
|
||||
}
|
||||
|
||||
// 5. If object’s body is null, then run successSteps with an
|
||||
// empty byte sequence.
|
||||
if (state.body == null) {
|
||||
successSteps(Buffer.allocUnsafe(0))
|
||||
return promise.promise
|
||||
}
|
||||
|
||||
// 6. Otherwise, fully read object’s body given successSteps,
|
||||
// errorSteps, and object’s relevant global object.
|
||||
fullyReadBody(state.body, successSteps, errorSteps)
|
||||
|
||||
// 7. Return promise.
|
||||
return promise.promise
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://fetch.spec.whatwg.org/#body-unusable
|
||||
* @param {any} object internal state
|
||||
*/
|
||||
function bodyUnusable (object) {
|
||||
const body = object.body
|
||||
|
||||
// An object including the Body interface mixin is
|
||||
// said to be unusable if its body is non-null and
|
||||
// its body’s stream is disturbed or locked.
|
||||
return body != null && (body.stream.locked || util.isDisturbed(body.stream))
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://fetch.spec.whatwg.org/#concept-body-mime-type
|
||||
* @param {any} requestOrResponse internal state
|
||||
*/
|
||||
function bodyMimeType (requestOrResponse) {
|
||||
// 1. Let headers be null.
|
||||
// 2. If requestOrResponse is a Request object, then set headers to requestOrResponse’s request’s header list.
|
||||
// 3. Otherwise, set headers to requestOrResponse’s response’s header list.
|
||||
/** @type {import('./headers').HeadersList} */
|
||||
const headers = requestOrResponse.headersList
|
||||
|
||||
// 4. Let mimeType be the result of extracting a MIME type from headers.
|
||||
const mimeType = extractMimeType(headers)
|
||||
|
||||
// 5. If mimeType is failure, then return null.
|
||||
if (mimeType === 'failure') {
|
||||
return null
|
||||
}
|
||||
|
||||
// 6. Return mimeType.
|
||||
return mimeType
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
extractBody,
|
||||
safelyExtractBody,
|
||||
cloneBody,
|
||||
mixinBody,
|
||||
streamRegistry,
|
||||
bodyUnusable
|
||||
}
|
||||
131
backend/node_modules/undici/lib/web/fetch/constants.js
generated
vendored
Normal file
131
backend/node_modules/undici/lib/web/fetch/constants.js
generated
vendored
Normal file
@@ -0,0 +1,131 @@
|
||||
'use strict'
|
||||
|
||||
const corsSafeListedMethods = /** @type {const} */ (['GET', 'HEAD', 'POST'])
|
||||
const corsSafeListedMethodsSet = new Set(corsSafeListedMethods)
|
||||
|
||||
const nullBodyStatus = /** @type {const} */ ([101, 204, 205, 304])
|
||||
|
||||
const redirectStatus = /** @type {const} */ ([301, 302, 303, 307, 308])
|
||||
const redirectStatusSet = new Set(redirectStatus)
|
||||
|
||||
/**
|
||||
* @see https://fetch.spec.whatwg.org/#block-bad-port
|
||||
*/
|
||||
const badPorts = /** @type {const} */ ([
|
||||
'1', '7', '9', '11', '13', '15', '17', '19', '20', '21', '22', '23', '25', '37', '42', '43', '53', '69', '77', '79',
|
||||
'87', '95', '101', '102', '103', '104', '109', '110', '111', '113', '115', '117', '119', '123', '135', '137',
|
||||
'139', '143', '161', '179', '389', '427', '465', '512', '513', '514', '515', '526', '530', '531', '532',
|
||||
'540', '548', '554', '556', '563', '587', '601', '636', '989', '990', '993', '995', '1719', '1720', '1723',
|
||||
'2049', '3659', '4045', '4190', '5060', '5061', '6000', '6566', '6665', '6666', '6667', '6668', '6669', '6679',
|
||||
'6697', '10080'
|
||||
])
|
||||
const badPortsSet = new Set(badPorts)
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/webappsec-referrer-policy/#referrer-policy-header
|
||||
*/
|
||||
const referrerPolicyTokens = /** @type {const} */ ([
|
||||
'no-referrer',
|
||||
'no-referrer-when-downgrade',
|
||||
'same-origin',
|
||||
'origin',
|
||||
'strict-origin',
|
||||
'origin-when-cross-origin',
|
||||
'strict-origin-when-cross-origin',
|
||||
'unsafe-url'
|
||||
])
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/webappsec-referrer-policy/#referrer-policies
|
||||
*/
|
||||
const referrerPolicy = /** @type {const} */ ([
|
||||
'',
|
||||
...referrerPolicyTokens
|
||||
])
|
||||
const referrerPolicyTokensSet = new Set(referrerPolicyTokens)
|
||||
|
||||
const requestRedirect = /** @type {const} */ (['follow', 'manual', 'error'])
|
||||
|
||||
const safeMethods = /** @type {const} */ (['GET', 'HEAD', 'OPTIONS', 'TRACE'])
|
||||
const safeMethodsSet = new Set(safeMethods)
|
||||
|
||||
const requestMode = /** @type {const} */ (['navigate', 'same-origin', 'no-cors', 'cors'])
|
||||
|
||||
const requestCredentials = /** @type {const} */ (['omit', 'same-origin', 'include'])
|
||||
|
||||
const requestCache = /** @type {const} */ ([
|
||||
'default',
|
||||
'no-store',
|
||||
'reload',
|
||||
'no-cache',
|
||||
'force-cache',
|
||||
'only-if-cached'
|
||||
])
|
||||
|
||||
/**
|
||||
* @see https://fetch.spec.whatwg.org/#request-body-header-name
|
||||
*/
|
||||
const requestBodyHeader = /** @type {const} */ ([
|
||||
'content-encoding',
|
||||
'content-language',
|
||||
'content-location',
|
||||
'content-type',
|
||||
// See https://github.com/nodejs/undici/issues/2021
|
||||
// 'Content-Length' is a forbidden header name, which is typically
|
||||
// removed in the Headers implementation. However, undici doesn't
|
||||
// filter out headers, so we add it here.
|
||||
'content-length'
|
||||
])
|
||||
|
||||
/**
|
||||
* @see https://fetch.spec.whatwg.org/#enumdef-requestduplex
|
||||
*/
|
||||
const requestDuplex = /** @type {const} */ ([
|
||||
'half'
|
||||
])
|
||||
|
||||
/**
|
||||
* @see http://fetch.spec.whatwg.org/#forbidden-method
|
||||
*/
|
||||
const forbiddenMethods = /** @type {const} */ (['CONNECT', 'TRACE', 'TRACK'])
|
||||
const forbiddenMethodsSet = new Set(forbiddenMethods)
|
||||
|
||||
const subresource = /** @type {const} */ ([
|
||||
'audio',
|
||||
'audioworklet',
|
||||
'font',
|
||||
'image',
|
||||
'manifest',
|
||||
'paintworklet',
|
||||
'script',
|
||||
'style',
|
||||
'track',
|
||||
'video',
|
||||
'xslt',
|
||||
''
|
||||
])
|
||||
const subresourceSet = new Set(subresource)
|
||||
|
||||
module.exports = {
|
||||
subresource,
|
||||
forbiddenMethods,
|
||||
requestBodyHeader,
|
||||
referrerPolicy,
|
||||
requestRedirect,
|
||||
requestMode,
|
||||
requestCredentials,
|
||||
requestCache,
|
||||
redirectStatus,
|
||||
corsSafeListedMethods,
|
||||
nullBodyStatus,
|
||||
safeMethods,
|
||||
badPorts,
|
||||
requestDuplex,
|
||||
subresourceSet,
|
||||
badPortsSet,
|
||||
redirectStatusSet,
|
||||
corsSafeListedMethodsSet,
|
||||
safeMethodsSet,
|
||||
forbiddenMethodsSet,
|
||||
referrerPolicyTokens: referrerPolicyTokensSet
|
||||
}
|
||||
596
backend/node_modules/undici/lib/web/fetch/data-url.js
generated
vendored
Normal file
596
backend/node_modules/undici/lib/web/fetch/data-url.js
generated
vendored
Normal file
@@ -0,0 +1,596 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert')
|
||||
const { forgivingBase64, collectASequenceOfCodePoints, collectASequenceOfCodePointsFast, isomorphicDecode, removeASCIIWhitespace, removeChars } = require('../infra')
|
||||
|
||||
const encoder = new TextEncoder()
|
||||
|
||||
/**
|
||||
* @see https://mimesniff.spec.whatwg.org/#http-token-code-point
|
||||
*/
|
||||
const HTTP_TOKEN_CODEPOINTS = /^[-!#$%&'*+.^_|~A-Za-z0-9]+$/u
|
||||
const HTTP_WHITESPACE_REGEX = /[\u000A\u000D\u0009\u0020]/u // eslint-disable-line
|
||||
|
||||
/**
|
||||
* @see https://mimesniff.spec.whatwg.org/#http-quoted-string-token-code-point
|
||||
*/
|
||||
const HTTP_QUOTED_STRING_TOKENS = /^[\u0009\u0020-\u007E\u0080-\u00FF]+$/u // eslint-disable-line
|
||||
|
||||
// https://fetch.spec.whatwg.org/#data-url-processor
|
||||
/** @param {URL} dataURL */
|
||||
function dataURLProcessor (dataURL) {
|
||||
// 1. Assert: dataURL’s scheme is "data".
|
||||
assert(dataURL.protocol === 'data:')
|
||||
|
||||
// 2. Let input be the result of running the URL
|
||||
// serializer on dataURL with exclude fragment
|
||||
// set to true.
|
||||
let input = URLSerializer(dataURL, true)
|
||||
|
||||
// 3. Remove the leading "data:" string from input.
|
||||
input = input.slice(5)
|
||||
|
||||
// 4. Let position point at the start of input.
|
||||
const position = { position: 0 }
|
||||
|
||||
// 5. Let mimeType be the result of collecting a
|
||||
// sequence of code points that are not equal
|
||||
// to U+002C (,), given position.
|
||||
let mimeType = collectASequenceOfCodePointsFast(
|
||||
',',
|
||||
input,
|
||||
position
|
||||
)
|
||||
|
||||
// 6. Strip leading and trailing ASCII whitespace
|
||||
// from mimeType.
|
||||
// Undici implementation note: we need to store the
|
||||
// length because if the mimetype has spaces removed,
|
||||
// the wrong amount will be sliced from the input in
|
||||
// step #9
|
||||
const mimeTypeLength = mimeType.length
|
||||
mimeType = removeASCIIWhitespace(mimeType, true, true)
|
||||
|
||||
// 7. If position is past the end of input, then
|
||||
// return failure
|
||||
if (position.position >= input.length) {
|
||||
return 'failure'
|
||||
}
|
||||
|
||||
// 8. Advance position by 1.
|
||||
position.position++
|
||||
|
||||
// 9. Let encodedBody be the remainder of input.
|
||||
const encodedBody = input.slice(mimeTypeLength + 1)
|
||||
|
||||
// 10. Let body be the percent-decoding of encodedBody.
|
||||
let body = stringPercentDecode(encodedBody)
|
||||
|
||||
// 11. If mimeType ends with U+003B (;), followed by
|
||||
// zero or more U+0020 SPACE, followed by an ASCII
|
||||
// case-insensitive match for "base64", then:
|
||||
if (/;(?:\u0020*)base64$/ui.test(mimeType)) {
|
||||
// 1. Let stringBody be the isomorphic decode of body.
|
||||
const stringBody = isomorphicDecode(body)
|
||||
|
||||
// 2. Set body to the forgiving-base64 decode of
|
||||
// stringBody.
|
||||
body = forgivingBase64(stringBody)
|
||||
|
||||
// 3. If body is failure, then return failure.
|
||||
if (body === 'failure') {
|
||||
return 'failure'
|
||||
}
|
||||
|
||||
// 4. Remove the last 6 code points from mimeType.
|
||||
mimeType = mimeType.slice(0, -6)
|
||||
|
||||
// 5. Remove trailing U+0020 SPACE code points from mimeType,
|
||||
// if any.
|
||||
mimeType = mimeType.replace(/(\u0020+)$/u, '')
|
||||
|
||||
// 6. Remove the last U+003B (;) code point from mimeType.
|
||||
mimeType = mimeType.slice(0, -1)
|
||||
}
|
||||
|
||||
// 12. If mimeType starts with U+003B (;), then prepend
|
||||
// "text/plain" to mimeType.
|
||||
if (mimeType.startsWith(';')) {
|
||||
mimeType = 'text/plain' + mimeType
|
||||
}
|
||||
|
||||
// 13. Let mimeTypeRecord be the result of parsing
|
||||
// mimeType.
|
||||
let mimeTypeRecord = parseMIMEType(mimeType)
|
||||
|
||||
// 14. If mimeTypeRecord is failure, then set
|
||||
// mimeTypeRecord to text/plain;charset=US-ASCII.
|
||||
if (mimeTypeRecord === 'failure') {
|
||||
mimeTypeRecord = parseMIMEType('text/plain;charset=US-ASCII')
|
||||
}
|
||||
|
||||
// 15. Return a new data: URL struct whose MIME
|
||||
// type is mimeTypeRecord and body is body.
|
||||
// https://fetch.spec.whatwg.org/#data-url-struct
|
||||
return { mimeType: mimeTypeRecord, body }
|
||||
}
|
||||
|
||||
// https://url.spec.whatwg.org/#concept-url-serializer
|
||||
/**
|
||||
* @param {URL} url
|
||||
* @param {boolean} excludeFragment
|
||||
*/
|
||||
function URLSerializer (url, excludeFragment = false) {
|
||||
if (!excludeFragment) {
|
||||
return url.href
|
||||
}
|
||||
|
||||
const href = url.href
|
||||
const hashLength = url.hash.length
|
||||
|
||||
const serialized = hashLength === 0 ? href : href.substring(0, href.length - hashLength)
|
||||
|
||||
if (!hashLength && href.endsWith('#')) {
|
||||
return serialized.slice(0, -1)
|
||||
}
|
||||
|
||||
return serialized
|
||||
}
|
||||
|
||||
// https://url.spec.whatwg.org/#string-percent-decode
|
||||
/** @param {string} input */
|
||||
function stringPercentDecode (input) {
|
||||
// 1. Let bytes be the UTF-8 encoding of input.
|
||||
const bytes = encoder.encode(input)
|
||||
|
||||
// 2. Return the percent-decoding of bytes.
|
||||
return percentDecode(bytes)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} byte
|
||||
*/
|
||||
function isHexCharByte (byte) {
|
||||
// 0-9 A-F a-f
|
||||
return (byte >= 0x30 && byte <= 0x39) || (byte >= 0x41 && byte <= 0x46) || (byte >= 0x61 && byte <= 0x66)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} byte
|
||||
*/
|
||||
function hexByteToNumber (byte) {
|
||||
return (
|
||||
// 0-9
|
||||
byte >= 0x30 && byte <= 0x39
|
||||
? (byte - 48)
|
||||
// Convert to uppercase
|
||||
// ((byte & 0xDF) - 65) + 10
|
||||
: ((byte & 0xDF) - 55)
|
||||
)
|
||||
}
|
||||
|
||||
// https://url.spec.whatwg.org/#percent-decode
|
||||
/** @param {Uint8Array} input */
|
||||
function percentDecode (input) {
|
||||
const length = input.length
|
||||
// 1. Let output be an empty byte sequence.
|
||||
/** @type {Uint8Array} */
|
||||
const output = new Uint8Array(length)
|
||||
let j = 0
|
||||
let i = 0
|
||||
// 2. For each byte byte in input:
|
||||
while (i < length) {
|
||||
const byte = input[i]
|
||||
|
||||
// 1. If byte is not 0x25 (%), then append byte to output.
|
||||
if (byte !== 0x25) {
|
||||
output[j++] = byte
|
||||
|
||||
// 2. Otherwise, if byte is 0x25 (%) and the next two bytes
|
||||
// after byte in input are not in the ranges
|
||||
// 0x30 (0) to 0x39 (9), 0x41 (A) to 0x46 (F),
|
||||
// and 0x61 (a) to 0x66 (f), all inclusive, append byte
|
||||
// to output.
|
||||
} else if (
|
||||
byte === 0x25 &&
|
||||
!(isHexCharByte(input[i + 1]) && isHexCharByte(input[i + 2]))
|
||||
) {
|
||||
output[j++] = 0x25
|
||||
|
||||
// 3. Otherwise:
|
||||
} else {
|
||||
// 1. Let bytePoint be the two bytes after byte in input,
|
||||
// decoded, and then interpreted as hexadecimal number.
|
||||
// 2. Append a byte whose value is bytePoint to output.
|
||||
output[j++] = (hexByteToNumber(input[i + 1]) << 4) | hexByteToNumber(input[i + 2])
|
||||
|
||||
// 3. Skip the next two bytes in input.
|
||||
i += 2
|
||||
}
|
||||
++i
|
||||
}
|
||||
|
||||
// 3. Return output.
|
||||
return length === j ? output : output.subarray(0, j)
|
||||
}
|
||||
|
||||
// https://mimesniff.spec.whatwg.org/#parse-a-mime-type
|
||||
/** @param {string} input */
|
||||
function parseMIMEType (input) {
|
||||
// 1. Remove any leading and trailing HTTP whitespace
|
||||
// from input.
|
||||
input = removeHTTPWhitespace(input, true, true)
|
||||
|
||||
// 2. Let position be a position variable for input,
|
||||
// initially pointing at the start of input.
|
||||
const position = { position: 0 }
|
||||
|
||||
// 3. Let type be the result of collecting a sequence
|
||||
// of code points that are not U+002F (/) from
|
||||
// input, given position.
|
||||
const type = collectASequenceOfCodePointsFast(
|
||||
'/',
|
||||
input,
|
||||
position
|
||||
)
|
||||
|
||||
// 4. If type is the empty string or does not solely
|
||||
// contain HTTP token code points, then return failure.
|
||||
// https://mimesniff.spec.whatwg.org/#http-token-code-point
|
||||
if (type.length === 0 || !HTTP_TOKEN_CODEPOINTS.test(type)) {
|
||||
return 'failure'
|
||||
}
|
||||
|
||||
// 5. If position is past the end of input, then return
|
||||
// failure
|
||||
if (position.position >= input.length) {
|
||||
return 'failure'
|
||||
}
|
||||
|
||||
// 6. Advance position by 1. (This skips past U+002F (/).)
|
||||
position.position++
|
||||
|
||||
// 7. Let subtype be the result of collecting a sequence of
|
||||
// code points that are not U+003B (;) from input, given
|
||||
// position.
|
||||
let subtype = collectASequenceOfCodePointsFast(
|
||||
';',
|
||||
input,
|
||||
position
|
||||
)
|
||||
|
||||
// 8. Remove any trailing HTTP whitespace from subtype.
|
||||
subtype = removeHTTPWhitespace(subtype, false, true)
|
||||
|
||||
// 9. If subtype is the empty string or does not solely
|
||||
// contain HTTP token code points, then return failure.
|
||||
if (subtype.length === 0 || !HTTP_TOKEN_CODEPOINTS.test(subtype)) {
|
||||
return 'failure'
|
||||
}
|
||||
|
||||
const typeLowercase = type.toLowerCase()
|
||||
const subtypeLowercase = subtype.toLowerCase()
|
||||
|
||||
// 10. Let mimeType be a new MIME type record whose type
|
||||
// is type, in ASCII lowercase, and subtype is subtype,
|
||||
// in ASCII lowercase.
|
||||
// https://mimesniff.spec.whatwg.org/#mime-type
|
||||
const mimeType = {
|
||||
type: typeLowercase,
|
||||
subtype: subtypeLowercase,
|
||||
/** @type {Map<string, string>} */
|
||||
parameters: new Map(),
|
||||
// https://mimesniff.spec.whatwg.org/#mime-type-essence
|
||||
essence: `${typeLowercase}/${subtypeLowercase}`
|
||||
}
|
||||
|
||||
// 11. While position is not past the end of input:
|
||||
while (position.position < input.length) {
|
||||
// 1. Advance position by 1. (This skips past U+003B (;).)
|
||||
position.position++
|
||||
|
||||
// 2. Collect a sequence of code points that are HTTP
|
||||
// whitespace from input given position.
|
||||
collectASequenceOfCodePoints(
|
||||
// https://fetch.spec.whatwg.org/#http-whitespace
|
||||
char => HTTP_WHITESPACE_REGEX.test(char),
|
||||
input,
|
||||
position
|
||||
)
|
||||
|
||||
// 3. Let parameterName be the result of collecting a
|
||||
// sequence of code points that are not U+003B (;)
|
||||
// or U+003D (=) from input, given position.
|
||||
let parameterName = collectASequenceOfCodePoints(
|
||||
(char) => char !== ';' && char !== '=',
|
||||
input,
|
||||
position
|
||||
)
|
||||
|
||||
// 4. Set parameterName to parameterName, in ASCII
|
||||
// lowercase.
|
||||
parameterName = parameterName.toLowerCase()
|
||||
|
||||
// 5. If position is not past the end of input, then:
|
||||
if (position.position < input.length) {
|
||||
// 1. If the code point at position within input is
|
||||
// U+003B (;), then continue.
|
||||
if (input[position.position] === ';') {
|
||||
continue
|
||||
}
|
||||
|
||||
// 2. Advance position by 1. (This skips past U+003D (=).)
|
||||
position.position++
|
||||
}
|
||||
|
||||
// 6. If position is past the end of input, then break.
|
||||
if (position.position >= input.length) {
|
||||
break
|
||||
}
|
||||
|
||||
// 7. Let parameterValue be null.
|
||||
let parameterValue = null
|
||||
|
||||
// 8. If the code point at position within input is
|
||||
// U+0022 ("), then:
|
||||
if (input[position.position] === '"') {
|
||||
// 1. Set parameterValue to the result of collecting
|
||||
// an HTTP quoted string from input, given position
|
||||
// and the extract-value flag.
|
||||
parameterValue = collectAnHTTPQuotedString(input, position, true)
|
||||
|
||||
// 2. Collect a sequence of code points that are not
|
||||
// U+003B (;) from input, given position.
|
||||
collectASequenceOfCodePointsFast(
|
||||
';',
|
||||
input,
|
||||
position
|
||||
)
|
||||
|
||||
// 9. Otherwise:
|
||||
} else {
|
||||
// 1. Set parameterValue to the result of collecting
|
||||
// a sequence of code points that are not U+003B (;)
|
||||
// from input, given position.
|
||||
parameterValue = collectASequenceOfCodePointsFast(
|
||||
';',
|
||||
input,
|
||||
position
|
||||
)
|
||||
|
||||
// 2. Remove any trailing HTTP whitespace from parameterValue.
|
||||
parameterValue = removeHTTPWhitespace(parameterValue, false, true)
|
||||
|
||||
// 3. If parameterValue is the empty string, then continue.
|
||||
if (parameterValue.length === 0) {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// 10. If all of the following are true
|
||||
// - parameterName is not the empty string
|
||||
// - parameterName solely contains HTTP token code points
|
||||
// - parameterValue solely contains HTTP quoted-string token code points
|
||||
// - mimeType’s parameters[parameterName] does not exist
|
||||
// then set mimeType’s parameters[parameterName] to parameterValue.
|
||||
if (
|
||||
parameterName.length !== 0 &&
|
||||
HTTP_TOKEN_CODEPOINTS.test(parameterName) &&
|
||||
(parameterValue.length === 0 || HTTP_QUOTED_STRING_TOKENS.test(parameterValue)) &&
|
||||
!mimeType.parameters.has(parameterName)
|
||||
) {
|
||||
mimeType.parameters.set(parameterName, parameterValue)
|
||||
}
|
||||
}
|
||||
|
||||
// 12. Return mimeType.
|
||||
return mimeType
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#collect-an-http-quoted-string
|
||||
// tests: https://fetch.spec.whatwg.org/#example-http-quoted-string
|
||||
/**
|
||||
* @param {string} input
|
||||
* @param {{ position: number }} position
|
||||
* @param {boolean} [extractValue=false]
|
||||
*/
|
||||
function collectAnHTTPQuotedString (input, position, extractValue = false) {
|
||||
// 1. Let positionStart be position.
|
||||
const positionStart = position.position
|
||||
|
||||
// 2. Let value be the empty string.
|
||||
let value = ''
|
||||
|
||||
// 3. Assert: the code point at position within input
|
||||
// is U+0022 (").
|
||||
assert(input[position.position] === '"')
|
||||
|
||||
// 4. Advance position by 1.
|
||||
position.position++
|
||||
|
||||
// 5. While true:
|
||||
while (true) {
|
||||
// 1. Append the result of collecting a sequence of code points
|
||||
// that are not U+0022 (") or U+005C (\) from input, given
|
||||
// position, to value.
|
||||
value += collectASequenceOfCodePoints(
|
||||
(char) => char !== '"' && char !== '\\',
|
||||
input,
|
||||
position
|
||||
)
|
||||
|
||||
// 2. If position is past the end of input, then break.
|
||||
if (position.position >= input.length) {
|
||||
break
|
||||
}
|
||||
|
||||
// 3. Let quoteOrBackslash be the code point at position within
|
||||
// input.
|
||||
const quoteOrBackslash = input[position.position]
|
||||
|
||||
// 4. Advance position by 1.
|
||||
position.position++
|
||||
|
||||
// 5. If quoteOrBackslash is U+005C (\), then:
|
||||
if (quoteOrBackslash === '\\') {
|
||||
// 1. If position is past the end of input, then append
|
||||
// U+005C (\) to value and break.
|
||||
if (position.position >= input.length) {
|
||||
value += '\\'
|
||||
break
|
||||
}
|
||||
|
||||
// 2. Append the code point at position within input to value.
|
||||
value += input[position.position]
|
||||
|
||||
// 3. Advance position by 1.
|
||||
position.position++
|
||||
|
||||
// 6. Otherwise:
|
||||
} else {
|
||||
// 1. Assert: quoteOrBackslash is U+0022 (").
|
||||
assert(quoteOrBackslash === '"')
|
||||
|
||||
// 2. Break.
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// 6. If the extract-value flag is set, then return value.
|
||||
if (extractValue) {
|
||||
return value
|
||||
}
|
||||
|
||||
// 7. Return the code points from positionStart to position,
|
||||
// inclusive, within input.
|
||||
return input.slice(positionStart, position.position)
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://mimesniff.spec.whatwg.org/#serialize-a-mime-type
|
||||
*/
|
||||
function serializeAMimeType (mimeType) {
|
||||
assert(mimeType !== 'failure')
|
||||
const { parameters, essence } = mimeType
|
||||
|
||||
// 1. Let serialization be the concatenation of mimeType’s
|
||||
// type, U+002F (/), and mimeType’s subtype.
|
||||
let serialization = essence
|
||||
|
||||
// 2. For each name → value of mimeType’s parameters:
|
||||
for (let [name, value] of parameters.entries()) {
|
||||
// 1. Append U+003B (;) to serialization.
|
||||
serialization += ';'
|
||||
|
||||
// 2. Append name to serialization.
|
||||
serialization += name
|
||||
|
||||
// 3. Append U+003D (=) to serialization.
|
||||
serialization += '='
|
||||
|
||||
// 4. If value does not solely contain HTTP token code
|
||||
// points or value is the empty string, then:
|
||||
if (!HTTP_TOKEN_CODEPOINTS.test(value)) {
|
||||
// 1. Precede each occurrence of U+0022 (") or
|
||||
// U+005C (\) in value with U+005C (\).
|
||||
value = value.replace(/[\\"]/ug, '\\$&')
|
||||
|
||||
// 2. Prepend U+0022 (") to value.
|
||||
value = '"' + value
|
||||
|
||||
// 3. Append U+0022 (") to value.
|
||||
value += '"'
|
||||
}
|
||||
|
||||
// 5. Append value to serialization.
|
||||
serialization += value
|
||||
}
|
||||
|
||||
// 3. Return serialization.
|
||||
return serialization
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://fetch.spec.whatwg.org/#http-whitespace
|
||||
* @param {number} char
|
||||
*/
|
||||
function isHTTPWhiteSpace (char) {
|
||||
// "\r\n\t "
|
||||
return char === 0x00d || char === 0x00a || char === 0x009 || char === 0x020
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://fetch.spec.whatwg.org/#http-whitespace
|
||||
* @param {string} str
|
||||
* @param {boolean} [leading=true]
|
||||
* @param {boolean} [trailing=true]
|
||||
*/
|
||||
function removeHTTPWhitespace (str, leading = true, trailing = true) {
|
||||
return removeChars(str, leading, trailing, isHTTPWhiteSpace)
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://mimesniff.spec.whatwg.org/#minimize-a-supported-mime-type
|
||||
* @param {Exclude<ReturnType<typeof parseMIMEType>, 'failure'>} mimeType
|
||||
*/
|
||||
function minimizeSupportedMimeType (mimeType) {
|
||||
switch (mimeType.essence) {
|
||||
case 'application/ecmascript':
|
||||
case 'application/javascript':
|
||||
case 'application/x-ecmascript':
|
||||
case 'application/x-javascript':
|
||||
case 'text/ecmascript':
|
||||
case 'text/javascript':
|
||||
case 'text/javascript1.0':
|
||||
case 'text/javascript1.1':
|
||||
case 'text/javascript1.2':
|
||||
case 'text/javascript1.3':
|
||||
case 'text/javascript1.4':
|
||||
case 'text/javascript1.5':
|
||||
case 'text/jscript':
|
||||
case 'text/livescript':
|
||||
case 'text/x-ecmascript':
|
||||
case 'text/x-javascript':
|
||||
// 1. If mimeType is a JavaScript MIME type, then return "text/javascript".
|
||||
return 'text/javascript'
|
||||
case 'application/json':
|
||||
case 'text/json':
|
||||
// 2. If mimeType is a JSON MIME type, then return "application/json".
|
||||
return 'application/json'
|
||||
case 'image/svg+xml':
|
||||
// 3. If mimeType’s essence is "image/svg+xml", then return "image/svg+xml".
|
||||
return 'image/svg+xml'
|
||||
case 'text/xml':
|
||||
case 'application/xml':
|
||||
// 4. If mimeType is an XML MIME type, then return "application/xml".
|
||||
return 'application/xml'
|
||||
}
|
||||
|
||||
// 2. If mimeType is a JSON MIME type, then return "application/json".
|
||||
if (mimeType.subtype.endsWith('+json')) {
|
||||
return 'application/json'
|
||||
}
|
||||
|
||||
// 4. If mimeType is an XML MIME type, then return "application/xml".
|
||||
if (mimeType.subtype.endsWith('+xml')) {
|
||||
return 'application/xml'
|
||||
}
|
||||
|
||||
// 5. If mimeType is supported by the user agent, then return mimeType’s essence.
|
||||
// Technically, node doesn't support any mimetypes.
|
||||
|
||||
// 6. Return the empty string.
|
||||
return ''
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
dataURLProcessor,
|
||||
URLSerializer,
|
||||
stringPercentDecode,
|
||||
parseMIMEType,
|
||||
collectAnHTTPQuotedString,
|
||||
serializeAMimeType,
|
||||
removeHTTPWhitespace,
|
||||
minimizeSupportedMimeType,
|
||||
HTTP_TOKEN_CODEPOINTS
|
||||
}
|
||||
575
backend/node_modules/undici/lib/web/fetch/formdata-parser.js
generated
vendored
Normal file
575
backend/node_modules/undici/lib/web/fetch/formdata-parser.js
generated
vendored
Normal file
@@ -0,0 +1,575 @@
|
||||
'use strict'
|
||||
|
||||
const { bufferToLowerCasedHeaderName } = require('../../core/util')
|
||||
const { HTTP_TOKEN_CODEPOINTS } = require('./data-url')
|
||||
const { makeEntry } = require('./formdata')
|
||||
const { webidl } = require('../webidl')
|
||||
const assert = require('node:assert')
|
||||
const { isomorphicDecode } = require('../infra')
|
||||
const { utf8DecodeBytes } = require('../../encoding')
|
||||
|
||||
const dd = Buffer.from('--')
|
||||
const decoder = new TextDecoder()
|
||||
|
||||
/**
|
||||
* @param {string} chars
|
||||
*/
|
||||
function isAsciiString (chars) {
|
||||
for (let i = 0; i < chars.length; ++i) {
|
||||
if ((chars.charCodeAt(i) & ~0x7F) !== 0) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://andreubotella.github.io/multipart-form-data/#multipart-form-data-boundary
|
||||
* @param {string} boundary
|
||||
*/
|
||||
function validateBoundary (boundary) {
|
||||
const length = boundary.length
|
||||
|
||||
// - its length is greater or equal to 27 and lesser or equal to 70, and
|
||||
if (length < 27 || length > 70) {
|
||||
return false
|
||||
}
|
||||
|
||||
// - it is composed by bytes in the ranges 0x30 to 0x39, 0x41 to 0x5A, or
|
||||
// 0x61 to 0x7A, inclusive (ASCII alphanumeric), or which are 0x27 ('),
|
||||
// 0x2D (-) or 0x5F (_).
|
||||
for (let i = 0; i < length; ++i) {
|
||||
const cp = boundary.charCodeAt(i)
|
||||
|
||||
if (!(
|
||||
(cp >= 0x30 && cp <= 0x39) ||
|
||||
(cp >= 0x41 && cp <= 0x5a) ||
|
||||
(cp >= 0x61 && cp <= 0x7a) ||
|
||||
cp === 0x27 ||
|
||||
cp === 0x2d ||
|
||||
cp === 0x5f
|
||||
)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://andreubotella.github.io/multipart-form-data/#multipart-form-data-parser
|
||||
* @param {Buffer} input
|
||||
* @param {ReturnType<import('./data-url')['parseMIMEType']>} mimeType
|
||||
*/
|
||||
function multipartFormDataParser (input, mimeType) {
|
||||
// 1. Assert: mimeType’s essence is "multipart/form-data".
|
||||
assert(mimeType !== 'failure' && mimeType.essence === 'multipart/form-data')
|
||||
|
||||
const boundaryString = mimeType.parameters.get('boundary')
|
||||
|
||||
// 2. If mimeType’s parameters["boundary"] does not exist, return failure.
|
||||
// Otherwise, let boundary be the result of UTF-8 decoding mimeType’s
|
||||
// parameters["boundary"].
|
||||
if (boundaryString === undefined) {
|
||||
throw parsingError('missing boundary in content-type header')
|
||||
}
|
||||
|
||||
const boundary = Buffer.from(`--${boundaryString}`, 'utf8')
|
||||
|
||||
// 3. Let entry list be an empty entry list.
|
||||
const entryList = []
|
||||
|
||||
// 4. Let position be a pointer to a byte in input, initially pointing at
|
||||
// the first byte.
|
||||
const position = { position: 0 }
|
||||
|
||||
// Note: Per RFC 2046 Section 5.1.1, we must ignore anything before the
|
||||
// first boundary delimiter line (preamble). Search for the first boundary.
|
||||
const firstBoundaryIndex = input.indexOf(boundary)
|
||||
|
||||
if (firstBoundaryIndex === -1) {
|
||||
throw parsingError('no boundary found in multipart body')
|
||||
}
|
||||
|
||||
// Start parsing from the first boundary, ignoring any preamble
|
||||
position.position = firstBoundaryIndex
|
||||
|
||||
// 5. While true:
|
||||
while (true) {
|
||||
// 5.1. If position points to a sequence of bytes starting with 0x2D 0x2D
|
||||
// (`--`) followed by boundary, advance position by 2 + the length of
|
||||
// boundary. Otherwise, return failure.
|
||||
// Note: boundary is padded with 2 dashes already, no need to add 2.
|
||||
if (input.subarray(position.position, position.position + boundary.length).equals(boundary)) {
|
||||
position.position += boundary.length
|
||||
} else {
|
||||
throw parsingError('expected a value starting with -- and the boundary')
|
||||
}
|
||||
|
||||
// 5.2. If position points to the sequence of bytes 0x2D 0x2D 0x0D 0x0A
|
||||
// (`--` followed by CR LF) followed by the end of input, return entry list.
|
||||
// Note: Per RFC 2046 Section 5.1.1, we must ignore anything after the
|
||||
// final boundary delimiter (epilogue). Check for -- or --CRLF and return
|
||||
// regardless of what follows.
|
||||
if (bufferStartsWith(input, dd, position)) {
|
||||
// Found closing boundary delimiter (--), ignore any epilogue
|
||||
return entryList
|
||||
}
|
||||
|
||||
// 5.3. If position does not point to a sequence of bytes starting with 0x0D
|
||||
// 0x0A (CR LF), return failure.
|
||||
if (input[position.position] !== 0x0d || input[position.position + 1] !== 0x0a) {
|
||||
throw parsingError('expected CRLF')
|
||||
}
|
||||
|
||||
// 5.4. Advance position by 2. (This skips past the newline.)
|
||||
position.position += 2
|
||||
|
||||
// 5.5. Let name, filename and contentType be the result of parsing
|
||||
// multipart/form-data headers on input and position, if the result
|
||||
// is not failure. Otherwise, return failure.
|
||||
const result = parseMultipartFormDataHeaders(input, position)
|
||||
|
||||
let { name, filename, contentType, encoding } = result
|
||||
|
||||
// 5.6. Advance position by 2. (This skips past the empty line that marks
|
||||
// the end of the headers.)
|
||||
position.position += 2
|
||||
|
||||
// 5.7. Let body be the empty byte sequence.
|
||||
let body
|
||||
|
||||
// 5.8. Body loop: While position is not past the end of input:
|
||||
// TODO: the steps here are completely wrong
|
||||
{
|
||||
const boundaryIndex = input.indexOf(boundary.subarray(2), position.position)
|
||||
|
||||
if (boundaryIndex === -1) {
|
||||
throw parsingError('expected boundary after body')
|
||||
}
|
||||
|
||||
body = input.subarray(position.position, boundaryIndex - 4)
|
||||
|
||||
position.position += body.length
|
||||
|
||||
// Note: position must be advanced by the body's length before being
|
||||
// decoded, otherwise the parsing will fail.
|
||||
if (encoding === 'base64') {
|
||||
body = Buffer.from(body.toString(), 'base64')
|
||||
}
|
||||
}
|
||||
|
||||
// 5.9. If position does not point to a sequence of bytes starting with
|
||||
// 0x0D 0x0A (CR LF), return failure. Otherwise, advance position by 2.
|
||||
if (input[position.position] !== 0x0d || input[position.position + 1] !== 0x0a) {
|
||||
throw parsingError('expected CRLF')
|
||||
} else {
|
||||
position.position += 2
|
||||
}
|
||||
|
||||
// 5.10. If filename is not null:
|
||||
let value
|
||||
|
||||
if (filename !== null) {
|
||||
// 5.10.1. If contentType is null, set contentType to "text/plain".
|
||||
contentType ??= 'text/plain'
|
||||
|
||||
// 5.10.2. If contentType is not an ASCII string, set contentType to the empty string.
|
||||
|
||||
// Note: `buffer.isAscii` can be used at zero-cost, but converting a string to a buffer is a high overhead.
|
||||
// Content-Type is a relatively small string, so it is faster to use `String#charCodeAt`.
|
||||
if (!isAsciiString(contentType)) {
|
||||
contentType = ''
|
||||
}
|
||||
|
||||
// 5.10.3. Let value be a new File object with name filename, type contentType, and body body.
|
||||
value = new File([body], filename, { type: contentType })
|
||||
} else {
|
||||
// 5.11. Otherwise:
|
||||
|
||||
// 5.11.1. Let value be the UTF-8 decoding without BOM of body.
|
||||
value = utf8DecodeBytes(Buffer.from(body))
|
||||
}
|
||||
|
||||
// 5.12. Assert: name is a scalar value string and value is either a scalar value string or a File object.
|
||||
assert(webidl.is.USVString(name))
|
||||
assert((typeof value === 'string' && webidl.is.USVString(value)) || webidl.is.File(value))
|
||||
|
||||
// 5.13. Create an entry with name and value, and append it to entry list.
|
||||
entryList.push(makeEntry(name, value, filename))
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses content-disposition attributes (e.g., name="value" or filename*=utf-8''encoded)
|
||||
* @param {Buffer} input
|
||||
* @param {{ position: number }} position
|
||||
* @returns {{ name: string, value: string }}
|
||||
*/
|
||||
function parseContentDispositionAttribute (input, position) {
|
||||
// Skip leading semicolon and whitespace
|
||||
if (input[position.position] === 0x3b /* ; */) {
|
||||
position.position++
|
||||
}
|
||||
|
||||
// Skip whitespace
|
||||
collectASequenceOfBytes(
|
||||
(char) => char === 0x20 || char === 0x09,
|
||||
input,
|
||||
position
|
||||
)
|
||||
|
||||
// Collect attribute name (token characters)
|
||||
const attributeName = collectASequenceOfBytes(
|
||||
(char) => isToken(char) && char !== 0x3d && char !== 0x2a, // not = or *
|
||||
input,
|
||||
position
|
||||
)
|
||||
|
||||
if (attributeName.length === 0) {
|
||||
return null
|
||||
}
|
||||
|
||||
const attrNameStr = attributeName.toString('ascii').toLowerCase()
|
||||
|
||||
// Check for extended notation (attribute*)
|
||||
const isExtended = input[position.position] === 0x2a /* * */
|
||||
if (isExtended) {
|
||||
position.position++ // skip *
|
||||
}
|
||||
|
||||
// Expect = sign
|
||||
if (input[position.position] !== 0x3d /* = */) {
|
||||
return null
|
||||
}
|
||||
position.position++ // skip =
|
||||
|
||||
// Skip whitespace
|
||||
collectASequenceOfBytes(
|
||||
(char) => char === 0x20 || char === 0x09,
|
||||
input,
|
||||
position
|
||||
)
|
||||
|
||||
let value
|
||||
|
||||
if (isExtended) {
|
||||
// Extended attribute format: charset'language'encoded-value
|
||||
const headerValue = collectASequenceOfBytes(
|
||||
(char) => char !== 0x20 && char !== 0x0d && char !== 0x0a && char !== 0x3b, // not space, CRLF, or ;
|
||||
input,
|
||||
position
|
||||
)
|
||||
|
||||
// Check for utf-8'' prefix (case insensitive)
|
||||
if (
|
||||
(headerValue[0] !== 0x75 && headerValue[0] !== 0x55) || // u or U
|
||||
(headerValue[1] !== 0x74 && headerValue[1] !== 0x54) || // t or T
|
||||
(headerValue[2] !== 0x66 && headerValue[2] !== 0x46) || // f or F
|
||||
headerValue[3] !== 0x2d || // -
|
||||
headerValue[4] !== 0x38 // 8
|
||||
) {
|
||||
throw parsingError('unknown encoding, expected utf-8\'\'')
|
||||
}
|
||||
|
||||
// Skip utf-8'' and decode the rest
|
||||
value = decodeURIComponent(decoder.decode(headerValue.subarray(7)))
|
||||
} else if (input[position.position] === 0x22 /* " */) {
|
||||
// Quoted string
|
||||
position.position++ // skip opening quote
|
||||
|
||||
const quotedValue = collectASequenceOfBytes(
|
||||
(char) => char !== 0x0a && char !== 0x0d && char !== 0x22, // not LF, CR, or "
|
||||
input,
|
||||
position
|
||||
)
|
||||
|
||||
if (input[position.position] !== 0x22) {
|
||||
throw parsingError('Closing quote not found')
|
||||
}
|
||||
position.position++ // skip closing quote
|
||||
|
||||
value = decoder.decode(quotedValue)
|
||||
.replace(/%0A/ig, '\n')
|
||||
.replace(/%0D/ig, '\r')
|
||||
.replace(/%22/g, '"')
|
||||
} else {
|
||||
// Token value (no quotes)
|
||||
const tokenValue = collectASequenceOfBytes(
|
||||
(char) => isToken(char) && char !== 0x3b, // not ;
|
||||
input,
|
||||
position
|
||||
)
|
||||
|
||||
value = decoder.decode(tokenValue)
|
||||
}
|
||||
|
||||
return { name: attrNameStr, value }
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://andreubotella.github.io/multipart-form-data/#parse-multipart-form-data-headers
|
||||
* @param {Buffer} input
|
||||
* @param {{ position: number }} position
|
||||
*/
|
||||
function parseMultipartFormDataHeaders (input, position) {
|
||||
// 1. Let name, filename and contentType be null.
|
||||
let name = null
|
||||
let filename = null
|
||||
let contentType = null
|
||||
let encoding = null
|
||||
|
||||
// 2. While true:
|
||||
while (true) {
|
||||
// 2.1. If position points to a sequence of bytes starting with 0x0D 0x0A (CR LF):
|
||||
if (input[position.position] === 0x0d && input[position.position + 1] === 0x0a) {
|
||||
// 2.1.1. If name is null, return failure.
|
||||
if (name === null) {
|
||||
throw parsingError('header name is null')
|
||||
}
|
||||
|
||||
// 2.1.2. Return name, filename and contentType.
|
||||
return { name, filename, contentType, encoding }
|
||||
}
|
||||
|
||||
// 2.2. Let header name be the result of collecting a sequence of bytes that are
|
||||
// not 0x0A (LF), 0x0D (CR) or 0x3A (:), given position.
|
||||
let headerName = collectASequenceOfBytes(
|
||||
(char) => char !== 0x0a && char !== 0x0d && char !== 0x3a,
|
||||
input,
|
||||
position
|
||||
)
|
||||
|
||||
// 2.3. Remove any HTTP tab or space bytes from the start or end of header name.
|
||||
headerName = removeChars(headerName, true, true, (char) => char === 0x9 || char === 0x20)
|
||||
|
||||
// 2.4. If header name does not match the field-name token production, return failure.
|
||||
if (!HTTP_TOKEN_CODEPOINTS.test(headerName.toString())) {
|
||||
throw parsingError('header name does not match the field-name token production')
|
||||
}
|
||||
|
||||
// 2.5. If the byte at position is not 0x3A (:), return failure.
|
||||
if (input[position.position] !== 0x3a) {
|
||||
throw parsingError('expected :')
|
||||
}
|
||||
|
||||
// 2.6. Advance position by 1.
|
||||
position.position++
|
||||
|
||||
// 2.7. Collect a sequence of bytes that are HTTP tab or space bytes given position.
|
||||
// (Do nothing with those bytes.)
|
||||
collectASequenceOfBytes(
|
||||
(char) => char === 0x20 || char === 0x09,
|
||||
input,
|
||||
position
|
||||
)
|
||||
|
||||
// 2.8. Byte-lowercase header name and switch on the result:
|
||||
switch (bufferToLowerCasedHeaderName(headerName)) {
|
||||
case 'content-disposition': {
|
||||
name = filename = null
|
||||
|
||||
// Collect the disposition type (should be "form-data")
|
||||
const dispositionType = collectASequenceOfBytes(
|
||||
(char) => isToken(char),
|
||||
input,
|
||||
position
|
||||
)
|
||||
|
||||
if (dispositionType.toString('ascii').toLowerCase() !== 'form-data') {
|
||||
throw parsingError('expected form-data for content-disposition header')
|
||||
}
|
||||
|
||||
// Parse attributes recursively until CRLF
|
||||
while (
|
||||
position.position < input.length &&
|
||||
input[position.position] !== 0x0d &&
|
||||
input[position.position + 1] !== 0x0a
|
||||
) {
|
||||
const attribute = parseContentDispositionAttribute(input, position)
|
||||
|
||||
if (!attribute) {
|
||||
break
|
||||
}
|
||||
|
||||
if (attribute.name === 'name') {
|
||||
name = attribute.value
|
||||
} else if (attribute.name === 'filename') {
|
||||
filename = attribute.value
|
||||
}
|
||||
}
|
||||
|
||||
if (name === null) {
|
||||
throw parsingError('name attribute is required in content-disposition header')
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
case 'content-type': {
|
||||
// 1. Let header value be the result of collecting a sequence of bytes that are
|
||||
// not 0x0A (LF) or 0x0D (CR), given position.
|
||||
let headerValue = collectASequenceOfBytes(
|
||||
(char) => char !== 0x0a && char !== 0x0d,
|
||||
input,
|
||||
position
|
||||
)
|
||||
|
||||
// 2. Remove any HTTP tab or space bytes from the end of header value.
|
||||
headerValue = removeChars(headerValue, false, true, (char) => char === 0x9 || char === 0x20)
|
||||
|
||||
// 3. Set contentType to the isomorphic decoding of header value.
|
||||
contentType = isomorphicDecode(headerValue)
|
||||
|
||||
break
|
||||
}
|
||||
case 'content-transfer-encoding': {
|
||||
let headerValue = collectASequenceOfBytes(
|
||||
(char) => char !== 0x0a && char !== 0x0d,
|
||||
input,
|
||||
position
|
||||
)
|
||||
|
||||
headerValue = removeChars(headerValue, false, true, (char) => char === 0x9 || char === 0x20)
|
||||
|
||||
encoding = isomorphicDecode(headerValue)
|
||||
|
||||
break
|
||||
}
|
||||
default: {
|
||||
// Collect a sequence of bytes that are not 0x0A (LF) or 0x0D (CR), given position.
|
||||
// (Do nothing with those bytes.)
|
||||
collectASequenceOfBytes(
|
||||
(char) => char !== 0x0a && char !== 0x0d,
|
||||
input,
|
||||
position
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// 2.9. If position does not point to a sequence of bytes starting with 0x0D 0x0A
|
||||
// (CR LF), return failure. Otherwise, advance position by 2 (past the newline).
|
||||
if (input[position.position] !== 0x0d && input[position.position + 1] !== 0x0a) {
|
||||
throw parsingError('expected CRLF')
|
||||
} else {
|
||||
position.position += 2
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {(char: number) => boolean} condition
|
||||
* @param {Buffer} input
|
||||
* @param {{ position: number }} position
|
||||
*/
|
||||
function collectASequenceOfBytes (condition, input, position) {
|
||||
let start = position.position
|
||||
|
||||
while (start < input.length && condition(input[start])) {
|
||||
++start
|
||||
}
|
||||
|
||||
return input.subarray(position.position, (position.position = start))
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Buffer} buf
|
||||
* @param {boolean} leading
|
||||
* @param {boolean} trailing
|
||||
* @param {(charCode: number) => boolean} predicate
|
||||
* @returns {Buffer}
|
||||
*/
|
||||
function removeChars (buf, leading, trailing, predicate) {
|
||||
let lead = 0
|
||||
let trail = buf.length - 1
|
||||
|
||||
if (leading) {
|
||||
while (lead < buf.length && predicate(buf[lead])) lead++
|
||||
}
|
||||
|
||||
if (trailing) {
|
||||
while (trail > 0 && predicate(buf[trail])) trail--
|
||||
}
|
||||
|
||||
return lead === 0 && trail === buf.length - 1 ? buf : buf.subarray(lead, trail + 1)
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if {@param buffer} starts with {@param start}
|
||||
* @param {Buffer} buffer
|
||||
* @param {Buffer} start
|
||||
* @param {{ position: number }} position
|
||||
*/
|
||||
function bufferStartsWith (buffer, start, position) {
|
||||
if (buffer.length < start.length) {
|
||||
return false
|
||||
}
|
||||
|
||||
for (let i = 0; i < start.length; i++) {
|
||||
if (start[i] !== buffer[position.position + i]) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
function parsingError (cause) {
|
||||
return new TypeError('Failed to parse body as FormData.', { cause: new TypeError(cause) })
|
||||
}
|
||||
|
||||
/**
|
||||
* CTL = <any US-ASCII control character
|
||||
* (octets 0 - 31) and DEL (127)>
|
||||
* @param {number} char
|
||||
*/
|
||||
function isCTL (char) {
|
||||
return char <= 0x1f || char === 0x7f
|
||||
}
|
||||
|
||||
/**
|
||||
* tspecials := "(" / ")" / "<" / ">" / "@" /
|
||||
* "," / ";" / ":" / "\" / <">
|
||||
* "/" / "[" / "]" / "?" / "="
|
||||
* ; Must be in quoted-string,
|
||||
* ; to use within parameter values
|
||||
* @param {number} char
|
||||
*/
|
||||
function isTSpecial (char) {
|
||||
return (
|
||||
char === 0x28 || // (
|
||||
char === 0x29 || // )
|
||||
char === 0x3c || // <
|
||||
char === 0x3e || // >
|
||||
char === 0x40 || // @
|
||||
char === 0x2c || // ,
|
||||
char === 0x3b || // ;
|
||||
char === 0x3a || // :
|
||||
char === 0x5c || // \
|
||||
char === 0x22 || // "
|
||||
char === 0x2f || // /
|
||||
char === 0x5b || // [
|
||||
char === 0x5d || // ]
|
||||
char === 0x3f || // ?
|
||||
char === 0x3d // +
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* token := 1*<any (US-ASCII) CHAR except SPACE, CTLs,
|
||||
* or tspecials>
|
||||
* @param {number} char
|
||||
*/
|
||||
function isToken (char) {
|
||||
return (
|
||||
char <= 0x7f && // ascii
|
||||
char !== 0x20 && // space
|
||||
char !== 0x09 &&
|
||||
!isCTL(char) &&
|
||||
!isTSpecial(char)
|
||||
)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
multipartFormDataParser,
|
||||
validateBoundary
|
||||
}
|
||||
259
backend/node_modules/undici/lib/web/fetch/formdata.js
generated
vendored
Normal file
259
backend/node_modules/undici/lib/web/fetch/formdata.js
generated
vendored
Normal file
@@ -0,0 +1,259 @@
|
||||
'use strict'
|
||||
|
||||
const { iteratorMixin } = require('./util')
|
||||
const { kEnumerableProperty } = require('../../core/util')
|
||||
const { webidl } = require('../webidl')
|
||||
const nodeUtil = require('node:util')
|
||||
|
||||
// https://xhr.spec.whatwg.org/#formdata
|
||||
class FormData {
|
||||
#state = []
|
||||
|
||||
constructor (form = undefined) {
|
||||
webidl.util.markAsUncloneable(this)
|
||||
|
||||
if (form !== undefined) {
|
||||
throw webidl.errors.conversionFailed({
|
||||
prefix: 'FormData constructor',
|
||||
argument: 'Argument 1',
|
||||
types: ['undefined']
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
append (name, value, filename = undefined) {
|
||||
webidl.brandCheck(this, FormData)
|
||||
|
||||
const prefix = 'FormData.append'
|
||||
webidl.argumentLengthCheck(arguments, 2, prefix)
|
||||
|
||||
name = webidl.converters.USVString(name)
|
||||
|
||||
if (arguments.length === 3 || webidl.is.Blob(value)) {
|
||||
value = webidl.converters.Blob(value, prefix, 'value')
|
||||
|
||||
if (filename !== undefined) {
|
||||
filename = webidl.converters.USVString(filename)
|
||||
}
|
||||
} else {
|
||||
value = webidl.converters.USVString(value)
|
||||
}
|
||||
|
||||
// 1. Let value be value if given; otherwise blobValue.
|
||||
|
||||
// 2. Let entry be the result of creating an entry with
|
||||
// name, value, and filename if given.
|
||||
const entry = makeEntry(name, value, filename)
|
||||
|
||||
// 3. Append entry to this’s entry list.
|
||||
this.#state.push(entry)
|
||||
}
|
||||
|
||||
delete (name) {
|
||||
webidl.brandCheck(this, FormData)
|
||||
|
||||
const prefix = 'FormData.delete'
|
||||
webidl.argumentLengthCheck(arguments, 1, prefix)
|
||||
|
||||
name = webidl.converters.USVString(name)
|
||||
|
||||
// The delete(name) method steps are to remove all entries whose name
|
||||
// is name from this’s entry list.
|
||||
this.#state = this.#state.filter(entry => entry.name !== name)
|
||||
}
|
||||
|
||||
get (name) {
|
||||
webidl.brandCheck(this, FormData)
|
||||
|
||||
const prefix = 'FormData.get'
|
||||
webidl.argumentLengthCheck(arguments, 1, prefix)
|
||||
|
||||
name = webidl.converters.USVString(name)
|
||||
|
||||
// 1. If there is no entry whose name is name in this’s entry list,
|
||||
// then return null.
|
||||
const idx = this.#state.findIndex((entry) => entry.name === name)
|
||||
if (idx === -1) {
|
||||
return null
|
||||
}
|
||||
|
||||
// 2. Return the value of the first entry whose name is name from
|
||||
// this’s entry list.
|
||||
return this.#state[idx].value
|
||||
}
|
||||
|
||||
getAll (name) {
|
||||
webidl.brandCheck(this, FormData)
|
||||
|
||||
const prefix = 'FormData.getAll'
|
||||
webidl.argumentLengthCheck(arguments, 1, prefix)
|
||||
|
||||
name = webidl.converters.USVString(name)
|
||||
|
||||
// 1. If there is no entry whose name is name in this’s entry list,
|
||||
// then return the empty list.
|
||||
// 2. Return the values of all entries whose name is name, in order,
|
||||
// from this’s entry list.
|
||||
return this.#state
|
||||
.filter((entry) => entry.name === name)
|
||||
.map((entry) => entry.value)
|
||||
}
|
||||
|
||||
has (name) {
|
||||
webidl.brandCheck(this, FormData)
|
||||
|
||||
const prefix = 'FormData.has'
|
||||
webidl.argumentLengthCheck(arguments, 1, prefix)
|
||||
|
||||
name = webidl.converters.USVString(name)
|
||||
|
||||
// The has(name) method steps are to return true if there is an entry
|
||||
// whose name is name in this’s entry list; otherwise false.
|
||||
return this.#state.findIndex((entry) => entry.name === name) !== -1
|
||||
}
|
||||
|
||||
set (name, value, filename = undefined) {
|
||||
webidl.brandCheck(this, FormData)
|
||||
|
||||
const prefix = 'FormData.set'
|
||||
webidl.argumentLengthCheck(arguments, 2, prefix)
|
||||
|
||||
name = webidl.converters.USVString(name)
|
||||
|
||||
if (arguments.length === 3 || webidl.is.Blob(value)) {
|
||||
value = webidl.converters.Blob(value, prefix, 'value')
|
||||
|
||||
if (filename !== undefined) {
|
||||
filename = webidl.converters.USVString(filename)
|
||||
}
|
||||
} else {
|
||||
value = webidl.converters.USVString(value)
|
||||
}
|
||||
|
||||
// The set(name, value) and set(name, blobValue, filename) method steps
|
||||
// are:
|
||||
|
||||
// 1. Let value be value if given; otherwise blobValue.
|
||||
|
||||
// 2. Let entry be the result of creating an entry with name, value, and
|
||||
// filename if given.
|
||||
const entry = makeEntry(name, value, filename)
|
||||
|
||||
// 3. If there are entries in this’s entry list whose name is name, then
|
||||
// replace the first such entry with entry and remove the others.
|
||||
const idx = this.#state.findIndex((entry) => entry.name === name)
|
||||
if (idx !== -1) {
|
||||
this.#state = [
|
||||
...this.#state.slice(0, idx),
|
||||
entry,
|
||||
...this.#state.slice(idx + 1).filter((entry) => entry.name !== name)
|
||||
]
|
||||
} else {
|
||||
// 4. Otherwise, append entry to this’s entry list.
|
||||
this.#state.push(entry)
|
||||
}
|
||||
}
|
||||
|
||||
[nodeUtil.inspect.custom] (depth, options) {
|
||||
const state = this.#state.reduce((a, b) => {
|
||||
if (a[b.name]) {
|
||||
if (Array.isArray(a[b.name])) {
|
||||
a[b.name].push(b.value)
|
||||
} else {
|
||||
a[b.name] = [a[b.name], b.value]
|
||||
}
|
||||
} else {
|
||||
a[b.name] = b.value
|
||||
}
|
||||
|
||||
return a
|
||||
}, { __proto__: null })
|
||||
|
||||
options.depth ??= depth
|
||||
options.colors ??= true
|
||||
|
||||
const output = nodeUtil.formatWithOptions(options, state)
|
||||
|
||||
// remove [Object null prototype]
|
||||
return `FormData ${output.slice(output.indexOf(']') + 2)}`
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {FormData} formData
|
||||
*/
|
||||
static getFormDataState (formData) {
|
||||
return formData.#state
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {FormData} formData
|
||||
* @param {any[]} newState
|
||||
*/
|
||||
static setFormDataState (formData, newState) {
|
||||
formData.#state = newState
|
||||
}
|
||||
}
|
||||
|
||||
const { getFormDataState, setFormDataState } = FormData
|
||||
Reflect.deleteProperty(FormData, 'getFormDataState')
|
||||
Reflect.deleteProperty(FormData, 'setFormDataState')
|
||||
|
||||
iteratorMixin('FormData', FormData, getFormDataState, 'name', 'value')
|
||||
|
||||
Object.defineProperties(FormData.prototype, {
|
||||
append: kEnumerableProperty,
|
||||
delete: kEnumerableProperty,
|
||||
get: kEnumerableProperty,
|
||||
getAll: kEnumerableProperty,
|
||||
has: kEnumerableProperty,
|
||||
set: kEnumerableProperty,
|
||||
[Symbol.toStringTag]: {
|
||||
value: 'FormData',
|
||||
configurable: true
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* @see https://html.spec.whatwg.org/multipage/form-control-infrastructure.html#create-an-entry
|
||||
* @param {string} name
|
||||
* @param {string|Blob} value
|
||||
* @param {?string} filename
|
||||
* @returns
|
||||
*/
|
||||
function makeEntry (name, value, filename) {
|
||||
// 1. Set name to the result of converting name into a scalar value string.
|
||||
// Note: This operation was done by the webidl converter USVString.
|
||||
|
||||
// 2. If value is a string, then set value to the result of converting
|
||||
// value into a scalar value string.
|
||||
if (typeof value === 'string') {
|
||||
// Note: This operation was done by the webidl converter USVString.
|
||||
} else {
|
||||
// 3. Otherwise:
|
||||
|
||||
// 1. If value is not a File object, then set value to a new File object,
|
||||
// representing the same bytes, whose name attribute value is "blob"
|
||||
if (!webidl.is.File(value)) {
|
||||
value = new File([value], 'blob', { type: value.type })
|
||||
}
|
||||
|
||||
// 2. If filename is given, then set value to a new File object,
|
||||
// representing the same bytes, whose name attribute is filename.
|
||||
if (filename !== undefined) {
|
||||
/** @type {FilePropertyBag} */
|
||||
const options = {
|
||||
type: value.type,
|
||||
lastModified: value.lastModified
|
||||
}
|
||||
|
||||
value = new File([value], filename, options)
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Return an entry whose name is name and whose value is value.
|
||||
return { name, value }
|
||||
}
|
||||
|
||||
webidl.is.FormData = webidl.util.MakeTypeAssertion(FormData)
|
||||
|
||||
module.exports = { FormData, makeEntry, setFormDataState }
|
||||
40
backend/node_modules/undici/lib/web/fetch/global.js
generated
vendored
Normal file
40
backend/node_modules/undici/lib/web/fetch/global.js
generated
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
'use strict'
|
||||
|
||||
// In case of breaking changes, increase the version
|
||||
// number to avoid conflicts.
|
||||
const globalOrigin = Symbol.for('undici.globalOrigin.1')
|
||||
|
||||
function getGlobalOrigin () {
|
||||
return globalThis[globalOrigin]
|
||||
}
|
||||
|
||||
function setGlobalOrigin (newOrigin) {
|
||||
if (newOrigin === undefined) {
|
||||
Object.defineProperty(globalThis, globalOrigin, {
|
||||
value: undefined,
|
||||
writable: true,
|
||||
enumerable: false,
|
||||
configurable: false
|
||||
})
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
const parsedURL = new URL(newOrigin)
|
||||
|
||||
if (parsedURL.protocol !== 'http:' && parsedURL.protocol !== 'https:') {
|
||||
throw new TypeError(`Only http & https urls are allowed, received ${parsedURL.protocol}`)
|
||||
}
|
||||
|
||||
Object.defineProperty(globalThis, globalOrigin, {
|
||||
value: parsedURL,
|
||||
writable: true,
|
||||
enumerable: false,
|
||||
configurable: false
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getGlobalOrigin,
|
||||
setGlobalOrigin
|
||||
}
|
||||
719
backend/node_modules/undici/lib/web/fetch/headers.js
generated
vendored
Normal file
719
backend/node_modules/undici/lib/web/fetch/headers.js
generated
vendored
Normal file
@@ -0,0 +1,719 @@
|
||||
// https://github.com/Ethan-Arrowood/undici-fetch
|
||||
|
||||
'use strict'
|
||||
|
||||
const { kConstruct } = require('../../core/symbols')
|
||||
const { kEnumerableProperty } = require('../../core/util')
|
||||
const {
|
||||
iteratorMixin,
|
||||
isValidHeaderName,
|
||||
isValidHeaderValue
|
||||
} = require('./util')
|
||||
const { webidl } = require('../webidl')
|
||||
const assert = require('node:assert')
|
||||
const util = require('node:util')
|
||||
|
||||
/**
|
||||
* @param {number} code
|
||||
* @returns {code is (0x0a | 0x0d | 0x09 | 0x20)}
|
||||
*/
|
||||
function isHTTPWhiteSpaceCharCode (code) {
|
||||
return code === 0x0a || code === 0x0d || code === 0x09 || code === 0x20
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://fetch.spec.whatwg.org/#concept-header-value-normalize
|
||||
* @param {string} potentialValue
|
||||
* @returns {string}
|
||||
*/
|
||||
function headerValueNormalize (potentialValue) {
|
||||
// To normalize a byte sequence potentialValue, remove
|
||||
// any leading and trailing HTTP whitespace bytes from
|
||||
// potentialValue.
|
||||
let i = 0; let j = potentialValue.length
|
||||
|
||||
while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(j - 1))) --j
|
||||
while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(i))) ++i
|
||||
|
||||
return i === 0 && j === potentialValue.length ? potentialValue : potentialValue.substring(i, j)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Headers} headers
|
||||
* @param {Array|Object} object
|
||||
*/
|
||||
function fill (headers, object) {
|
||||
// To fill a Headers object headers with a given object object, run these steps:
|
||||
|
||||
// 1. If object is a sequence, then for each header in object:
|
||||
// Note: webidl conversion to array has already been done.
|
||||
if (Array.isArray(object)) {
|
||||
for (let i = 0; i < object.length; ++i) {
|
||||
const header = object[i]
|
||||
// 1. If header does not contain exactly two items, then throw a TypeError.
|
||||
if (header.length !== 2) {
|
||||
throw webidl.errors.exception({
|
||||
header: 'Headers constructor',
|
||||
message: `expected name/value pair to be length 2, found ${header.length}.`
|
||||
})
|
||||
}
|
||||
|
||||
// 2. Append (header’s first item, header’s second item) to headers.
|
||||
appendHeader(headers, header[0], header[1])
|
||||
}
|
||||
} else if (typeof object === 'object' && object !== null) {
|
||||
// Note: null should throw
|
||||
|
||||
// 2. Otherwise, object is a record, then for each key → value in object,
|
||||
// append (key, value) to headers
|
||||
const keys = Object.keys(object)
|
||||
for (let i = 0; i < keys.length; ++i) {
|
||||
appendHeader(headers, keys[i], object[keys[i]])
|
||||
}
|
||||
} else {
|
||||
throw webidl.errors.conversionFailed({
|
||||
prefix: 'Headers constructor',
|
||||
argument: 'Argument 1',
|
||||
types: ['sequence<sequence<ByteString>>', 'record<ByteString, ByteString>']
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://fetch.spec.whatwg.org/#concept-headers-append
|
||||
* @param {Headers} headers
|
||||
* @param {string} name
|
||||
* @param {string} value
|
||||
*/
|
||||
function appendHeader (headers, name, value) {
|
||||
// 1. Normalize value.
|
||||
value = headerValueNormalize(value)
|
||||
|
||||
// 2. If name is not a header name or value is not a
|
||||
// header value, then throw a TypeError.
|
||||
if (!isValidHeaderName(name)) {
|
||||
throw webidl.errors.invalidArgument({
|
||||
prefix: 'Headers.append',
|
||||
value: name,
|
||||
type: 'header name'
|
||||
})
|
||||
} else if (!isValidHeaderValue(value)) {
|
||||
throw webidl.errors.invalidArgument({
|
||||
prefix: 'Headers.append',
|
||||
value,
|
||||
type: 'header value'
|
||||
})
|
||||
}
|
||||
|
||||
// 3. If headers’s guard is "immutable", then throw a TypeError.
|
||||
// 4. Otherwise, if headers’s guard is "request" and name is a
|
||||
// forbidden header name, return.
|
||||
// 5. Otherwise, if headers’s guard is "request-no-cors":
|
||||
// TODO
|
||||
// Note: undici does not implement forbidden header names
|
||||
if (getHeadersGuard(headers) === 'immutable') {
|
||||
throw new TypeError('immutable')
|
||||
}
|
||||
|
||||
// 6. Otherwise, if headers’s guard is "response" and name is a
|
||||
// forbidden response-header name, return.
|
||||
|
||||
// 7. Append (name, value) to headers’s header list.
|
||||
return getHeadersList(headers).append(name, value, false)
|
||||
|
||||
// 8. If headers’s guard is "request-no-cors", then remove
|
||||
// privileged no-CORS request headers from headers
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#concept-header-list-sort-and-combine
|
||||
/**
|
||||
* @param {Headers} target
|
||||
*/
|
||||
function headersListSortAndCombine (target) {
|
||||
const headersList = getHeadersList(target)
|
||||
|
||||
if (!headersList) {
|
||||
return []
|
||||
}
|
||||
|
||||
if (headersList.sortedMap) {
|
||||
return headersList.sortedMap
|
||||
}
|
||||
|
||||
// 1. Let headers be an empty list of headers with the key being the name
|
||||
// and value the value.
|
||||
const headers = []
|
||||
|
||||
// 2. Let names be the result of convert header names to a sorted-lowercase
|
||||
// set with all the names of the headers in list.
|
||||
const names = headersList.toSortedArray()
|
||||
|
||||
const cookies = headersList.cookies
|
||||
|
||||
// fast-path
|
||||
if (cookies === null || cookies.length === 1) {
|
||||
// Note: The non-null assertion of value has already been done by `HeadersList#toSortedArray`
|
||||
return (headersList.sortedMap = names)
|
||||
}
|
||||
|
||||
// 3. For each name of names:
|
||||
for (let i = 0; i < names.length; ++i) {
|
||||
const { 0: name, 1: value } = names[i]
|
||||
// 1. If name is `set-cookie`, then:
|
||||
if (name === 'set-cookie') {
|
||||
// 1. Let values be a list of all values of headers in list whose name
|
||||
// is a byte-case-insensitive match for name, in order.
|
||||
|
||||
// 2. For each value of values:
|
||||
// 1. Append (name, value) to headers.
|
||||
for (let j = 0; j < cookies.length; ++j) {
|
||||
headers.push([name, cookies[j]])
|
||||
}
|
||||
} else {
|
||||
// 2. Otherwise:
|
||||
|
||||
// 1. Let value be the result of getting name from list.
|
||||
|
||||
// 2. Assert: value is non-null.
|
||||
// Note: This operation was done by `HeadersList#toSortedArray`.
|
||||
|
||||
// 3. Append (name, value) to headers.
|
||||
headers.push([name, value])
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Return headers.
|
||||
return (headersList.sortedMap = headers)
|
||||
}
|
||||
|
||||
function compareHeaderName (a, b) {
|
||||
return a[0] < b[0] ? -1 : 1
|
||||
}
|
||||
|
||||
class HeadersList {
|
||||
/** @type {[string, string][]|null} */
|
||||
cookies = null
|
||||
|
||||
sortedMap
|
||||
headersMap
|
||||
|
||||
constructor (init) {
|
||||
if (init instanceof HeadersList) {
|
||||
this.headersMap = new Map(init.headersMap)
|
||||
this.sortedMap = init.sortedMap
|
||||
this.cookies = init.cookies === null ? null : [...init.cookies]
|
||||
} else {
|
||||
this.headersMap = new Map(init)
|
||||
this.sortedMap = null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://fetch.spec.whatwg.org/#header-list-contains
|
||||
* @param {string} name
|
||||
* @param {boolean} isLowerCase
|
||||
*/
|
||||
contains (name, isLowerCase) {
|
||||
// A header list list contains a header name name if list
|
||||
// contains a header whose name is a byte-case-insensitive
|
||||
// match for name.
|
||||
|
||||
return this.headersMap.has(isLowerCase ? name : name.toLowerCase())
|
||||
}
|
||||
|
||||
clear () {
|
||||
this.headersMap.clear()
|
||||
this.sortedMap = null
|
||||
this.cookies = null
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://fetch.spec.whatwg.org/#concept-header-list-append
|
||||
* @param {string} name
|
||||
* @param {string} value
|
||||
* @param {boolean} isLowerCase
|
||||
*/
|
||||
append (name, value, isLowerCase) {
|
||||
this.sortedMap = null
|
||||
|
||||
// 1. If list contains name, then set name to the first such
|
||||
// header’s name.
|
||||
const lowercaseName = isLowerCase ? name : name.toLowerCase()
|
||||
const exists = this.headersMap.get(lowercaseName)
|
||||
|
||||
// 2. Append (name, value) to list.
|
||||
if (exists) {
|
||||
const delimiter = lowercaseName === 'cookie' ? '; ' : ', '
|
||||
this.headersMap.set(lowercaseName, {
|
||||
name: exists.name,
|
||||
value: `${exists.value}${delimiter}${value}`
|
||||
})
|
||||
} else {
|
||||
this.headersMap.set(lowercaseName, { name, value })
|
||||
}
|
||||
|
||||
if (lowercaseName === 'set-cookie') {
|
||||
(this.cookies ??= []).push(value)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://fetch.spec.whatwg.org/#concept-header-list-set
|
||||
* @param {string} name
|
||||
* @param {string} value
|
||||
* @param {boolean} isLowerCase
|
||||
*/
|
||||
set (name, value, isLowerCase) {
|
||||
this.sortedMap = null
|
||||
const lowercaseName = isLowerCase ? name : name.toLowerCase()
|
||||
|
||||
if (lowercaseName === 'set-cookie') {
|
||||
this.cookies = [value]
|
||||
}
|
||||
|
||||
// 1. If list contains name, then set the value of
|
||||
// the first such header to value and remove the
|
||||
// others.
|
||||
// 2. Otherwise, append header (name, value) to list.
|
||||
this.headersMap.set(lowercaseName, { name, value })
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://fetch.spec.whatwg.org/#concept-header-list-delete
|
||||
* @param {string} name
|
||||
* @param {boolean} isLowerCase
|
||||
*/
|
||||
delete (name, isLowerCase) {
|
||||
this.sortedMap = null
|
||||
if (!isLowerCase) name = name.toLowerCase()
|
||||
|
||||
if (name === 'set-cookie') {
|
||||
this.cookies = null
|
||||
}
|
||||
|
||||
this.headersMap.delete(name)
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://fetch.spec.whatwg.org/#concept-header-list-get
|
||||
* @param {string} name
|
||||
* @param {boolean} isLowerCase
|
||||
* @returns {string | null}
|
||||
*/
|
||||
get (name, isLowerCase) {
|
||||
// 1. If list does not contain name, then return null.
|
||||
// 2. Return the values of all headers in list whose name
|
||||
// is a byte-case-insensitive match for name,
|
||||
// separated from each other by 0x2C 0x20, in order.
|
||||
return this.headersMap.get(isLowerCase ? name : name.toLowerCase())?.value ?? null
|
||||
}
|
||||
|
||||
* [Symbol.iterator] () {
|
||||
// use the lowercased name
|
||||
for (const { 0: name, 1: { value } } of this.headersMap) {
|
||||
yield [name, value]
|
||||
}
|
||||
}
|
||||
|
||||
get entries () {
|
||||
const headers = {}
|
||||
|
||||
if (this.headersMap.size !== 0) {
|
||||
for (const { name, value } of this.headersMap.values()) {
|
||||
headers[name] = value
|
||||
}
|
||||
}
|
||||
|
||||
return headers
|
||||
}
|
||||
|
||||
rawValues () {
|
||||
return this.headersMap.values()
|
||||
}
|
||||
|
||||
get entriesList () {
|
||||
const headers = []
|
||||
|
||||
if (this.headersMap.size !== 0) {
|
||||
for (const { 0: lowerName, 1: { name, value } } of this.headersMap) {
|
||||
if (lowerName === 'set-cookie') {
|
||||
for (const cookie of this.cookies) {
|
||||
headers.push([name, cookie])
|
||||
}
|
||||
} else {
|
||||
headers.push([name, value])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return headers
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#convert-header-names-to-a-sorted-lowercase-set
|
||||
toSortedArray () {
|
||||
const size = this.headersMap.size
|
||||
const array = new Array(size)
|
||||
// In most cases, you will use the fast-path.
|
||||
// fast-path: Use binary insertion sort for small arrays.
|
||||
if (size <= 32) {
|
||||
if (size === 0) {
|
||||
// If empty, it is an empty array. To avoid the first index assignment.
|
||||
return array
|
||||
}
|
||||
// Improve performance by unrolling loop and avoiding double-loop.
|
||||
// Double-loop-less version of the binary insertion sort.
|
||||
const iterator = this.headersMap[Symbol.iterator]()
|
||||
const firstValue = iterator.next().value
|
||||
// set [name, value] to first index.
|
||||
array[0] = [firstValue[0], firstValue[1].value]
|
||||
// https://fetch.spec.whatwg.org/#concept-header-list-sort-and-combine
|
||||
// 3.2.2. Assert: value is non-null.
|
||||
assert(firstValue[1].value !== null)
|
||||
for (
|
||||
let i = 1, j = 0, right = 0, left = 0, pivot = 0, x, value;
|
||||
i < size;
|
||||
++i
|
||||
) {
|
||||
// get next value
|
||||
value = iterator.next().value
|
||||
// set [name, value] to current index.
|
||||
x = array[i] = [value[0], value[1].value]
|
||||
// https://fetch.spec.whatwg.org/#concept-header-list-sort-and-combine
|
||||
// 3.2.2. Assert: value is non-null.
|
||||
assert(x[1] !== null)
|
||||
left = 0
|
||||
right = i
|
||||
// binary search
|
||||
while (left < right) {
|
||||
// middle index
|
||||
pivot = left + ((right - left) >> 1)
|
||||
// compare header name
|
||||
if (array[pivot][0] <= x[0]) {
|
||||
left = pivot + 1
|
||||
} else {
|
||||
right = pivot
|
||||
}
|
||||
}
|
||||
if (i !== pivot) {
|
||||
j = i
|
||||
while (j > left) {
|
||||
array[j] = array[--j]
|
||||
}
|
||||
array[left] = x
|
||||
}
|
||||
}
|
||||
/* c8 ignore next 4 */
|
||||
if (!iterator.next().done) {
|
||||
// This is for debugging and will never be called.
|
||||
throw new TypeError('Unreachable')
|
||||
}
|
||||
return array
|
||||
} else {
|
||||
// This case would be a rare occurrence.
|
||||
// slow-path: fallback
|
||||
let i = 0
|
||||
for (const { 0: name, 1: { value } } of this.headersMap) {
|
||||
array[i++] = [name, value]
|
||||
// https://fetch.spec.whatwg.org/#concept-header-list-sort-and-combine
|
||||
// 3.2.2. Assert: value is non-null.
|
||||
assert(value !== null)
|
||||
}
|
||||
return array.sort(compareHeaderName)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#headers-class
|
||||
class Headers {
|
||||
#guard
|
||||
/**
|
||||
* @type {HeadersList}
|
||||
*/
|
||||
#headersList
|
||||
|
||||
/**
|
||||
* @param {HeadersInit|Symbol} [init]
|
||||
* @returns
|
||||
*/
|
||||
constructor (init = undefined) {
|
||||
webidl.util.markAsUncloneable(this)
|
||||
|
||||
if (init === kConstruct) {
|
||||
return
|
||||
}
|
||||
|
||||
this.#headersList = new HeadersList()
|
||||
|
||||
// The new Headers(init) constructor steps are:
|
||||
|
||||
// 1. Set this’s guard to "none".
|
||||
this.#guard = 'none'
|
||||
|
||||
// 2. If init is given, then fill this with init.
|
||||
if (init !== undefined) {
|
||||
init = webidl.converters.HeadersInit(init, 'Headers constructor', 'init')
|
||||
fill(this, init)
|
||||
}
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#dom-headers-append
|
||||
append (name, value) {
|
||||
webidl.brandCheck(this, Headers)
|
||||
|
||||
webidl.argumentLengthCheck(arguments, 2, 'Headers.append')
|
||||
|
||||
const prefix = 'Headers.append'
|
||||
name = webidl.converters.ByteString(name, prefix, 'name')
|
||||
value = webidl.converters.ByteString(value, prefix, 'value')
|
||||
|
||||
return appendHeader(this, name, value)
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#dom-headers-delete
|
||||
delete (name) {
|
||||
webidl.brandCheck(this, Headers)
|
||||
|
||||
webidl.argumentLengthCheck(arguments, 1, 'Headers.delete')
|
||||
|
||||
const prefix = 'Headers.delete'
|
||||
name = webidl.converters.ByteString(name, prefix, 'name')
|
||||
|
||||
// 1. If name is not a header name, then throw a TypeError.
|
||||
if (!isValidHeaderName(name)) {
|
||||
throw webidl.errors.invalidArgument({
|
||||
prefix: 'Headers.delete',
|
||||
value: name,
|
||||
type: 'header name'
|
||||
})
|
||||
}
|
||||
|
||||
// 2. If this’s guard is "immutable", then throw a TypeError.
|
||||
// 3. Otherwise, if this’s guard is "request" and name is a
|
||||
// forbidden header name, return.
|
||||
// 4. Otherwise, if this’s guard is "request-no-cors", name
|
||||
// is not a no-CORS-safelisted request-header name, and
|
||||
// name is not a privileged no-CORS request-header name,
|
||||
// return.
|
||||
// 5. Otherwise, if this’s guard is "response" and name is
|
||||
// a forbidden response-header name, return.
|
||||
// Note: undici does not implement forbidden header names
|
||||
if (this.#guard === 'immutable') {
|
||||
throw new TypeError('immutable')
|
||||
}
|
||||
|
||||
// 6. If this’s header list does not contain name, then
|
||||
// return.
|
||||
if (!this.#headersList.contains(name, false)) {
|
||||
return
|
||||
}
|
||||
|
||||
// 7. Delete name from this’s header list.
|
||||
// 8. If this’s guard is "request-no-cors", then remove
|
||||
// privileged no-CORS request headers from this.
|
||||
this.#headersList.delete(name, false)
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#dom-headers-get
|
||||
get (name) {
|
||||
webidl.brandCheck(this, Headers)
|
||||
|
||||
webidl.argumentLengthCheck(arguments, 1, 'Headers.get')
|
||||
|
||||
const prefix = 'Headers.get'
|
||||
name = webidl.converters.ByteString(name, prefix, 'name')
|
||||
|
||||
// 1. If name is not a header name, then throw a TypeError.
|
||||
if (!isValidHeaderName(name)) {
|
||||
throw webidl.errors.invalidArgument({
|
||||
prefix,
|
||||
value: name,
|
||||
type: 'header name'
|
||||
})
|
||||
}
|
||||
|
||||
// 2. Return the result of getting name from this’s header
|
||||
// list.
|
||||
return this.#headersList.get(name, false)
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#dom-headers-has
|
||||
has (name) {
|
||||
webidl.brandCheck(this, Headers)
|
||||
|
||||
webidl.argumentLengthCheck(arguments, 1, 'Headers.has')
|
||||
|
||||
const prefix = 'Headers.has'
|
||||
name = webidl.converters.ByteString(name, prefix, 'name')
|
||||
|
||||
// 1. If name is not a header name, then throw a TypeError.
|
||||
if (!isValidHeaderName(name)) {
|
||||
throw webidl.errors.invalidArgument({
|
||||
prefix,
|
||||
value: name,
|
||||
type: 'header name'
|
||||
})
|
||||
}
|
||||
|
||||
// 2. Return true if this’s header list contains name;
|
||||
// otherwise false.
|
||||
return this.#headersList.contains(name, false)
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#dom-headers-set
|
||||
set (name, value) {
|
||||
webidl.brandCheck(this, Headers)
|
||||
|
||||
webidl.argumentLengthCheck(arguments, 2, 'Headers.set')
|
||||
|
||||
const prefix = 'Headers.set'
|
||||
name = webidl.converters.ByteString(name, prefix, 'name')
|
||||
value = webidl.converters.ByteString(value, prefix, 'value')
|
||||
|
||||
// 1. Normalize value.
|
||||
value = headerValueNormalize(value)
|
||||
|
||||
// 2. If name is not a header name or value is not a
|
||||
// header value, then throw a TypeError.
|
||||
if (!isValidHeaderName(name)) {
|
||||
throw webidl.errors.invalidArgument({
|
||||
prefix,
|
||||
value: name,
|
||||
type: 'header name'
|
||||
})
|
||||
} else if (!isValidHeaderValue(value)) {
|
||||
throw webidl.errors.invalidArgument({
|
||||
prefix,
|
||||
value,
|
||||
type: 'header value'
|
||||
})
|
||||
}
|
||||
|
||||
// 3. If this’s guard is "immutable", then throw a TypeError.
|
||||
// 4. Otherwise, if this’s guard is "request" and name is a
|
||||
// forbidden header name, return.
|
||||
// 5. Otherwise, if this’s guard is "request-no-cors" and
|
||||
// name/value is not a no-CORS-safelisted request-header,
|
||||
// return.
|
||||
// 6. Otherwise, if this’s guard is "response" and name is a
|
||||
// forbidden response-header name, return.
|
||||
// Note: undici does not implement forbidden header names
|
||||
if (this.#guard === 'immutable') {
|
||||
throw new TypeError('immutable')
|
||||
}
|
||||
|
||||
// 7. Set (name, value) in this’s header list.
|
||||
// 8. If this’s guard is "request-no-cors", then remove
|
||||
// privileged no-CORS request headers from this
|
||||
this.#headersList.set(name, value, false)
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#dom-headers-getsetcookie
|
||||
getSetCookie () {
|
||||
webidl.brandCheck(this, Headers)
|
||||
|
||||
// 1. If this’s header list does not contain `Set-Cookie`, then return « ».
|
||||
// 2. Return the values of all headers in this’s header list whose name is
|
||||
// a byte-case-insensitive match for `Set-Cookie`, in order.
|
||||
|
||||
const list = this.#headersList.cookies
|
||||
|
||||
if (list) {
|
||||
return [...list]
|
||||
}
|
||||
|
||||
return []
|
||||
}
|
||||
|
||||
[util.inspect.custom] (depth, options) {
|
||||
options.depth ??= depth
|
||||
|
||||
return `Headers ${util.formatWithOptions(options, this.#headersList.entries)}`
|
||||
}
|
||||
|
||||
static getHeadersGuard (o) {
|
||||
return o.#guard
|
||||
}
|
||||
|
||||
static setHeadersGuard (o, guard) {
|
||||
o.#guard = guard
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Headers} o
|
||||
*/
|
||||
static getHeadersList (o) {
|
||||
return o.#headersList
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Headers} target
|
||||
* @param {HeadersList} list
|
||||
*/
|
||||
static setHeadersList (target, list) {
|
||||
target.#headersList = list
|
||||
}
|
||||
}
|
||||
|
||||
const { getHeadersGuard, setHeadersGuard, getHeadersList, setHeadersList } = Headers
|
||||
Reflect.deleteProperty(Headers, 'getHeadersGuard')
|
||||
Reflect.deleteProperty(Headers, 'setHeadersGuard')
|
||||
Reflect.deleteProperty(Headers, 'getHeadersList')
|
||||
Reflect.deleteProperty(Headers, 'setHeadersList')
|
||||
|
||||
iteratorMixin('Headers', Headers, headersListSortAndCombine, 0, 1)
|
||||
|
||||
Object.defineProperties(Headers.prototype, {
|
||||
append: kEnumerableProperty,
|
||||
delete: kEnumerableProperty,
|
||||
get: kEnumerableProperty,
|
||||
has: kEnumerableProperty,
|
||||
set: kEnumerableProperty,
|
||||
getSetCookie: kEnumerableProperty,
|
||||
[Symbol.toStringTag]: {
|
||||
value: 'Headers',
|
||||
configurable: true
|
||||
},
|
||||
[util.inspect.custom]: {
|
||||
enumerable: false
|
||||
}
|
||||
})
|
||||
|
||||
webidl.converters.HeadersInit = function (V, prefix, argument) {
|
||||
if (webidl.util.Type(V) === webidl.util.Types.OBJECT) {
|
||||
const iterator = Reflect.get(V, Symbol.iterator)
|
||||
|
||||
// A work-around to ensure we send the properly-cased Headers when V is a Headers object.
|
||||
// Read https://github.com/nodejs/undici/pull/3159#issuecomment-2075537226 before touching, please.
|
||||
if (!util.types.isProxy(V) && iterator === Headers.prototype.entries) { // Headers object
|
||||
try {
|
||||
return getHeadersList(V).entriesList
|
||||
} catch {
|
||||
// fall-through
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof iterator === 'function') {
|
||||
return webidl.converters['sequence<sequence<ByteString>>'](V, prefix, argument, iterator.bind(V))
|
||||
}
|
||||
|
||||
return webidl.converters['record<ByteString, ByteString>'](V, prefix, argument)
|
||||
}
|
||||
|
||||
throw webidl.errors.conversionFailed({
|
||||
prefix: 'Headers constructor',
|
||||
argument: 'Argument 1',
|
||||
types: ['sequence<sequence<ByteString>>', 'record<ByteString, ByteString>']
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
fill,
|
||||
// for test.
|
||||
compareHeaderName,
|
||||
Headers,
|
||||
HeadersList,
|
||||
getHeadersGuard,
|
||||
setHeadersGuard,
|
||||
setHeadersList,
|
||||
getHeadersList
|
||||
}
|
||||
2343
backend/node_modules/undici/lib/web/fetch/index.js
generated
vendored
Normal file
2343
backend/node_modules/undici/lib/web/fetch/index.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1115
backend/node_modules/undici/lib/web/fetch/request.js
generated
vendored
Normal file
1115
backend/node_modules/undici/lib/web/fetch/request.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
640
backend/node_modules/undici/lib/web/fetch/response.js
generated
vendored
Normal file
640
backend/node_modules/undici/lib/web/fetch/response.js
generated
vendored
Normal file
@@ -0,0 +1,640 @@
|
||||
'use strict'
|
||||
|
||||
const { Headers, HeadersList, fill, getHeadersGuard, setHeadersGuard, setHeadersList } = require('./headers')
|
||||
const { extractBody, cloneBody, mixinBody, streamRegistry, bodyUnusable } = require('./body')
|
||||
const util = require('../../core/util')
|
||||
const nodeUtil = require('node:util')
|
||||
const { kEnumerableProperty } = util
|
||||
const {
|
||||
isValidReasonPhrase,
|
||||
isCancelled,
|
||||
isAborted,
|
||||
isErrorLike,
|
||||
environmentSettingsObject: relevantRealm
|
||||
} = require('./util')
|
||||
const {
|
||||
redirectStatusSet,
|
||||
nullBodyStatus
|
||||
} = require('./constants')
|
||||
const { webidl } = require('../webidl')
|
||||
const { URLSerializer } = require('./data-url')
|
||||
const { kConstruct } = require('../../core/symbols')
|
||||
const assert = require('node:assert')
|
||||
const { isomorphicEncode, serializeJavascriptValueToJSONString } = require('../infra')
|
||||
|
||||
const textEncoder = new TextEncoder('utf-8')
|
||||
|
||||
// https://fetch.spec.whatwg.org/#response-class
|
||||
class Response {
|
||||
/** @type {Headers} */
|
||||
#headers
|
||||
|
||||
#state
|
||||
|
||||
// Creates network error Response.
|
||||
static error () {
|
||||
// The static error() method steps are to return the result of creating a
|
||||
// Response object, given a new network error, "immutable", and this’s
|
||||
// relevant Realm.
|
||||
const responseObject = fromInnerResponse(makeNetworkError(), 'immutable')
|
||||
|
||||
return responseObject
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#dom-response-json
|
||||
static json (data, init = undefined) {
|
||||
webidl.argumentLengthCheck(arguments, 1, 'Response.json')
|
||||
|
||||
if (init !== null) {
|
||||
init = webidl.converters.ResponseInit(init)
|
||||
}
|
||||
|
||||
// 1. Let bytes the result of running serialize a JavaScript value to JSON bytes on data.
|
||||
const bytes = textEncoder.encode(
|
||||
serializeJavascriptValueToJSONString(data)
|
||||
)
|
||||
|
||||
// 2. Let body be the result of extracting bytes.
|
||||
const body = extractBody(bytes)
|
||||
|
||||
// 3. Let responseObject be the result of creating a Response object, given a new response,
|
||||
// "response", and this’s relevant Realm.
|
||||
const responseObject = fromInnerResponse(makeResponse({}), 'response')
|
||||
|
||||
// 4. Perform initialize a response given responseObject, init, and (body, "application/json").
|
||||
initializeResponse(responseObject, init, { body: body[0], type: 'application/json' })
|
||||
|
||||
// 5. Return responseObject.
|
||||
return responseObject
|
||||
}
|
||||
|
||||
// Creates a redirect Response that redirects to url with status status.
|
||||
static redirect (url, status = 302) {
|
||||
webidl.argumentLengthCheck(arguments, 1, 'Response.redirect')
|
||||
|
||||
url = webidl.converters.USVString(url)
|
||||
status = webidl.converters['unsigned short'](status)
|
||||
|
||||
// 1. Let parsedURL be the result of parsing url with current settings
|
||||
// object’s API base URL.
|
||||
// 2. If parsedURL is failure, then throw a TypeError.
|
||||
// TODO: base-URL?
|
||||
let parsedURL
|
||||
try {
|
||||
parsedURL = new URL(url, relevantRealm.settingsObject.baseUrl)
|
||||
} catch (err) {
|
||||
throw new TypeError(`Failed to parse URL from ${url}`, { cause: err })
|
||||
}
|
||||
|
||||
// 3. If status is not a redirect status, then throw a RangeError.
|
||||
if (!redirectStatusSet.has(status)) {
|
||||
throw new RangeError(`Invalid status code ${status}`)
|
||||
}
|
||||
|
||||
// 4. Let responseObject be the result of creating a Response object,
|
||||
// given a new response, "immutable", and this’s relevant Realm.
|
||||
const responseObject = fromInnerResponse(makeResponse({}), 'immutable')
|
||||
|
||||
// 5. Set responseObject’s response’s status to status.
|
||||
responseObject.#state.status = status
|
||||
|
||||
// 6. Let value be parsedURL, serialized and isomorphic encoded.
|
||||
const value = isomorphicEncode(URLSerializer(parsedURL))
|
||||
|
||||
// 7. Append `Location`/value to responseObject’s response’s header list.
|
||||
responseObject.#state.headersList.append('location', value, true)
|
||||
|
||||
// 8. Return responseObject.
|
||||
return responseObject
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#dom-response
|
||||
constructor (body = null, init = undefined) {
|
||||
webidl.util.markAsUncloneable(this)
|
||||
|
||||
if (body === kConstruct) {
|
||||
return
|
||||
}
|
||||
|
||||
if (body !== null) {
|
||||
body = webidl.converters.BodyInit(body, 'Response', 'body')
|
||||
}
|
||||
|
||||
init = webidl.converters.ResponseInit(init)
|
||||
|
||||
// 1. Set this’s response to a new response.
|
||||
this.#state = makeResponse({})
|
||||
|
||||
// 2. Set this’s headers to a new Headers object with this’s relevant
|
||||
// Realm, whose header list is this’s response’s header list and guard
|
||||
// is "response".
|
||||
this.#headers = new Headers(kConstruct)
|
||||
setHeadersGuard(this.#headers, 'response')
|
||||
setHeadersList(this.#headers, this.#state.headersList)
|
||||
|
||||
// 3. Let bodyWithType be null.
|
||||
let bodyWithType = null
|
||||
|
||||
// 4. If body is non-null, then set bodyWithType to the result of extracting body.
|
||||
if (body != null) {
|
||||
const [extractedBody, type] = extractBody(body)
|
||||
bodyWithType = { body: extractedBody, type }
|
||||
}
|
||||
|
||||
// 5. Perform initialize a response given this, init, and bodyWithType.
|
||||
initializeResponse(this, init, bodyWithType)
|
||||
}
|
||||
|
||||
// Returns response’s type, e.g., "cors".
|
||||
get type () {
|
||||
webidl.brandCheck(this, Response)
|
||||
|
||||
// The type getter steps are to return this’s response’s type.
|
||||
return this.#state.type
|
||||
}
|
||||
|
||||
// Returns response’s URL, if it has one; otherwise the empty string.
|
||||
get url () {
|
||||
webidl.brandCheck(this, Response)
|
||||
|
||||
const urlList = this.#state.urlList
|
||||
|
||||
// The url getter steps are to return the empty string if this’s
|
||||
// response’s URL is null; otherwise this’s response’s URL,
|
||||
// serialized with exclude fragment set to true.
|
||||
const url = urlList[urlList.length - 1] ?? null
|
||||
|
||||
if (url === null) {
|
||||
return ''
|
||||
}
|
||||
|
||||
return URLSerializer(url, true)
|
||||
}
|
||||
|
||||
// Returns whether response was obtained through a redirect.
|
||||
get redirected () {
|
||||
webidl.brandCheck(this, Response)
|
||||
|
||||
// The redirected getter steps are to return true if this’s response’s URL
|
||||
// list has more than one item; otherwise false.
|
||||
return this.#state.urlList.length > 1
|
||||
}
|
||||
|
||||
// Returns response’s status.
|
||||
get status () {
|
||||
webidl.brandCheck(this, Response)
|
||||
|
||||
// The status getter steps are to return this’s response’s status.
|
||||
return this.#state.status
|
||||
}
|
||||
|
||||
// Returns whether response’s status is an ok status.
|
||||
get ok () {
|
||||
webidl.brandCheck(this, Response)
|
||||
|
||||
// The ok getter steps are to return true if this’s response’s status is an
|
||||
// ok status; otherwise false.
|
||||
return this.#state.status >= 200 && this.#state.status <= 299
|
||||
}
|
||||
|
||||
// Returns response’s status message.
|
||||
get statusText () {
|
||||
webidl.brandCheck(this, Response)
|
||||
|
||||
// The statusText getter steps are to return this’s response’s status
|
||||
// message.
|
||||
return this.#state.statusText
|
||||
}
|
||||
|
||||
// Returns response’s headers as Headers.
|
||||
get headers () {
|
||||
webidl.brandCheck(this, Response)
|
||||
|
||||
// The headers getter steps are to return this’s headers.
|
||||
return this.#headers
|
||||
}
|
||||
|
||||
get body () {
|
||||
webidl.brandCheck(this, Response)
|
||||
|
||||
return this.#state.body ? this.#state.body.stream : null
|
||||
}
|
||||
|
||||
get bodyUsed () {
|
||||
webidl.brandCheck(this, Response)
|
||||
|
||||
return !!this.#state.body && util.isDisturbed(this.#state.body.stream)
|
||||
}
|
||||
|
||||
// Returns a clone of response.
|
||||
clone () {
|
||||
webidl.brandCheck(this, Response)
|
||||
|
||||
// 1. If this is unusable, then throw a TypeError.
|
||||
if (bodyUnusable(this.#state)) {
|
||||
throw webidl.errors.exception({
|
||||
header: 'Response.clone',
|
||||
message: 'Body has already been consumed.'
|
||||
})
|
||||
}
|
||||
|
||||
// 2. Let clonedResponse be the result of cloning this’s response.
|
||||
const clonedResponse = cloneResponse(this.#state)
|
||||
|
||||
// Note: To re-register because of a new stream.
|
||||
if (this.#state.body?.stream) {
|
||||
streamRegistry.register(this, new WeakRef(this.#state.body.stream))
|
||||
}
|
||||
|
||||
// 3. Return the result of creating a Response object, given
|
||||
// clonedResponse, this’s headers’s guard, and this’s relevant Realm.
|
||||
return fromInnerResponse(clonedResponse, getHeadersGuard(this.#headers))
|
||||
}
|
||||
|
||||
[nodeUtil.inspect.custom] (depth, options) {
|
||||
if (options.depth === null) {
|
||||
options.depth = 2
|
||||
}
|
||||
|
||||
options.colors ??= true
|
||||
|
||||
const properties = {
|
||||
status: this.status,
|
||||
statusText: this.statusText,
|
||||
headers: this.headers,
|
||||
body: this.body,
|
||||
bodyUsed: this.bodyUsed,
|
||||
ok: this.ok,
|
||||
redirected: this.redirected,
|
||||
type: this.type,
|
||||
url: this.url
|
||||
}
|
||||
|
||||
return `Response ${nodeUtil.formatWithOptions(options, properties)}`
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Response} response
|
||||
*/
|
||||
static getResponseHeaders (response) {
|
||||
return response.#headers
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Response} response
|
||||
* @param {Headers} newHeaders
|
||||
*/
|
||||
static setResponseHeaders (response, newHeaders) {
|
||||
response.#headers = newHeaders
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Response} response
|
||||
*/
|
||||
static getResponseState (response) {
|
||||
return response.#state
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Response} response
|
||||
* @param {any} newState
|
||||
*/
|
||||
static setResponseState (response, newState) {
|
||||
response.#state = newState
|
||||
}
|
||||
}
|
||||
|
||||
const { getResponseHeaders, setResponseHeaders, getResponseState, setResponseState } = Response
|
||||
Reflect.deleteProperty(Response, 'getResponseHeaders')
|
||||
Reflect.deleteProperty(Response, 'setResponseHeaders')
|
||||
Reflect.deleteProperty(Response, 'getResponseState')
|
||||
Reflect.deleteProperty(Response, 'setResponseState')
|
||||
|
||||
mixinBody(Response, getResponseState)
|
||||
|
||||
Object.defineProperties(Response.prototype, {
|
||||
type: kEnumerableProperty,
|
||||
url: kEnumerableProperty,
|
||||
status: kEnumerableProperty,
|
||||
ok: kEnumerableProperty,
|
||||
redirected: kEnumerableProperty,
|
||||
statusText: kEnumerableProperty,
|
||||
headers: kEnumerableProperty,
|
||||
clone: kEnumerableProperty,
|
||||
body: kEnumerableProperty,
|
||||
bodyUsed: kEnumerableProperty,
|
||||
[Symbol.toStringTag]: {
|
||||
value: 'Response',
|
||||
configurable: true
|
||||
}
|
||||
})
|
||||
|
||||
Object.defineProperties(Response, {
|
||||
json: kEnumerableProperty,
|
||||
redirect: kEnumerableProperty,
|
||||
error: kEnumerableProperty
|
||||
})
|
||||
|
||||
// https://fetch.spec.whatwg.org/#concept-response-clone
|
||||
function cloneResponse (response) {
|
||||
// To clone a response response, run these steps:
|
||||
|
||||
// 1. If response is a filtered response, then return a new identical
|
||||
// filtered response whose internal response is a clone of response’s
|
||||
// internal response.
|
||||
if (response.internalResponse) {
|
||||
return filterResponse(
|
||||
cloneResponse(response.internalResponse),
|
||||
response.type
|
||||
)
|
||||
}
|
||||
|
||||
// 2. Let newResponse be a copy of response, except for its body.
|
||||
const newResponse = makeResponse({ ...response, body: null })
|
||||
|
||||
// 3. If response’s body is non-null, then set newResponse’s body to the
|
||||
// result of cloning response’s body.
|
||||
if (response.body != null) {
|
||||
newResponse.body = cloneBody(response.body)
|
||||
}
|
||||
|
||||
// 4. Return newResponse.
|
||||
return newResponse
|
||||
}
|
||||
|
||||
function makeResponse (init) {
|
||||
return {
|
||||
aborted: false,
|
||||
rangeRequested: false,
|
||||
timingAllowPassed: false,
|
||||
requestIncludesCredentials: false,
|
||||
type: 'default',
|
||||
status: 200,
|
||||
timingInfo: null,
|
||||
cacheState: '',
|
||||
statusText: '',
|
||||
...init,
|
||||
headersList: init?.headersList
|
||||
? new HeadersList(init?.headersList)
|
||||
: new HeadersList(),
|
||||
urlList: init?.urlList ? [...init.urlList] : []
|
||||
}
|
||||
}
|
||||
|
||||
function makeNetworkError (reason) {
|
||||
const isError = isErrorLike(reason)
|
||||
return makeResponse({
|
||||
type: 'error',
|
||||
status: 0,
|
||||
error: isError
|
||||
? reason
|
||||
: new Error(reason ? String(reason) : reason),
|
||||
aborted: reason && reason.name === 'AbortError'
|
||||
})
|
||||
}
|
||||
|
||||
// @see https://fetch.spec.whatwg.org/#concept-network-error
|
||||
function isNetworkError (response) {
|
||||
return (
|
||||
// A network error is a response whose type is "error",
|
||||
response.type === 'error' &&
|
||||
// status is 0
|
||||
response.status === 0
|
||||
)
|
||||
}
|
||||
|
||||
function makeFilteredResponse (response, state) {
|
||||
state = {
|
||||
internalResponse: response,
|
||||
...state
|
||||
}
|
||||
|
||||
return new Proxy(response, {
|
||||
get (target, p) {
|
||||
return p in state ? state[p] : target[p]
|
||||
},
|
||||
set (target, p, value) {
|
||||
assert(!(p in state))
|
||||
target[p] = value
|
||||
return true
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#concept-filtered-response
|
||||
function filterResponse (response, type) {
|
||||
// Set response to the following filtered response with response as its
|
||||
// internal response, depending on request’s response tainting:
|
||||
if (type === 'basic') {
|
||||
// A basic filtered response is a filtered response whose type is "basic"
|
||||
// and header list excludes any headers in internal response’s header list
|
||||
// whose name is a forbidden response-header name.
|
||||
|
||||
// Note: undici does not implement forbidden response-header names
|
||||
return makeFilteredResponse(response, {
|
||||
type: 'basic',
|
||||
headersList: response.headersList
|
||||
})
|
||||
} else if (type === 'cors') {
|
||||
// A CORS filtered response is a filtered response whose type is "cors"
|
||||
// and header list excludes any headers in internal response’s header
|
||||
// list whose name is not a CORS-safelisted response-header name, given
|
||||
// internal response’s CORS-exposed header-name list.
|
||||
|
||||
// Note: undici does not implement CORS-safelisted response-header names
|
||||
return makeFilteredResponse(response, {
|
||||
type: 'cors',
|
||||
headersList: response.headersList
|
||||
})
|
||||
} else if (type === 'opaque') {
|
||||
// An opaque filtered response is a filtered response whose type is
|
||||
// "opaque", URL list is the empty list, status is 0, status message
|
||||
// is the empty byte sequence, header list is empty, and body is null.
|
||||
|
||||
return makeFilteredResponse(response, {
|
||||
type: 'opaque',
|
||||
urlList: [],
|
||||
status: 0,
|
||||
statusText: '',
|
||||
body: null
|
||||
})
|
||||
} else if (type === 'opaqueredirect') {
|
||||
// An opaque-redirect filtered response is a filtered response whose type
|
||||
// is "opaqueredirect", status is 0, status message is the empty byte
|
||||
// sequence, header list is empty, and body is null.
|
||||
|
||||
return makeFilteredResponse(response, {
|
||||
type: 'opaqueredirect',
|
||||
status: 0,
|
||||
statusText: '',
|
||||
headersList: [],
|
||||
body: null
|
||||
})
|
||||
} else {
|
||||
assert(false)
|
||||
}
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#appropriate-network-error
|
||||
function makeAppropriateNetworkError (fetchParams, err = null) {
|
||||
// 1. Assert: fetchParams is canceled.
|
||||
assert(isCancelled(fetchParams))
|
||||
|
||||
// 2. Return an aborted network error if fetchParams is aborted;
|
||||
// otherwise return a network error.
|
||||
return isAborted(fetchParams)
|
||||
? makeNetworkError(Object.assign(new DOMException('The operation was aborted.', 'AbortError'), { cause: err }))
|
||||
: makeNetworkError(Object.assign(new DOMException('Request was cancelled.'), { cause: err }))
|
||||
}
|
||||
|
||||
// https://whatpr.org/fetch/1392.html#initialize-a-response
|
||||
function initializeResponse (response, init, body) {
|
||||
// 1. If init["status"] is not in the range 200 to 599, inclusive, then
|
||||
// throw a RangeError.
|
||||
if (init.status !== null && (init.status < 200 || init.status > 599)) {
|
||||
throw new RangeError('init["status"] must be in the range of 200 to 599, inclusive.')
|
||||
}
|
||||
|
||||
// 2. If init["statusText"] does not match the reason-phrase token production,
|
||||
// then throw a TypeError.
|
||||
if ('statusText' in init && init.statusText != null) {
|
||||
// See, https://datatracker.ietf.org/doc/html/rfc7230#section-3.1.2:
|
||||
// reason-phrase = *( HTAB / SP / VCHAR / obs-text )
|
||||
if (!isValidReasonPhrase(String(init.statusText))) {
|
||||
throw new TypeError('Invalid statusText')
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Set response’s response’s status to init["status"].
|
||||
if ('status' in init && init.status != null) {
|
||||
getResponseState(response).status = init.status
|
||||
}
|
||||
|
||||
// 4. Set response’s response’s status message to init["statusText"].
|
||||
if ('statusText' in init && init.statusText != null) {
|
||||
getResponseState(response).statusText = init.statusText
|
||||
}
|
||||
|
||||
// 5. If init["headers"] exists, then fill response’s headers with init["headers"].
|
||||
if ('headers' in init && init.headers != null) {
|
||||
fill(getResponseHeaders(response), init.headers)
|
||||
}
|
||||
|
||||
// 6. If body was given, then:
|
||||
if (body) {
|
||||
// 1. If response's status is a null body status, then throw a TypeError.
|
||||
if (nullBodyStatus.includes(response.status)) {
|
||||
throw webidl.errors.exception({
|
||||
header: 'Response constructor',
|
||||
message: `Invalid response status code ${response.status}`
|
||||
})
|
||||
}
|
||||
|
||||
// 2. Set response's body to body's body.
|
||||
getResponseState(response).body = body.body
|
||||
|
||||
// 3. If body's type is non-null and response's header list does not contain
|
||||
// `Content-Type`, then append (`Content-Type`, body's type) to response's header list.
|
||||
if (body.type != null && !getResponseState(response).headersList.contains('content-type', true)) {
|
||||
getResponseState(response).headersList.append('content-type', body.type, true)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://fetch.spec.whatwg.org/#response-create
|
||||
* @param {any} innerResponse
|
||||
* @param {'request' | 'immutable' | 'request-no-cors' | 'response' | 'none'} guard
|
||||
* @returns {Response}
|
||||
*/
|
||||
function fromInnerResponse (innerResponse, guard) {
|
||||
const response = new Response(kConstruct)
|
||||
setResponseState(response, innerResponse)
|
||||
const headers = new Headers(kConstruct)
|
||||
setResponseHeaders(response, headers)
|
||||
setHeadersList(headers, innerResponse.headersList)
|
||||
setHeadersGuard(headers, guard)
|
||||
|
||||
// Note: If innerResponse's urlList contains a URL, it is a fetch response.
|
||||
if (innerResponse.urlList.length !== 0 && innerResponse.body?.stream) {
|
||||
// If the target (response) is reclaimed, the cleanup callback may be called at some point with
|
||||
// the held value provided for it (innerResponse.body.stream). The held value can be any value:
|
||||
// a primitive or an object, even undefined. If the held value is an object, the registry keeps
|
||||
// a strong reference to it (so it can pass it to the cleanup callback later). Reworded from
|
||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/FinalizationRegistry
|
||||
streamRegistry.register(response, new WeakRef(innerResponse.body.stream))
|
||||
}
|
||||
|
||||
return response
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#typedefdef-xmlhttprequestbodyinit
|
||||
webidl.converters.XMLHttpRequestBodyInit = function (V, prefix, name) {
|
||||
if (typeof V === 'string') {
|
||||
return webidl.converters.USVString(V, prefix, name)
|
||||
}
|
||||
|
||||
if (webidl.is.Blob(V)) {
|
||||
return V
|
||||
}
|
||||
|
||||
if (webidl.is.BufferSource(V)) {
|
||||
return V
|
||||
}
|
||||
|
||||
if (webidl.is.FormData(V)) {
|
||||
return V
|
||||
}
|
||||
|
||||
if (webidl.is.URLSearchParams(V)) {
|
||||
return V
|
||||
}
|
||||
|
||||
return webidl.converters.DOMString(V, prefix, name)
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#bodyinit
|
||||
webidl.converters.BodyInit = function (V, prefix, argument) {
|
||||
if (webidl.is.ReadableStream(V)) {
|
||||
return V
|
||||
}
|
||||
|
||||
// Note: the spec doesn't include async iterables,
|
||||
// this is an undici extension.
|
||||
if (V?.[Symbol.asyncIterator]) {
|
||||
return V
|
||||
}
|
||||
|
||||
return webidl.converters.XMLHttpRequestBodyInit(V, prefix, argument)
|
||||
}
|
||||
|
||||
webidl.converters.ResponseInit = webidl.dictionaryConverter([
|
||||
{
|
||||
key: 'status',
|
||||
converter: webidl.converters['unsigned short'],
|
||||
defaultValue: () => 200
|
||||
},
|
||||
{
|
||||
key: 'statusText',
|
||||
converter: webidl.converters.ByteString,
|
||||
defaultValue: () => ''
|
||||
},
|
||||
{
|
||||
key: 'headers',
|
||||
converter: webidl.converters.HeadersInit
|
||||
}
|
||||
])
|
||||
|
||||
webidl.is.Response = webidl.util.MakeTypeAssertion(Response)
|
||||
|
||||
module.exports = {
|
||||
isNetworkError,
|
||||
makeNetworkError,
|
||||
makeResponse,
|
||||
makeAppropriateNetworkError,
|
||||
filterResponse,
|
||||
Response,
|
||||
cloneResponse,
|
||||
fromInnerResponse,
|
||||
getResponseState
|
||||
}
|
||||
1520
backend/node_modules/undici/lib/web/fetch/util.js
generated
vendored
Normal file
1520
backend/node_modules/undici/lib/web/fetch/util.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
229
backend/node_modules/undici/lib/web/infra/index.js
generated
vendored
Normal file
229
backend/node_modules/undici/lib/web/infra/index.js
generated
vendored
Normal file
@@ -0,0 +1,229 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert')
|
||||
const { utf8DecodeBytes } = require('../../encoding')
|
||||
|
||||
/**
|
||||
* @param {(char: string) => boolean} condition
|
||||
* @param {string} input
|
||||
* @param {{ position: number }} position
|
||||
* @returns {string}
|
||||
*
|
||||
* @see https://infra.spec.whatwg.org/#collect-a-sequence-of-code-points
|
||||
*/
|
||||
function collectASequenceOfCodePoints (condition, input, position) {
|
||||
// 1. Let result be the empty string.
|
||||
let result = ''
|
||||
|
||||
// 2. While position doesn’t point past the end of input and the
|
||||
// code point at position within input meets the condition condition:
|
||||
while (position.position < input.length && condition(input[position.position])) {
|
||||
// 1. Append that code point to the end of result.
|
||||
result += input[position.position]
|
||||
|
||||
// 2. Advance position by 1.
|
||||
position.position++
|
||||
}
|
||||
|
||||
// 3. Return result.
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* A faster collectASequenceOfCodePoints that only works when comparing a single character.
|
||||
* @param {string} char
|
||||
* @param {string} input
|
||||
* @param {{ position: number }} position
|
||||
* @returns {string}
|
||||
*
|
||||
* @see https://infra.spec.whatwg.org/#collect-a-sequence-of-code-points
|
||||
*/
|
||||
function collectASequenceOfCodePointsFast (char, input, position) {
|
||||
const idx = input.indexOf(char, position.position)
|
||||
const start = position.position
|
||||
|
||||
if (idx === -1) {
|
||||
position.position = input.length
|
||||
return input.slice(start)
|
||||
}
|
||||
|
||||
position.position = idx
|
||||
return input.slice(start, position.position)
|
||||
}
|
||||
|
||||
const ASCII_WHITESPACE_REPLACE_REGEX = /[\u0009\u000A\u000C\u000D\u0020]/g // eslint-disable-line no-control-regex
|
||||
|
||||
/**
|
||||
* @param {string} data
|
||||
* @returns {Uint8Array | 'failure'}
|
||||
*
|
||||
* @see https://infra.spec.whatwg.org/#forgiving-base64-decode
|
||||
*/
|
||||
function forgivingBase64 (data) {
|
||||
// 1. Remove all ASCII whitespace from data.
|
||||
data = data.replace(ASCII_WHITESPACE_REPLACE_REGEX, '')
|
||||
|
||||
let dataLength = data.length
|
||||
// 2. If data’s code point length divides by 4 leaving
|
||||
// no remainder, then:
|
||||
if (dataLength % 4 === 0) {
|
||||
// 1. If data ends with one or two U+003D (=) code points,
|
||||
// then remove them from data.
|
||||
if (data.charCodeAt(dataLength - 1) === 0x003D) {
|
||||
--dataLength
|
||||
if (data.charCodeAt(dataLength - 1) === 0x003D) {
|
||||
--dataLength
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 3. If data’s code point length divides by 4 leaving
|
||||
// a remainder of 1, then return failure.
|
||||
if (dataLength % 4 === 1) {
|
||||
return 'failure'
|
||||
}
|
||||
|
||||
// 4. If data contains a code point that is not one of
|
||||
// U+002B (+)
|
||||
// U+002F (/)
|
||||
// ASCII alphanumeric
|
||||
// then return failure.
|
||||
if (/[^+/0-9A-Za-z]/.test(data.length === dataLength ? data : data.substring(0, dataLength))) {
|
||||
return 'failure'
|
||||
}
|
||||
|
||||
const buffer = Buffer.from(data, 'base64')
|
||||
return new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.byteLength)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} char
|
||||
* @returns {boolean}
|
||||
*
|
||||
* @see https://infra.spec.whatwg.org/#ascii-whitespace
|
||||
*/
|
||||
function isASCIIWhitespace (char) {
|
||||
return (
|
||||
char === 0x09 || // \t
|
||||
char === 0x0a || // \n
|
||||
char === 0x0c || // \f
|
||||
char === 0x0d || // \r
|
||||
char === 0x20 // space
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} input
|
||||
* @returns {string}
|
||||
*
|
||||
* @see https://infra.spec.whatwg.org/#isomorphic-decode
|
||||
*/
|
||||
function isomorphicDecode (input) {
|
||||
// 1. To isomorphic decode a byte sequence input, return a string whose code point
|
||||
// length is equal to input’s length and whose code points have the same values
|
||||
// as the values of input’s bytes, in the same order.
|
||||
const length = input.length
|
||||
if ((2 << 15) - 1 > length) {
|
||||
return String.fromCharCode.apply(null, input)
|
||||
}
|
||||
let result = ''
|
||||
let i = 0
|
||||
let addition = (2 << 15) - 1
|
||||
while (i < length) {
|
||||
if (i + addition > length) {
|
||||
addition = length - i
|
||||
}
|
||||
result += String.fromCharCode.apply(null, input.subarray(i, i += addition))
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
const invalidIsomorphicEncodeValueRegex = /[^\x00-\xFF]/ // eslint-disable-line no-control-regex
|
||||
|
||||
/**
|
||||
* @param {string} input
|
||||
* @returns {string}
|
||||
*
|
||||
* @see https://infra.spec.whatwg.org/#isomorphic-encode
|
||||
*/
|
||||
function isomorphicEncode (input) {
|
||||
// 1. Assert: input contains no code points greater than U+00FF.
|
||||
assert(!invalidIsomorphicEncodeValueRegex.test(input))
|
||||
|
||||
// 2. Return a byte sequence whose length is equal to input’s code
|
||||
// point length and whose bytes have the same values as the
|
||||
// values of input’s code points, in the same order
|
||||
return input
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://infra.spec.whatwg.org/#parse-json-bytes-to-a-javascript-value
|
||||
* @param {Uint8Array} bytes
|
||||
*/
|
||||
function parseJSONFromBytes (bytes) {
|
||||
return JSON.parse(utf8DecodeBytes(bytes))
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} str
|
||||
* @param {boolean} [leading=true]
|
||||
* @param {boolean} [trailing=true]
|
||||
* @returns {string}
|
||||
*
|
||||
* @see https://infra.spec.whatwg.org/#strip-leading-and-trailing-ascii-whitespace
|
||||
*/
|
||||
function removeASCIIWhitespace (str, leading = true, trailing = true) {
|
||||
return removeChars(str, leading, trailing, isASCIIWhitespace)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} str
|
||||
* @param {boolean} leading
|
||||
* @param {boolean} trailing
|
||||
* @param {(charCode: number) => boolean} predicate
|
||||
* @returns {string}
|
||||
*/
|
||||
function removeChars (str, leading, trailing, predicate) {
|
||||
let lead = 0
|
||||
let trail = str.length - 1
|
||||
|
||||
if (leading) {
|
||||
while (lead < str.length && predicate(str.charCodeAt(lead))) lead++
|
||||
}
|
||||
|
||||
if (trailing) {
|
||||
while (trail > 0 && predicate(str.charCodeAt(trail))) trail--
|
||||
}
|
||||
|
||||
return lead === 0 && trail === str.length - 1 ? str : str.slice(lead, trail + 1)
|
||||
}
|
||||
|
||||
// https://infra.spec.whatwg.org/#serialize-a-javascript-value-to-a-json-string
|
||||
function serializeJavascriptValueToJSONString (value) {
|
||||
// 1. Let result be ? Call(%JSON.stringify%, undefined, « value »).
|
||||
const result = JSON.stringify(value)
|
||||
|
||||
// 2. If result is undefined, then throw a TypeError.
|
||||
if (result === undefined) {
|
||||
throw new TypeError('Value is not JSON serializable')
|
||||
}
|
||||
|
||||
// 3. Assert: result is a string.
|
||||
assert(typeof result === 'string')
|
||||
|
||||
// 4. Return result.
|
||||
return result
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
collectASequenceOfCodePoints,
|
||||
collectASequenceOfCodePointsFast,
|
||||
forgivingBase64,
|
||||
isASCIIWhitespace,
|
||||
isomorphicDecode,
|
||||
isomorphicEncode,
|
||||
parseJSONFromBytes,
|
||||
removeASCIIWhitespace,
|
||||
removeChars,
|
||||
serializeJavascriptValueToJSONString
|
||||
}
|
||||
9
backend/node_modules/undici/lib/web/subresource-integrity/Readme.md
generated
vendored
Normal file
9
backend/node_modules/undici/lib/web/subresource-integrity/Readme.md
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
# Subresource Integrity
|
||||
|
||||
based on Editor’s Draft, 12 June 2025
|
||||
|
||||
This module provides support for Subresource Integrity (SRI) in the context of web fetch operations. SRI is a security feature that allows clients to verify that fetched resources are delivered without unexpected manipulation.
|
||||
|
||||
## Links
|
||||
|
||||
- [Subresource Integrity](https://w3c.github.io/webappsec-subresource-integrity/)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user