mirror of
https://gitlab.com/upRootNutrition/zookeeper.git
synced 2025-06-16 18:35:13 -05:00
feat: init
This commit is contained in:
parent
8379d09058
commit
2cfa016090
2929 changed files with 299087 additions and 3 deletions
290
node_modules/undici/lib/web/fileapi/encoding.js
generated
vendored
Normal file
290
node_modules/undici/lib/web/fileapi/encoding.js
generated
vendored
Normal file
|
@ -0,0 +1,290 @@
|
|||
'use strict'
|
||||
|
||||
/**
|
||||
* @see https://encoding.spec.whatwg.org/#concept-encoding-get
|
||||
* @param {string|undefined} label
|
||||
*/
|
||||
function getEncoding (label) {
|
||||
if (!label) {
|
||||
return 'failure'
|
||||
}
|
||||
|
||||
// 1. Remove any leading and trailing ASCII whitespace from label.
|
||||
// 2. If label is an ASCII case-insensitive match for any of the
|
||||
// labels listed in the table below, then return the
|
||||
// corresponding encoding; otherwise return failure.
|
||||
switch (label.trim().toLowerCase()) {
|
||||
case 'unicode-1-1-utf-8':
|
||||
case 'unicode11utf8':
|
||||
case 'unicode20utf8':
|
||||
case 'utf-8':
|
||||
case 'utf8':
|
||||
case 'x-unicode20utf8':
|
||||
return 'UTF-8'
|
||||
case '866':
|
||||
case 'cp866':
|
||||
case 'csibm866':
|
||||
case 'ibm866':
|
||||
return 'IBM866'
|
||||
case 'csisolatin2':
|
||||
case 'iso-8859-2':
|
||||
case 'iso-ir-101':
|
||||
case 'iso8859-2':
|
||||
case 'iso88592':
|
||||
case 'iso_8859-2':
|
||||
case 'iso_8859-2:1987':
|
||||
case 'l2':
|
||||
case 'latin2':
|
||||
return 'ISO-8859-2'
|
||||
case 'csisolatin3':
|
||||
case 'iso-8859-3':
|
||||
case 'iso-ir-109':
|
||||
case 'iso8859-3':
|
||||
case 'iso88593':
|
||||
case 'iso_8859-3':
|
||||
case 'iso_8859-3:1988':
|
||||
case 'l3':
|
||||
case 'latin3':
|
||||
return 'ISO-8859-3'
|
||||
case 'csisolatin4':
|
||||
case 'iso-8859-4':
|
||||
case 'iso-ir-110':
|
||||
case 'iso8859-4':
|
||||
case 'iso88594':
|
||||
case 'iso_8859-4':
|
||||
case 'iso_8859-4:1988':
|
||||
case 'l4':
|
||||
case 'latin4':
|
||||
return 'ISO-8859-4'
|
||||
case 'csisolatincyrillic':
|
||||
case 'cyrillic':
|
||||
case 'iso-8859-5':
|
||||
case 'iso-ir-144':
|
||||
case 'iso8859-5':
|
||||
case 'iso88595':
|
||||
case 'iso_8859-5':
|
||||
case 'iso_8859-5:1988':
|
||||
return 'ISO-8859-5'
|
||||
case 'arabic':
|
||||
case 'asmo-708':
|
||||
case 'csiso88596e':
|
||||
case 'csiso88596i':
|
||||
case 'csisolatinarabic':
|
||||
case 'ecma-114':
|
||||
case 'iso-8859-6':
|
||||
case 'iso-8859-6-e':
|
||||
case 'iso-8859-6-i':
|
||||
case 'iso-ir-127':
|
||||
case 'iso8859-6':
|
||||
case 'iso88596':
|
||||
case 'iso_8859-6':
|
||||
case 'iso_8859-6:1987':
|
||||
return 'ISO-8859-6'
|
||||
case 'csisolatingreek':
|
||||
case 'ecma-118':
|
||||
case 'elot_928':
|
||||
case 'greek':
|
||||
case 'greek8':
|
||||
case 'iso-8859-7':
|
||||
case 'iso-ir-126':
|
||||
case 'iso8859-7':
|
||||
case 'iso88597':
|
||||
case 'iso_8859-7':
|
||||
case 'iso_8859-7:1987':
|
||||
case 'sun_eu_greek':
|
||||
return 'ISO-8859-7'
|
||||
case 'csiso88598e':
|
||||
case 'csisolatinhebrew':
|
||||
case 'hebrew':
|
||||
case 'iso-8859-8':
|
||||
case 'iso-8859-8-e':
|
||||
case 'iso-ir-138':
|
||||
case 'iso8859-8':
|
||||
case 'iso88598':
|
||||
case 'iso_8859-8':
|
||||
case 'iso_8859-8:1988':
|
||||
case 'visual':
|
||||
return 'ISO-8859-8'
|
||||
case 'csiso88598i':
|
||||
case 'iso-8859-8-i':
|
||||
case 'logical':
|
||||
return 'ISO-8859-8-I'
|
||||
case 'csisolatin6':
|
||||
case 'iso-8859-10':
|
||||
case 'iso-ir-157':
|
||||
case 'iso8859-10':
|
||||
case 'iso885910':
|
||||
case 'l6':
|
||||
case 'latin6':
|
||||
return 'ISO-8859-10'
|
||||
case 'iso-8859-13':
|
||||
case 'iso8859-13':
|
||||
case 'iso885913':
|
||||
return 'ISO-8859-13'
|
||||
case 'iso-8859-14':
|
||||
case 'iso8859-14':
|
||||
case 'iso885914':
|
||||
return 'ISO-8859-14'
|
||||
case 'csisolatin9':
|
||||
case 'iso-8859-15':
|
||||
case 'iso8859-15':
|
||||
case 'iso885915':
|
||||
case 'iso_8859-15':
|
||||
case 'l9':
|
||||
return 'ISO-8859-15'
|
||||
case 'iso-8859-16':
|
||||
return 'ISO-8859-16'
|
||||
case 'cskoi8r':
|
||||
case 'koi':
|
||||
case 'koi8':
|
||||
case 'koi8-r':
|
||||
case 'koi8_r':
|
||||
return 'KOI8-R'
|
||||
case 'koi8-ru':
|
||||
case 'koi8-u':
|
||||
return 'KOI8-U'
|
||||
case 'csmacintosh':
|
||||
case 'mac':
|
||||
case 'macintosh':
|
||||
case 'x-mac-roman':
|
||||
return 'macintosh'
|
||||
case 'iso-8859-11':
|
||||
case 'iso8859-11':
|
||||
case 'iso885911':
|
||||
case 'tis-620':
|
||||
case 'windows-874':
|
||||
return 'windows-874'
|
||||
case 'cp1250':
|
||||
case 'windows-1250':
|
||||
case 'x-cp1250':
|
||||
return 'windows-1250'
|
||||
case 'cp1251':
|
||||
case 'windows-1251':
|
||||
case 'x-cp1251':
|
||||
return 'windows-1251'
|
||||
case 'ansi_x3.4-1968':
|
||||
case 'ascii':
|
||||
case 'cp1252':
|
||||
case 'cp819':
|
||||
case 'csisolatin1':
|
||||
case 'ibm819':
|
||||
case 'iso-8859-1':
|
||||
case 'iso-ir-100':
|
||||
case 'iso8859-1':
|
||||
case 'iso88591':
|
||||
case 'iso_8859-1':
|
||||
case 'iso_8859-1:1987':
|
||||
case 'l1':
|
||||
case 'latin1':
|
||||
case 'us-ascii':
|
||||
case 'windows-1252':
|
||||
case 'x-cp1252':
|
||||
return 'windows-1252'
|
||||
case 'cp1253':
|
||||
case 'windows-1253':
|
||||
case 'x-cp1253':
|
||||
return 'windows-1253'
|
||||
case 'cp1254':
|
||||
case 'csisolatin5':
|
||||
case 'iso-8859-9':
|
||||
case 'iso-ir-148':
|
||||
case 'iso8859-9':
|
||||
case 'iso88599':
|
||||
case 'iso_8859-9':
|
||||
case 'iso_8859-9:1989':
|
||||
case 'l5':
|
||||
case 'latin5':
|
||||
case 'windows-1254':
|
||||
case 'x-cp1254':
|
||||
return 'windows-1254'
|
||||
case 'cp1255':
|
||||
case 'windows-1255':
|
||||
case 'x-cp1255':
|
||||
return 'windows-1255'
|
||||
case 'cp1256':
|
||||
case 'windows-1256':
|
||||
case 'x-cp1256':
|
||||
return 'windows-1256'
|
||||
case 'cp1257':
|
||||
case 'windows-1257':
|
||||
case 'x-cp1257':
|
||||
return 'windows-1257'
|
||||
case 'cp1258':
|
||||
case 'windows-1258':
|
||||
case 'x-cp1258':
|
||||
return 'windows-1258'
|
||||
case 'x-mac-cyrillic':
|
||||
case 'x-mac-ukrainian':
|
||||
return 'x-mac-cyrillic'
|
||||
case 'chinese':
|
||||
case 'csgb2312':
|
||||
case 'csiso58gb231280':
|
||||
case 'gb2312':
|
||||
case 'gb_2312':
|
||||
case 'gb_2312-80':
|
||||
case 'gbk':
|
||||
case 'iso-ir-58':
|
||||
case 'x-gbk':
|
||||
return 'GBK'
|
||||
case 'gb18030':
|
||||
return 'gb18030'
|
||||
case 'big5':
|
||||
case 'big5-hkscs':
|
||||
case 'cn-big5':
|
||||
case 'csbig5':
|
||||
case 'x-x-big5':
|
||||
return 'Big5'
|
||||
case 'cseucpkdfmtjapanese':
|
||||
case 'euc-jp':
|
||||
case 'x-euc-jp':
|
||||
return 'EUC-JP'
|
||||
case 'csiso2022jp':
|
||||
case 'iso-2022-jp':
|
||||
return 'ISO-2022-JP'
|
||||
case 'csshiftjis':
|
||||
case 'ms932':
|
||||
case 'ms_kanji':
|
||||
case 'shift-jis':
|
||||
case 'shift_jis':
|
||||
case 'sjis':
|
||||
case 'windows-31j':
|
||||
case 'x-sjis':
|
||||
return 'Shift_JIS'
|
||||
case 'cseuckr':
|
||||
case 'csksc56011987':
|
||||
case 'euc-kr':
|
||||
case 'iso-ir-149':
|
||||
case 'korean':
|
||||
case 'ks_c_5601-1987':
|
||||
case 'ks_c_5601-1989':
|
||||
case 'ksc5601':
|
||||
case 'ksc_5601':
|
||||
case 'windows-949':
|
||||
return 'EUC-KR'
|
||||
case 'csiso2022kr':
|
||||
case 'hz-gb-2312':
|
||||
case 'iso-2022-cn':
|
||||
case 'iso-2022-cn-ext':
|
||||
case 'iso-2022-kr':
|
||||
case 'replacement':
|
||||
return 'replacement'
|
||||
case 'unicodefffe':
|
||||
case 'utf-16be':
|
||||
return 'UTF-16BE'
|
||||
case 'csunicode':
|
||||
case 'iso-10646-ucs-2':
|
||||
case 'ucs-2':
|
||||
case 'unicode':
|
||||
case 'unicodefeff':
|
||||
case 'utf-16':
|
||||
case 'utf-16le':
|
||||
return 'UTF-16LE'
|
||||
case 'x-user-defined':
|
||||
return 'x-user-defined'
|
||||
default: return 'failure'
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getEncoding
|
||||
}
|
344
node_modules/undici/lib/web/fileapi/filereader.js
generated
vendored
Normal file
344
node_modules/undici/lib/web/fileapi/filereader.js
generated
vendored
Normal file
|
@ -0,0 +1,344 @@
|
|||
'use strict'
|
||||
|
||||
const {
|
||||
staticPropertyDescriptors,
|
||||
readOperation,
|
||||
fireAProgressEvent
|
||||
} = require('./util')
|
||||
const {
|
||||
kState,
|
||||
kError,
|
||||
kResult,
|
||||
kEvents,
|
||||
kAborted
|
||||
} = require('./symbols')
|
||||
const { webidl } = require('../fetch/webidl')
|
||||
const { kEnumerableProperty } = require('../../core/util')
|
||||
|
||||
class FileReader extends EventTarget {
|
||||
constructor () {
|
||||
super()
|
||||
|
||||
this[kState] = 'empty'
|
||||
this[kResult] = null
|
||||
this[kError] = null
|
||||
this[kEvents] = {
|
||||
loadend: null,
|
||||
error: null,
|
||||
abort: null,
|
||||
load: null,
|
||||
progress: null,
|
||||
loadstart: null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/FileAPI/#dfn-readAsArrayBuffer
|
||||
* @param {import('buffer').Blob} blob
|
||||
*/
|
||||
readAsArrayBuffer (blob) {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsArrayBuffer' })
|
||||
|
||||
blob = webidl.converters.Blob(blob, { strict: false })
|
||||
|
||||
// The readAsArrayBuffer(blob) method, when invoked,
|
||||
// must initiate a read operation for blob with ArrayBuffer.
|
||||
readOperation(this, blob, 'ArrayBuffer')
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/FileAPI/#readAsBinaryString
|
||||
* @param {import('buffer').Blob} blob
|
||||
*/
|
||||
readAsBinaryString (blob) {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsBinaryString' })
|
||||
|
||||
blob = webidl.converters.Blob(blob, { strict: false })
|
||||
|
||||
// The readAsBinaryString(blob) method, when invoked,
|
||||
// must initiate a read operation for blob with BinaryString.
|
||||
readOperation(this, blob, 'BinaryString')
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/FileAPI/#readAsDataText
|
||||
* @param {import('buffer').Blob} blob
|
||||
* @param {string?} encoding
|
||||
*/
|
||||
readAsText (blob, encoding = undefined) {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsText' })
|
||||
|
||||
blob = webidl.converters.Blob(blob, { strict: false })
|
||||
|
||||
if (encoding !== undefined) {
|
||||
encoding = webidl.converters.DOMString(encoding)
|
||||
}
|
||||
|
||||
// The readAsText(blob, encoding) method, when invoked,
|
||||
// must initiate a read operation for blob with Text and encoding.
|
||||
readOperation(this, blob, 'Text', encoding)
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/FileAPI/#dfn-readAsDataURL
|
||||
* @param {import('buffer').Blob} blob
|
||||
*/
|
||||
readAsDataURL (blob) {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsDataURL' })
|
||||
|
||||
blob = webidl.converters.Blob(blob, { strict: false })
|
||||
|
||||
// The readAsDataURL(blob) method, when invoked, must
|
||||
// initiate a read operation for blob with DataURL.
|
||||
readOperation(this, blob, 'DataURL')
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/FileAPI/#dfn-abort
|
||||
*/
|
||||
abort () {
|
||||
// 1. If this's state is "empty" or if this's state is
|
||||
// "done" set this's result to null and terminate
|
||||
// this algorithm.
|
||||
if (this[kState] === 'empty' || this[kState] === 'done') {
|
||||
this[kResult] = null
|
||||
return
|
||||
}
|
||||
|
||||
// 2. If this's state is "loading" set this's state to
|
||||
// "done" and set this's result to null.
|
||||
if (this[kState] === 'loading') {
|
||||
this[kState] = 'done'
|
||||
this[kResult] = null
|
||||
}
|
||||
|
||||
// 3. If there are any tasks from this on the file reading
|
||||
// task source in an affiliated task queue, then remove
|
||||
// those tasks from that task queue.
|
||||
this[kAborted] = true
|
||||
|
||||
// 4. Terminate the algorithm for the read method being processed.
|
||||
// TODO
|
||||
|
||||
// 5. Fire a progress event called abort at this.
|
||||
fireAProgressEvent('abort', this)
|
||||
|
||||
// 6. If this's state is not "loading", fire a progress
|
||||
// event called loadend at this.
|
||||
if (this[kState] !== 'loading') {
|
||||
fireAProgressEvent('loadend', this)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/FileAPI/#dom-filereader-readystate
|
||||
*/
|
||||
get readyState () {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
switch (this[kState]) {
|
||||
case 'empty': return this.EMPTY
|
||||
case 'loading': return this.LOADING
|
||||
case 'done': return this.DONE
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/FileAPI/#dom-filereader-result
|
||||
*/
|
||||
get result () {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
// The result attribute’s getter, when invoked, must return
|
||||
// this's result.
|
||||
return this[kResult]
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/FileAPI/#dom-filereader-error
|
||||
*/
|
||||
get error () {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
// The error attribute’s getter, when invoked, must return
|
||||
// this's error.
|
||||
return this[kError]
|
||||
}
|
||||
|
||||
get onloadend () {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
return this[kEvents].loadend
|
||||
}
|
||||
|
||||
set onloadend (fn) {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
if (this[kEvents].loadend) {
|
||||
this.removeEventListener('loadend', this[kEvents].loadend)
|
||||
}
|
||||
|
||||
if (typeof fn === 'function') {
|
||||
this[kEvents].loadend = fn
|
||||
this.addEventListener('loadend', fn)
|
||||
} else {
|
||||
this[kEvents].loadend = null
|
||||
}
|
||||
}
|
||||
|
||||
get onerror () {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
return this[kEvents].error
|
||||
}
|
||||
|
||||
set onerror (fn) {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
if (this[kEvents].error) {
|
||||
this.removeEventListener('error', this[kEvents].error)
|
||||
}
|
||||
|
||||
if (typeof fn === 'function') {
|
||||
this[kEvents].error = fn
|
||||
this.addEventListener('error', fn)
|
||||
} else {
|
||||
this[kEvents].error = null
|
||||
}
|
||||
}
|
||||
|
||||
get onloadstart () {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
return this[kEvents].loadstart
|
||||
}
|
||||
|
||||
set onloadstart (fn) {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
if (this[kEvents].loadstart) {
|
||||
this.removeEventListener('loadstart', this[kEvents].loadstart)
|
||||
}
|
||||
|
||||
if (typeof fn === 'function') {
|
||||
this[kEvents].loadstart = fn
|
||||
this.addEventListener('loadstart', fn)
|
||||
} else {
|
||||
this[kEvents].loadstart = null
|
||||
}
|
||||
}
|
||||
|
||||
get onprogress () {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
return this[kEvents].progress
|
||||
}
|
||||
|
||||
set onprogress (fn) {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
if (this[kEvents].progress) {
|
||||
this.removeEventListener('progress', this[kEvents].progress)
|
||||
}
|
||||
|
||||
if (typeof fn === 'function') {
|
||||
this[kEvents].progress = fn
|
||||
this.addEventListener('progress', fn)
|
||||
} else {
|
||||
this[kEvents].progress = null
|
||||
}
|
||||
}
|
||||
|
||||
get onload () {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
return this[kEvents].load
|
||||
}
|
||||
|
||||
set onload (fn) {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
if (this[kEvents].load) {
|
||||
this.removeEventListener('load', this[kEvents].load)
|
||||
}
|
||||
|
||||
if (typeof fn === 'function') {
|
||||
this[kEvents].load = fn
|
||||
this.addEventListener('load', fn)
|
||||
} else {
|
||||
this[kEvents].load = null
|
||||
}
|
||||
}
|
||||
|
||||
get onabort () {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
return this[kEvents].abort
|
||||
}
|
||||
|
||||
set onabort (fn) {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
if (this[kEvents].abort) {
|
||||
this.removeEventListener('abort', this[kEvents].abort)
|
||||
}
|
||||
|
||||
if (typeof fn === 'function') {
|
||||
this[kEvents].abort = fn
|
||||
this.addEventListener('abort', fn)
|
||||
} else {
|
||||
this[kEvents].abort = null
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// https://w3c.github.io/FileAPI/#dom-filereader-empty
|
||||
FileReader.EMPTY = FileReader.prototype.EMPTY = 0
|
||||
// https://w3c.github.io/FileAPI/#dom-filereader-loading
|
||||
FileReader.LOADING = FileReader.prototype.LOADING = 1
|
||||
// https://w3c.github.io/FileAPI/#dom-filereader-done
|
||||
FileReader.DONE = FileReader.prototype.DONE = 2
|
||||
|
||||
Object.defineProperties(FileReader.prototype, {
|
||||
EMPTY: staticPropertyDescriptors,
|
||||
LOADING: staticPropertyDescriptors,
|
||||
DONE: staticPropertyDescriptors,
|
||||
readAsArrayBuffer: kEnumerableProperty,
|
||||
readAsBinaryString: kEnumerableProperty,
|
||||
readAsText: kEnumerableProperty,
|
||||
readAsDataURL: kEnumerableProperty,
|
||||
abort: kEnumerableProperty,
|
||||
readyState: kEnumerableProperty,
|
||||
result: kEnumerableProperty,
|
||||
error: kEnumerableProperty,
|
||||
onloadstart: kEnumerableProperty,
|
||||
onprogress: kEnumerableProperty,
|
||||
onload: kEnumerableProperty,
|
||||
onabort: kEnumerableProperty,
|
||||
onerror: kEnumerableProperty,
|
||||
onloadend: kEnumerableProperty,
|
||||
[Symbol.toStringTag]: {
|
||||
value: 'FileReader',
|
||||
writable: false,
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
}
|
||||
})
|
||||
|
||||
Object.defineProperties(FileReader, {
|
||||
EMPTY: staticPropertyDescriptors,
|
||||
LOADING: staticPropertyDescriptors,
|
||||
DONE: staticPropertyDescriptors
|
||||
})
|
||||
|
||||
module.exports = {
|
||||
FileReader
|
||||
}
|
78
node_modules/undici/lib/web/fileapi/progressevent.js
generated
vendored
Normal file
78
node_modules/undici/lib/web/fileapi/progressevent.js
generated
vendored
Normal file
|
@ -0,0 +1,78 @@
|
|||
'use strict'
|
||||
|
||||
const { webidl } = require('../fetch/webidl')
|
||||
|
||||
const kState = Symbol('ProgressEvent state')
|
||||
|
||||
/**
|
||||
* @see https://xhr.spec.whatwg.org/#progressevent
|
||||
*/
|
||||
class ProgressEvent extends Event {
|
||||
constructor (type, eventInitDict = {}) {
|
||||
type = webidl.converters.DOMString(type)
|
||||
eventInitDict = webidl.converters.ProgressEventInit(eventInitDict ?? {})
|
||||
|
||||
super(type, eventInitDict)
|
||||
|
||||
this[kState] = {
|
||||
lengthComputable: eventInitDict.lengthComputable,
|
||||
loaded: eventInitDict.loaded,
|
||||
total: eventInitDict.total
|
||||
}
|
||||
}
|
||||
|
||||
get lengthComputable () {
|
||||
webidl.brandCheck(this, ProgressEvent)
|
||||
|
||||
return this[kState].lengthComputable
|
||||
}
|
||||
|
||||
get loaded () {
|
||||
webidl.brandCheck(this, ProgressEvent)
|
||||
|
||||
return this[kState].loaded
|
||||
}
|
||||
|
||||
get total () {
|
||||
webidl.brandCheck(this, ProgressEvent)
|
||||
|
||||
return this[kState].total
|
||||
}
|
||||
}
|
||||
|
||||
webidl.converters.ProgressEventInit = webidl.dictionaryConverter([
|
||||
{
|
||||
key: 'lengthComputable',
|
||||
converter: webidl.converters.boolean,
|
||||
defaultValue: false
|
||||
},
|
||||
{
|
||||
key: 'loaded',
|
||||
converter: webidl.converters['unsigned long long'],
|
||||
defaultValue: 0
|
||||
},
|
||||
{
|
||||
key: 'total',
|
||||
converter: webidl.converters['unsigned long long'],
|
||||
defaultValue: 0
|
||||
},
|
||||
{
|
||||
key: 'bubbles',
|
||||
converter: webidl.converters.boolean,
|
||||
defaultValue: false
|
||||
},
|
||||
{
|
||||
key: 'cancelable',
|
||||
converter: webidl.converters.boolean,
|
||||
defaultValue: false
|
||||
},
|
||||
{
|
||||
key: 'composed',
|
||||
converter: webidl.converters.boolean,
|
||||
defaultValue: false
|
||||
}
|
||||
])
|
||||
|
||||
module.exports = {
|
||||
ProgressEvent
|
||||
}
|
10
node_modules/undici/lib/web/fileapi/symbols.js
generated
vendored
Normal file
10
node_modules/undici/lib/web/fileapi/symbols.js
generated
vendored
Normal file
|
@ -0,0 +1,10 @@
|
|||
'use strict'
|
||||
|
||||
module.exports = {
|
||||
kState: Symbol('FileReader state'),
|
||||
kResult: Symbol('FileReader result'),
|
||||
kError: Symbol('FileReader error'),
|
||||
kLastProgressEventFired: Symbol('FileReader last progress event fired timestamp'),
|
||||
kEvents: Symbol('FileReader events'),
|
||||
kAborted: Symbol('FileReader aborted')
|
||||
}
|
391
node_modules/undici/lib/web/fileapi/util.js
generated
vendored
Normal file
391
node_modules/undici/lib/web/fileapi/util.js
generated
vendored
Normal file
|
@ -0,0 +1,391 @@
|
|||
'use strict'
|
||||
|
||||
const {
|
||||
kState,
|
||||
kError,
|
||||
kResult,
|
||||
kAborted,
|
||||
kLastProgressEventFired
|
||||
} = require('./symbols')
|
||||
const { ProgressEvent } = require('./progressevent')
|
||||
const { getEncoding } = require('./encoding')
|
||||
const { serializeAMimeType, parseMIMEType } = require('../fetch/data-url')
|
||||
const { types } = require('node:util')
|
||||
const { StringDecoder } = require('string_decoder')
|
||||
const { btoa } = require('node:buffer')
|
||||
|
||||
/** @type {PropertyDescriptor} */
|
||||
const staticPropertyDescriptors = {
|
||||
enumerable: true,
|
||||
writable: false,
|
||||
configurable: false
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/FileAPI/#readOperation
|
||||
* @param {import('./filereader').FileReader} fr
|
||||
* @param {import('buffer').Blob} blob
|
||||
* @param {string} type
|
||||
* @param {string?} encodingName
|
||||
*/
|
||||
function readOperation (fr, blob, type, encodingName) {
|
||||
// 1. If fr’s state is "loading", throw an InvalidStateError
|
||||
// DOMException.
|
||||
if (fr[kState] === 'loading') {
|
||||
throw new DOMException('Invalid state', 'InvalidStateError')
|
||||
}
|
||||
|
||||
// 2. Set fr’s state to "loading".
|
||||
fr[kState] = 'loading'
|
||||
|
||||
// 3. Set fr’s result to null.
|
||||
fr[kResult] = null
|
||||
|
||||
// 4. Set fr’s error to null.
|
||||
fr[kError] = null
|
||||
|
||||
// 5. Let stream be the result of calling get stream on blob.
|
||||
/** @type {import('stream/web').ReadableStream} */
|
||||
const stream = blob.stream()
|
||||
|
||||
// 6. Let reader be the result of getting a reader from stream.
|
||||
const reader = stream.getReader()
|
||||
|
||||
// 7. Let bytes be an empty byte sequence.
|
||||
/** @type {Uint8Array[]} */
|
||||
const bytes = []
|
||||
|
||||
// 8. Let chunkPromise be the result of reading a chunk from
|
||||
// stream with reader.
|
||||
let chunkPromise = reader.read()
|
||||
|
||||
// 9. Let isFirstChunk be true.
|
||||
let isFirstChunk = true
|
||||
|
||||
// 10. In parallel, while true:
|
||||
// Note: "In parallel" just means non-blocking
|
||||
// Note 2: readOperation itself cannot be async as double
|
||||
// reading the body would then reject the promise, instead
|
||||
// of throwing an error.
|
||||
;(async () => {
|
||||
while (!fr[kAborted]) {
|
||||
// 1. Wait for chunkPromise to be fulfilled or rejected.
|
||||
try {
|
||||
const { done, value } = await chunkPromise
|
||||
|
||||
// 2. If chunkPromise is fulfilled, and isFirstChunk is
|
||||
// true, queue a task to fire a progress event called
|
||||
// loadstart at fr.
|
||||
if (isFirstChunk && !fr[kAborted]) {
|
||||
queueMicrotask(() => {
|
||||
fireAProgressEvent('loadstart', fr)
|
||||
})
|
||||
}
|
||||
|
||||
// 3. Set isFirstChunk to false.
|
||||
isFirstChunk = false
|
||||
|
||||
// 4. If chunkPromise is fulfilled with an object whose
|
||||
// done property is false and whose value property is
|
||||
// a Uint8Array object, run these steps:
|
||||
if (!done && types.isUint8Array(value)) {
|
||||
// 1. Let bs be the byte sequence represented by the
|
||||
// Uint8Array object.
|
||||
|
||||
// 2. Append bs to bytes.
|
||||
bytes.push(value)
|
||||
|
||||
// 3. If roughly 50ms have passed since these steps
|
||||
// were last invoked, queue a task to fire a
|
||||
// progress event called progress at fr.
|
||||
if (
|
||||
(
|
||||
fr[kLastProgressEventFired] === undefined ||
|
||||
Date.now() - fr[kLastProgressEventFired] >= 50
|
||||
) &&
|
||||
!fr[kAborted]
|
||||
) {
|
||||
fr[kLastProgressEventFired] = Date.now()
|
||||
queueMicrotask(() => {
|
||||
fireAProgressEvent('progress', fr)
|
||||
})
|
||||
}
|
||||
|
||||
// 4. Set chunkPromise to the result of reading a
|
||||
// chunk from stream with reader.
|
||||
chunkPromise = reader.read()
|
||||
} else if (done) {
|
||||
// 5. Otherwise, if chunkPromise is fulfilled with an
|
||||
// object whose done property is true, queue a task
|
||||
// to run the following steps and abort this algorithm:
|
||||
queueMicrotask(() => {
|
||||
// 1. Set fr’s state to "done".
|
||||
fr[kState] = 'done'
|
||||
|
||||
// 2. Let result be the result of package data given
|
||||
// bytes, type, blob’s type, and encodingName.
|
||||
try {
|
||||
const result = packageData(bytes, type, blob.type, encodingName)
|
||||
|
||||
// 4. Else:
|
||||
|
||||
if (fr[kAborted]) {
|
||||
return
|
||||
}
|
||||
|
||||
// 1. Set fr’s result to result.
|
||||
fr[kResult] = result
|
||||
|
||||
// 2. Fire a progress event called load at the fr.
|
||||
fireAProgressEvent('load', fr)
|
||||
} catch (error) {
|
||||
// 3. If package data threw an exception error:
|
||||
|
||||
// 1. Set fr’s error to error.
|
||||
fr[kError] = error
|
||||
|
||||
// 2. Fire a progress event called error at fr.
|
||||
fireAProgressEvent('error', fr)
|
||||
}
|
||||
|
||||
// 5. If fr’s state is not "loading", fire a progress
|
||||
// event called loadend at the fr.
|
||||
if (fr[kState] !== 'loading') {
|
||||
fireAProgressEvent('loadend', fr)
|
||||
}
|
||||
})
|
||||
|
||||
break
|
||||
}
|
||||
} catch (error) {
|
||||
if (fr[kAborted]) {
|
||||
return
|
||||
}
|
||||
|
||||
// 6. Otherwise, if chunkPromise is rejected with an
|
||||
// error error, queue a task to run the following
|
||||
// steps and abort this algorithm:
|
||||
queueMicrotask(() => {
|
||||
// 1. Set fr’s state to "done".
|
||||
fr[kState] = 'done'
|
||||
|
||||
// 2. Set fr’s error to error.
|
||||
fr[kError] = error
|
||||
|
||||
// 3. Fire a progress event called error at fr.
|
||||
fireAProgressEvent('error', fr)
|
||||
|
||||
// 4. If fr’s state is not "loading", fire a progress
|
||||
// event called loadend at fr.
|
||||
if (fr[kState] !== 'loading') {
|
||||
fireAProgressEvent('loadend', fr)
|
||||
}
|
||||
})
|
||||
|
||||
break
|
||||
}
|
||||
}
|
||||
})()
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/FileAPI/#fire-a-progress-event
|
||||
* @see https://dom.spec.whatwg.org/#concept-event-fire
|
||||
* @param {string} e The name of the event
|
||||
* @param {import('./filereader').FileReader} reader
|
||||
*/
|
||||
function fireAProgressEvent (e, reader) {
|
||||
// The progress event e does not bubble. e.bubbles must be false
|
||||
// The progress event e is NOT cancelable. e.cancelable must be false
|
||||
const event = new ProgressEvent(e, {
|
||||
bubbles: false,
|
||||
cancelable: false
|
||||
})
|
||||
|
||||
reader.dispatchEvent(event)
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/FileAPI/#blob-package-data
|
||||
* @param {Uint8Array[]} bytes
|
||||
* @param {string} type
|
||||
* @param {string?} mimeType
|
||||
* @param {string?} encodingName
|
||||
*/
|
||||
function packageData (bytes, type, mimeType, encodingName) {
|
||||
// 1. A Blob has an associated package data algorithm, given
|
||||
// bytes, a type, a optional mimeType, and a optional
|
||||
// encodingName, which switches on type and runs the
|
||||
// associated steps:
|
||||
|
||||
switch (type) {
|
||||
case 'DataURL': {
|
||||
// 1. Return bytes as a DataURL [RFC2397] subject to
|
||||
// the considerations below:
|
||||
// * Use mimeType as part of the Data URL if it is
|
||||
// available in keeping with the Data URL
|
||||
// specification [RFC2397].
|
||||
// * If mimeType is not available return a Data URL
|
||||
// without a media-type. [RFC2397].
|
||||
|
||||
// https://datatracker.ietf.org/doc/html/rfc2397#section-3
|
||||
// dataurl := "data:" [ mediatype ] [ ";base64" ] "," data
|
||||
// mediatype := [ type "/" subtype ] *( ";" parameter )
|
||||
// data := *urlchar
|
||||
// parameter := attribute "=" value
|
||||
let dataURL = 'data:'
|
||||
|
||||
const parsed = parseMIMEType(mimeType || 'application/octet-stream')
|
||||
|
||||
if (parsed !== 'failure') {
|
||||
dataURL += serializeAMimeType(parsed)
|
||||
}
|
||||
|
||||
dataURL += ';base64,'
|
||||
|
||||
const decoder = new StringDecoder('latin1')
|
||||
|
||||
for (const chunk of bytes) {
|
||||
dataURL += btoa(decoder.write(chunk))
|
||||
}
|
||||
|
||||
dataURL += btoa(decoder.end())
|
||||
|
||||
return dataURL
|
||||
}
|
||||
case 'Text': {
|
||||
// 1. Let encoding be failure
|
||||
let encoding = 'failure'
|
||||
|
||||
// 2. If the encodingName is present, set encoding to the
|
||||
// result of getting an encoding from encodingName.
|
||||
if (encodingName) {
|
||||
encoding = getEncoding(encodingName)
|
||||
}
|
||||
|
||||
// 3. If encoding is failure, and mimeType is present:
|
||||
if (encoding === 'failure' && mimeType) {
|
||||
// 1. Let type be the result of parse a MIME type
|
||||
// given mimeType.
|
||||
const type = parseMIMEType(mimeType)
|
||||
|
||||
// 2. If type is not failure, set encoding to the result
|
||||
// of getting an encoding from type’s parameters["charset"].
|
||||
if (type !== 'failure') {
|
||||
encoding = getEncoding(type.parameters.get('charset'))
|
||||
}
|
||||
}
|
||||
|
||||
// 4. If encoding is failure, then set encoding to UTF-8.
|
||||
if (encoding === 'failure') {
|
||||
encoding = 'UTF-8'
|
||||
}
|
||||
|
||||
// 5. Decode bytes using fallback encoding encoding, and
|
||||
// return the result.
|
||||
return decode(bytes, encoding)
|
||||
}
|
||||
case 'ArrayBuffer': {
|
||||
// Return a new ArrayBuffer whose contents are bytes.
|
||||
const sequence = combineByteSequences(bytes)
|
||||
|
||||
return sequence.buffer
|
||||
}
|
||||
case 'BinaryString': {
|
||||
// Return bytes as a binary string, in which every byte
|
||||
// is represented by a code unit of equal value [0..255].
|
||||
let binaryString = ''
|
||||
|
||||
const decoder = new StringDecoder('latin1')
|
||||
|
||||
for (const chunk of bytes) {
|
||||
binaryString += decoder.write(chunk)
|
||||
}
|
||||
|
||||
binaryString += decoder.end()
|
||||
|
||||
return binaryString
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://encoding.spec.whatwg.org/#decode
|
||||
* @param {Uint8Array[]} ioQueue
|
||||
* @param {string} encoding
|
||||
*/
|
||||
function decode (ioQueue, encoding) {
|
||||
const bytes = combineByteSequences(ioQueue)
|
||||
|
||||
// 1. Let BOMEncoding be the result of BOM sniffing ioQueue.
|
||||
const BOMEncoding = BOMSniffing(bytes)
|
||||
|
||||
let slice = 0
|
||||
|
||||
// 2. If BOMEncoding is non-null:
|
||||
if (BOMEncoding !== null) {
|
||||
// 1. Set encoding to BOMEncoding.
|
||||
encoding = BOMEncoding
|
||||
|
||||
// 2. Read three bytes from ioQueue, if BOMEncoding is
|
||||
// UTF-8; otherwise read two bytes.
|
||||
// (Do nothing with those bytes.)
|
||||
slice = BOMEncoding === 'UTF-8' ? 3 : 2
|
||||
}
|
||||
|
||||
// 3. Process a queue with an instance of encoding’s
|
||||
// decoder, ioQueue, output, and "replacement".
|
||||
|
||||
// 4. Return output.
|
||||
|
||||
const sliced = bytes.slice(slice)
|
||||
return new TextDecoder(encoding).decode(sliced)
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://encoding.spec.whatwg.org/#bom-sniff
|
||||
* @param {Uint8Array} ioQueue
|
||||
*/
|
||||
function BOMSniffing (ioQueue) {
|
||||
// 1. Let BOM be the result of peeking 3 bytes from ioQueue,
|
||||
// converted to a byte sequence.
|
||||
const [a, b, c] = ioQueue
|
||||
|
||||
// 2. For each of the rows in the table below, starting with
|
||||
// the first one and going down, if BOM starts with the
|
||||
// bytes given in the first column, then return the
|
||||
// encoding given in the cell in the second column of that
|
||||
// row. Otherwise, return null.
|
||||
if (a === 0xEF && b === 0xBB && c === 0xBF) {
|
||||
return 'UTF-8'
|
||||
} else if (a === 0xFE && b === 0xFF) {
|
||||
return 'UTF-16BE'
|
||||
} else if (a === 0xFF && b === 0xFE) {
|
||||
return 'UTF-16LE'
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Uint8Array[]} sequences
|
||||
*/
|
||||
function combineByteSequences (sequences) {
|
||||
const size = sequences.reduce((a, b) => {
|
||||
return a + b.byteLength
|
||||
}, 0)
|
||||
|
||||
let offset = 0
|
||||
|
||||
return sequences.reduce((a, b) => {
|
||||
a.set(b, offset)
|
||||
offset += b.byteLength
|
||||
return a
|
||||
}, new Uint8Array(size))
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
staticPropertyDescriptors,
|
||||
readOperation,
|
||||
fireAProgressEvent
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue