mirror of
https://github.com/ducbao414/win32.run.git
synced 2025-12-18 10:12:50 +09:00
11804 lines
370 KiB
JavaScript
11804 lines
370 KiB
JavaScript
// This library is PATCHED to expose format objects for LoadingErrors.
|
||
// See "__PATCHED_LIB_TO_ADD_THIS__format" below.
|
||
|
||
/**
|
||
* Modules in this bundle
|
||
* @license
|
||
*
|
||
* anypalette:
|
||
* license: MIT (http://opensource.org/licenses/MIT)
|
||
* author: Isaiah Odhner <isaiahodhner@gmail.com>
|
||
* homepage: https://1j01.github.io/anypalette.js/
|
||
* version: 0.6.0
|
||
*
|
||
* base64-js:
|
||
* license: MIT (http://opensource.org/licenses/MIT)
|
||
* author: T. Jameson Little <t.jameson.little@gmail.com>
|
||
* homepage: https://github.com/beatgammit/base64-js
|
||
* version: 1.5.1
|
||
*
|
||
* browser-resolve:
|
||
* license: MIT (http://opensource.org/licenses/MIT)
|
||
* author: Roman Shtylman <shtylman@gmail.com>
|
||
* homepage: https://github.com/browserify/browser-resolve#readme
|
||
* version: 2.0.0
|
||
*
|
||
* buffer:
|
||
* license: MIT (http://opensource.org/licenses/MIT)
|
||
* author: Feross Aboukhadijeh <feross@feross.org>
|
||
* contributors: Romain Beauxis <toots@rastageeks.org>, James Halliday <mail@substack.net>
|
||
* homepage: https://github.com/feross/buffer
|
||
* version: 5.2.1
|
||
*
|
||
* css.escape:
|
||
* license: MIT (http://opensource.org/licenses/MIT)
|
||
* author: Mathias Bynens
|
||
* homepage: https://mths.be/cssescape
|
||
* version: 1.5.1
|
||
*
|
||
* events:
|
||
* license: MIT (http://opensource.org/licenses/MIT)
|
||
* author: Irakli Gozalishvili <rfobic@gmail.com>
|
||
* homepage: https://github.com/Gozala/events#readme
|
||
* version: 3.2.0
|
||
*
|
||
* ieee754:
|
||
* license: BSD-3-Clause (http://opensource.org/licenses/BSD-3-Clause)
|
||
* author: Feross Aboukhadijeh <feross@feross.org>
|
||
* contributors: Romain Beauxis <toots@rastageeks.org>
|
||
* homepage: https://github.com/feross/ieee754#readme
|
||
* version: 1.2.1
|
||
*
|
||
* inherits:
|
||
* license: ISC (http://opensource.org/licenses/ISC)
|
||
* homepage: https://github.com/isaacs/inherits#readme
|
||
* version: 2.0.4
|
||
*
|
||
* jdataview:
|
||
* licenses: WTFPL (http://www.wtfpl.net/about/)
|
||
* author: Vjeux <vjeuxx@gmail.com>
|
||
* contributors: Vjeux <vjeuxx@gmail.com>, RReverser <me@rreverser.com>
|
||
* homepage: http://jDataView.github.io/
|
||
* version: 2.5.0
|
||
*
|
||
* process:
|
||
* license: MIT (http://opensource.org/licenses/MIT)
|
||
* author: Roman Shtylman <shtylman@gmail.com>
|
||
* homepage: https://github.com/shtylman/node-process#readme
|
||
* version: 0.11.10
|
||
*
|
||
* readable-stream:
|
||
* license: MIT (http://opensource.org/licenses/MIT)
|
||
* homepage: https://github.com/nodejs/readable-stream#readme
|
||
* version: 3.6.0
|
||
*
|
||
* safe-buffer:
|
||
* license: MIT (http://opensource.org/licenses/MIT)
|
||
* author: Feross Aboukhadijeh <feross@feross.org>
|
||
* homepage: https://github.com/feross/safe-buffer
|
||
* version: 5.1.2
|
||
*
|
||
* sax:
|
||
* license: ISC (http://opensource.org/licenses/ISC)
|
||
* author: Isaac Z. Schlueter <i@izs.me>
|
||
* homepage: https://github.com/isaacs/sax-js#readme
|
||
* version: 1.2.4
|
||
*
|
||
* stream-browserify:
|
||
* license: MIT (http://opensource.org/licenses/MIT)
|
||
* author: James Halliday <mail@substack.net>
|
||
* homepage: https://github.com/browserify/stream-browserify
|
||
* version: 3.0.0
|
||
*
|
||
* string_decoder:
|
||
* license: MIT (http://opensource.org/licenses/MIT)
|
||
* homepage: https://github.com/nodejs/string_decoder
|
||
* version: 1.1.1
|
||
*
|
||
* util-deprecate:
|
||
* license: MIT (http://opensource.org/licenses/MIT)
|
||
* author: Nathan Rajlich <nathan@tootallnate.net>
|
||
* homepage: https://github.com/TooTallNate/util-deprecate
|
||
* version: 1.0.2
|
||
*
|
||
* xml-js:
|
||
* license: MIT (http://opensource.org/licenses/MIT)
|
||
* author: Yousuf Almarzooqi <ysf953@gmail.com>
|
||
* homepage: https://github.com/nashwaan/xml-js#readme
|
||
* version: 1.6.11
|
||
*
|
||
* This header is generated by licensify (https://github.com/twada/licensify)
|
||
*/
|
||
(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.AnyPalette = f()}})(function(){var define,module,exports;return (function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i<t.length;i++)o(t[i]);return o}return r})()({1:[function(require,module,exports){
|
||
'use strict'
|
||
|
||
exports.byteLength = byteLength
|
||
exports.toByteArray = toByteArray
|
||
exports.fromByteArray = fromByteArray
|
||
|
||
var lookup = []
|
||
var revLookup = []
|
||
var Arr = typeof Uint8Array !== 'undefined' ? Uint8Array : Array
|
||
|
||
var code = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'
|
||
for (var i = 0, len = code.length; i < len; ++i) {
|
||
lookup[i] = code[i]
|
||
revLookup[code.charCodeAt(i)] = i
|
||
}
|
||
|
||
// Support decoding URL-safe base64 strings, as Node.js does.
|
||
// See: https://en.wikipedia.org/wiki/Base64#URL_applications
|
||
revLookup['-'.charCodeAt(0)] = 62
|
||
revLookup['_'.charCodeAt(0)] = 63
|
||
|
||
function getLens (b64) {
|
||
var len = b64.length
|
||
|
||
if (len % 4 > 0) {
|
||
throw new Error('Invalid string. Length must be a multiple of 4')
|
||
}
|
||
|
||
// Trim off extra bytes after placeholder bytes are found
|
||
// See: https://github.com/beatgammit/base64-js/issues/42
|
||
var validLen = b64.indexOf('=')
|
||
if (validLen === -1) validLen = len
|
||
|
||
var placeHoldersLen = validLen === len
|
||
? 0
|
||
: 4 - (validLen % 4)
|
||
|
||
return [validLen, placeHoldersLen]
|
||
}
|
||
|
||
// base64 is 4/3 + up to two characters of the original data
|
||
function byteLength (b64) {
|
||
var lens = getLens(b64)
|
||
var validLen = lens[0]
|
||
var placeHoldersLen = lens[1]
|
||
return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen
|
||
}
|
||
|
||
function _byteLength (b64, validLen, placeHoldersLen) {
|
||
return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen
|
||
}
|
||
|
||
function toByteArray (b64) {
|
||
var tmp
|
||
var lens = getLens(b64)
|
||
var validLen = lens[0]
|
||
var placeHoldersLen = lens[1]
|
||
|
||
var arr = new Arr(_byteLength(b64, validLen, placeHoldersLen))
|
||
|
||
var curByte = 0
|
||
|
||
// if there are placeholders, only get up to the last complete 4 chars
|
||
var len = placeHoldersLen > 0
|
||
? validLen - 4
|
||
: validLen
|
||
|
||
var i
|
||
for (i = 0; i < len; i += 4) {
|
||
tmp =
|
||
(revLookup[b64.charCodeAt(i)] << 18) |
|
||
(revLookup[b64.charCodeAt(i + 1)] << 12) |
|
||
(revLookup[b64.charCodeAt(i + 2)] << 6) |
|
||
revLookup[b64.charCodeAt(i + 3)]
|
||
arr[curByte++] = (tmp >> 16) & 0xFF
|
||
arr[curByte++] = (tmp >> 8) & 0xFF
|
||
arr[curByte++] = tmp & 0xFF
|
||
}
|
||
|
||
if (placeHoldersLen === 2) {
|
||
tmp =
|
||
(revLookup[b64.charCodeAt(i)] << 2) |
|
||
(revLookup[b64.charCodeAt(i + 1)] >> 4)
|
||
arr[curByte++] = tmp & 0xFF
|
||
}
|
||
|
||
if (placeHoldersLen === 1) {
|
||
tmp =
|
||
(revLookup[b64.charCodeAt(i)] << 10) |
|
||
(revLookup[b64.charCodeAt(i + 1)] << 4) |
|
||
(revLookup[b64.charCodeAt(i + 2)] >> 2)
|
||
arr[curByte++] = (tmp >> 8) & 0xFF
|
||
arr[curByte++] = tmp & 0xFF
|
||
}
|
||
|
||
return arr
|
||
}
|
||
|
||
function tripletToBase64 (num) {
|
||
return lookup[num >> 18 & 0x3F] +
|
||
lookup[num >> 12 & 0x3F] +
|
||
lookup[num >> 6 & 0x3F] +
|
||
lookup[num & 0x3F]
|
||
}
|
||
|
||
function encodeChunk (uint8, start, end) {
|
||
var tmp
|
||
var output = []
|
||
for (var i = start; i < end; i += 3) {
|
||
tmp =
|
||
((uint8[i] << 16) & 0xFF0000) +
|
||
((uint8[i + 1] << 8) & 0xFF00) +
|
||
(uint8[i + 2] & 0xFF)
|
||
output.push(tripletToBase64(tmp))
|
||
}
|
||
return output.join('')
|
||
}
|
||
|
||
function fromByteArray (uint8) {
|
||
var tmp
|
||
var len = uint8.length
|
||
var extraBytes = len % 3 // if we have 1 byte left, pad 2 bytes
|
||
var parts = []
|
||
var maxChunkLength = 16383 // must be multiple of 3
|
||
|
||
// go through the array every three bytes, we'll deal with trailing stuff later
|
||
for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
|
||
parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength)))
|
||
}
|
||
|
||
// pad the end with zeros, but make sure to not forget the extra bytes
|
||
if (extraBytes === 1) {
|
||
tmp = uint8[len - 1]
|
||
parts.push(
|
||
lookup[tmp >> 2] +
|
||
lookup[(tmp << 4) & 0x3F] +
|
||
'=='
|
||
)
|
||
} else if (extraBytes === 2) {
|
||
tmp = (uint8[len - 2] << 8) + uint8[len - 1]
|
||
parts.push(
|
||
lookup[tmp >> 10] +
|
||
lookup[(tmp >> 4) & 0x3F] +
|
||
lookup[(tmp << 2) & 0x3F] +
|
||
'='
|
||
)
|
||
}
|
||
|
||
return parts.join('')
|
||
}
|
||
|
||
},{}],2:[function(require,module,exports){
|
||
|
||
},{}],3:[function(require,module,exports){
|
||
(function (Buffer){(function (){
|
||
/*!
|
||
* The buffer module from node.js, for the browser.
|
||
*
|
||
* @author Feross Aboukhadijeh <https://feross.org>
|
||
* @license MIT
|
||
*/
|
||
/* eslint-disable no-proto */
|
||
|
||
'use strict'
|
||
|
||
var base64 = require('base64-js')
|
||
var ieee754 = require('ieee754')
|
||
|
||
exports.Buffer = Buffer
|
||
exports.SlowBuffer = SlowBuffer
|
||
exports.INSPECT_MAX_BYTES = 50
|
||
|
||
var K_MAX_LENGTH = 0x7fffffff
|
||
exports.kMaxLength = K_MAX_LENGTH
|
||
|
||
/**
|
||
* If `Buffer.TYPED_ARRAY_SUPPORT`:
|
||
* === true Use Uint8Array implementation (fastest)
|
||
* === false Print warning and recommend using `buffer` v4.x which has an Object
|
||
* implementation (most compatible, even IE6)
|
||
*
|
||
* Browsers that support typed arrays are IE 10+, Firefox 4+, Chrome 7+, Safari 5.1+,
|
||
* Opera 11.6+, iOS 4.2+.
|
||
*
|
||
* We report that the browser does not support typed arrays if the are not subclassable
|
||
* using __proto__. Firefox 4-29 lacks support for adding new properties to `Uint8Array`
|
||
* (See: https://bugzilla.mozilla.org/show_bug.cgi?id=695438). IE 10 lacks support
|
||
* for __proto__ and has a buggy typed array implementation.
|
||
*/
|
||
Buffer.TYPED_ARRAY_SUPPORT = typedArraySupport()
|
||
|
||
if (!Buffer.TYPED_ARRAY_SUPPORT && typeof console !== 'undefined' &&
|
||
typeof console.error === 'function') {
|
||
console.error(
|
||
'This browser lacks typed array (Uint8Array) support which is required by ' +
|
||
'`buffer` v5.x. Use `buffer` v4.x if you require old browser support.'
|
||
)
|
||
}
|
||
|
||
function typedArraySupport () {
|
||
// Can typed array instances can be augmented?
|
||
try {
|
||
var arr = new Uint8Array(1)
|
||
arr.__proto__ = { __proto__: Uint8Array.prototype, foo: function () { return 42 } }
|
||
return arr.foo() === 42
|
||
} catch (e) {
|
||
return false
|
||
}
|
||
}
|
||
|
||
Object.defineProperty(Buffer.prototype, 'parent', {
|
||
enumerable: true,
|
||
get: function () {
|
||
if (!Buffer.isBuffer(this)) return undefined
|
||
return this.buffer
|
||
}
|
||
})
|
||
|
||
Object.defineProperty(Buffer.prototype, 'offset', {
|
||
enumerable: true,
|
||
get: function () {
|
||
if (!Buffer.isBuffer(this)) return undefined
|
||
return this.byteOffset
|
||
}
|
||
})
|
||
|
||
function createBuffer (length) {
|
||
if (length > K_MAX_LENGTH) {
|
||
throw new RangeError('The value "' + length + '" is invalid for option "size"')
|
||
}
|
||
// Return an augmented `Uint8Array` instance
|
||
var buf = new Uint8Array(length)
|
||
buf.__proto__ = Buffer.prototype
|
||
return buf
|
||
}
|
||
|
||
/**
|
||
* The Buffer constructor returns instances of `Uint8Array` that have their
|
||
* prototype changed to `Buffer.prototype`. Furthermore, `Buffer` is a subclass of
|
||
* `Uint8Array`, so the returned instances will have all the node `Buffer` methods
|
||
* and the `Uint8Array` methods. Square bracket notation works as expected -- it
|
||
* returns a single octet.
|
||
*
|
||
* The `Uint8Array` prototype remains unmodified.
|
||
*/
|
||
|
||
function Buffer (arg, encodingOrOffset, length) {
|
||
// Common case.
|
||
if (typeof arg === 'number') {
|
||
if (typeof encodingOrOffset === 'string') {
|
||
throw new TypeError(
|
||
'The "string" argument must be of type string. Received type number'
|
||
)
|
||
}
|
||
return allocUnsafe(arg)
|
||
}
|
||
return from(arg, encodingOrOffset, length)
|
||
}
|
||
|
||
// Fix subarray() in ES2016. See: https://github.com/feross/buffer/pull/97
|
||
if (typeof Symbol !== 'undefined' && Symbol.species != null &&
|
||
Buffer[Symbol.species] === Buffer) {
|
||
Object.defineProperty(Buffer, Symbol.species, {
|
||
value: null,
|
||
configurable: true,
|
||
enumerable: false,
|
||
writable: false
|
||
})
|
||
}
|
||
|
||
Buffer.poolSize = 8192 // not used by this implementation
|
||
|
||
function from (value, encodingOrOffset, length) {
|
||
if (typeof value === 'string') {
|
||
return fromString(value, encodingOrOffset)
|
||
}
|
||
|
||
if (ArrayBuffer.isView(value)) {
|
||
return fromArrayLike(value)
|
||
}
|
||
|
||
if (value == null) {
|
||
throw TypeError(
|
||
'The first argument must be one of type string, Buffer, ArrayBuffer, Array, ' +
|
||
'or Array-like Object. Received type ' + (typeof value)
|
||
)
|
||
}
|
||
|
||
if (isInstance(value, ArrayBuffer) ||
|
||
(value && isInstance(value.buffer, ArrayBuffer))) {
|
||
return fromArrayBuffer(value, encodingOrOffset, length)
|
||
}
|
||
|
||
if (typeof value === 'number') {
|
||
throw new TypeError(
|
||
'The "value" argument must not be of type number. Received type number'
|
||
)
|
||
}
|
||
|
||
var valueOf = value.valueOf && value.valueOf()
|
||
if (valueOf != null && valueOf !== value) {
|
||
return Buffer.from(valueOf, encodingOrOffset, length)
|
||
}
|
||
|
||
var b = fromObject(value)
|
||
if (b) return b
|
||
|
||
if (typeof Symbol !== 'undefined' && Symbol.toPrimitive != null &&
|
||
typeof value[Symbol.toPrimitive] === 'function') {
|
||
return Buffer.from(
|
||
value[Symbol.toPrimitive]('string'), encodingOrOffset, length
|
||
)
|
||
}
|
||
|
||
throw new TypeError(
|
||
'The first argument must be one of type string, Buffer, ArrayBuffer, Array, ' +
|
||
'or Array-like Object. Received type ' + (typeof value)
|
||
)
|
||
}
|
||
|
||
/**
|
||
* Functionally equivalent to Buffer(arg, encoding) but throws a TypeError
|
||
* if value is a number.
|
||
* Buffer.from(str[, encoding])
|
||
* Buffer.from(array)
|
||
* Buffer.from(buffer)
|
||
* Buffer.from(arrayBuffer[, byteOffset[, length]])
|
||
**/
|
||
Buffer.from = function (value, encodingOrOffset, length) {
|
||
return from(value, encodingOrOffset, length)
|
||
}
|
||
|
||
// Note: Change prototype *after* Buffer.from is defined to workaround Chrome bug:
|
||
// https://github.com/feross/buffer/pull/148
|
||
Buffer.prototype.__proto__ = Uint8Array.prototype
|
||
Buffer.__proto__ = Uint8Array
|
||
|
||
function assertSize (size) {
|
||
if (typeof size !== 'number') {
|
||
throw new TypeError('"size" argument must be of type number')
|
||
} else if (size < 0) {
|
||
throw new RangeError('The value "' + size + '" is invalid for option "size"')
|
||
}
|
||
}
|
||
|
||
function alloc (size, fill, encoding) {
|
||
assertSize(size)
|
||
if (size <= 0) {
|
||
return createBuffer(size)
|
||
}
|
||
if (fill !== undefined) {
|
||
// Only pay attention to encoding if it's a string. This
|
||
// prevents accidentally sending in a number that would
|
||
// be interpretted as a start offset.
|
||
return typeof encoding === 'string'
|
||
? createBuffer(size).fill(fill, encoding)
|
||
: createBuffer(size).fill(fill)
|
||
}
|
||
return createBuffer(size)
|
||
}
|
||
|
||
/**
|
||
* Creates a new filled Buffer instance.
|
||
* alloc(size[, fill[, encoding]])
|
||
**/
|
||
Buffer.alloc = function (size, fill, encoding) {
|
||
return alloc(size, fill, encoding)
|
||
}
|
||
|
||
function allocUnsafe (size) {
|
||
assertSize(size)
|
||
return createBuffer(size < 0 ? 0 : checked(size) | 0)
|
||
}
|
||
|
||
/**
|
||
* Equivalent to Buffer(num), by default creates a non-zero-filled Buffer instance.
|
||
* */
|
||
Buffer.allocUnsafe = function (size) {
|
||
return allocUnsafe(size)
|
||
}
|
||
/**
|
||
* Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
||
*/
|
||
Buffer.allocUnsafeSlow = function (size) {
|
||
return allocUnsafe(size)
|
||
}
|
||
|
||
function fromString (string, encoding) {
|
||
if (typeof encoding !== 'string' || encoding === '') {
|
||
encoding = 'utf8'
|
||
}
|
||
|
||
if (!Buffer.isEncoding(encoding)) {
|
||
throw new TypeError('Unknown encoding: ' + encoding)
|
||
}
|
||
|
||
var length = byteLength(string, encoding) | 0
|
||
var buf = createBuffer(length)
|
||
|
||
var actual = buf.write(string, encoding)
|
||
|
||
if (actual !== length) {
|
||
// Writing a hex string, for example, that contains invalid characters will
|
||
// cause everything after the first invalid character to be ignored. (e.g.
|
||
// 'abxxcd' will be treated as 'ab')
|
||
buf = buf.slice(0, actual)
|
||
}
|
||
|
||
return buf
|
||
}
|
||
|
||
function fromArrayLike (array) {
|
||
var length = array.length < 0 ? 0 : checked(array.length) | 0
|
||
var buf = createBuffer(length)
|
||
for (var i = 0; i < length; i += 1) {
|
||
buf[i] = array[i] & 255
|
||
}
|
||
return buf
|
||
}
|
||
|
||
function fromArrayBuffer (array, byteOffset, length) {
|
||
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
||
throw new RangeError('"offset" is outside of buffer bounds')
|
||
}
|
||
|
||
if (array.byteLength < byteOffset + (length || 0)) {
|
||
throw new RangeError('"length" is outside of buffer bounds')
|
||
}
|
||
|
||
var buf
|
||
if (byteOffset === undefined && length === undefined) {
|
||
buf = new Uint8Array(array)
|
||
} else if (length === undefined) {
|
||
buf = new Uint8Array(array, byteOffset)
|
||
} else {
|
||
buf = new Uint8Array(array, byteOffset, length)
|
||
}
|
||
|
||
// Return an augmented `Uint8Array` instance
|
||
buf.__proto__ = Buffer.prototype
|
||
return buf
|
||
}
|
||
|
||
function fromObject (obj) {
|
||
if (Buffer.isBuffer(obj)) {
|
||
var len = checked(obj.length) | 0
|
||
var buf = createBuffer(len)
|
||
|
||
if (buf.length === 0) {
|
||
return buf
|
||
}
|
||
|
||
obj.copy(buf, 0, 0, len)
|
||
return buf
|
||
}
|
||
|
||
if (obj.length !== undefined) {
|
||
if (typeof obj.length !== 'number' || numberIsNaN(obj.length)) {
|
||
return createBuffer(0)
|
||
}
|
||
return fromArrayLike(obj)
|
||
}
|
||
|
||
if (obj.type === 'Buffer' && Array.isArray(obj.data)) {
|
||
return fromArrayLike(obj.data)
|
||
}
|
||
}
|
||
|
||
function checked (length) {
|
||
// Note: cannot use `length < K_MAX_LENGTH` here because that fails when
|
||
// length is NaN (which is otherwise coerced to zero.)
|
||
if (length >= K_MAX_LENGTH) {
|
||
throw new RangeError('Attempt to allocate Buffer larger than maximum ' +
|
||
'size: 0x' + K_MAX_LENGTH.toString(16) + ' bytes')
|
||
}
|
||
return length | 0
|
||
}
|
||
|
||
function SlowBuffer (length) {
|
||
if (+length != length) { // eslint-disable-line eqeqeq
|
||
length = 0
|
||
}
|
||
return Buffer.alloc(+length)
|
||
}
|
||
|
||
Buffer.isBuffer = function isBuffer (b) {
|
||
return b != null && b._isBuffer === true &&
|
||
b !== Buffer.prototype // so Buffer.isBuffer(Buffer.prototype) will be false
|
||
}
|
||
|
||
Buffer.compare = function compare (a, b) {
|
||
if (isInstance(a, Uint8Array)) a = Buffer.from(a, a.offset, a.byteLength)
|
||
if (isInstance(b, Uint8Array)) b = Buffer.from(b, b.offset, b.byteLength)
|
||
if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) {
|
||
throw new TypeError(
|
||
'The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array'
|
||
)
|
||
}
|
||
|
||
if (a === b) return 0
|
||
|
||
var x = a.length
|
||
var y = b.length
|
||
|
||
for (var i = 0, len = Math.min(x, y); i < len; ++i) {
|
||
if (a[i] !== b[i]) {
|
||
x = a[i]
|
||
y = b[i]
|
||
break
|
||
}
|
||
}
|
||
|
||
if (x < y) return -1
|
||
if (y < x) return 1
|
||
return 0
|
||
}
|
||
|
||
Buffer.isEncoding = function isEncoding (encoding) {
|
||
switch (String(encoding).toLowerCase()) {
|
||
case 'hex':
|
||
case 'utf8':
|
||
case 'utf-8':
|
||
case 'ascii':
|
||
case 'latin1':
|
||
case 'binary':
|
||
case 'base64':
|
||
case 'ucs2':
|
||
case 'ucs-2':
|
||
case 'utf16le':
|
||
case 'utf-16le':
|
||
return true
|
||
default:
|
||
return false
|
||
}
|
||
}
|
||
|
||
Buffer.concat = function concat (list, length) {
|
||
if (!Array.isArray(list)) {
|
||
throw new TypeError('"list" argument must be an Array of Buffers')
|
||
}
|
||
|
||
if (list.length === 0) {
|
||
return Buffer.alloc(0)
|
||
}
|
||
|
||
var i
|
||
if (length === undefined) {
|
||
length = 0
|
||
for (i = 0; i < list.length; ++i) {
|
||
length += list[i].length
|
||
}
|
||
}
|
||
|
||
var buffer = Buffer.allocUnsafe(length)
|
||
var pos = 0
|
||
for (i = 0; i < list.length; ++i) {
|
||
var buf = list[i]
|
||
if (isInstance(buf, Uint8Array)) {
|
||
buf = Buffer.from(buf)
|
||
}
|
||
if (!Buffer.isBuffer(buf)) {
|
||
throw new TypeError('"list" argument must be an Array of Buffers')
|
||
}
|
||
buf.copy(buffer, pos)
|
||
pos += buf.length
|
||
}
|
||
return buffer
|
||
}
|
||
|
||
function byteLength (string, encoding) {
|
||
if (Buffer.isBuffer(string)) {
|
||
return string.length
|
||
}
|
||
if (ArrayBuffer.isView(string) || isInstance(string, ArrayBuffer)) {
|
||
return string.byteLength
|
||
}
|
||
if (typeof string !== 'string') {
|
||
throw new TypeError(
|
||
'The "string" argument must be one of type string, Buffer, or ArrayBuffer. ' +
|
||
'Received type ' + typeof string
|
||
)
|
||
}
|
||
|
||
var len = string.length
|
||
var mustMatch = (arguments.length > 2 && arguments[2] === true)
|
||
if (!mustMatch && len === 0) return 0
|
||
|
||
// Use a for loop to avoid recursion
|
||
var loweredCase = false
|
||
for (;;) {
|
||
switch (encoding) {
|
||
case 'ascii':
|
||
case 'latin1':
|
||
case 'binary':
|
||
return len
|
||
case 'utf8':
|
||
case 'utf-8':
|
||
return utf8ToBytes(string).length
|
||
case 'ucs2':
|
||
case 'ucs-2':
|
||
case 'utf16le':
|
||
case 'utf-16le':
|
||
return len * 2
|
||
case 'hex':
|
||
return len >>> 1
|
||
case 'base64':
|
||
return base64ToBytes(string).length
|
||
default:
|
||
if (loweredCase) {
|
||
return mustMatch ? -1 : utf8ToBytes(string).length // assume utf8
|
||
}
|
||
encoding = ('' + encoding).toLowerCase()
|
||
loweredCase = true
|
||
}
|
||
}
|
||
}
|
||
Buffer.byteLength = byteLength
|
||
|
||
function slowToString (encoding, start, end) {
|
||
var loweredCase = false
|
||
|
||
// No need to verify that "this.length <= MAX_UINT32" since it's a read-only
|
||
// property of a typed array.
|
||
|
||
// This behaves neither like String nor Uint8Array in that we set start/end
|
||
// to their upper/lower bounds if the value passed is out of range.
|
||
// undefined is handled specially as per ECMA-262 6th Edition,
|
||
// Section 13.3.3.7 Runtime Semantics: KeyedBindingInitialization.
|
||
if (start === undefined || start < 0) {
|
||
start = 0
|
||
}
|
||
// Return early if start > this.length. Done here to prevent potential uint32
|
||
// coercion fail below.
|
||
if (start > this.length) {
|
||
return ''
|
||
}
|
||
|
||
if (end === undefined || end > this.length) {
|
||
end = this.length
|
||
}
|
||
|
||
if (end <= 0) {
|
||
return ''
|
||
}
|
||
|
||
// Force coersion to uint32. This will also coerce falsey/NaN values to 0.
|
||
end >>>= 0
|
||
start >>>= 0
|
||
|
||
if (end <= start) {
|
||
return ''
|
||
}
|
||
|
||
if (!encoding) encoding = 'utf8'
|
||
|
||
while (true) {
|
||
switch (encoding) {
|
||
case 'hex':
|
||
return hexSlice(this, start, end)
|
||
|
||
case 'utf8':
|
||
case 'utf-8':
|
||
return utf8Slice(this, start, end)
|
||
|
||
case 'ascii':
|
||
return asciiSlice(this, start, end)
|
||
|
||
case 'latin1':
|
||
case 'binary':
|
||
return latin1Slice(this, start, end)
|
||
|
||
case 'base64':
|
||
return base64Slice(this, start, end)
|
||
|
||
case 'ucs2':
|
||
case 'ucs-2':
|
||
case 'utf16le':
|
||
case 'utf-16le':
|
||
return utf16leSlice(this, start, end)
|
||
|
||
default:
|
||
if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding)
|
||
encoding = (encoding + '').toLowerCase()
|
||
loweredCase = true
|
||
}
|
||
}
|
||
}
|
||
|
||
// This property is used by `Buffer.isBuffer` (and the `is-buffer` npm package)
|
||
// to detect a Buffer instance. It's not possible to use `instanceof Buffer`
|
||
// reliably in a browserify context because there could be multiple different
|
||
// copies of the 'buffer' package in use. This method works even for Buffer
|
||
// instances that were created from another copy of the `buffer` package.
|
||
// See: https://github.com/feross/buffer/issues/154
|
||
Buffer.prototype._isBuffer = true
|
||
|
||
function swap (b, n, m) {
|
||
var i = b[n]
|
||
b[n] = b[m]
|
||
b[m] = i
|
||
}
|
||
|
||
Buffer.prototype.swap16 = function swap16 () {
|
||
var len = this.length
|
||
if (len % 2 !== 0) {
|
||
throw new RangeError('Buffer size must be a multiple of 16-bits')
|
||
}
|
||
for (var i = 0; i < len; i += 2) {
|
||
swap(this, i, i + 1)
|
||
}
|
||
return this
|
||
}
|
||
|
||
Buffer.prototype.swap32 = function swap32 () {
|
||
var len = this.length
|
||
if (len % 4 !== 0) {
|
||
throw new RangeError('Buffer size must be a multiple of 32-bits')
|
||
}
|
||
for (var i = 0; i < len; i += 4) {
|
||
swap(this, i, i + 3)
|
||
swap(this, i + 1, i + 2)
|
||
}
|
||
return this
|
||
}
|
||
|
||
Buffer.prototype.swap64 = function swap64 () {
|
||
var len = this.length
|
||
if (len % 8 !== 0) {
|
||
throw new RangeError('Buffer size must be a multiple of 64-bits')
|
||
}
|
||
for (var i = 0; i < len; i += 8) {
|
||
swap(this, i, i + 7)
|
||
swap(this, i + 1, i + 6)
|
||
swap(this, i + 2, i + 5)
|
||
swap(this, i + 3, i + 4)
|
||
}
|
||
return this
|
||
}
|
||
|
||
Buffer.prototype.toString = function toString () {
|
||
var length = this.length
|
||
if (length === 0) return ''
|
||
if (arguments.length === 0) return utf8Slice(this, 0, length)
|
||
return slowToString.apply(this, arguments)
|
||
}
|
||
|
||
Buffer.prototype.toLocaleString = Buffer.prototype.toString
|
||
|
||
Buffer.prototype.equals = function equals (b) {
|
||
if (!Buffer.isBuffer(b)) throw new TypeError('Argument must be a Buffer')
|
||
if (this === b) return true
|
||
return Buffer.compare(this, b) === 0
|
||
}
|
||
|
||
Buffer.prototype.inspect = function inspect () {
|
||
var str = ''
|
||
var max = exports.INSPECT_MAX_BYTES
|
||
str = this.toString('hex', 0, max).replace(/(.{2})/g, '$1 ').trim()
|
||
if (this.length > max) str += ' ... '
|
||
return '<Buffer ' + str + '>'
|
||
}
|
||
|
||
Buffer.prototype.compare = function compare (target, start, end, thisStart, thisEnd) {
|
||
if (isInstance(target, Uint8Array)) {
|
||
target = Buffer.from(target, target.offset, target.byteLength)
|
||
}
|
||
if (!Buffer.isBuffer(target)) {
|
||
throw new TypeError(
|
||
'The "target" argument must be one of type Buffer or Uint8Array. ' +
|
||
'Received type ' + (typeof target)
|
||
)
|
||
}
|
||
|
||
if (start === undefined) {
|
||
start = 0
|
||
}
|
||
if (end === undefined) {
|
||
end = target ? target.length : 0
|
||
}
|
||
if (thisStart === undefined) {
|
||
thisStart = 0
|
||
}
|
||
if (thisEnd === undefined) {
|
||
thisEnd = this.length
|
||
}
|
||
|
||
if (start < 0 || end > target.length || thisStart < 0 || thisEnd > this.length) {
|
||
throw new RangeError('out of range index')
|
||
}
|
||
|
||
if (thisStart >= thisEnd && start >= end) {
|
||
return 0
|
||
}
|
||
if (thisStart >= thisEnd) {
|
||
return -1
|
||
}
|
||
if (start >= end) {
|
||
return 1
|
||
}
|
||
|
||
start >>>= 0
|
||
end >>>= 0
|
||
thisStart >>>= 0
|
||
thisEnd >>>= 0
|
||
|
||
if (this === target) return 0
|
||
|
||
var x = thisEnd - thisStart
|
||
var y = end - start
|
||
var len = Math.min(x, y)
|
||
|
||
var thisCopy = this.slice(thisStart, thisEnd)
|
||
var targetCopy = target.slice(start, end)
|
||
|
||
for (var i = 0; i < len; ++i) {
|
||
if (thisCopy[i] !== targetCopy[i]) {
|
||
x = thisCopy[i]
|
||
y = targetCopy[i]
|
||
break
|
||
}
|
||
}
|
||
|
||
if (x < y) return -1
|
||
if (y < x) return 1
|
||
return 0
|
||
}
|
||
|
||
// Finds either the first index of `val` in `buffer` at offset >= `byteOffset`,
|
||
// OR the last index of `val` in `buffer` at offset <= `byteOffset`.
|
||
//
|
||
// Arguments:
|
||
// - buffer - a Buffer to search
|
||
// - val - a string, Buffer, or number
|
||
// - byteOffset - an index into `buffer`; will be clamped to an int32
|
||
// - encoding - an optional encoding, relevant is val is a string
|
||
// - dir - true for indexOf, false for lastIndexOf
|
||
function bidirectionalIndexOf (buffer, val, byteOffset, encoding, dir) {
|
||
// Empty buffer means no match
|
||
if (buffer.length === 0) return -1
|
||
|
||
// Normalize byteOffset
|
||
if (typeof byteOffset === 'string') {
|
||
encoding = byteOffset
|
||
byteOffset = 0
|
||
} else if (byteOffset > 0x7fffffff) {
|
||
byteOffset = 0x7fffffff
|
||
} else if (byteOffset < -0x80000000) {
|
||
byteOffset = -0x80000000
|
||
}
|
||
byteOffset = +byteOffset // Coerce to Number.
|
||
if (numberIsNaN(byteOffset)) {
|
||
// byteOffset: it it's undefined, null, NaN, "foo", etc, search whole buffer
|
||
byteOffset = dir ? 0 : (buffer.length - 1)
|
||
}
|
||
|
||
// Normalize byteOffset: negative offsets start from the end of the buffer
|
||
if (byteOffset < 0) byteOffset = buffer.length + byteOffset
|
||
if (byteOffset >= buffer.length) {
|
||
if (dir) return -1
|
||
else byteOffset = buffer.length - 1
|
||
} else if (byteOffset < 0) {
|
||
if (dir) byteOffset = 0
|
||
else return -1
|
||
}
|
||
|
||
// Normalize val
|
||
if (typeof val === 'string') {
|
||
val = Buffer.from(val, encoding)
|
||
}
|
||
|
||
// Finally, search either indexOf (if dir is true) or lastIndexOf
|
||
if (Buffer.isBuffer(val)) {
|
||
// Special case: looking for empty string/buffer always fails
|
||
if (val.length === 0) {
|
||
return -1
|
||
}
|
||
return arrayIndexOf(buffer, val, byteOffset, encoding, dir)
|
||
} else if (typeof val === 'number') {
|
||
val = val & 0xFF // Search for a byte value [0-255]
|
||
if (typeof Uint8Array.prototype.indexOf === 'function') {
|
||
if (dir) {
|
||
return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset)
|
||
} else {
|
||
return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset)
|
||
}
|
||
}
|
||
return arrayIndexOf(buffer, [ val ], byteOffset, encoding, dir)
|
||
}
|
||
|
||
throw new TypeError('val must be string, number or Buffer')
|
||
}
|
||
|
||
function arrayIndexOf (arr, val, byteOffset, encoding, dir) {
|
||
var indexSize = 1
|
||
var arrLength = arr.length
|
||
var valLength = val.length
|
||
|
||
if (encoding !== undefined) {
|
||
encoding = String(encoding).toLowerCase()
|
||
if (encoding === 'ucs2' || encoding === 'ucs-2' ||
|
||
encoding === 'utf16le' || encoding === 'utf-16le') {
|
||
if (arr.length < 2 || val.length < 2) {
|
||
return -1
|
||
}
|
||
indexSize = 2
|
||
arrLength /= 2
|
||
valLength /= 2
|
||
byteOffset /= 2
|
||
}
|
||
}
|
||
|
||
function read (buf, i) {
|
||
if (indexSize === 1) {
|
||
return buf[i]
|
||
} else {
|
||
return buf.readUInt16BE(i * indexSize)
|
||
}
|
||
}
|
||
|
||
var i
|
||
if (dir) {
|
||
var foundIndex = -1
|
||
for (i = byteOffset; i < arrLength; i++) {
|
||
if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) {
|
||
if (foundIndex === -1) foundIndex = i
|
||
if (i - foundIndex + 1 === valLength) return foundIndex * indexSize
|
||
} else {
|
||
if (foundIndex !== -1) i -= i - foundIndex
|
||
foundIndex = -1
|
||
}
|
||
}
|
||
} else {
|
||
if (byteOffset + valLength > arrLength) byteOffset = arrLength - valLength
|
||
for (i = byteOffset; i >= 0; i--) {
|
||
var found = true
|
||
for (var j = 0; j < valLength; j++) {
|
||
if (read(arr, i + j) !== read(val, j)) {
|
||
found = false
|
||
break
|
||
}
|
||
}
|
||
if (found) return i
|
||
}
|
||
}
|
||
|
||
return -1
|
||
}
|
||
|
||
Buffer.prototype.includes = function includes (val, byteOffset, encoding) {
|
||
return this.indexOf(val, byteOffset, encoding) !== -1
|
||
}
|
||
|
||
Buffer.prototype.indexOf = function indexOf (val, byteOffset, encoding) {
|
||
return bidirectionalIndexOf(this, val, byteOffset, encoding, true)
|
||
}
|
||
|
||
Buffer.prototype.lastIndexOf = function lastIndexOf (val, byteOffset, encoding) {
|
||
return bidirectionalIndexOf(this, val, byteOffset, encoding, false)
|
||
}
|
||
|
||
function hexWrite (buf, string, offset, length) {
|
||
offset = Number(offset) || 0
|
||
var remaining = buf.length - offset
|
||
if (!length) {
|
||
length = remaining
|
||
} else {
|
||
length = Number(length)
|
||
if (length > remaining) {
|
||
length = remaining
|
||
}
|
||
}
|
||
|
||
var strLen = string.length
|
||
|
||
if (length > strLen / 2) {
|
||
length = strLen / 2
|
||
}
|
||
for (var i = 0; i < length; ++i) {
|
||
var parsed = parseInt(string.substr(i * 2, 2), 16)
|
||
if (numberIsNaN(parsed)) return i
|
||
buf[offset + i] = parsed
|
||
}
|
||
return i
|
||
}
|
||
|
||
function utf8Write (buf, string, offset, length) {
|
||
return blitBuffer(utf8ToBytes(string, buf.length - offset), buf, offset, length)
|
||
}
|
||
|
||
function asciiWrite (buf, string, offset, length) {
|
||
return blitBuffer(asciiToBytes(string), buf, offset, length)
|
||
}
|
||
|
||
function latin1Write (buf, string, offset, length) {
|
||
return asciiWrite(buf, string, offset, length)
|
||
}
|
||
|
||
function base64Write (buf, string, offset, length) {
|
||
return blitBuffer(base64ToBytes(string), buf, offset, length)
|
||
}
|
||
|
||
function ucs2Write (buf, string, offset, length) {
|
||
return blitBuffer(utf16leToBytes(string, buf.length - offset), buf, offset, length)
|
||
}
|
||
|
||
Buffer.prototype.write = function write (string, offset, length, encoding) {
|
||
// Buffer#write(string)
|
||
if (offset === undefined) {
|
||
encoding = 'utf8'
|
||
length = this.length
|
||
offset = 0
|
||
// Buffer#write(string, encoding)
|
||
} else if (length === undefined && typeof offset === 'string') {
|
||
encoding = offset
|
||
length = this.length
|
||
offset = 0
|
||
// Buffer#write(string, offset[, length][, encoding])
|
||
} else if (isFinite(offset)) {
|
||
offset = offset >>> 0
|
||
if (isFinite(length)) {
|
||
length = length >>> 0
|
||
if (encoding === undefined) encoding = 'utf8'
|
||
} else {
|
||
encoding = length
|
||
length = undefined
|
||
}
|
||
} else {
|
||
throw new Error(
|
||
'Buffer.write(string, encoding, offset[, length]) is no longer supported'
|
||
)
|
||
}
|
||
|
||
var remaining = this.length - offset
|
||
if (length === undefined || length > remaining) length = remaining
|
||
|
||
if ((string.length > 0 && (length < 0 || offset < 0)) || offset > this.length) {
|
||
throw new RangeError('Attempt to write outside buffer bounds')
|
||
}
|
||
|
||
if (!encoding) encoding = 'utf8'
|
||
|
||
var loweredCase = false
|
||
for (;;) {
|
||
switch (encoding) {
|
||
case 'hex':
|
||
return hexWrite(this, string, offset, length)
|
||
|
||
case 'utf8':
|
||
case 'utf-8':
|
||
return utf8Write(this, string, offset, length)
|
||
|
||
case 'ascii':
|
||
return asciiWrite(this, string, offset, length)
|
||
|
||
case 'latin1':
|
||
case 'binary':
|
||
return latin1Write(this, string, offset, length)
|
||
|
||
case 'base64':
|
||
// Warning: maxLength not taken into account in base64Write
|
||
return base64Write(this, string, offset, length)
|
||
|
||
case 'ucs2':
|
||
case 'ucs-2':
|
||
case 'utf16le':
|
||
case 'utf-16le':
|
||
return ucs2Write(this, string, offset, length)
|
||
|
||
default:
|
||
if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding)
|
||
encoding = ('' + encoding).toLowerCase()
|
||
loweredCase = true
|
||
}
|
||
}
|
||
}
|
||
|
||
Buffer.prototype.toJSON = function toJSON () {
|
||
return {
|
||
type: 'Buffer',
|
||
data: Array.prototype.slice.call(this._arr || this, 0)
|
||
}
|
||
}
|
||
|
||
function base64Slice (buf, start, end) {
|
||
if (start === 0 && end === buf.length) {
|
||
return base64.fromByteArray(buf)
|
||
} else {
|
||
return base64.fromByteArray(buf.slice(start, end))
|
||
}
|
||
}
|
||
|
||
function utf8Slice (buf, start, end) {
|
||
end = Math.min(buf.length, end)
|
||
var res = []
|
||
|
||
var i = start
|
||
while (i < end) {
|
||
var firstByte = buf[i]
|
||
var codePoint = null
|
||
var bytesPerSequence = (firstByte > 0xEF) ? 4
|
||
: (firstByte > 0xDF) ? 3
|
||
: (firstByte > 0xBF) ? 2
|
||
: 1
|
||
|
||
if (i + bytesPerSequence <= end) {
|
||
var secondByte, thirdByte, fourthByte, tempCodePoint
|
||
|
||
switch (bytesPerSequence) {
|
||
case 1:
|
||
if (firstByte < 0x80) {
|
||
codePoint = firstByte
|
||
}
|
||
break
|
||
case 2:
|
||
secondByte = buf[i + 1]
|
||
if ((secondByte & 0xC0) === 0x80) {
|
||
tempCodePoint = (firstByte & 0x1F) << 0x6 | (secondByte & 0x3F)
|
||
if (tempCodePoint > 0x7F) {
|
||
codePoint = tempCodePoint
|
||
}
|
||
}
|
||
break
|
||
case 3:
|
||
secondByte = buf[i + 1]
|
||
thirdByte = buf[i + 2]
|
||
if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80) {
|
||
tempCodePoint = (firstByte & 0xF) << 0xC | (secondByte & 0x3F) << 0x6 | (thirdByte & 0x3F)
|
||
if (tempCodePoint > 0x7FF && (tempCodePoint < 0xD800 || tempCodePoint > 0xDFFF)) {
|
||
codePoint = tempCodePoint
|
||
}
|
||
}
|
||
break
|
||
case 4:
|
||
secondByte = buf[i + 1]
|
||
thirdByte = buf[i + 2]
|
||
fourthByte = buf[i + 3]
|
||
if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80 && (fourthByte & 0xC0) === 0x80) {
|
||
tempCodePoint = (firstByte & 0xF) << 0x12 | (secondByte & 0x3F) << 0xC | (thirdByte & 0x3F) << 0x6 | (fourthByte & 0x3F)
|
||
if (tempCodePoint > 0xFFFF && tempCodePoint < 0x110000) {
|
||
codePoint = tempCodePoint
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
if (codePoint === null) {
|
||
// we did not generate a valid codePoint so insert a
|
||
// replacement char (U+FFFD) and advance only 1 byte
|
||
codePoint = 0xFFFD
|
||
bytesPerSequence = 1
|
||
} else if (codePoint > 0xFFFF) {
|
||
// encode to utf16 (surrogate pair dance)
|
||
codePoint -= 0x10000
|
||
res.push(codePoint >>> 10 & 0x3FF | 0xD800)
|
||
codePoint = 0xDC00 | codePoint & 0x3FF
|
||
}
|
||
|
||
res.push(codePoint)
|
||
i += bytesPerSequence
|
||
}
|
||
|
||
return decodeCodePointsArray(res)
|
||
}
|
||
|
||
// Based on http://stackoverflow.com/a/22747272/680742, the browser with
|
||
// the lowest limit is Chrome, with 0x10000 args.
|
||
// We go 1 magnitude less, for safety
|
||
var MAX_ARGUMENTS_LENGTH = 0x1000
|
||
|
||
function decodeCodePointsArray (codePoints) {
|
||
var len = codePoints.length
|
||
if (len <= MAX_ARGUMENTS_LENGTH) {
|
||
return String.fromCharCode.apply(String, codePoints) // avoid extra slice()
|
||
}
|
||
|
||
// Decode in chunks to avoid "call stack size exceeded".
|
||
var res = ''
|
||
var i = 0
|
||
while (i < len) {
|
||
res += String.fromCharCode.apply(
|
||
String,
|
||
codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH)
|
||
)
|
||
}
|
||
return res
|
||
}
|
||
|
||
function asciiSlice (buf, start, end) {
|
||
var ret = ''
|
||
end = Math.min(buf.length, end)
|
||
|
||
for (var i = start; i < end; ++i) {
|
||
ret += String.fromCharCode(buf[i] & 0x7F)
|
||
}
|
||
return ret
|
||
}
|
||
|
||
function latin1Slice (buf, start, end) {
|
||
var ret = ''
|
||
end = Math.min(buf.length, end)
|
||
|
||
for (var i = start; i < end; ++i) {
|
||
ret += String.fromCharCode(buf[i])
|
||
}
|
||
return ret
|
||
}
|
||
|
||
function hexSlice (buf, start, end) {
|
||
var len = buf.length
|
||
|
||
if (!start || start < 0) start = 0
|
||
if (!end || end < 0 || end > len) end = len
|
||
|
||
var out = ''
|
||
for (var i = start; i < end; ++i) {
|
||
out += toHex(buf[i])
|
||
}
|
||
return out
|
||
}
|
||
|
||
function utf16leSlice (buf, start, end) {
|
||
var bytes = buf.slice(start, end)
|
||
var res = ''
|
||
for (var i = 0; i < bytes.length; i += 2) {
|
||
res += String.fromCharCode(bytes[i] + (bytes[i + 1] * 256))
|
||
}
|
||
return res
|
||
}
|
||
|
||
Buffer.prototype.slice = function slice (start, end) {
|
||
var len = this.length
|
||
start = ~~start
|
||
end = end === undefined ? len : ~~end
|
||
|
||
if (start < 0) {
|
||
start += len
|
||
if (start < 0) start = 0
|
||
} else if (start > len) {
|
||
start = len
|
||
}
|
||
|
||
if (end < 0) {
|
||
end += len
|
||
if (end < 0) end = 0
|
||
} else if (end > len) {
|
||
end = len
|
||
}
|
||
|
||
if (end < start) end = start
|
||
|
||
var newBuf = this.subarray(start, end)
|
||
// Return an augmented `Uint8Array` instance
|
||
newBuf.__proto__ = Buffer.prototype
|
||
return newBuf
|
||
}
|
||
|
||
/*
|
||
* Need to make sure that buffer isn't trying to write out of bounds.
|
||
*/
|
||
function checkOffset (offset, ext, length) {
|
||
if ((offset % 1) !== 0 || offset < 0) throw new RangeError('offset is not uint')
|
||
if (offset + ext > length) throw new RangeError('Trying to access beyond buffer length')
|
||
}
|
||
|
||
Buffer.prototype.readUIntLE = function readUIntLE (offset, byteLength, noAssert) {
|
||
offset = offset >>> 0
|
||
byteLength = byteLength >>> 0
|
||
if (!noAssert) checkOffset(offset, byteLength, this.length)
|
||
|
||
var val = this[offset]
|
||
var mul = 1
|
||
var i = 0
|
||
while (++i < byteLength && (mul *= 0x100)) {
|
||
val += this[offset + i] * mul
|
||
}
|
||
|
||
return val
|
||
}
|
||
|
||
Buffer.prototype.readUIntBE = function readUIntBE (offset, byteLength, noAssert) {
|
||
offset = offset >>> 0
|
||
byteLength = byteLength >>> 0
|
||
if (!noAssert) {
|
||
checkOffset(offset, byteLength, this.length)
|
||
}
|
||
|
||
var val = this[offset + --byteLength]
|
||
var mul = 1
|
||
while (byteLength > 0 && (mul *= 0x100)) {
|
||
val += this[offset + --byteLength] * mul
|
||
}
|
||
|
||
return val
|
||
}
|
||
|
||
Buffer.prototype.readUInt8 = function readUInt8 (offset, noAssert) {
|
||
offset = offset >>> 0
|
||
if (!noAssert) checkOffset(offset, 1, this.length)
|
||
return this[offset]
|
||
}
|
||
|
||
Buffer.prototype.readUInt16LE = function readUInt16LE (offset, noAssert) {
|
||
offset = offset >>> 0
|
||
if (!noAssert) checkOffset(offset, 2, this.length)
|
||
return this[offset] | (this[offset + 1] << 8)
|
||
}
|
||
|
||
Buffer.prototype.readUInt16BE = function readUInt16BE (offset, noAssert) {
|
||
offset = offset >>> 0
|
||
if (!noAssert) checkOffset(offset, 2, this.length)
|
||
return (this[offset] << 8) | this[offset + 1]
|
||
}
|
||
|
||
Buffer.prototype.readUInt32LE = function readUInt32LE (offset, noAssert) {
|
||
offset = offset >>> 0
|
||
if (!noAssert) checkOffset(offset, 4, this.length)
|
||
|
||
return ((this[offset]) |
|
||
(this[offset + 1] << 8) |
|
||
(this[offset + 2] << 16)) +
|
||
(this[offset + 3] * 0x1000000)
|
||
}
|
||
|
||
Buffer.prototype.readUInt32BE = function readUInt32BE (offset, noAssert) {
|
||
offset = offset >>> 0
|
||
if (!noAssert) checkOffset(offset, 4, this.length)
|
||
|
||
return (this[offset] * 0x1000000) +
|
||
((this[offset + 1] << 16) |
|
||
(this[offset + 2] << 8) |
|
||
this[offset + 3])
|
||
}
|
||
|
||
Buffer.prototype.readIntLE = function readIntLE (offset, byteLength, noAssert) {
|
||
offset = offset >>> 0
|
||
byteLength = byteLength >>> 0
|
||
if (!noAssert) checkOffset(offset, byteLength, this.length)
|
||
|
||
var val = this[offset]
|
||
var mul = 1
|
||
var i = 0
|
||
while (++i < byteLength && (mul *= 0x100)) {
|
||
val += this[offset + i] * mul
|
||
}
|
||
mul *= 0x80
|
||
|
||
if (val >= mul) val -= Math.pow(2, 8 * byteLength)
|
||
|
||
return val
|
||
}
|
||
|
||
Buffer.prototype.readIntBE = function readIntBE (offset, byteLength, noAssert) {
|
||
offset = offset >>> 0
|
||
byteLength = byteLength >>> 0
|
||
if (!noAssert) checkOffset(offset, byteLength, this.length)
|
||
|
||
var i = byteLength
|
||
var mul = 1
|
||
var val = this[offset + --i]
|
||
while (i > 0 && (mul *= 0x100)) {
|
||
val += this[offset + --i] * mul
|
||
}
|
||
mul *= 0x80
|
||
|
||
if (val >= mul) val -= Math.pow(2, 8 * byteLength)
|
||
|
||
return val
|
||
}
|
||
|
||
Buffer.prototype.readInt8 = function readInt8 (offset, noAssert) {
|
||
offset = offset >>> 0
|
||
if (!noAssert) checkOffset(offset, 1, this.length)
|
||
if (!(this[offset] & 0x80)) return (this[offset])
|
||
return ((0xff - this[offset] + 1) * -1)
|
||
}
|
||
|
||
Buffer.prototype.readInt16LE = function readInt16LE (offset, noAssert) {
|
||
offset = offset >>> 0
|
||
if (!noAssert) checkOffset(offset, 2, this.length)
|
||
var val = this[offset] | (this[offset + 1] << 8)
|
||
return (val & 0x8000) ? val | 0xFFFF0000 : val
|
||
}
|
||
|
||
Buffer.prototype.readInt16BE = function readInt16BE (offset, noAssert) {
|
||
offset = offset >>> 0
|
||
if (!noAssert) checkOffset(offset, 2, this.length)
|
||
var val = this[offset + 1] | (this[offset] << 8)
|
||
return (val & 0x8000) ? val | 0xFFFF0000 : val
|
||
}
|
||
|
||
Buffer.prototype.readInt32LE = function readInt32LE (offset, noAssert) {
|
||
offset = offset >>> 0
|
||
if (!noAssert) checkOffset(offset, 4, this.length)
|
||
|
||
return (this[offset]) |
|
||
(this[offset + 1] << 8) |
|
||
(this[offset + 2] << 16) |
|
||
(this[offset + 3] << 24)
|
||
}
|
||
|
||
Buffer.prototype.readInt32BE = function readInt32BE (offset, noAssert) {
|
||
offset = offset >>> 0
|
||
if (!noAssert) checkOffset(offset, 4, this.length)
|
||
|
||
return (this[offset] << 24) |
|
||
(this[offset + 1] << 16) |
|
||
(this[offset + 2] << 8) |
|
||
(this[offset + 3])
|
||
}
|
||
|
||
Buffer.prototype.readFloatLE = function readFloatLE (offset, noAssert) {
|
||
offset = offset >>> 0
|
||
if (!noAssert) checkOffset(offset, 4, this.length)
|
||
return ieee754.read(this, offset, true, 23, 4)
|
||
}
|
||
|
||
Buffer.prototype.readFloatBE = function readFloatBE (offset, noAssert) {
|
||
offset = offset >>> 0
|
||
if (!noAssert) checkOffset(offset, 4, this.length)
|
||
return ieee754.read(this, offset, false, 23, 4)
|
||
}
|
||
|
||
Buffer.prototype.readDoubleLE = function readDoubleLE (offset, noAssert) {
|
||
offset = offset >>> 0
|
||
if (!noAssert) checkOffset(offset, 8, this.length)
|
||
return ieee754.read(this, offset, true, 52, 8)
|
||
}
|
||
|
||
Buffer.prototype.readDoubleBE = function readDoubleBE (offset, noAssert) {
|
||
offset = offset >>> 0
|
||
if (!noAssert) checkOffset(offset, 8, this.length)
|
||
return ieee754.read(this, offset, false, 52, 8)
|
||
}
|
||
|
||
function checkInt (buf, value, offset, ext, max, min) {
|
||
if (!Buffer.isBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance')
|
||
if (value > max || value < min) throw new RangeError('"value" argument is out of bounds')
|
||
if (offset + ext > buf.length) throw new RangeError('Index out of range')
|
||
}
|
||
|
||
Buffer.prototype.writeUIntLE = function writeUIntLE (value, offset, byteLength, noAssert) {
|
||
value = +value
|
||
offset = offset >>> 0
|
||
byteLength = byteLength >>> 0
|
||
if (!noAssert) {
|
||
var maxBytes = Math.pow(2, 8 * byteLength) - 1
|
||
checkInt(this, value, offset, byteLength, maxBytes, 0)
|
||
}
|
||
|
||
var mul = 1
|
||
var i = 0
|
||
this[offset] = value & 0xFF
|
||
while (++i < byteLength && (mul *= 0x100)) {
|
||
this[offset + i] = (value / mul) & 0xFF
|
||
}
|
||
|
||
return offset + byteLength
|
||
}
|
||
|
||
Buffer.prototype.writeUIntBE = function writeUIntBE (value, offset, byteLength, noAssert) {
|
||
value = +value
|
||
offset = offset >>> 0
|
||
byteLength = byteLength >>> 0
|
||
if (!noAssert) {
|
||
var maxBytes = Math.pow(2, 8 * byteLength) - 1
|
||
checkInt(this, value, offset, byteLength, maxBytes, 0)
|
||
}
|
||
|
||
var i = byteLength - 1
|
||
var mul = 1
|
||
this[offset + i] = value & 0xFF
|
||
while (--i >= 0 && (mul *= 0x100)) {
|
||
this[offset + i] = (value / mul) & 0xFF
|
||
}
|
||
|
||
return offset + byteLength
|
||
}
|
||
|
||
Buffer.prototype.writeUInt8 = function writeUInt8 (value, offset, noAssert) {
|
||
value = +value
|
||
offset = offset >>> 0
|
||
if (!noAssert) checkInt(this, value, offset, 1, 0xff, 0)
|
||
this[offset] = (value & 0xff)
|
||
return offset + 1
|
||
}
|
||
|
||
Buffer.prototype.writeUInt16LE = function writeUInt16LE (value, offset, noAssert) {
|
||
value = +value
|
||
offset = offset >>> 0
|
||
if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0)
|
||
this[offset] = (value & 0xff)
|
||
this[offset + 1] = (value >>> 8)
|
||
return offset + 2
|
||
}
|
||
|
||
Buffer.prototype.writeUInt16BE = function writeUInt16BE (value, offset, noAssert) {
|
||
value = +value
|
||
offset = offset >>> 0
|
||
if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0)
|
||
this[offset] = (value >>> 8)
|
||
this[offset + 1] = (value & 0xff)
|
||
return offset + 2
|
||
}
|
||
|
||
Buffer.prototype.writeUInt32LE = function writeUInt32LE (value, offset, noAssert) {
|
||
value = +value
|
||
offset = offset >>> 0
|
||
if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0)
|
||
this[offset + 3] = (value >>> 24)
|
||
this[offset + 2] = (value >>> 16)
|
||
this[offset + 1] = (value >>> 8)
|
||
this[offset] = (value & 0xff)
|
||
return offset + 4
|
||
}
|
||
|
||
Buffer.prototype.writeUInt32BE = function writeUInt32BE (value, offset, noAssert) {
|
||
value = +value
|
||
offset = offset >>> 0
|
||
if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0)
|
||
this[offset] = (value >>> 24)
|
||
this[offset + 1] = (value >>> 16)
|
||
this[offset + 2] = (value >>> 8)
|
||
this[offset + 3] = (value & 0xff)
|
||
return offset + 4
|
||
}
|
||
|
||
Buffer.prototype.writeIntLE = function writeIntLE (value, offset, byteLength, noAssert) {
|
||
value = +value
|
||
offset = offset >>> 0
|
||
if (!noAssert) {
|
||
var limit = Math.pow(2, (8 * byteLength) - 1)
|
||
|
||
checkInt(this, value, offset, byteLength, limit - 1, -limit)
|
||
}
|
||
|
||
var i = 0
|
||
var mul = 1
|
||
var sub = 0
|
||
this[offset] = value & 0xFF
|
||
while (++i < byteLength && (mul *= 0x100)) {
|
||
if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) {
|
||
sub = 1
|
||
}
|
||
this[offset + i] = ((value / mul) >> 0) - sub & 0xFF
|
||
}
|
||
|
||
return offset + byteLength
|
||
}
|
||
|
||
Buffer.prototype.writeIntBE = function writeIntBE (value, offset, byteLength, noAssert) {
|
||
value = +value
|
||
offset = offset >>> 0
|
||
if (!noAssert) {
|
||
var limit = Math.pow(2, (8 * byteLength) - 1)
|
||
|
||
checkInt(this, value, offset, byteLength, limit - 1, -limit)
|
||
}
|
||
|
||
var i = byteLength - 1
|
||
var mul = 1
|
||
var sub = 0
|
||
this[offset + i] = value & 0xFF
|
||
while (--i >= 0 && (mul *= 0x100)) {
|
||
if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) {
|
||
sub = 1
|
||
}
|
||
this[offset + i] = ((value / mul) >> 0) - sub & 0xFF
|
||
}
|
||
|
||
return offset + byteLength
|
||
}
|
||
|
||
Buffer.prototype.writeInt8 = function writeInt8 (value, offset, noAssert) {
|
||
value = +value
|
||
offset = offset >>> 0
|
||
if (!noAssert) checkInt(this, value, offset, 1, 0x7f, -0x80)
|
||
if (value < 0) value = 0xff + value + 1
|
||
this[offset] = (value & 0xff)
|
||
return offset + 1
|
||
}
|
||
|
||
Buffer.prototype.writeInt16LE = function writeInt16LE (value, offset, noAssert) {
|
||
value = +value
|
||
offset = offset >>> 0
|
||
if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000)
|
||
this[offset] = (value & 0xff)
|
||
this[offset + 1] = (value >>> 8)
|
||
return offset + 2
|
||
}
|
||
|
||
Buffer.prototype.writeInt16BE = function writeInt16BE (value, offset, noAssert) {
|
||
value = +value
|
||
offset = offset >>> 0
|
||
if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000)
|
||
this[offset] = (value >>> 8)
|
||
this[offset + 1] = (value & 0xff)
|
||
return offset + 2
|
||
}
|
||
|
||
Buffer.prototype.writeInt32LE = function writeInt32LE (value, offset, noAssert) {
|
||
value = +value
|
||
offset = offset >>> 0
|
||
if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000)
|
||
this[offset] = (value & 0xff)
|
||
this[offset + 1] = (value >>> 8)
|
||
this[offset + 2] = (value >>> 16)
|
||
this[offset + 3] = (value >>> 24)
|
||
return offset + 4
|
||
}
|
||
|
||
Buffer.prototype.writeInt32BE = function writeInt32BE (value, offset, noAssert) {
|
||
value = +value
|
||
offset = offset >>> 0
|
||
if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000)
|
||
if (value < 0) value = 0xffffffff + value + 1
|
||
this[offset] = (value >>> 24)
|
||
this[offset + 1] = (value >>> 16)
|
||
this[offset + 2] = (value >>> 8)
|
||
this[offset + 3] = (value & 0xff)
|
||
return offset + 4
|
||
}
|
||
|
||
function checkIEEE754 (buf, value, offset, ext, max, min) {
|
||
if (offset + ext > buf.length) throw new RangeError('Index out of range')
|
||
if (offset < 0) throw new RangeError('Index out of range')
|
||
}
|
||
|
||
function writeFloat (buf, value, offset, littleEndian, noAssert) {
|
||
value = +value
|
||
offset = offset >>> 0
|
||
if (!noAssert) {
|
||
checkIEEE754(buf, value, offset, 4, 3.4028234663852886e+38, -3.4028234663852886e+38)
|
||
}
|
||
ieee754.write(buf, value, offset, littleEndian, 23, 4)
|
||
return offset + 4
|
||
}
|
||
|
||
Buffer.prototype.writeFloatLE = function writeFloatLE (value, offset, noAssert) {
|
||
return writeFloat(this, value, offset, true, noAssert)
|
||
}
|
||
|
||
Buffer.prototype.writeFloatBE = function writeFloatBE (value, offset, noAssert) {
|
||
return writeFloat(this, value, offset, false, noAssert)
|
||
}
|
||
|
||
function writeDouble (buf, value, offset, littleEndian, noAssert) {
|
||
value = +value
|
||
offset = offset >>> 0
|
||
if (!noAssert) {
|
||
checkIEEE754(buf, value, offset, 8, 1.7976931348623157E+308, -1.7976931348623157E+308)
|
||
}
|
||
ieee754.write(buf, value, offset, littleEndian, 52, 8)
|
||
return offset + 8
|
||
}
|
||
|
||
Buffer.prototype.writeDoubleLE = function writeDoubleLE (value, offset, noAssert) {
|
||
return writeDouble(this, value, offset, true, noAssert)
|
||
}
|
||
|
||
Buffer.prototype.writeDoubleBE = function writeDoubleBE (value, offset, noAssert) {
|
||
return writeDouble(this, value, offset, false, noAssert)
|
||
}
|
||
|
||
// copy(targetBuffer, targetStart=0, sourceStart=0, sourceEnd=buffer.length)
|
||
Buffer.prototype.copy = function copy (target, targetStart, start, end) {
|
||
if (!Buffer.isBuffer(target)) throw new TypeError('argument should be a Buffer')
|
||
if (!start) start = 0
|
||
if (!end && end !== 0) end = this.length
|
||
if (targetStart >= target.length) targetStart = target.length
|
||
if (!targetStart) targetStart = 0
|
||
if (end > 0 && end < start) end = start
|
||
|
||
// Copy 0 bytes; we're done
|
||
if (end === start) return 0
|
||
if (target.length === 0 || this.length === 0) return 0
|
||
|
||
// Fatal error conditions
|
||
if (targetStart < 0) {
|
||
throw new RangeError('targetStart out of bounds')
|
||
}
|
||
if (start < 0 || start >= this.length) throw new RangeError('Index out of range')
|
||
if (end < 0) throw new RangeError('sourceEnd out of bounds')
|
||
|
||
// Are we oob?
|
||
if (end > this.length) end = this.length
|
||
if (target.length - targetStart < end - start) {
|
||
end = target.length - targetStart + start
|
||
}
|
||
|
||
var len = end - start
|
||
|
||
if (this === target && typeof Uint8Array.prototype.copyWithin === 'function') {
|
||
// Use built-in when available, missing from IE11
|
||
this.copyWithin(targetStart, start, end)
|
||
} else if (this === target && start < targetStart && targetStart < end) {
|
||
// descending copy from end
|
||
for (var i = len - 1; i >= 0; --i) {
|
||
target[i + targetStart] = this[i + start]
|
||
}
|
||
} else {
|
||
Uint8Array.prototype.set.call(
|
||
target,
|
||
this.subarray(start, end),
|
||
targetStart
|
||
)
|
||
}
|
||
|
||
return len
|
||
}
|
||
|
||
// Usage:
|
||
// buffer.fill(number[, offset[, end]])
|
||
// buffer.fill(buffer[, offset[, end]])
|
||
// buffer.fill(string[, offset[, end]][, encoding])
|
||
Buffer.prototype.fill = function fill (val, start, end, encoding) {
|
||
// Handle string cases:
|
||
if (typeof val === 'string') {
|
||
if (typeof start === 'string') {
|
||
encoding = start
|
||
start = 0
|
||
end = this.length
|
||
} else if (typeof end === 'string') {
|
||
encoding = end
|
||
end = this.length
|
||
}
|
||
if (encoding !== undefined && typeof encoding !== 'string') {
|
||
throw new TypeError('encoding must be a string')
|
||
}
|
||
if (typeof encoding === 'string' && !Buffer.isEncoding(encoding)) {
|
||
throw new TypeError('Unknown encoding: ' + encoding)
|
||
}
|
||
if (val.length === 1) {
|
||
var code = val.charCodeAt(0)
|
||
if ((encoding === 'utf8' && code < 128) ||
|
||
encoding === 'latin1') {
|
||
// Fast path: If `val` fits into a single byte, use that numeric value.
|
||
val = code
|
||
}
|
||
}
|
||
} else if (typeof val === 'number') {
|
||
val = val & 255
|
||
}
|
||
|
||
// Invalid ranges are not set to a default, so can range check early.
|
||
if (start < 0 || this.length < start || this.length < end) {
|
||
throw new RangeError('Out of range index')
|
||
}
|
||
|
||
if (end <= start) {
|
||
return this
|
||
}
|
||
|
||
start = start >>> 0
|
||
end = end === undefined ? this.length : end >>> 0
|
||
|
||
if (!val) val = 0
|
||
|
||
var i
|
||
if (typeof val === 'number') {
|
||
for (i = start; i < end; ++i) {
|
||
this[i] = val
|
||
}
|
||
} else {
|
||
var bytes = Buffer.isBuffer(val)
|
||
? val
|
||
: Buffer.from(val, encoding)
|
||
var len = bytes.length
|
||
if (len === 0) {
|
||
throw new TypeError('The value "' + val +
|
||
'" is invalid for argument "value"')
|
||
}
|
||
for (i = 0; i < end - start; ++i) {
|
||
this[i + start] = bytes[i % len]
|
||
}
|
||
}
|
||
|
||
return this
|
||
}
|
||
|
||
// HELPER FUNCTIONS
|
||
// ================
|
||
|
||
var INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g
|
||
|
||
function base64clean (str) {
|
||
// Node takes equal signs as end of the Base64 encoding
|
||
str = str.split('=')[0]
|
||
// Node strips out invalid characters like \n and \t from the string, base64-js does not
|
||
str = str.trim().replace(INVALID_BASE64_RE, '')
|
||
// Node converts strings with length < 2 to ''
|
||
if (str.length < 2) return ''
|
||
// Node allows for non-padded base64 strings (missing trailing ===), base64-js does not
|
||
while (str.length % 4 !== 0) {
|
||
str = str + '='
|
||
}
|
||
return str
|
||
}
|
||
|
||
function toHex (n) {
|
||
if (n < 16) return '0' + n.toString(16)
|
||
return n.toString(16)
|
||
}
|
||
|
||
function utf8ToBytes (string, units) {
|
||
units = units || Infinity
|
||
var codePoint
|
||
var length = string.length
|
||
var leadSurrogate = null
|
||
var bytes = []
|
||
|
||
for (var i = 0; i < length; ++i) {
|
||
codePoint = string.charCodeAt(i)
|
||
|
||
// is surrogate component
|
||
if (codePoint > 0xD7FF && codePoint < 0xE000) {
|
||
// last char was a lead
|
||
if (!leadSurrogate) {
|
||
// no lead yet
|
||
if (codePoint > 0xDBFF) {
|
||
// unexpected trail
|
||
if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD)
|
||
continue
|
||
} else if (i + 1 === length) {
|
||
// unpaired lead
|
||
if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD)
|
||
continue
|
||
}
|
||
|
||
// valid lead
|
||
leadSurrogate = codePoint
|
||
|
||
continue
|
||
}
|
||
|
||
// 2 leads in a row
|
||
if (codePoint < 0xDC00) {
|
||
if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD)
|
||
leadSurrogate = codePoint
|
||
continue
|
||
}
|
||
|
||
// valid surrogate pair
|
||
codePoint = (leadSurrogate - 0xD800 << 10 | codePoint - 0xDC00) + 0x10000
|
||
} else if (leadSurrogate) {
|
||
// valid bmp char, but last char was a lead
|
||
if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD)
|
||
}
|
||
|
||
leadSurrogate = null
|
||
|
||
// encode utf8
|
||
if (codePoint < 0x80) {
|
||
if ((units -= 1) < 0) break
|
||
bytes.push(codePoint)
|
||
} else if (codePoint < 0x800) {
|
||
if ((units -= 2) < 0) break
|
||
bytes.push(
|
||
codePoint >> 0x6 | 0xC0,
|
||
codePoint & 0x3F | 0x80
|
||
)
|
||
} else if (codePoint < 0x10000) {
|
||
if ((units -= 3) < 0) break
|
||
bytes.push(
|
||
codePoint >> 0xC | 0xE0,
|
||
codePoint >> 0x6 & 0x3F | 0x80,
|
||
codePoint & 0x3F | 0x80
|
||
)
|
||
} else if (codePoint < 0x110000) {
|
||
if ((units -= 4) < 0) break
|
||
bytes.push(
|
||
codePoint >> 0x12 | 0xF0,
|
||
codePoint >> 0xC & 0x3F | 0x80,
|
||
codePoint >> 0x6 & 0x3F | 0x80,
|
||
codePoint & 0x3F | 0x80
|
||
)
|
||
} else {
|
||
throw new Error('Invalid code point')
|
||
}
|
||
}
|
||
|
||
return bytes
|
||
}
|
||
|
||
function asciiToBytes (str) {
|
||
var byteArray = []
|
||
for (var i = 0; i < str.length; ++i) {
|
||
// Node's code seems to be doing this and not & 0x7F..
|
||
byteArray.push(str.charCodeAt(i) & 0xFF)
|
||
}
|
||
return byteArray
|
||
}
|
||
|
||
function utf16leToBytes (str, units) {
|
||
var c, hi, lo
|
||
var byteArray = []
|
||
for (var i = 0; i < str.length; ++i) {
|
||
if ((units -= 2) < 0) break
|
||
|
||
c = str.charCodeAt(i)
|
||
hi = c >> 8
|
||
lo = c % 256
|
||
byteArray.push(lo)
|
||
byteArray.push(hi)
|
||
}
|
||
|
||
return byteArray
|
||
}
|
||
|
||
function base64ToBytes (str) {
|
||
return base64.toByteArray(base64clean(str))
|
||
}
|
||
|
||
function blitBuffer (src, dst, offset, length) {
|
||
for (var i = 0; i < length; ++i) {
|
||
if ((i + offset >= dst.length) || (i >= src.length)) break
|
||
dst[i + offset] = src[i]
|
||
}
|
||
return i
|
||
}
|
||
|
||
// ArrayBuffer or Uint8Array objects from other contexts (i.e. iframes) do not pass
|
||
// the `instanceof` check but they should be treated as of that type.
|
||
// See: https://github.com/feross/buffer/issues/166
|
||
function isInstance (obj, type) {
|
||
return obj instanceof type ||
|
||
(obj != null && obj.constructor != null && obj.constructor.name != null &&
|
||
obj.constructor.name === type.name)
|
||
}
|
||
function numberIsNaN (obj) {
|
||
// For IE11 support
|
||
return obj !== obj // eslint-disable-line no-self-compare
|
||
}
|
||
|
||
}).call(this)}).call(this,require("buffer").Buffer)
|
||
|
||
},{"base64-js":1,"buffer":3,"ieee754":6}],4:[function(require,module,exports){
|
||
(function (global){(function (){
|
||
/*! https://mths.be/cssescape v1.5.1 by @mathias | MIT license */
|
||
;(function(root, factory) {
|
||
// https://github.com/umdjs/umd/blob/master/returnExports.js
|
||
if (typeof exports == 'object') {
|
||
// For Node.js.
|
||
module.exports = factory(root);
|
||
} else if (typeof define == 'function' && define.amd) {
|
||
// For AMD. Register as an anonymous module.
|
||
define([], factory.bind(root, root));
|
||
} else {
|
||
// For browser globals (not exposing the function separately).
|
||
factory(root);
|
||
}
|
||
}(typeof global != 'undefined' ? global : this, function(root) {
|
||
|
||
if (root.CSS && root.CSS.escape) {
|
||
return root.CSS.escape;
|
||
}
|
||
|
||
// https://drafts.csswg.org/cssom/#serialize-an-identifier
|
||
var cssEscape = function(value) {
|
||
if (arguments.length == 0) {
|
||
throw new TypeError('`CSS.escape` requires an argument.');
|
||
}
|
||
var string = String(value);
|
||
var length = string.length;
|
||
var index = -1;
|
||
var codeUnit;
|
||
var result = '';
|
||
var firstCodeUnit = string.charCodeAt(0);
|
||
while (++index < length) {
|
||
codeUnit = string.charCodeAt(index);
|
||
// Note: there’s no need to special-case astral symbols, surrogate
|
||
// pairs, or lone surrogates.
|
||
|
||
// If the character is NULL (U+0000), then the REPLACEMENT CHARACTER
|
||
// (U+FFFD).
|
||
if (codeUnit == 0x0000) {
|
||
result += '\uFFFD';
|
||
continue;
|
||
}
|
||
|
||
if (
|
||
// If the character is in the range [\1-\1F] (U+0001 to U+001F) or is
|
||
// U+007F, […]
|
||
(codeUnit >= 0x0001 && codeUnit <= 0x001F) || codeUnit == 0x007F ||
|
||
// If the character is the first character and is in the range [0-9]
|
||
// (U+0030 to U+0039), […]
|
||
(index == 0 && codeUnit >= 0x0030 && codeUnit <= 0x0039) ||
|
||
// If the character is the second character and is in the range [0-9]
|
||
// (U+0030 to U+0039) and the first character is a `-` (U+002D), […]
|
||
(
|
||
index == 1 &&
|
||
codeUnit >= 0x0030 && codeUnit <= 0x0039 &&
|
||
firstCodeUnit == 0x002D
|
||
)
|
||
) {
|
||
// https://drafts.csswg.org/cssom/#escape-a-character-as-code-point
|
||
result += '\\' + codeUnit.toString(16) + ' ';
|
||
continue;
|
||
}
|
||
|
||
if (
|
||
// If the character is the first character and is a `-` (U+002D), and
|
||
// there is no second character, […]
|
||
index == 0 &&
|
||
length == 1 &&
|
||
codeUnit == 0x002D
|
||
) {
|
||
result += '\\' + string.charAt(index);
|
||
continue;
|
||
}
|
||
|
||
// If the character is not handled by one of the above rules and is
|
||
// greater than or equal to U+0080, is `-` (U+002D) or `_` (U+005F), or
|
||
// is in one of the ranges [0-9] (U+0030 to U+0039), [A-Z] (U+0041 to
|
||
// U+005A), or [a-z] (U+0061 to U+007A), […]
|
||
if (
|
||
codeUnit >= 0x0080 ||
|
||
codeUnit == 0x002D ||
|
||
codeUnit == 0x005F ||
|
||
codeUnit >= 0x0030 && codeUnit <= 0x0039 ||
|
||
codeUnit >= 0x0041 && codeUnit <= 0x005A ||
|
||
codeUnit >= 0x0061 && codeUnit <= 0x007A
|
||
) {
|
||
// the character itself
|
||
result += string.charAt(index);
|
||
continue;
|
||
}
|
||
|
||
// Otherwise, the escaped character.
|
||
// https://drafts.csswg.org/cssom/#escape-a-character
|
||
result += '\\' + string.charAt(index);
|
||
|
||
}
|
||
return result;
|
||
};
|
||
|
||
if (!root.CSS) {
|
||
root.CSS = {};
|
||
}
|
||
|
||
root.CSS.escape = cssEscape;
|
||
return cssEscape;
|
||
|
||
}));
|
||
|
||
}).call(this)}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
|
||
|
||
},{}],5:[function(require,module,exports){
|
||
// Copyright Joyent, Inc. and other Node contributors.
|
||
//
|
||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||
// copy of this software and associated documentation files (the
|
||
// "Software"), to deal in the Software without restriction, including
|
||
// without limitation the rights to use, copy, modify, merge, publish,
|
||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||
// persons to whom the Software is furnished to do so, subject to the
|
||
// following conditions:
|
||
//
|
||
// The above copyright notice and this permission notice shall be included
|
||
// in all copies or substantial portions of the Software.
|
||
//
|
||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||
|
||
'use strict';
|
||
|
||
var R = typeof Reflect === 'object' ? Reflect : null
|
||
var ReflectApply = R && typeof R.apply === 'function'
|
||
? R.apply
|
||
: function ReflectApply(target, receiver, args) {
|
||
return Function.prototype.apply.call(target, receiver, args);
|
||
}
|
||
|
||
var ReflectOwnKeys
|
||
if (R && typeof R.ownKeys === 'function') {
|
||
ReflectOwnKeys = R.ownKeys
|
||
} else if (Object.getOwnPropertySymbols) {
|
||
ReflectOwnKeys = function ReflectOwnKeys(target) {
|
||
return Object.getOwnPropertyNames(target)
|
||
.concat(Object.getOwnPropertySymbols(target));
|
||
};
|
||
} else {
|
||
ReflectOwnKeys = function ReflectOwnKeys(target) {
|
||
return Object.getOwnPropertyNames(target);
|
||
};
|
||
}
|
||
|
||
function ProcessEmitWarning(warning) {
|
||
if (console && console.warn) console.warn(warning);
|
||
}
|
||
|
||
var NumberIsNaN = Number.isNaN || function NumberIsNaN(value) {
|
||
return value !== value;
|
||
}
|
||
|
||
function EventEmitter() {
|
||
EventEmitter.init.call(this);
|
||
}
|
||
module.exports = EventEmitter;
|
||
module.exports.once = once;
|
||
|
||
// Backwards-compat with node 0.10.x
|
||
EventEmitter.EventEmitter = EventEmitter;
|
||
|
||
EventEmitter.prototype._events = undefined;
|
||
EventEmitter.prototype._eventsCount = 0;
|
||
EventEmitter.prototype._maxListeners = undefined;
|
||
|
||
// By default EventEmitters will print a warning if more than 10 listeners are
|
||
// added to it. This is a useful default which helps finding memory leaks.
|
||
var defaultMaxListeners = 10;
|
||
|
||
function checkListener(listener) {
|
||
if (typeof listener !== 'function') {
|
||
throw new TypeError('The "listener" argument must be of type Function. Received type ' + typeof listener);
|
||
}
|
||
}
|
||
|
||
Object.defineProperty(EventEmitter, 'defaultMaxListeners', {
|
||
enumerable: true,
|
||
get: function() {
|
||
return defaultMaxListeners;
|
||
},
|
||
set: function(arg) {
|
||
if (typeof arg !== 'number' || arg < 0 || NumberIsNaN(arg)) {
|
||
throw new RangeError('The value of "defaultMaxListeners" is out of range. It must be a non-negative number. Received ' + arg + '.');
|
||
}
|
||
defaultMaxListeners = arg;
|
||
}
|
||
});
|
||
|
||
EventEmitter.init = function() {
|
||
|
||
if (this._events === undefined ||
|
||
this._events === Object.getPrototypeOf(this)._events) {
|
||
this._events = Object.create(null);
|
||
this._eventsCount = 0;
|
||
}
|
||
|
||
this._maxListeners = this._maxListeners || undefined;
|
||
};
|
||
|
||
// Obviously not all Emitters should be limited to 10. This function allows
|
||
// that to be increased. Set to zero for unlimited.
|
||
EventEmitter.prototype.setMaxListeners = function setMaxListeners(n) {
|
||
if (typeof n !== 'number' || n < 0 || NumberIsNaN(n)) {
|
||
throw new RangeError('The value of "n" is out of range. It must be a non-negative number. Received ' + n + '.');
|
||
}
|
||
this._maxListeners = n;
|
||
return this;
|
||
};
|
||
|
||
function _getMaxListeners(that) {
|
||
if (that._maxListeners === undefined)
|
||
return EventEmitter.defaultMaxListeners;
|
||
return that._maxListeners;
|
||
}
|
||
|
||
EventEmitter.prototype.getMaxListeners = function getMaxListeners() {
|
||
return _getMaxListeners(this);
|
||
};
|
||
|
||
EventEmitter.prototype.emit = function emit(type) {
|
||
var args = [];
|
||
for (var i = 1; i < arguments.length; i++) args.push(arguments[i]);
|
||
var doError = (type === 'error');
|
||
|
||
var events = this._events;
|
||
if (events !== undefined)
|
||
doError = (doError && events.error === undefined);
|
||
else if (!doError)
|
||
return false;
|
||
|
||
// If there is no 'error' event listener then throw.
|
||
if (doError) {
|
||
var er;
|
||
if (args.length > 0)
|
||
er = args[0];
|
||
if (er instanceof Error) {
|
||
// Note: The comments on the `throw` lines are intentional, they show
|
||
// up in Node's output if this results in an unhandled exception.
|
||
throw er; // Unhandled 'error' event
|
||
}
|
||
// At least give some kind of context to the user
|
||
var err = new Error('Unhandled error.' + (er ? ' (' + er.message + ')' : ''));
|
||
err.context = er;
|
||
throw err; // Unhandled 'error' event
|
||
}
|
||
|
||
var handler = events[type];
|
||
|
||
if (handler === undefined)
|
||
return false;
|
||
|
||
if (typeof handler === 'function') {
|
||
ReflectApply(handler, this, args);
|
||
} else {
|
||
var len = handler.length;
|
||
var listeners = arrayClone(handler, len);
|
||
for (var i = 0; i < len; ++i)
|
||
ReflectApply(listeners[i], this, args);
|
||
}
|
||
|
||
return true;
|
||
};
|
||
|
||
function _addListener(target, type, listener, prepend) {
|
||
var m;
|
||
var events;
|
||
var existing;
|
||
|
||
checkListener(listener);
|
||
|
||
events = target._events;
|
||
if (events === undefined) {
|
||
events = target._events = Object.create(null);
|
||
target._eventsCount = 0;
|
||
} else {
|
||
// To avoid recursion in the case that type === "newListener"! Before
|
||
// adding it to the listeners, first emit "newListener".
|
||
if (events.newListener !== undefined) {
|
||
target.emit('newListener', type,
|
||
listener.listener ? listener.listener : listener);
|
||
|
||
// Re-assign `events` because a newListener handler could have caused the
|
||
// this._events to be assigned to a new object
|
||
events = target._events;
|
||
}
|
||
existing = events[type];
|
||
}
|
||
|
||
if (existing === undefined) {
|
||
// Optimize the case of one listener. Don't need the extra array object.
|
||
existing = events[type] = listener;
|
||
++target._eventsCount;
|
||
} else {
|
||
if (typeof existing === 'function') {
|
||
// Adding the second element, need to change to array.
|
||
existing = events[type] =
|
||
prepend ? [listener, existing] : [existing, listener];
|
||
// If we've already got an array, just append.
|
||
} else if (prepend) {
|
||
existing.unshift(listener);
|
||
} else {
|
||
existing.push(listener);
|
||
}
|
||
|
||
// Check for listener leak
|
||
m = _getMaxListeners(target);
|
||
if (m > 0 && existing.length > m && !existing.warned) {
|
||
existing.warned = true;
|
||
// No error code for this since it is a Warning
|
||
// eslint-disable-next-line no-restricted-syntax
|
||
var w = new Error('Possible EventEmitter memory leak detected. ' +
|
||
existing.length + ' ' + String(type) + ' listeners ' +
|
||
'added. Use emitter.setMaxListeners() to ' +
|
||
'increase limit');
|
||
w.name = 'MaxListenersExceededWarning';
|
||
w.emitter = target;
|
||
w.type = type;
|
||
w.count = existing.length;
|
||
ProcessEmitWarning(w);
|
||
}
|
||
}
|
||
|
||
return target;
|
||
}
|
||
|
||
EventEmitter.prototype.addListener = function addListener(type, listener) {
|
||
return _addListener(this, type, listener, false);
|
||
};
|
||
|
||
EventEmitter.prototype.on = EventEmitter.prototype.addListener;
|
||
|
||
EventEmitter.prototype.prependListener =
|
||
function prependListener(type, listener) {
|
||
return _addListener(this, type, listener, true);
|
||
};
|
||
|
||
function onceWrapper() {
|
||
if (!this.fired) {
|
||
this.target.removeListener(this.type, this.wrapFn);
|
||
this.fired = true;
|
||
if (arguments.length === 0)
|
||
return this.listener.call(this.target);
|
||
return this.listener.apply(this.target, arguments);
|
||
}
|
||
}
|
||
|
||
function _onceWrap(target, type, listener) {
|
||
var state = { fired: false, wrapFn: undefined, target: target, type: type, listener: listener };
|
||
var wrapped = onceWrapper.bind(state);
|
||
wrapped.listener = listener;
|
||
state.wrapFn = wrapped;
|
||
return wrapped;
|
||
}
|
||
|
||
EventEmitter.prototype.once = function once(type, listener) {
|
||
checkListener(listener);
|
||
this.on(type, _onceWrap(this, type, listener));
|
||
return this;
|
||
};
|
||
|
||
EventEmitter.prototype.prependOnceListener =
|
||
function prependOnceListener(type, listener) {
|
||
checkListener(listener);
|
||
this.prependListener(type, _onceWrap(this, type, listener));
|
||
return this;
|
||
};
|
||
|
||
// Emits a 'removeListener' event if and only if the listener was removed.
|
||
EventEmitter.prototype.removeListener =
|
||
function removeListener(type, listener) {
|
||
var list, events, position, i, originalListener;
|
||
|
||
checkListener(listener);
|
||
|
||
events = this._events;
|
||
if (events === undefined)
|
||
return this;
|
||
|
||
list = events[type];
|
||
if (list === undefined)
|
||
return this;
|
||
|
||
if (list === listener || list.listener === listener) {
|
||
if (--this._eventsCount === 0)
|
||
this._events = Object.create(null);
|
||
else {
|
||
delete events[type];
|
||
if (events.removeListener)
|
||
this.emit('removeListener', type, list.listener || listener);
|
||
}
|
||
} else if (typeof list !== 'function') {
|
||
position = -1;
|
||
|
||
for (i = list.length - 1; i >= 0; i--) {
|
||
if (list[i] === listener || list[i].listener === listener) {
|
||
originalListener = list[i].listener;
|
||
position = i;
|
||
break;
|
||
}
|
||
}
|
||
|
||
if (position < 0)
|
||
return this;
|
||
|
||
if (position === 0)
|
||
list.shift();
|
||
else {
|
||
spliceOne(list, position);
|
||
}
|
||
|
||
if (list.length === 1)
|
||
events[type] = list[0];
|
||
|
||
if (events.removeListener !== undefined)
|
||
this.emit('removeListener', type, originalListener || listener);
|
||
}
|
||
|
||
return this;
|
||
};
|
||
|
||
EventEmitter.prototype.off = EventEmitter.prototype.removeListener;
|
||
|
||
EventEmitter.prototype.removeAllListeners =
|
||
function removeAllListeners(type) {
|
||
var listeners, events, i;
|
||
|
||
events = this._events;
|
||
if (events === undefined)
|
||
return this;
|
||
|
||
// not listening for removeListener, no need to emit
|
||
if (events.removeListener === undefined) {
|
||
if (arguments.length === 0) {
|
||
this._events = Object.create(null);
|
||
this._eventsCount = 0;
|
||
} else if (events[type] !== undefined) {
|
||
if (--this._eventsCount === 0)
|
||
this._events = Object.create(null);
|
||
else
|
||
delete events[type];
|
||
}
|
||
return this;
|
||
}
|
||
|
||
// emit removeListener for all listeners on all events
|
||
if (arguments.length === 0) {
|
||
var keys = Object.keys(events);
|
||
var key;
|
||
for (i = 0; i < keys.length; ++i) {
|
||
key = keys[i];
|
||
if (key === 'removeListener') continue;
|
||
this.removeAllListeners(key);
|
||
}
|
||
this.removeAllListeners('removeListener');
|
||
this._events = Object.create(null);
|
||
this._eventsCount = 0;
|
||
return this;
|
||
}
|
||
|
||
listeners = events[type];
|
||
|
||
if (typeof listeners === 'function') {
|
||
this.removeListener(type, listeners);
|
||
} else if (listeners !== undefined) {
|
||
// LIFO order
|
||
for (i = listeners.length - 1; i >= 0; i--) {
|
||
this.removeListener(type, listeners[i]);
|
||
}
|
||
}
|
||
|
||
return this;
|
||
};
|
||
|
||
function _listeners(target, type, unwrap) {
|
||
var events = target._events;
|
||
|
||
if (events === undefined)
|
||
return [];
|
||
|
||
var evlistener = events[type];
|
||
if (evlistener === undefined)
|
||
return [];
|
||
|
||
if (typeof evlistener === 'function')
|
||
return unwrap ? [evlistener.listener || evlistener] : [evlistener];
|
||
|
||
return unwrap ?
|
||
unwrapListeners(evlistener) : arrayClone(evlistener, evlistener.length);
|
||
}
|
||
|
||
EventEmitter.prototype.listeners = function listeners(type) {
|
||
return _listeners(this, type, true);
|
||
};
|
||
|
||
EventEmitter.prototype.rawListeners = function rawListeners(type) {
|
||
return _listeners(this, type, false);
|
||
};
|
||
|
||
EventEmitter.listenerCount = function(emitter, type) {
|
||
if (typeof emitter.listenerCount === 'function') {
|
||
return emitter.listenerCount(type);
|
||
} else {
|
||
return listenerCount.call(emitter, type);
|
||
}
|
||
};
|
||
|
||
EventEmitter.prototype.listenerCount = listenerCount;
|
||
function listenerCount(type) {
|
||
var events = this._events;
|
||
|
||
if (events !== undefined) {
|
||
var evlistener = events[type];
|
||
|
||
if (typeof evlistener === 'function') {
|
||
return 1;
|
||
} else if (evlistener !== undefined) {
|
||
return evlistener.length;
|
||
}
|
||
}
|
||
|
||
return 0;
|
||
}
|
||
|
||
EventEmitter.prototype.eventNames = function eventNames() {
|
||
return this._eventsCount > 0 ? ReflectOwnKeys(this._events) : [];
|
||
};
|
||
|
||
function arrayClone(arr, n) {
|
||
var copy = new Array(n);
|
||
for (var i = 0; i < n; ++i)
|
||
copy[i] = arr[i];
|
||
return copy;
|
||
}
|
||
|
||
function spliceOne(list, index) {
|
||
for (; index + 1 < list.length; index++)
|
||
list[index] = list[index + 1];
|
||
list.pop();
|
||
}
|
||
|
||
function unwrapListeners(arr) {
|
||
var ret = new Array(arr.length);
|
||
for (var i = 0; i < ret.length; ++i) {
|
||
ret[i] = arr[i].listener || arr[i];
|
||
}
|
||
return ret;
|
||
}
|
||
|
||
function once(emitter, name) {
|
||
return new Promise(function (resolve, reject) {
|
||
function eventListener() {
|
||
if (errorListener !== undefined) {
|
||
emitter.removeListener('error', errorListener);
|
||
}
|
||
resolve([].slice.call(arguments));
|
||
};
|
||
var errorListener;
|
||
|
||
// Adding an error listener is not optional because
|
||
// if an error is thrown on an event emitter we cannot
|
||
// guarantee that the actual event we are waiting will
|
||
// be fired. The result could be a silent way to create
|
||
// memory or file descriptor leaks, which is something
|
||
// we should avoid.
|
||
if (name !== 'error') {
|
||
errorListener = function errorListener(err) {
|
||
emitter.removeListener(name, eventListener);
|
||
reject(err);
|
||
};
|
||
|
||
emitter.once('error', errorListener);
|
||
}
|
||
|
||
emitter.once(name, eventListener);
|
||
});
|
||
}
|
||
|
||
},{}],6:[function(require,module,exports){
|
||
/*! ieee754. BSD-3-Clause License. Feross Aboukhadijeh <https://feross.org/opensource> */
|
||
exports.read = function (buffer, offset, isLE, mLen, nBytes) {
|
||
var e, m
|
||
var eLen = (nBytes * 8) - mLen - 1
|
||
var eMax = (1 << eLen) - 1
|
||
var eBias = eMax >> 1
|
||
var nBits = -7
|
||
var i = isLE ? (nBytes - 1) : 0
|
||
var d = isLE ? -1 : 1
|
||
var s = buffer[offset + i]
|
||
|
||
i += d
|
||
|
||
e = s & ((1 << (-nBits)) - 1)
|
||
s >>= (-nBits)
|
||
nBits += eLen
|
||
for (; nBits > 0; e = (e * 256) + buffer[offset + i], i += d, nBits -= 8) {}
|
||
|
||
m = e & ((1 << (-nBits)) - 1)
|
||
e >>= (-nBits)
|
||
nBits += mLen
|
||
for (; nBits > 0; m = (m * 256) + buffer[offset + i], i += d, nBits -= 8) {}
|
||
|
||
if (e === 0) {
|
||
e = 1 - eBias
|
||
} else if (e === eMax) {
|
||
return m ? NaN : ((s ? -1 : 1) * Infinity)
|
||
} else {
|
||
m = m + Math.pow(2, mLen)
|
||
e = e - eBias
|
||
}
|
||
return (s ? -1 : 1) * m * Math.pow(2, e - mLen)
|
||
}
|
||
|
||
exports.write = function (buffer, value, offset, isLE, mLen, nBytes) {
|
||
var e, m, c
|
||
var eLen = (nBytes * 8) - mLen - 1
|
||
var eMax = (1 << eLen) - 1
|
||
var eBias = eMax >> 1
|
||
var rt = (mLen === 23 ? Math.pow(2, -24) - Math.pow(2, -77) : 0)
|
||
var i = isLE ? 0 : (nBytes - 1)
|
||
var d = isLE ? 1 : -1
|
||
var s = value < 0 || (value === 0 && 1 / value < 0) ? 1 : 0
|
||
|
||
value = Math.abs(value)
|
||
|
||
if (isNaN(value) || value === Infinity) {
|
||
m = isNaN(value) ? 1 : 0
|
||
e = eMax
|
||
} else {
|
||
e = Math.floor(Math.log(value) / Math.LN2)
|
||
if (value * (c = Math.pow(2, -e)) < 1) {
|
||
e--
|
||
c *= 2
|
||
}
|
||
if (e + eBias >= 1) {
|
||
value += rt / c
|
||
} else {
|
||
value += rt * Math.pow(2, 1 - eBias)
|
||
}
|
||
if (value * c >= 2) {
|
||
e++
|
||
c /= 2
|
||
}
|
||
|
||
if (e + eBias >= eMax) {
|
||
m = 0
|
||
e = eMax
|
||
} else if (e + eBias >= 1) {
|
||
m = ((value * c) - 1) * Math.pow(2, mLen)
|
||
e = e + eBias
|
||
} else {
|
||
m = value * Math.pow(2, eBias - 1) * Math.pow(2, mLen)
|
||
e = 0
|
||
}
|
||
}
|
||
|
||
for (; mLen >= 8; buffer[offset + i] = m & 0xff, i += d, m /= 256, mLen -= 8) {}
|
||
|
||
e = (e << mLen) | m
|
||
eLen += mLen
|
||
for (; eLen > 0; buffer[offset + i] = e & 0xff, i += d, e /= 256, eLen -= 8) {}
|
||
|
||
buffer[offset + i - d] |= s * 128
|
||
}
|
||
|
||
},{}],7:[function(require,module,exports){
|
||
(function (Buffer){(function (){
|
||
!function(factory) {
|
||
var global = this;
|
||
module.exports = factory(global);
|
||
}(function(global) {
|
||
"use strict";
|
||
function is(obj, Ctor) {
|
||
return "object" != typeof obj || null === obj ? !1 : obj.constructor === Ctor || Object.prototype.toString.call(obj) === "[object " + Ctor.name + "]";
|
||
}
|
||
function arrayFrom(arrayLike, forceCopy) {
|
||
return !forceCopy && is(arrayLike, Array) ? arrayLike : Array.prototype.slice.call(arrayLike);
|
||
}
|
||
function defined(value, defaultValue) {
|
||
return void 0 !== value ? value : defaultValue;
|
||
}
|
||
function jDataView(buffer, byteOffset, byteLength, littleEndian) {
|
||
if (jDataView.is(buffer)) {
|
||
var result = buffer.slice(byteOffset, byteOffset + byteLength);
|
||
return result._littleEndian = defined(littleEndian, result._littleEndian), result;
|
||
}
|
||
if (!jDataView.is(this)) return new jDataView(buffer, byteOffset, byteLength, littleEndian);
|
||
if (this.buffer = buffer = jDataView.wrapBuffer(buffer), this._isArrayBuffer = compatibility.ArrayBuffer && is(buffer, ArrayBuffer),
|
||
this._isPixelData = !1, this._isDataView = compatibility.DataView && this._isArrayBuffer,
|
||
this._isNodeBuffer = !0 && compatibility.NodeBuffer && is(buffer, Buffer), !this._isNodeBuffer && !this._isArrayBuffer && !is(buffer, Array)) throw new TypeError("jDataView buffer has an incompatible type");
|
||
this._littleEndian = !!littleEndian;
|
||
var bufferLength = "byteLength" in buffer ? buffer.byteLength : buffer.length;
|
||
this.byteOffset = byteOffset = defined(byteOffset, 0), this.byteLength = byteLength = defined(byteLength, bufferLength - byteOffset),
|
||
this._offset = this._bitOffset = 0, this._isDataView ? this._view = new DataView(buffer, byteOffset, byteLength) : this._checkBounds(byteOffset, byteLength, bufferLength),
|
||
this._engineAction = this._isDataView ? this._dataViewAction : this._isNodeBuffer ? this._nodeBufferAction : this._isArrayBuffer ? this._arrayBufferAction : this._arrayAction;
|
||
}
|
||
function getCharCodes(string) {
|
||
if (compatibility.NodeBuffer) return new Buffer(string, "binary");
|
||
for (var Type = compatibility.ArrayBuffer ? Uint8Array : Array, codes = new Type(string.length), i = 0, length = string.length; length > i; i++) codes[i] = 255 & string.charCodeAt(i);
|
||
return codes;
|
||
}
|
||
function pow2(n) {
|
||
return n >= 0 && 31 > n ? 1 << n : pow2[n] || (pow2[n] = Math.pow(2, n));
|
||
}
|
||
function Uint64(lo, hi) {
|
||
this.lo = lo, this.hi = hi;
|
||
}
|
||
function Int64() {
|
||
Uint64.apply(this, arguments);
|
||
}
|
||
var compatibility = {
|
||
NodeBuffer: !0 && "Buffer" in global,
|
||
DataView: "DataView" in global,
|
||
ArrayBuffer: "ArrayBuffer" in global,
|
||
PixelData: !1
|
||
}, TextEncoder = global.TextEncoder, TextDecoder = global.TextDecoder;
|
||
compatibility.NodeBuffer && !function(buffer) {
|
||
try {
|
||
buffer.writeFloatLE(1/0, 0);
|
||
} catch (e) {
|
||
compatibility.NodeBuffer = !1;
|
||
}
|
||
}(new Buffer(4));
|
||
var dataTypes = {
|
||
Int8: 1,
|
||
Int16: 2,
|
||
Int32: 4,
|
||
Uint8: 1,
|
||
Uint16: 2,
|
||
Uint32: 4,
|
||
Float32: 4,
|
||
Float64: 8
|
||
};
|
||
jDataView.wrapBuffer = function(buffer) {
|
||
switch (typeof buffer) {
|
||
case "number":
|
||
if (compatibility.NodeBuffer) buffer = new Buffer(buffer), buffer.fill(0); else if (compatibility.ArrayBuffer) buffer = new Uint8Array(buffer).buffer; else {
|
||
buffer = new Array(buffer);
|
||
for (var i = 0; i < buffer.length; i++) buffer[i] = 0;
|
||
}
|
||
return buffer;
|
||
|
||
case "string":
|
||
buffer = getCharCodes(buffer);
|
||
|
||
default:
|
||
return "length" in buffer && !(compatibility.NodeBuffer && is(buffer, Buffer) || compatibility.ArrayBuffer && is(buffer, ArrayBuffer)) && (compatibility.NodeBuffer ? buffer = new Buffer(buffer) : compatibility.ArrayBuffer ? is(buffer, ArrayBuffer) || (buffer = new Uint8Array(buffer).buffer,
|
||
is(buffer, ArrayBuffer) || (buffer = new Uint8Array(arrayFrom(buffer, !0)).buffer)) : buffer = arrayFrom(buffer)),
|
||
buffer;
|
||
}
|
||
}, jDataView.is = function(view) {
|
||
return view && view.jDataView;
|
||
}, jDataView.from = function() {
|
||
return new jDataView(arguments);
|
||
}, jDataView.Uint64 = Uint64, Uint64.prototype = {
|
||
valueOf: function() {
|
||
return this.lo + pow2(32) * this.hi;
|
||
},
|
||
toString: function() {
|
||
return Number.prototype.toString.apply(this.valueOf(), arguments);
|
||
}
|
||
}, Uint64.fromNumber = function(number) {
|
||
var hi = Math.floor(number / pow2(32)), lo = number - hi * pow2(32);
|
||
return new Uint64(lo, hi);
|
||
}, jDataView.Int64 = Int64, Int64.prototype = "create" in Object ? Object.create(Uint64.prototype) : new Uint64(),
|
||
Int64.prototype.valueOf = function() {
|
||
return this.hi < pow2(31) ? Uint64.prototype.valueOf.apply(this, arguments) : -(pow2(32) - this.lo + pow2(32) * (pow2(32) - 1 - this.hi));
|
||
}, Int64.fromNumber = function(number) {
|
||
var lo, hi;
|
||
if (number >= 0) {
|
||
var unsigned = Uint64.fromNumber(number);
|
||
lo = unsigned.lo, hi = unsigned.hi;
|
||
} else hi = Math.floor(number / pow2(32)), lo = number - hi * pow2(32), hi += pow2(32);
|
||
return new Int64(lo, hi);
|
||
};
|
||
var proto = jDataView.prototype = {
|
||
compatibility: compatibility,
|
||
jDataView: !0,
|
||
_checkBounds: function(byteOffset, byteLength, maxLength) {
|
||
if ("number" != typeof byteOffset) throw new TypeError("Offset is not a number.");
|
||
if ("number" != typeof byteLength) throw new TypeError("Size is not a number.");
|
||
if (0 > byteLength) throw new RangeError("Length is negative.");
|
||
if (0 > byteOffset || byteOffset + byteLength > defined(maxLength, this.byteLength)) throw new RangeError("Offsets are out of bounds.");
|
||
},
|
||
_action: function(type, isReadAction, byteOffset, littleEndian, value) {
|
||
return this._engineAction(type, isReadAction, defined(byteOffset, this._offset), defined(littleEndian, this._littleEndian), value);
|
||
},
|
||
_dataViewAction: function(type, isReadAction, byteOffset, littleEndian, value) {
|
||
return this._offset = byteOffset + dataTypes[type], isReadAction ? this._view["get" + type](byteOffset, littleEndian) : this._view["set" + type](byteOffset, value, littleEndian);
|
||
},
|
||
_arrayBufferAction: function(type, isReadAction, byteOffset, littleEndian, value) {
|
||
var typedArray, size = dataTypes[type], TypedArray = global[type + "Array"];
|
||
if (littleEndian = defined(littleEndian, this._littleEndian), 1 === size || (this.byteOffset + byteOffset) % size === 0 && littleEndian) return typedArray = new TypedArray(this.buffer, this.byteOffset + byteOffset, 1),
|
||
this._offset = byteOffset + size, isReadAction ? typedArray[0] : typedArray[0] = value;
|
||
var bytes = new Uint8Array(isReadAction ? this.getBytes(size, byteOffset, littleEndian, !0) : size);
|
||
return typedArray = new TypedArray(bytes.buffer, 0, 1), isReadAction ? typedArray[0] : (typedArray[0] = value,
|
||
void this._setBytes(byteOffset, bytes, littleEndian));
|
||
},
|
||
_arrayAction: function(type, isReadAction, byteOffset, littleEndian, value) {
|
||
return isReadAction ? this["_get" + type](byteOffset, littleEndian) : this["_set" + type](byteOffset, value, littleEndian);
|
||
},
|
||
_getBytes: function(length, byteOffset, littleEndian) {
|
||
littleEndian = defined(littleEndian, this._littleEndian), byteOffset = defined(byteOffset, this._offset),
|
||
length = defined(length, this.byteLength - byteOffset), this._checkBounds(byteOffset, length),
|
||
byteOffset += this.byteOffset, this._offset = byteOffset - this.byteOffset + length;
|
||
var result = this._isArrayBuffer ? new Uint8Array(this.buffer, byteOffset, length) : (this.buffer.slice || Array.prototype.slice).call(this.buffer, byteOffset, byteOffset + length);
|
||
return littleEndian || 1 >= length ? result : arrayFrom(result).reverse();
|
||
},
|
||
getBytes: function(length, byteOffset, littleEndian, toArray) {
|
||
var result = this._getBytes(length, byteOffset, defined(littleEndian, !0));
|
||
return toArray ? arrayFrom(result) : result;
|
||
},
|
||
_setBytes: function(byteOffset, bytes, littleEndian) {
|
||
var length = bytes.length;
|
||
if (0 !== length) {
|
||
if (littleEndian = defined(littleEndian, this._littleEndian), byteOffset = defined(byteOffset, this._offset),
|
||
this._checkBounds(byteOffset, length), !littleEndian && length > 1 && (bytes = arrayFrom(bytes, !0).reverse()),
|
||
byteOffset += this.byteOffset, this._isArrayBuffer) new Uint8Array(this.buffer, byteOffset, length).set(bytes); else if (this._isNodeBuffer) new Buffer(bytes).copy(this.buffer, byteOffset); else for (var i = 0; length > i; i++) this.buffer[byteOffset + i] = bytes[i];
|
||
this._offset = byteOffset - this.byteOffset + length;
|
||
}
|
||
},
|
||
setBytes: function(byteOffset, bytes, littleEndian) {
|
||
this._setBytes(byteOffset, bytes, defined(littleEndian, !0));
|
||
},
|
||
getString: function(byteLength, byteOffset, encoding) {
|
||
if (this._isNodeBuffer) return byteOffset = defined(byteOffset, this._offset), byteLength = defined(byteLength, this.byteLength - byteOffset),
|
||
this._checkBounds(byteOffset, byteLength), this._offset = byteOffset + byteLength,
|
||
this.buffer.toString(encoding || "binary", this.byteOffset + byteOffset, this.byteOffset + this._offset);
|
||
var bytes = this._getBytes(byteLength, byteOffset, !0);
|
||
if (encoding = "utf8" === encoding ? "utf-8" : encoding || "binary", TextDecoder && "binary" !== encoding) return new TextDecoder(encoding).decode(this._isArrayBuffer ? bytes : new Uint8Array(bytes));
|
||
var string = "";
|
||
byteLength = bytes.length;
|
||
for (var i = 0; byteLength > i; i++) string += String.fromCharCode(bytes[i]);
|
||
return "utf-8" === encoding && (string = decodeURIComponent(escape(string))), string;
|
||
},
|
||
setString: function(byteOffset, subString, encoding) {
|
||
if (this._isNodeBuffer) return byteOffset = defined(byteOffset, this._offset), this._checkBounds(byteOffset, subString.length),
|
||
void (this._offset = byteOffset + this.buffer.write(subString, this.byteOffset + byteOffset, encoding || "binary"));
|
||
encoding = "utf8" === encoding ? "utf-8" : encoding || "binary";
|
||
var bytes;
|
||
TextEncoder && "binary" !== encoding ? bytes = new TextEncoder(encoding).encode(subString) : ("utf-8" === encoding && (subString = unescape(encodeURIComponent(subString))),
|
||
bytes = getCharCodes(subString)), this._setBytes(byteOffset, bytes, !0);
|
||
},
|
||
getChar: function(byteOffset) {
|
||
return this.getString(1, byteOffset);
|
||
},
|
||
setChar: function(byteOffset, character) {
|
||
this.setString(byteOffset, character);
|
||
},
|
||
tell: function() {
|
||
return this._offset;
|
||
},
|
||
seek: function(byteOffset) {
|
||
return this._checkBounds(byteOffset, 0), this._offset = byteOffset;
|
||
},
|
||
skip: function(byteLength) {
|
||
return this.seek(this._offset + byteLength);
|
||
},
|
||
slice: function(start, end, forceCopy) {
|
||
function normalizeOffset(offset, byteLength) {
|
||
return 0 > offset ? offset + byteLength : offset;
|
||
}
|
||
return start = normalizeOffset(start, this.byteLength), end = normalizeOffset(defined(end, this.byteLength), this.byteLength),
|
||
forceCopy ? new jDataView(this.getBytes(end - start, start, !0, !0), void 0, void 0, this._littleEndian) : new jDataView(this.buffer, this.byteOffset + start, end - start, this._littleEndian);
|
||
},
|
||
alignBy: function(byteCount) {
|
||
return this._bitOffset = 0, 1 !== defined(byteCount, 1) ? this.skip(byteCount - (this._offset % byteCount || byteCount)) : this._offset;
|
||
},
|
||
_getFloat64: function(byteOffset, littleEndian) {
|
||
var b = this._getBytes(8, byteOffset, littleEndian), sign = 1 - 2 * (b[7] >> 7), exponent = ((b[7] << 1 & 255) << 3 | b[6] >> 4) - 1023, mantissa = (15 & b[6]) * pow2(48) + b[5] * pow2(40) + b[4] * pow2(32) + b[3] * pow2(24) + b[2] * pow2(16) + b[1] * pow2(8) + b[0];
|
||
return 1024 === exponent ? 0 !== mantissa ? 0/0 : 1/0 * sign : -1023 === exponent ? sign * mantissa * pow2(-1074) : sign * (1 + mantissa * pow2(-52)) * pow2(exponent);
|
||
},
|
||
_getFloat32: function(byteOffset, littleEndian) {
|
||
var b = this._getBytes(4, byteOffset, littleEndian), sign = 1 - 2 * (b[3] >> 7), exponent = (b[3] << 1 & 255 | b[2] >> 7) - 127, mantissa = (127 & b[2]) << 16 | b[1] << 8 | b[0];
|
||
return 128 === exponent ? 0 !== mantissa ? 0/0 : 1/0 * sign : -127 === exponent ? sign * mantissa * pow2(-149) : sign * (1 + mantissa * pow2(-23)) * pow2(exponent);
|
||
},
|
||
_get64: function(Type, byteOffset, littleEndian) {
|
||
littleEndian = defined(littleEndian, this._littleEndian), byteOffset = defined(byteOffset, this._offset);
|
||
for (var parts = littleEndian ? [ 0, 4 ] : [ 4, 0 ], i = 0; 2 > i; i++) parts[i] = this.getUint32(byteOffset + parts[i], littleEndian);
|
||
return this._offset = byteOffset + 8, new Type(parts[0], parts[1]);
|
||
},
|
||
getInt64: function(byteOffset, littleEndian) {
|
||
return this._get64(Int64, byteOffset, littleEndian);
|
||
},
|
||
getUint64: function(byteOffset, littleEndian) {
|
||
return this._get64(Uint64, byteOffset, littleEndian);
|
||
},
|
||
_getInt32: function(byteOffset, littleEndian) {
|
||
var b = this._getBytes(4, byteOffset, littleEndian);
|
||
return b[3] << 24 | b[2] << 16 | b[1] << 8 | b[0];
|
||
},
|
||
_getUint32: function(byteOffset, littleEndian) {
|
||
return this._getInt32(byteOffset, littleEndian) >>> 0;
|
||
},
|
||
_getInt16: function(byteOffset, littleEndian) {
|
||
return this._getUint16(byteOffset, littleEndian) << 16 >> 16;
|
||
},
|
||
_getUint16: function(byteOffset, littleEndian) {
|
||
var b = this._getBytes(2, byteOffset, littleEndian);
|
||
return b[1] << 8 | b[0];
|
||
},
|
||
_getInt8: function(byteOffset) {
|
||
return this._getUint8(byteOffset) << 24 >> 24;
|
||
},
|
||
_getUint8: function(byteOffset) {
|
||
return this._getBytes(1, byteOffset)[0];
|
||
},
|
||
_getBitRangeData: function(bitLength, byteOffset) {
|
||
var startBit = (defined(byteOffset, this._offset) << 3) + this._bitOffset, endBit = startBit + bitLength, start = startBit >>> 3, end = endBit + 7 >>> 3, b = this._getBytes(end - start, start, !0), wideValue = 0;
|
||
(this._bitOffset = 7 & endBit) && (this._bitOffset -= 8);
|
||
for (var i = 0, length = b.length; length > i; i++) wideValue = wideValue << 8 | b[i];
|
||
return {
|
||
start: start,
|
||
bytes: b,
|
||
wideValue: wideValue
|
||
};
|
||
},
|
||
getSigned: function(bitLength, byteOffset) {
|
||
var shift = 32 - bitLength;
|
||
return this.getUnsigned(bitLength, byteOffset) << shift >> shift;
|
||
},
|
||
getUnsigned: function(bitLength, byteOffset) {
|
||
var value = this._getBitRangeData(bitLength, byteOffset).wideValue >>> -this._bitOffset;
|
||
return 32 > bitLength ? value & ~(-1 << bitLength) : value;
|
||
},
|
||
_setBinaryFloat: function(byteOffset, value, mantSize, expSize, littleEndian) {
|
||
var exponent, mantissa, signBit = 0 > value ? 1 : 0, eMax = ~(-1 << expSize - 1), eMin = 1 - eMax;
|
||
0 > value && (value = -value), 0 === value ? (exponent = 0, mantissa = 0) : isNaN(value) ? (exponent = 2 * eMax + 1,
|
||
mantissa = 1) : 1/0 === value ? (exponent = 2 * eMax + 1, mantissa = 0) : (exponent = Math.floor(Math.log(value) / Math.LN2),
|
||
exponent >= eMin && eMax >= exponent ? (mantissa = Math.floor((value * pow2(-exponent) - 1) * pow2(mantSize)),
|
||
exponent += eMax) : (mantissa = Math.floor(value / pow2(eMin - mantSize)), exponent = 0));
|
||
for (var b = []; mantSize >= 8; ) b.push(mantissa % 256), mantissa = Math.floor(mantissa / 256),
|
||
mantSize -= 8;
|
||
for (exponent = exponent << mantSize | mantissa, expSize += mantSize; expSize >= 8; ) b.push(255 & exponent),
|
||
exponent >>>= 8, expSize -= 8;
|
||
b.push(signBit << expSize | exponent), this._setBytes(byteOffset, b, littleEndian);
|
||
},
|
||
_setFloat32: function(byteOffset, value, littleEndian) {
|
||
this._setBinaryFloat(byteOffset, value, 23, 8, littleEndian);
|
||
},
|
||
_setFloat64: function(byteOffset, value, littleEndian) {
|
||
this._setBinaryFloat(byteOffset, value, 52, 11, littleEndian);
|
||
},
|
||
_set64: function(Type, byteOffset, value, littleEndian) {
|
||
"object" != typeof value && (value = Type.fromNumber(value)), littleEndian = defined(littleEndian, this._littleEndian),
|
||
byteOffset = defined(byteOffset, this._offset);
|
||
var parts = littleEndian ? {
|
||
lo: 0,
|
||
hi: 4
|
||
} : {
|
||
lo: 4,
|
||
hi: 0
|
||
};
|
||
for (var partName in parts) this.setUint32(byteOffset + parts[partName], value[partName], littleEndian);
|
||
this._offset = byteOffset + 8;
|
||
},
|
||
setInt64: function(byteOffset, value, littleEndian) {
|
||
this._set64(Int64, byteOffset, value, littleEndian);
|
||
},
|
||
setUint64: function(byteOffset, value, littleEndian) {
|
||
this._set64(Uint64, byteOffset, value, littleEndian);
|
||
},
|
||
_setUint32: function(byteOffset, value, littleEndian) {
|
||
this._setBytes(byteOffset, [ 255 & value, value >>> 8 & 255, value >>> 16 & 255, value >>> 24 ], littleEndian);
|
||
},
|
||
_setUint16: function(byteOffset, value, littleEndian) {
|
||
this._setBytes(byteOffset, [ 255 & value, value >>> 8 & 255 ], littleEndian);
|
||
},
|
||
_setUint8: function(byteOffset, value) {
|
||
this._setBytes(byteOffset, [ 255 & value ]);
|
||
},
|
||
setUnsigned: function(byteOffset, value, bitLength) {
|
||
var data = this._getBitRangeData(bitLength, byteOffset), wideValue = data.wideValue, b = data.bytes;
|
||
wideValue &= ~(~(-1 << bitLength) << -this._bitOffset), wideValue |= (32 > bitLength ? value & ~(-1 << bitLength) : value) << -this._bitOffset;
|
||
for (var i = b.length - 1; i >= 0; i--) b[i] = 255 & wideValue, wideValue >>>= 8;
|
||
this._setBytes(data.start, b, !0);
|
||
}
|
||
}, nodeNaming = {
|
||
Int8: "Int8",
|
||
Int16: "Int16",
|
||
Int32: "Int32",
|
||
Uint8: "UInt8",
|
||
Uint16: "UInt16",
|
||
Uint32: "UInt32",
|
||
Float32: "Float",
|
||
Float64: "Double"
|
||
};
|
||
proto._nodeBufferAction = function(type, isReadAction, byteOffset, littleEndian, value) {
|
||
this._offset = byteOffset + dataTypes[type];
|
||
var nodeName = nodeNaming[type] + ("Int8" === type || "Uint8" === type ? "" : littleEndian ? "LE" : "BE");
|
||
return byteOffset += this.byteOffset, isReadAction ? this.buffer["read" + nodeName](byteOffset) : this.buffer["write" + nodeName](value, byteOffset);
|
||
};
|
||
for (var type in dataTypes) !function(type) {
|
||
proto["get" + type] = function(byteOffset, littleEndian) {
|
||
return this._action(type, !0, byteOffset, littleEndian);
|
||
}, proto["set" + type] = function(byteOffset, value, littleEndian) {
|
||
this._action(type, !1, byteOffset, littleEndian, value);
|
||
};
|
||
}(type);
|
||
proto._setInt32 = proto._setUint32, proto._setInt16 = proto._setUint16, proto._setInt8 = proto._setUint8,
|
||
proto.setSigned = proto.setUnsigned;
|
||
for (var method in proto) "set" === method.slice(0, 3) && !function(type) {
|
||
proto["write" + type] = function() {
|
||
Array.prototype.unshift.call(arguments, void 0), this["set" + type].apply(this, arguments);
|
||
};
|
||
}(method.slice(3));
|
||
return jDataView;
|
||
});
|
||
}).call(this)}).call(this,require("buffer").Buffer)
|
||
|
||
},{"buffer":3}],8:[function(require,module,exports){
|
||
// shim for using process in browser
|
||
var process = module.exports = {};
|
||
|
||
// cached from whatever global is present so that test runners that stub it
|
||
// don't break things. But we need to wrap it in a try catch in case it is
|
||
// wrapped in strict mode code which doesn't define any globals. It's inside a
|
||
// function because try/catches deoptimize in certain engines.
|
||
|
||
var cachedSetTimeout;
|
||
var cachedClearTimeout;
|
||
|
||
function defaultSetTimout() {
|
||
throw new Error('setTimeout has not been defined');
|
||
}
|
||
function defaultClearTimeout () {
|
||
throw new Error('clearTimeout has not been defined');
|
||
}
|
||
(function () {
|
||
try {
|
||
if (typeof setTimeout === 'function') {
|
||
cachedSetTimeout = setTimeout;
|
||
} else {
|
||
cachedSetTimeout = defaultSetTimout;
|
||
}
|
||
} catch (e) {
|
||
cachedSetTimeout = defaultSetTimout;
|
||
}
|
||
try {
|
||
if (typeof clearTimeout === 'function') {
|
||
cachedClearTimeout = clearTimeout;
|
||
} else {
|
||
cachedClearTimeout = defaultClearTimeout;
|
||
}
|
||
} catch (e) {
|
||
cachedClearTimeout = defaultClearTimeout;
|
||
}
|
||
} ())
|
||
function runTimeout(fun) {
|
||
if (cachedSetTimeout === setTimeout) {
|
||
//normal enviroments in sane situations
|
||
return setTimeout(fun, 0);
|
||
}
|
||
// if setTimeout wasn't available but was latter defined
|
||
if ((cachedSetTimeout === defaultSetTimout || !cachedSetTimeout) && setTimeout) {
|
||
cachedSetTimeout = setTimeout;
|
||
return setTimeout(fun, 0);
|
||
}
|
||
try {
|
||
// when when somebody has screwed with setTimeout but no I.E. maddness
|
||
return cachedSetTimeout(fun, 0);
|
||
} catch(e){
|
||
try {
|
||
// When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally
|
||
return cachedSetTimeout.call(null, fun, 0);
|
||
} catch(e){
|
||
// same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error
|
||
return cachedSetTimeout.call(this, fun, 0);
|
||
}
|
||
}
|
||
|
||
|
||
}
|
||
function runClearTimeout(marker) {
|
||
if (cachedClearTimeout === clearTimeout) {
|
||
//normal enviroments in sane situations
|
||
return clearTimeout(marker);
|
||
}
|
||
// if clearTimeout wasn't available but was latter defined
|
||
if ((cachedClearTimeout === defaultClearTimeout || !cachedClearTimeout) && clearTimeout) {
|
||
cachedClearTimeout = clearTimeout;
|
||
return clearTimeout(marker);
|
||
}
|
||
try {
|
||
// when when somebody has screwed with setTimeout but no I.E. maddness
|
||
return cachedClearTimeout(marker);
|
||
} catch (e){
|
||
try {
|
||
// When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally
|
||
return cachedClearTimeout.call(null, marker);
|
||
} catch (e){
|
||
// same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error.
|
||
// Some versions of I.E. have different rules for clearTimeout vs setTimeout
|
||
return cachedClearTimeout.call(this, marker);
|
||
}
|
||
}
|
||
|
||
|
||
|
||
}
|
||
var queue = [];
|
||
var draining = false;
|
||
var currentQueue;
|
||
var queueIndex = -1;
|
||
|
||
function cleanUpNextTick() {
|
||
if (!draining || !currentQueue) {
|
||
return;
|
||
}
|
||
draining = false;
|
||
if (currentQueue.length) {
|
||
queue = currentQueue.concat(queue);
|
||
} else {
|
||
queueIndex = -1;
|
||
}
|
||
if (queue.length) {
|
||
drainQueue();
|
||
}
|
||
}
|
||
|
||
function drainQueue() {
|
||
if (draining) {
|
||
return;
|
||
}
|
||
var timeout = runTimeout(cleanUpNextTick);
|
||
draining = true;
|
||
|
||
var len = queue.length;
|
||
while(len) {
|
||
currentQueue = queue;
|
||
queue = [];
|
||
while (++queueIndex < len) {
|
||
if (currentQueue) {
|
||
currentQueue[queueIndex].run();
|
||
}
|
||
}
|
||
queueIndex = -1;
|
||
len = queue.length;
|
||
}
|
||
currentQueue = null;
|
||
draining = false;
|
||
runClearTimeout(timeout);
|
||
}
|
||
|
||
process.nextTick = function (fun) {
|
||
var args = new Array(arguments.length - 1);
|
||
if (arguments.length > 1) {
|
||
for (var i = 1; i < arguments.length; i++) {
|
||
args[i - 1] = arguments[i];
|
||
}
|
||
}
|
||
queue.push(new Item(fun, args));
|
||
if (queue.length === 1 && !draining) {
|
||
runTimeout(drainQueue);
|
||
}
|
||
};
|
||
|
||
// v8 likes predictible objects
|
||
function Item(fun, array) {
|
||
this.fun = fun;
|
||
this.array = array;
|
||
}
|
||
Item.prototype.run = function () {
|
||
this.fun.apply(null, this.array);
|
||
};
|
||
process.title = 'browser';
|
||
process.browser = true;
|
||
process.env = {};
|
||
process.argv = [];
|
||
process.version = ''; // empty string to avoid regexp issues
|
||
process.versions = {};
|
||
|
||
function noop() {}
|
||
|
||
process.on = noop;
|
||
process.addListener = noop;
|
||
process.once = noop;
|
||
process.off = noop;
|
||
process.removeListener = noop;
|
||
process.removeAllListeners = noop;
|
||
process.emit = noop;
|
||
process.prependListener = noop;
|
||
process.prependOnceListener = noop;
|
||
|
||
process.listeners = function (name) { return [] }
|
||
|
||
process.binding = function (name) {
|
||
throw new Error('process.binding is not supported');
|
||
};
|
||
|
||
process.cwd = function () { return '/' };
|
||
process.chdir = function (dir) {
|
||
throw new Error('process.chdir is not supported');
|
||
};
|
||
process.umask = function() { return 0; };
|
||
|
||
},{}],9:[function(require,module,exports){
|
||
/* eslint-disable node/no-deprecated-api */
|
||
var buffer = require('buffer')
|
||
var Buffer = buffer.Buffer
|
||
|
||
// alternative to using Object.keys for old browsers
|
||
function copyProps (src, dst) {
|
||
for (var key in src) {
|
||
dst[key] = src[key]
|
||
}
|
||
}
|
||
if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) {
|
||
module.exports = buffer
|
||
} else {
|
||
// Copy properties from require('buffer')
|
||
copyProps(buffer, exports)
|
||
exports.Buffer = SafeBuffer
|
||
}
|
||
|
||
function SafeBuffer (arg, encodingOrOffset, length) {
|
||
return Buffer(arg, encodingOrOffset, length)
|
||
}
|
||
|
||
// Copy static methods from Buffer
|
||
copyProps(Buffer, SafeBuffer)
|
||
|
||
SafeBuffer.from = function (arg, encodingOrOffset, length) {
|
||
if (typeof arg === 'number') {
|
||
throw new TypeError('Argument must not be a number')
|
||
}
|
||
return Buffer(arg, encodingOrOffset, length)
|
||
}
|
||
|
||
SafeBuffer.alloc = function (size, fill, encoding) {
|
||
if (typeof size !== 'number') {
|
||
throw new TypeError('Argument must be a number')
|
||
}
|
||
var buf = Buffer(size)
|
||
if (fill !== undefined) {
|
||
if (typeof encoding === 'string') {
|
||
buf.fill(fill, encoding)
|
||
} else {
|
||
buf.fill(fill)
|
||
}
|
||
} else {
|
||
buf.fill(0)
|
||
}
|
||
return buf
|
||
}
|
||
|
||
SafeBuffer.allocUnsafe = function (size) {
|
||
if (typeof size !== 'number') {
|
||
throw new TypeError('Argument must be a number')
|
||
}
|
||
return Buffer(size)
|
||
}
|
||
|
||
SafeBuffer.allocUnsafeSlow = function (size) {
|
||
if (typeof size !== 'number') {
|
||
throw new TypeError('Argument must be a number')
|
||
}
|
||
return buffer.SlowBuffer(size)
|
||
}
|
||
|
||
},{"buffer":3}],10:[function(require,module,exports){
|
||
(function (Buffer){(function (){
|
||
;(function (sax) { // wrapper for non-node envs
|
||
sax.parser = function (strict, opt) { return new SAXParser(strict, opt) }
|
||
sax.SAXParser = SAXParser
|
||
sax.SAXStream = SAXStream
|
||
sax.createStream = createStream
|
||
|
||
// When we pass the MAX_BUFFER_LENGTH position, start checking for buffer overruns.
|
||
// When we check, schedule the next check for MAX_BUFFER_LENGTH - (max(buffer lengths)),
|
||
// since that's the earliest that a buffer overrun could occur. This way, checks are
|
||
// as rare as required, but as often as necessary to ensure never crossing this bound.
|
||
// Furthermore, buffers are only tested at most once per write(), so passing a very
|
||
// large string into write() might have undesirable effects, but this is manageable by
|
||
// the caller, so it is assumed to be safe. Thus, a call to write() may, in the extreme
|
||
// edge case, result in creating at most one complete copy of the string passed in.
|
||
// Set to Infinity to have unlimited buffers.
|
||
sax.MAX_BUFFER_LENGTH = 64 * 1024
|
||
|
||
var buffers = [
|
||
'comment', 'sgmlDecl', 'textNode', 'tagName', 'doctype',
|
||
'procInstName', 'procInstBody', 'entity', 'attribName',
|
||
'attribValue', 'cdata', 'script'
|
||
]
|
||
|
||
sax.EVENTS = [
|
||
'text',
|
||
'processinginstruction',
|
||
'sgmldeclaration',
|
||
'doctype',
|
||
'comment',
|
||
'opentagstart',
|
||
'attribute',
|
||
'opentag',
|
||
'closetag',
|
||
'opencdata',
|
||
'cdata',
|
||
'closecdata',
|
||
'error',
|
||
'end',
|
||
'ready',
|
||
'script',
|
||
'opennamespace',
|
||
'closenamespace'
|
||
]
|
||
|
||
function SAXParser (strict, opt) {
|
||
if (!(this instanceof SAXParser)) {
|
||
return new SAXParser(strict, opt)
|
||
}
|
||
|
||
var parser = this
|
||
clearBuffers(parser)
|
||
parser.q = parser.c = ''
|
||
parser.bufferCheckPosition = sax.MAX_BUFFER_LENGTH
|
||
parser.opt = opt || {}
|
||
parser.opt.lowercase = parser.opt.lowercase || parser.opt.lowercasetags
|
||
parser.looseCase = parser.opt.lowercase ? 'toLowerCase' : 'toUpperCase'
|
||
parser.tags = []
|
||
parser.closed = parser.closedRoot = parser.sawRoot = false
|
||
parser.tag = parser.error = null
|
||
parser.strict = !!strict
|
||
parser.noscript = !!(strict || parser.opt.noscript)
|
||
parser.state = S.BEGIN
|
||
parser.strictEntities = parser.opt.strictEntities
|
||
parser.ENTITIES = parser.strictEntities ? Object.create(sax.XML_ENTITIES) : Object.create(sax.ENTITIES)
|
||
parser.attribList = []
|
||
|
||
// namespaces form a prototype chain.
|
||
// it always points at the current tag,
|
||
// which protos to its parent tag.
|
||
if (parser.opt.xmlns) {
|
||
parser.ns = Object.create(rootNS)
|
||
}
|
||
|
||
// mostly just for error reporting
|
||
parser.trackPosition = parser.opt.position !== false
|
||
if (parser.trackPosition) {
|
||
parser.position = parser.line = parser.column = 0
|
||
}
|
||
emit(parser, 'onready')
|
||
}
|
||
|
||
if (!Object.create) {
|
||
Object.create = function (o) {
|
||
function F () {}
|
||
F.prototype = o
|
||
var newf = new F()
|
||
return newf
|
||
}
|
||
}
|
||
|
||
if (!Object.keys) {
|
||
Object.keys = function (o) {
|
||
var a = []
|
||
for (var i in o) if (o.hasOwnProperty(i)) a.push(i)
|
||
return a
|
||
}
|
||
}
|
||
|
||
function checkBufferLength (parser) {
|
||
var maxAllowed = Math.max(sax.MAX_BUFFER_LENGTH, 10)
|
||
var maxActual = 0
|
||
for (var i = 0, l = buffers.length; i < l; i++) {
|
||
var len = parser[buffers[i]].length
|
||
if (len > maxAllowed) {
|
||
// Text/cdata nodes can get big, and since they're buffered,
|
||
// we can get here under normal conditions.
|
||
// Avoid issues by emitting the text node now,
|
||
// so at least it won't get any bigger.
|
||
switch (buffers[i]) {
|
||
case 'textNode':
|
||
closeText(parser)
|
||
break
|
||
|
||
case 'cdata':
|
||
emitNode(parser, 'oncdata', parser.cdata)
|
||
parser.cdata = ''
|
||
break
|
||
|
||
case 'script':
|
||
emitNode(parser, 'onscript', parser.script)
|
||
parser.script = ''
|
||
break
|
||
|
||
default:
|
||
error(parser, 'Max buffer length exceeded: ' + buffers[i])
|
||
}
|
||
}
|
||
maxActual = Math.max(maxActual, len)
|
||
}
|
||
// schedule the next check for the earliest possible buffer overrun.
|
||
var m = sax.MAX_BUFFER_LENGTH - maxActual
|
||
parser.bufferCheckPosition = m + parser.position
|
||
}
|
||
|
||
function clearBuffers (parser) {
|
||
for (var i = 0, l = buffers.length; i < l; i++) {
|
||
parser[buffers[i]] = ''
|
||
}
|
||
}
|
||
|
||
function flushBuffers (parser) {
|
||
closeText(parser)
|
||
if (parser.cdata !== '') {
|
||
emitNode(parser, 'oncdata', parser.cdata)
|
||
parser.cdata = ''
|
||
}
|
||
if (parser.script !== '') {
|
||
emitNode(parser, 'onscript', parser.script)
|
||
parser.script = ''
|
||
}
|
||
}
|
||
|
||
SAXParser.prototype = {
|
||
end: function () { end(this) },
|
||
write: write,
|
||
resume: function () { this.error = null; return this },
|
||
close: function () { return this.write(null) },
|
||
flush: function () { flushBuffers(this) }
|
||
}
|
||
|
||
var Stream
|
||
try {
|
||
Stream = require('stream').Stream
|
||
} catch (ex) {
|
||
Stream = function () {}
|
||
}
|
||
|
||
var streamWraps = sax.EVENTS.filter(function (ev) {
|
||
return ev !== 'error' && ev !== 'end'
|
||
})
|
||
|
||
function createStream (strict, opt) {
|
||
return new SAXStream(strict, opt)
|
||
}
|
||
|
||
function SAXStream (strict, opt) {
|
||
if (!(this instanceof SAXStream)) {
|
||
return new SAXStream(strict, opt)
|
||
}
|
||
|
||
Stream.apply(this)
|
||
|
||
this._parser = new SAXParser(strict, opt)
|
||
this.writable = true
|
||
this.readable = true
|
||
|
||
var me = this
|
||
|
||
this._parser.onend = function () {
|
||
me.emit('end')
|
||
}
|
||
|
||
this._parser.onerror = function (er) {
|
||
me.emit('error', er)
|
||
|
||
// if didn't throw, then means error was handled.
|
||
// go ahead and clear error, so we can write again.
|
||
me._parser.error = null
|
||
}
|
||
|
||
this._decoder = null
|
||
|
||
streamWraps.forEach(function (ev) {
|
||
Object.defineProperty(me, 'on' + ev, {
|
||
get: function () {
|
||
return me._parser['on' + ev]
|
||
},
|
||
set: function (h) {
|
||
if (!h) {
|
||
me.removeAllListeners(ev)
|
||
me._parser['on' + ev] = h
|
||
return h
|
||
}
|
||
me.on(ev, h)
|
||
},
|
||
enumerable: true,
|
||
configurable: false
|
||
})
|
||
})
|
||
}
|
||
|
||
SAXStream.prototype = Object.create(Stream.prototype, {
|
||
constructor: {
|
||
value: SAXStream
|
||
}
|
||
})
|
||
|
||
SAXStream.prototype.write = function (data) {
|
||
if (typeof Buffer === 'function' &&
|
||
typeof Buffer.isBuffer === 'function' &&
|
||
Buffer.isBuffer(data)) {
|
||
if (!this._decoder) {
|
||
var SD = require('string_decoder').StringDecoder
|
||
this._decoder = new SD('utf8')
|
||
}
|
||
data = this._decoder.write(data)
|
||
}
|
||
|
||
this._parser.write(data.toString())
|
||
this.emit('data', data)
|
||
return true
|
||
}
|
||
|
||
SAXStream.prototype.end = function (chunk) {
|
||
if (chunk && chunk.length) {
|
||
this.write(chunk)
|
||
}
|
||
this._parser.end()
|
||
return true
|
||
}
|
||
|
||
SAXStream.prototype.on = function (ev, handler) {
|
||
var me = this
|
||
if (!me._parser['on' + ev] && streamWraps.indexOf(ev) !== -1) {
|
||
me._parser['on' + ev] = function () {
|
||
var args = arguments.length === 1 ? [arguments[0]] : Array.apply(null, arguments)
|
||
args.splice(0, 0, ev)
|
||
me.emit.apply(me, args)
|
||
}
|
||
}
|
||
|
||
return Stream.prototype.on.call(me, ev, handler)
|
||
}
|
||
|
||
// this really needs to be replaced with character classes.
|
||
// XML allows all manner of ridiculous numbers and digits.
|
||
var CDATA = '[CDATA['
|
||
var DOCTYPE = 'DOCTYPE'
|
||
var XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace'
|
||
var XMLNS_NAMESPACE = 'http://www.w3.org/2000/xmlns/'
|
||
var rootNS = { xml: XML_NAMESPACE, xmlns: XMLNS_NAMESPACE }
|
||
|
||
// http://www.w3.org/TR/REC-xml/#NT-NameStartChar
|
||
// This implementation works on strings, a single character at a time
|
||
// as such, it cannot ever support astral-plane characters (10000-EFFFF)
|
||
// without a significant breaking change to either this parser, or the
|
||
// JavaScript language. Implementation of an emoji-capable xml parser
|
||
// is left as an exercise for the reader.
|
||
var nameStart = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/
|
||
|
||
var nameBody = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/
|
||
|
||
var entityStart = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/
|
||
var entityBody = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/
|
||
|
||
function isWhitespace (c) {
|
||
return c === ' ' || c === '\n' || c === '\r' || c === '\t'
|
||
}
|
||
|
||
function isQuote (c) {
|
||
return c === '"' || c === '\''
|
||
}
|
||
|
||
function isAttribEnd (c) {
|
||
return c === '>' || isWhitespace(c)
|
||
}
|
||
|
||
function isMatch (regex, c) {
|
||
return regex.test(c)
|
||
}
|
||
|
||
function notMatch (regex, c) {
|
||
return !isMatch(regex, c)
|
||
}
|
||
|
||
var S = 0
|
||
sax.STATE = {
|
||
BEGIN: S++, // leading byte order mark or whitespace
|
||
BEGIN_WHITESPACE: S++, // leading whitespace
|
||
TEXT: S++, // general stuff
|
||
TEXT_ENTITY: S++, // & and such.
|
||
OPEN_WAKA: S++, // <
|
||
SGML_DECL: S++, // <!BLARG
|
||
SGML_DECL_QUOTED: S++, // <!BLARG foo "bar
|
||
DOCTYPE: S++, // <!DOCTYPE
|
||
DOCTYPE_QUOTED: S++, // <!DOCTYPE "//blah
|
||
DOCTYPE_DTD: S++, // <!DOCTYPE "//blah" [ ...
|
||
DOCTYPE_DTD_QUOTED: S++, // <!DOCTYPE "//blah" [ "foo
|
||
COMMENT_STARTING: S++, // <!-
|
||
COMMENT: S++, // <!--
|
||
COMMENT_ENDING: S++, // <!-- blah -
|
||
COMMENT_ENDED: S++, // <!-- blah --
|
||
CDATA: S++, // <![CDATA[ something
|
||
CDATA_ENDING: S++, // ]
|
||
CDATA_ENDING_2: S++, // ]]
|
||
PROC_INST: S++, // <?hi
|
||
PROC_INST_BODY: S++, // <?hi there
|
||
PROC_INST_ENDING: S++, // <?hi "there" ?
|
||
OPEN_TAG: S++, // <strong
|
||
OPEN_TAG_SLASH: S++, // <strong /
|
||
ATTRIB: S++, // <a
|
||
ATTRIB_NAME: S++, // <a foo
|
||
ATTRIB_NAME_SAW_WHITE: S++, // <a foo _
|
||
ATTRIB_VALUE: S++, // <a foo=
|
||
ATTRIB_VALUE_QUOTED: S++, // <a foo="bar
|
||
ATTRIB_VALUE_CLOSED: S++, // <a foo="bar"
|
||
ATTRIB_VALUE_UNQUOTED: S++, // <a foo=bar
|
||
ATTRIB_VALUE_ENTITY_Q: S++, // <foo bar="""
|
||
ATTRIB_VALUE_ENTITY_U: S++, // <foo bar="
|
||
CLOSE_TAG: S++, // </a
|
||
CLOSE_TAG_SAW_WHITE: S++, // </a >
|
||
SCRIPT: S++, // <script> ...
|
||
SCRIPT_ENDING: S++ // <script> ... <
|
||
}
|
||
|
||
sax.XML_ENTITIES = {
|
||
'amp': '&',
|
||
'gt': '>',
|
||
'lt': '<',
|
||
'quot': '"',
|
||
'apos': "'"
|
||
}
|
||
|
||
sax.ENTITIES = {
|
||
'amp': '&',
|
||
'gt': '>',
|
||
'lt': '<',
|
||
'quot': '"',
|
||
'apos': "'",
|
||
'AElig': 198,
|
||
'Aacute': 193,
|
||
'Acirc': 194,
|
||
'Agrave': 192,
|
||
'Aring': 197,
|
||
'Atilde': 195,
|
||
'Auml': 196,
|
||
'Ccedil': 199,
|
||
'ETH': 208,
|
||
'Eacute': 201,
|
||
'Ecirc': 202,
|
||
'Egrave': 200,
|
||
'Euml': 203,
|
||
'Iacute': 205,
|
||
'Icirc': 206,
|
||
'Igrave': 204,
|
||
'Iuml': 207,
|
||
'Ntilde': 209,
|
||
'Oacute': 211,
|
||
'Ocirc': 212,
|
||
'Ograve': 210,
|
||
'Oslash': 216,
|
||
'Otilde': 213,
|
||
'Ouml': 214,
|
||
'THORN': 222,
|
||
'Uacute': 218,
|
||
'Ucirc': 219,
|
||
'Ugrave': 217,
|
||
'Uuml': 220,
|
||
'Yacute': 221,
|
||
'aacute': 225,
|
||
'acirc': 226,
|
||
'aelig': 230,
|
||
'agrave': 224,
|
||
'aring': 229,
|
||
'atilde': 227,
|
||
'auml': 228,
|
||
'ccedil': 231,
|
||
'eacute': 233,
|
||
'ecirc': 234,
|
||
'egrave': 232,
|
||
'eth': 240,
|
||
'euml': 235,
|
||
'iacute': 237,
|
||
'icirc': 238,
|
||
'igrave': 236,
|
||
'iuml': 239,
|
||
'ntilde': 241,
|
||
'oacute': 243,
|
||
'ocirc': 244,
|
||
'ograve': 242,
|
||
'oslash': 248,
|
||
'otilde': 245,
|
||
'ouml': 246,
|
||
'szlig': 223,
|
||
'thorn': 254,
|
||
'uacute': 250,
|
||
'ucirc': 251,
|
||
'ugrave': 249,
|
||
'uuml': 252,
|
||
'yacute': 253,
|
||
'yuml': 255,
|
||
'copy': 169,
|
||
'reg': 174,
|
||
'nbsp': 160,
|
||
'iexcl': 161,
|
||
'cent': 162,
|
||
'pound': 163,
|
||
'curren': 164,
|
||
'yen': 165,
|
||
'brvbar': 166,
|
||
'sect': 167,
|
||
'uml': 168,
|
||
'ordf': 170,
|
||
'laquo': 171,
|
||
'not': 172,
|
||
'shy': 173,
|
||
'macr': 175,
|
||
'deg': 176,
|
||
'plusmn': 177,
|
||
'sup1': 185,
|
||
'sup2': 178,
|
||
'sup3': 179,
|
||
'acute': 180,
|
||
'micro': 181,
|
||
'para': 182,
|
||
'middot': 183,
|
||
'cedil': 184,
|
||
'ordm': 186,
|
||
'raquo': 187,
|
||
'frac14': 188,
|
||
'frac12': 189,
|
||
'frac34': 190,
|
||
'iquest': 191,
|
||
'times': 215,
|
||
'divide': 247,
|
||
'OElig': 338,
|
||
'oelig': 339,
|
||
'Scaron': 352,
|
||
'scaron': 353,
|
||
'Yuml': 376,
|
||
'fnof': 402,
|
||
'circ': 710,
|
||
'tilde': 732,
|
||
'Alpha': 913,
|
||
'Beta': 914,
|
||
'Gamma': 915,
|
||
'Delta': 916,
|
||
'Epsilon': 917,
|
||
'Zeta': 918,
|
||
'Eta': 919,
|
||
'Theta': 920,
|
||
'Iota': 921,
|
||
'Kappa': 922,
|
||
'Lambda': 923,
|
||
'Mu': 924,
|
||
'Nu': 925,
|
||
'Xi': 926,
|
||
'Omicron': 927,
|
||
'Pi': 928,
|
||
'Rho': 929,
|
||
'Sigma': 931,
|
||
'Tau': 932,
|
||
'Upsilon': 933,
|
||
'Phi': 934,
|
||
'Chi': 935,
|
||
'Psi': 936,
|
||
'Omega': 937,
|
||
'alpha': 945,
|
||
'beta': 946,
|
||
'gamma': 947,
|
||
'delta': 948,
|
||
'epsilon': 949,
|
||
'zeta': 950,
|
||
'eta': 951,
|
||
'theta': 952,
|
||
'iota': 953,
|
||
'kappa': 954,
|
||
'lambda': 955,
|
||
'mu': 956,
|
||
'nu': 957,
|
||
'xi': 958,
|
||
'omicron': 959,
|
||
'pi': 960,
|
||
'rho': 961,
|
||
'sigmaf': 962,
|
||
'sigma': 963,
|
||
'tau': 964,
|
||
'upsilon': 965,
|
||
'phi': 966,
|
||
'chi': 967,
|
||
'psi': 968,
|
||
'omega': 969,
|
||
'thetasym': 977,
|
||
'upsih': 978,
|
||
'piv': 982,
|
||
'ensp': 8194,
|
||
'emsp': 8195,
|
||
'thinsp': 8201,
|
||
'zwnj': 8204,
|
||
'zwj': 8205,
|
||
'lrm': 8206,
|
||
'rlm': 8207,
|
||
'ndash': 8211,
|
||
'mdash': 8212,
|
||
'lsquo': 8216,
|
||
'rsquo': 8217,
|
||
'sbquo': 8218,
|
||
'ldquo': 8220,
|
||
'rdquo': 8221,
|
||
'bdquo': 8222,
|
||
'dagger': 8224,
|
||
'Dagger': 8225,
|
||
'bull': 8226,
|
||
'hellip': 8230,
|
||
'permil': 8240,
|
||
'prime': 8242,
|
||
'Prime': 8243,
|
||
'lsaquo': 8249,
|
||
'rsaquo': 8250,
|
||
'oline': 8254,
|
||
'frasl': 8260,
|
||
'euro': 8364,
|
||
'image': 8465,
|
||
'weierp': 8472,
|
||
'real': 8476,
|
||
'trade': 8482,
|
||
'alefsym': 8501,
|
||
'larr': 8592,
|
||
'uarr': 8593,
|
||
'rarr': 8594,
|
||
'darr': 8595,
|
||
'harr': 8596,
|
||
'crarr': 8629,
|
||
'lArr': 8656,
|
||
'uArr': 8657,
|
||
'rArr': 8658,
|
||
'dArr': 8659,
|
||
'hArr': 8660,
|
||
'forall': 8704,
|
||
'part': 8706,
|
||
'exist': 8707,
|
||
'empty': 8709,
|
||
'nabla': 8711,
|
||
'isin': 8712,
|
||
'notin': 8713,
|
||
'ni': 8715,
|
||
'prod': 8719,
|
||
'sum': 8721,
|
||
'minus': 8722,
|
||
'lowast': 8727,
|
||
'radic': 8730,
|
||
'prop': 8733,
|
||
'infin': 8734,
|
||
'ang': 8736,
|
||
'and': 8743,
|
||
'or': 8744,
|
||
'cap': 8745,
|
||
'cup': 8746,
|
||
'int': 8747,
|
||
'there4': 8756,
|
||
'sim': 8764,
|
||
'cong': 8773,
|
||
'asymp': 8776,
|
||
'ne': 8800,
|
||
'equiv': 8801,
|
||
'le': 8804,
|
||
'ge': 8805,
|
||
'sub': 8834,
|
||
'sup': 8835,
|
||
'nsub': 8836,
|
||
'sube': 8838,
|
||
'supe': 8839,
|
||
'oplus': 8853,
|
||
'otimes': 8855,
|
||
'perp': 8869,
|
||
'sdot': 8901,
|
||
'lceil': 8968,
|
||
'rceil': 8969,
|
||
'lfloor': 8970,
|
||
'rfloor': 8971,
|
||
'lang': 9001,
|
||
'rang': 9002,
|
||
'loz': 9674,
|
||
'spades': 9824,
|
||
'clubs': 9827,
|
||
'hearts': 9829,
|
||
'diams': 9830
|
||
}
|
||
|
||
Object.keys(sax.ENTITIES).forEach(function (key) {
|
||
var e = sax.ENTITIES[key]
|
||
var s = typeof e === 'number' ? String.fromCharCode(e) : e
|
||
sax.ENTITIES[key] = s
|
||
})
|
||
|
||
for (var s in sax.STATE) {
|
||
sax.STATE[sax.STATE[s]] = s
|
||
}
|
||
|
||
// shorthand
|
||
S = sax.STATE
|
||
|
||
function emit (parser, event, data) {
|
||
parser[event] && parser[event](data)
|
||
}
|
||
|
||
function emitNode (parser, nodeType, data) {
|
||
if (parser.textNode) closeText(parser)
|
||
emit(parser, nodeType, data)
|
||
}
|
||
|
||
function closeText (parser) {
|
||
parser.textNode = textopts(parser.opt, parser.textNode)
|
||
if (parser.textNode) emit(parser, 'ontext', parser.textNode)
|
||
parser.textNode = ''
|
||
}
|
||
|
||
function textopts (opt, text) {
|
||
if (opt.trim) text = text.trim()
|
||
if (opt.normalize) text = text.replace(/\s+/g, ' ')
|
||
return text
|
||
}
|
||
|
||
function error (parser, er) {
|
||
closeText(parser)
|
||
if (parser.trackPosition) {
|
||
er += '\nLine: ' + parser.line +
|
||
'\nColumn: ' + parser.column +
|
||
'\nChar: ' + parser.c
|
||
}
|
||
er = new Error(er)
|
||
parser.error = er
|
||
emit(parser, 'onerror', er)
|
||
return parser
|
||
}
|
||
|
||
function end (parser) {
|
||
if (parser.sawRoot && !parser.closedRoot) strictFail(parser, 'Unclosed root tag')
|
||
if ((parser.state !== S.BEGIN) &&
|
||
(parser.state !== S.BEGIN_WHITESPACE) &&
|
||
(parser.state !== S.TEXT)) {
|
||
error(parser, 'Unexpected end')
|
||
}
|
||
closeText(parser)
|
||
parser.c = ''
|
||
parser.closed = true
|
||
emit(parser, 'onend')
|
||
SAXParser.call(parser, parser.strict, parser.opt)
|
||
return parser
|
||
}
|
||
|
||
function strictFail (parser, message) {
|
||
if (typeof parser !== 'object' || !(parser instanceof SAXParser)) {
|
||
throw new Error('bad call to strictFail')
|
||
}
|
||
if (parser.strict) {
|
||
error(parser, message)
|
||
}
|
||
}
|
||
|
||
function newTag (parser) {
|
||
if (!parser.strict) parser.tagName = parser.tagName[parser.looseCase]()
|
||
var parent = parser.tags[parser.tags.length - 1] || parser
|
||
var tag = parser.tag = { name: parser.tagName, attributes: {} }
|
||
|
||
// will be overridden if tag contails an xmlns="foo" or xmlns:foo="bar"
|
||
if (parser.opt.xmlns) {
|
||
tag.ns = parent.ns
|
||
}
|
||
parser.attribList.length = 0
|
||
emitNode(parser, 'onopentagstart', tag)
|
||
}
|
||
|
||
function qname (name, attribute) {
|
||
var i = name.indexOf(':')
|
||
var qualName = i < 0 ? [ '', name ] : name.split(':')
|
||
var prefix = qualName[0]
|
||
var local = qualName[1]
|
||
|
||
// <x "xmlns"="http://foo">
|
||
if (attribute && name === 'xmlns') {
|
||
prefix = 'xmlns'
|
||
local = ''
|
||
}
|
||
|
||
return { prefix: prefix, local: local }
|
||
}
|
||
|
||
function attrib (parser) {
|
||
if (!parser.strict) {
|
||
parser.attribName = parser.attribName[parser.looseCase]()
|
||
}
|
||
|
||
if (parser.attribList.indexOf(parser.attribName) !== -1 ||
|
||
parser.tag.attributes.hasOwnProperty(parser.attribName)) {
|
||
parser.attribName = parser.attribValue = ''
|
||
return
|
||
}
|
||
|
||
if (parser.opt.xmlns) {
|
||
var qn = qname(parser.attribName, true)
|
||
var prefix = qn.prefix
|
||
var local = qn.local
|
||
|
||
if (prefix === 'xmlns') {
|
||
// namespace binding attribute. push the binding into scope
|
||
if (local === 'xml' && parser.attribValue !== XML_NAMESPACE) {
|
||
strictFail(parser,
|
||
'xml: prefix must be bound to ' + XML_NAMESPACE + '\n' +
|
||
'Actual: ' + parser.attribValue)
|
||
} else if (local === 'xmlns' && parser.attribValue !== XMLNS_NAMESPACE) {
|
||
strictFail(parser,
|
||
'xmlns: prefix must be bound to ' + XMLNS_NAMESPACE + '\n' +
|
||
'Actual: ' + parser.attribValue)
|
||
} else {
|
||
var tag = parser.tag
|
||
var parent = parser.tags[parser.tags.length - 1] || parser
|
||
if (tag.ns === parent.ns) {
|
||
tag.ns = Object.create(parent.ns)
|
||
}
|
||
tag.ns[local] = parser.attribValue
|
||
}
|
||
}
|
||
|
||
// defer onattribute events until all attributes have been seen
|
||
// so any new bindings can take effect. preserve attribute order
|
||
// so deferred events can be emitted in document order
|
||
parser.attribList.push([parser.attribName, parser.attribValue])
|
||
} else {
|
||
// in non-xmlns mode, we can emit the event right away
|
||
parser.tag.attributes[parser.attribName] = parser.attribValue
|
||
emitNode(parser, 'onattribute', {
|
||
name: parser.attribName,
|
||
value: parser.attribValue
|
||
})
|
||
}
|
||
|
||
parser.attribName = parser.attribValue = ''
|
||
}
|
||
|
||
function openTag (parser, selfClosing) {
|
||
if (parser.opt.xmlns) {
|
||
// emit namespace binding events
|
||
var tag = parser.tag
|
||
|
||
// add namespace info to tag
|
||
var qn = qname(parser.tagName)
|
||
tag.prefix = qn.prefix
|
||
tag.local = qn.local
|
||
tag.uri = tag.ns[qn.prefix] || ''
|
||
|
||
if (tag.prefix && !tag.uri) {
|
||
strictFail(parser, 'Unbound namespace prefix: ' +
|
||
JSON.stringify(parser.tagName))
|
||
tag.uri = qn.prefix
|
||
}
|
||
|
||
var parent = parser.tags[parser.tags.length - 1] || parser
|
||
if (tag.ns && parent.ns !== tag.ns) {
|
||
Object.keys(tag.ns).forEach(function (p) {
|
||
emitNode(parser, 'onopennamespace', {
|
||
prefix: p,
|
||
uri: tag.ns[p]
|
||
})
|
||
})
|
||
}
|
||
|
||
// handle deferred onattribute events
|
||
// Note: do not apply default ns to attributes:
|
||
// http://www.w3.org/TR/REC-xml-names/#defaulting
|
||
for (var i = 0, l = parser.attribList.length; i < l; i++) {
|
||
var nv = parser.attribList[i]
|
||
var name = nv[0]
|
||
var value = nv[1]
|
||
var qualName = qname(name, true)
|
||
var prefix = qualName.prefix
|
||
var local = qualName.local
|
||
var uri = prefix === '' ? '' : (tag.ns[prefix] || '')
|
||
var a = {
|
||
name: name,
|
||
value: value,
|
||
prefix: prefix,
|
||
local: local,
|
||
uri: uri
|
||
}
|
||
|
||
// if there's any attributes with an undefined namespace,
|
||
// then fail on them now.
|
||
if (prefix && prefix !== 'xmlns' && !uri) {
|
||
strictFail(parser, 'Unbound namespace prefix: ' +
|
||
JSON.stringify(prefix))
|
||
a.uri = prefix
|
||
}
|
||
parser.tag.attributes[name] = a
|
||
emitNode(parser, 'onattribute', a)
|
||
}
|
||
parser.attribList.length = 0
|
||
}
|
||
|
||
parser.tag.isSelfClosing = !!selfClosing
|
||
|
||
// process the tag
|
||
parser.sawRoot = true
|
||
parser.tags.push(parser.tag)
|
||
emitNode(parser, 'onopentag', parser.tag)
|
||
if (!selfClosing) {
|
||
// special case for <script> in non-strict mode.
|
||
if (!parser.noscript && parser.tagName.toLowerCase() === 'script') {
|
||
parser.state = S.SCRIPT
|
||
} else {
|
||
parser.state = S.TEXT
|
||
}
|
||
parser.tag = null
|
||
parser.tagName = ''
|
||
}
|
||
parser.attribName = parser.attribValue = ''
|
||
parser.attribList.length = 0
|
||
}
|
||
|
||
function closeTag (parser) {
|
||
if (!parser.tagName) {
|
||
strictFail(parser, 'Weird empty close tag.')
|
||
parser.textNode += '</>'
|
||
parser.state = S.TEXT
|
||
return
|
||
}
|
||
|
||
if (parser.script) {
|
||
if (parser.tagName !== 'script') {
|
||
parser.script += '</' + parser.tagName + '>'
|
||
parser.tagName = ''
|
||
parser.state = S.SCRIPT
|
||
return
|
||
}
|
||
emitNode(parser, 'onscript', parser.script)
|
||
parser.script = ''
|
||
}
|
||
|
||
// first make sure that the closing tag actually exists.
|
||
// <a><b></c></b></a> will close everything, otherwise.
|
||
var t = parser.tags.length
|
||
var tagName = parser.tagName
|
||
if (!parser.strict) {
|
||
tagName = tagName[parser.looseCase]()
|
||
}
|
||
var closeTo = tagName
|
||
while (t--) {
|
||
var close = parser.tags[t]
|
||
if (close.name !== closeTo) {
|
||
// fail the first time in strict mode
|
||
strictFail(parser, 'Unexpected close tag')
|
||
} else {
|
||
break
|
||
}
|
||
}
|
||
|
||
// didn't find it. we already failed for strict, so just abort.
|
||
if (t < 0) {
|
||
strictFail(parser, 'Unmatched closing tag: ' + parser.tagName)
|
||
parser.textNode += '</' + parser.tagName + '>'
|
||
parser.state = S.TEXT
|
||
return
|
||
}
|
||
parser.tagName = tagName
|
||
var s = parser.tags.length
|
||
while (s-- > t) {
|
||
var tag = parser.tag = parser.tags.pop()
|
||
parser.tagName = parser.tag.name
|
||
emitNode(parser, 'onclosetag', parser.tagName)
|
||
|
||
var x = {}
|
||
for (var i in tag.ns) {
|
||
x[i] = tag.ns[i]
|
||
}
|
||
|
||
var parent = parser.tags[parser.tags.length - 1] || parser
|
||
if (parser.opt.xmlns && tag.ns !== parent.ns) {
|
||
// remove namespace bindings introduced by tag
|
||
Object.keys(tag.ns).forEach(function (p) {
|
||
var n = tag.ns[p]
|
||
emitNode(parser, 'onclosenamespace', { prefix: p, uri: n })
|
||
})
|
||
}
|
||
}
|
||
if (t === 0) parser.closedRoot = true
|
||
parser.tagName = parser.attribValue = parser.attribName = ''
|
||
parser.attribList.length = 0
|
||
parser.state = S.TEXT
|
||
}
|
||
|
||
function parseEntity (parser) {
|
||
var entity = parser.entity
|
||
var entityLC = entity.toLowerCase()
|
||
var num
|
||
var numStr = ''
|
||
|
||
if (parser.ENTITIES[entity]) {
|
||
return parser.ENTITIES[entity]
|
||
}
|
||
if (parser.ENTITIES[entityLC]) {
|
||
return parser.ENTITIES[entityLC]
|
||
}
|
||
entity = entityLC
|
||
if (entity.charAt(0) === '#') {
|
||
if (entity.charAt(1) === 'x') {
|
||
entity = entity.slice(2)
|
||
num = parseInt(entity, 16)
|
||
numStr = num.toString(16)
|
||
} else {
|
||
entity = entity.slice(1)
|
||
num = parseInt(entity, 10)
|
||
numStr = num.toString(10)
|
||
}
|
||
}
|
||
entity = entity.replace(/^0+/, '')
|
||
if (isNaN(num) || numStr.toLowerCase() !== entity) {
|
||
strictFail(parser, 'Invalid character entity')
|
||
return '&' + parser.entity + ';'
|
||
}
|
||
|
||
return String.fromCodePoint(num)
|
||
}
|
||
|
||
function beginWhiteSpace (parser, c) {
|
||
if (c === '<') {
|
||
parser.state = S.OPEN_WAKA
|
||
parser.startTagPosition = parser.position
|
||
} else if (!isWhitespace(c)) {
|
||
// have to process this as a text node.
|
||
// weird, but happens.
|
||
strictFail(parser, 'Non-whitespace before first tag.')
|
||
parser.textNode = c
|
||
parser.state = S.TEXT
|
||
}
|
||
}
|
||
|
||
function charAt (chunk, i) {
|
||
var result = ''
|
||
if (i < chunk.length) {
|
||
result = chunk.charAt(i)
|
||
}
|
||
return result
|
||
}
|
||
|
||
function write (chunk) {
|
||
var parser = this
|
||
if (this.error) {
|
||
throw this.error
|
||
}
|
||
if (parser.closed) {
|
||
return error(parser,
|
||
'Cannot write after close. Assign an onready handler.')
|
||
}
|
||
if (chunk === null) {
|
||
return end(parser)
|
||
}
|
||
if (typeof chunk === 'object') {
|
||
chunk = chunk.toString()
|
||
}
|
||
var i = 0
|
||
var c = ''
|
||
while (true) {
|
||
c = charAt(chunk, i++)
|
||
parser.c = c
|
||
|
||
if (!c) {
|
||
break
|
||
}
|
||
|
||
if (parser.trackPosition) {
|
||
parser.position++
|
||
if (c === '\n') {
|
||
parser.line++
|
||
parser.column = 0
|
||
} else {
|
||
parser.column++
|
||
}
|
||
}
|
||
|
||
switch (parser.state) {
|
||
case S.BEGIN:
|
||
parser.state = S.BEGIN_WHITESPACE
|
||
if (c === '\uFEFF') {
|
||
continue
|
||
}
|
||
beginWhiteSpace(parser, c)
|
||
continue
|
||
|
||
case S.BEGIN_WHITESPACE:
|
||
beginWhiteSpace(parser, c)
|
||
continue
|
||
|
||
case S.TEXT:
|
||
if (parser.sawRoot && !parser.closedRoot) {
|
||
var starti = i - 1
|
||
while (c && c !== '<' && c !== '&') {
|
||
c = charAt(chunk, i++)
|
||
if (c && parser.trackPosition) {
|
||
parser.position++
|
||
if (c === '\n') {
|
||
parser.line++
|
||
parser.column = 0
|
||
} else {
|
||
parser.column++
|
||
}
|
||
}
|
||
}
|
||
parser.textNode += chunk.substring(starti, i - 1)
|
||
}
|
||
if (c === '<' && !(parser.sawRoot && parser.closedRoot && !parser.strict)) {
|
||
parser.state = S.OPEN_WAKA
|
||
parser.startTagPosition = parser.position
|
||
} else {
|
||
if (!isWhitespace(c) && (!parser.sawRoot || parser.closedRoot)) {
|
||
strictFail(parser, 'Text data outside of root node.')
|
||
}
|
||
if (c === '&') {
|
||
parser.state = S.TEXT_ENTITY
|
||
} else {
|
||
parser.textNode += c
|
||
}
|
||
}
|
||
continue
|
||
|
||
case S.SCRIPT:
|
||
// only non-strict
|
||
if (c === '<') {
|
||
parser.state = S.SCRIPT_ENDING
|
||
} else {
|
||
parser.script += c
|
||
}
|
||
continue
|
||
|
||
case S.SCRIPT_ENDING:
|
||
if (c === '/') {
|
||
parser.state = S.CLOSE_TAG
|
||
} else {
|
||
parser.script += '<' + c
|
||
parser.state = S.SCRIPT
|
||
}
|
||
continue
|
||
|
||
case S.OPEN_WAKA:
|
||
// either a /, ?, !, or text is coming next.
|
||
if (c === '!') {
|
||
parser.state = S.SGML_DECL
|
||
parser.sgmlDecl = ''
|
||
} else if (isWhitespace(c)) {
|
||
// wait for it...
|
||
} else if (isMatch(nameStart, c)) {
|
||
parser.state = S.OPEN_TAG
|
||
parser.tagName = c
|
||
} else if (c === '/') {
|
||
parser.state = S.CLOSE_TAG
|
||
parser.tagName = ''
|
||
} else if (c === '?') {
|
||
parser.state = S.PROC_INST
|
||
parser.procInstName = parser.procInstBody = ''
|
||
} else {
|
||
strictFail(parser, 'Unencoded <')
|
||
// if there was some whitespace, then add that in.
|
||
if (parser.startTagPosition + 1 < parser.position) {
|
||
var pad = parser.position - parser.startTagPosition
|
||
c = new Array(pad).join(' ') + c
|
||
}
|
||
parser.textNode += '<' + c
|
||
parser.state = S.TEXT
|
||
}
|
||
continue
|
||
|
||
case S.SGML_DECL:
|
||
if ((parser.sgmlDecl + c).toUpperCase() === CDATA) {
|
||
emitNode(parser, 'onopencdata')
|
||
parser.state = S.CDATA
|
||
parser.sgmlDecl = ''
|
||
parser.cdata = ''
|
||
} else if (parser.sgmlDecl + c === '--') {
|
||
parser.state = S.COMMENT
|
||
parser.comment = ''
|
||
parser.sgmlDecl = ''
|
||
} else if ((parser.sgmlDecl + c).toUpperCase() === DOCTYPE) {
|
||
parser.state = S.DOCTYPE
|
||
if (parser.doctype || parser.sawRoot) {
|
||
strictFail(parser,
|
||
'Inappropriately located doctype declaration')
|
||
}
|
||
parser.doctype = ''
|
||
parser.sgmlDecl = ''
|
||
} else if (c === '>') {
|
||
emitNode(parser, 'onsgmldeclaration', parser.sgmlDecl)
|
||
parser.sgmlDecl = ''
|
||
parser.state = S.TEXT
|
||
} else if (isQuote(c)) {
|
||
parser.state = S.SGML_DECL_QUOTED
|
||
parser.sgmlDecl += c
|
||
} else {
|
||
parser.sgmlDecl += c
|
||
}
|
||
continue
|
||
|
||
case S.SGML_DECL_QUOTED:
|
||
if (c === parser.q) {
|
||
parser.state = S.SGML_DECL
|
||
parser.q = ''
|
||
}
|
||
parser.sgmlDecl += c
|
||
continue
|
||
|
||
case S.DOCTYPE:
|
||
if (c === '>') {
|
||
parser.state = S.TEXT
|
||
emitNode(parser, 'ondoctype', parser.doctype)
|
||
parser.doctype = true // just remember that we saw it.
|
||
} else {
|
||
parser.doctype += c
|
||
if (c === '[') {
|
||
parser.state = S.DOCTYPE_DTD
|
||
} else if (isQuote(c)) {
|
||
parser.state = S.DOCTYPE_QUOTED
|
||
parser.q = c
|
||
}
|
||
}
|
||
continue
|
||
|
||
case S.DOCTYPE_QUOTED:
|
||
parser.doctype += c
|
||
if (c === parser.q) {
|
||
parser.q = ''
|
||
parser.state = S.DOCTYPE
|
||
}
|
||
continue
|
||
|
||
case S.DOCTYPE_DTD:
|
||
parser.doctype += c
|
||
if (c === ']') {
|
||
parser.state = S.DOCTYPE
|
||
} else if (isQuote(c)) {
|
||
parser.state = S.DOCTYPE_DTD_QUOTED
|
||
parser.q = c
|
||
}
|
||
continue
|
||
|
||
case S.DOCTYPE_DTD_QUOTED:
|
||
parser.doctype += c
|
||
if (c === parser.q) {
|
||
parser.state = S.DOCTYPE_DTD
|
||
parser.q = ''
|
||
}
|
||
continue
|
||
|
||
case S.COMMENT:
|
||
if (c === '-') {
|
||
parser.state = S.COMMENT_ENDING
|
||
} else {
|
||
parser.comment += c
|
||
}
|
||
continue
|
||
|
||
case S.COMMENT_ENDING:
|
||
if (c === '-') {
|
||
parser.state = S.COMMENT_ENDED
|
||
parser.comment = textopts(parser.opt, parser.comment)
|
||
if (parser.comment) {
|
||
emitNode(parser, 'oncomment', parser.comment)
|
||
}
|
||
parser.comment = ''
|
||
} else {
|
||
parser.comment += '-' + c
|
||
parser.state = S.COMMENT
|
||
}
|
||
continue
|
||
|
||
case S.COMMENT_ENDED:
|
||
if (c !== '>') {
|
||
strictFail(parser, 'Malformed comment')
|
||
// allow <!-- blah -- bloo --> in non-strict mode,
|
||
// which is a comment of " blah -- bloo "
|
||
parser.comment += '--' + c
|
||
parser.state = S.COMMENT
|
||
} else {
|
||
parser.state = S.TEXT
|
||
}
|
||
continue
|
||
|
||
case S.CDATA:
|
||
if (c === ']') {
|
||
parser.state = S.CDATA_ENDING
|
||
} else {
|
||
parser.cdata += c
|
||
}
|
||
continue
|
||
|
||
case S.CDATA_ENDING:
|
||
if (c === ']') {
|
||
parser.state = S.CDATA_ENDING_2
|
||
} else {
|
||
parser.cdata += ']' + c
|
||
parser.state = S.CDATA
|
||
}
|
||
continue
|
||
|
||
case S.CDATA_ENDING_2:
|
||
if (c === '>') {
|
||
if (parser.cdata) {
|
||
emitNode(parser, 'oncdata', parser.cdata)
|
||
}
|
||
emitNode(parser, 'onclosecdata')
|
||
parser.cdata = ''
|
||
parser.state = S.TEXT
|
||
} else if (c === ']') {
|
||
parser.cdata += ']'
|
||
} else {
|
||
parser.cdata += ']]' + c
|
||
parser.state = S.CDATA
|
||
}
|
||
continue
|
||
|
||
case S.PROC_INST:
|
||
if (c === '?') {
|
||
parser.state = S.PROC_INST_ENDING
|
||
} else if (isWhitespace(c)) {
|
||
parser.state = S.PROC_INST_BODY
|
||
} else {
|
||
parser.procInstName += c
|
||
}
|
||
continue
|
||
|
||
case S.PROC_INST_BODY:
|
||
if (!parser.procInstBody && isWhitespace(c)) {
|
||
continue
|
||
} else if (c === '?') {
|
||
parser.state = S.PROC_INST_ENDING
|
||
} else {
|
||
parser.procInstBody += c
|
||
}
|
||
continue
|
||
|
||
case S.PROC_INST_ENDING:
|
||
if (c === '>') {
|
||
emitNode(parser, 'onprocessinginstruction', {
|
||
name: parser.procInstName,
|
||
body: parser.procInstBody
|
||
})
|
||
parser.procInstName = parser.procInstBody = ''
|
||
parser.state = S.TEXT
|
||
} else {
|
||
parser.procInstBody += '?' + c
|
||
parser.state = S.PROC_INST_BODY
|
||
}
|
||
continue
|
||
|
||
case S.OPEN_TAG:
|
||
if (isMatch(nameBody, c)) {
|
||
parser.tagName += c
|
||
} else {
|
||
newTag(parser)
|
||
if (c === '>') {
|
||
openTag(parser)
|
||
} else if (c === '/') {
|
||
parser.state = S.OPEN_TAG_SLASH
|
||
} else {
|
||
if (!isWhitespace(c)) {
|
||
strictFail(parser, 'Invalid character in tag name')
|
||
}
|
||
parser.state = S.ATTRIB
|
||
}
|
||
}
|
||
continue
|
||
|
||
case S.OPEN_TAG_SLASH:
|
||
if (c === '>') {
|
||
openTag(parser, true)
|
||
closeTag(parser)
|
||
} else {
|
||
strictFail(parser, 'Forward-slash in opening tag not followed by >')
|
||
parser.state = S.ATTRIB
|
||
}
|
||
continue
|
||
|
||
case S.ATTRIB:
|
||
// haven't read the attribute name yet.
|
||
if (isWhitespace(c)) {
|
||
continue
|
||
} else if (c === '>') {
|
||
openTag(parser)
|
||
} else if (c === '/') {
|
||
parser.state = S.OPEN_TAG_SLASH
|
||
} else if (isMatch(nameStart, c)) {
|
||
parser.attribName = c
|
||
parser.attribValue = ''
|
||
parser.state = S.ATTRIB_NAME
|
||
} else {
|
||
strictFail(parser, 'Invalid attribute name')
|
||
}
|
||
continue
|
||
|
||
case S.ATTRIB_NAME:
|
||
if (c === '=') {
|
||
parser.state = S.ATTRIB_VALUE
|
||
} else if (c === '>') {
|
||
strictFail(parser, 'Attribute without value')
|
||
parser.attribValue = parser.attribName
|
||
attrib(parser)
|
||
openTag(parser)
|
||
} else if (isWhitespace(c)) {
|
||
parser.state = S.ATTRIB_NAME_SAW_WHITE
|
||
} else if (isMatch(nameBody, c)) {
|
||
parser.attribName += c
|
||
} else {
|
||
strictFail(parser, 'Invalid attribute name')
|
||
}
|
||
continue
|
||
|
||
case S.ATTRIB_NAME_SAW_WHITE:
|
||
if (c === '=') {
|
||
parser.state = S.ATTRIB_VALUE
|
||
} else if (isWhitespace(c)) {
|
||
continue
|
||
} else {
|
||
strictFail(parser, 'Attribute without value')
|
||
parser.tag.attributes[parser.attribName] = ''
|
||
parser.attribValue = ''
|
||
emitNode(parser, 'onattribute', {
|
||
name: parser.attribName,
|
||
value: ''
|
||
})
|
||
parser.attribName = ''
|
||
if (c === '>') {
|
||
openTag(parser)
|
||
} else if (isMatch(nameStart, c)) {
|
||
parser.attribName = c
|
||
parser.state = S.ATTRIB_NAME
|
||
} else {
|
||
strictFail(parser, 'Invalid attribute name')
|
||
parser.state = S.ATTRIB
|
||
}
|
||
}
|
||
continue
|
||
|
||
case S.ATTRIB_VALUE:
|
||
if (isWhitespace(c)) {
|
||
continue
|
||
} else if (isQuote(c)) {
|
||
parser.q = c
|
||
parser.state = S.ATTRIB_VALUE_QUOTED
|
||
} else {
|
||
strictFail(parser, 'Unquoted attribute value')
|
||
parser.state = S.ATTRIB_VALUE_UNQUOTED
|
||
parser.attribValue = c
|
||
}
|
||
continue
|
||
|
||
case S.ATTRIB_VALUE_QUOTED:
|
||
if (c !== parser.q) {
|
||
if (c === '&') {
|
||
parser.state = S.ATTRIB_VALUE_ENTITY_Q
|
||
} else {
|
||
parser.attribValue += c
|
||
}
|
||
continue
|
||
}
|
||
attrib(parser)
|
||
parser.q = ''
|
||
parser.state = S.ATTRIB_VALUE_CLOSED
|
||
continue
|
||
|
||
case S.ATTRIB_VALUE_CLOSED:
|
||
if (isWhitespace(c)) {
|
||
parser.state = S.ATTRIB
|
||
} else if (c === '>') {
|
||
openTag(parser)
|
||
} else if (c === '/') {
|
||
parser.state = S.OPEN_TAG_SLASH
|
||
} else if (isMatch(nameStart, c)) {
|
||
strictFail(parser, 'No whitespace between attributes')
|
||
parser.attribName = c
|
||
parser.attribValue = ''
|
||
parser.state = S.ATTRIB_NAME
|
||
} else {
|
||
strictFail(parser, 'Invalid attribute name')
|
||
}
|
||
continue
|
||
|
||
case S.ATTRIB_VALUE_UNQUOTED:
|
||
if (!isAttribEnd(c)) {
|
||
if (c === '&') {
|
||
parser.state = S.ATTRIB_VALUE_ENTITY_U
|
||
} else {
|
||
parser.attribValue += c
|
||
}
|
||
continue
|
||
}
|
||
attrib(parser)
|
||
if (c === '>') {
|
||
openTag(parser)
|
||
} else {
|
||
parser.state = S.ATTRIB
|
||
}
|
||
continue
|
||
|
||
case S.CLOSE_TAG:
|
||
if (!parser.tagName) {
|
||
if (isWhitespace(c)) {
|
||
continue
|
||
} else if (notMatch(nameStart, c)) {
|
||
if (parser.script) {
|
||
parser.script += '</' + c
|
||
parser.state = S.SCRIPT
|
||
} else {
|
||
strictFail(parser, 'Invalid tagname in closing tag.')
|
||
}
|
||
} else {
|
||
parser.tagName = c
|
||
}
|
||
} else if (c === '>') {
|
||
closeTag(parser)
|
||
} else if (isMatch(nameBody, c)) {
|
||
parser.tagName += c
|
||
} else if (parser.script) {
|
||
parser.script += '</' + parser.tagName
|
||
parser.tagName = ''
|
||
parser.state = S.SCRIPT
|
||
} else {
|
||
if (!isWhitespace(c)) {
|
||
strictFail(parser, 'Invalid tagname in closing tag')
|
||
}
|
||
parser.state = S.CLOSE_TAG_SAW_WHITE
|
||
}
|
||
continue
|
||
|
||
case S.CLOSE_TAG_SAW_WHITE:
|
||
if (isWhitespace(c)) {
|
||
continue
|
||
}
|
||
if (c === '>') {
|
||
closeTag(parser)
|
||
} else {
|
||
strictFail(parser, 'Invalid characters in closing tag')
|
||
}
|
||
continue
|
||
|
||
case S.TEXT_ENTITY:
|
||
case S.ATTRIB_VALUE_ENTITY_Q:
|
||
case S.ATTRIB_VALUE_ENTITY_U:
|
||
var returnState
|
||
var buffer
|
||
switch (parser.state) {
|
||
case S.TEXT_ENTITY:
|
||
returnState = S.TEXT
|
||
buffer = 'textNode'
|
||
break
|
||
|
||
case S.ATTRIB_VALUE_ENTITY_Q:
|
||
returnState = S.ATTRIB_VALUE_QUOTED
|
||
buffer = 'attribValue'
|
||
break
|
||
|
||
case S.ATTRIB_VALUE_ENTITY_U:
|
||
returnState = S.ATTRIB_VALUE_UNQUOTED
|
||
buffer = 'attribValue'
|
||
break
|
||
}
|
||
|
||
if (c === ';') {
|
||
parser[buffer] += parseEntity(parser)
|
||
parser.entity = ''
|
||
parser.state = returnState
|
||
} else if (isMatch(parser.entity.length ? entityBody : entityStart, c)) {
|
||
parser.entity += c
|
||
} else {
|
||
strictFail(parser, 'Invalid character in entity name')
|
||
parser[buffer] += '&' + parser.entity + c
|
||
parser.entity = ''
|
||
parser.state = returnState
|
||
}
|
||
|
||
continue
|
||
|
||
default:
|
||
throw new Error(parser, 'Unknown state: ' + parser.state)
|
||
}
|
||
} // while
|
||
|
||
if (parser.position >= parser.bufferCheckPosition) {
|
||
checkBufferLength(parser)
|
||
}
|
||
return parser
|
||
}
|
||
|
||
/*! http://mths.be/fromcodepoint v0.1.0 by @mathias */
|
||
/* istanbul ignore next */
|
||
if (!String.fromCodePoint) {
|
||
(function () {
|
||
var stringFromCharCode = String.fromCharCode
|
||
var floor = Math.floor
|
||
var fromCodePoint = function () {
|
||
var MAX_SIZE = 0x4000
|
||
var codeUnits = []
|
||
var highSurrogate
|
||
var lowSurrogate
|
||
var index = -1
|
||
var length = arguments.length
|
||
if (!length) {
|
||
return ''
|
||
}
|
||
var result = ''
|
||
while (++index < length) {
|
||
var codePoint = Number(arguments[index])
|
||
if (
|
||
!isFinite(codePoint) || // `NaN`, `+Infinity`, or `-Infinity`
|
||
codePoint < 0 || // not a valid Unicode code point
|
||
codePoint > 0x10FFFF || // not a valid Unicode code point
|
||
floor(codePoint) !== codePoint // not an integer
|
||
) {
|
||
throw RangeError('Invalid code point: ' + codePoint)
|
||
}
|
||
if (codePoint <= 0xFFFF) { // BMP code point
|
||
codeUnits.push(codePoint)
|
||
} else { // Astral code point; split in surrogate halves
|
||
// http://mathiasbynens.be/notes/javascript-encoding#surrogate-formulae
|
||
codePoint -= 0x10000
|
||
highSurrogate = (codePoint >> 10) + 0xD800
|
||
lowSurrogate = (codePoint % 0x400) + 0xDC00
|
||
codeUnits.push(highSurrogate, lowSurrogate)
|
||
}
|
||
if (index + 1 === length || codeUnits.length > MAX_SIZE) {
|
||
result += stringFromCharCode.apply(null, codeUnits)
|
||
codeUnits.length = 0
|
||
}
|
||
}
|
||
return result
|
||
}
|
||
/* istanbul ignore next */
|
||
if (Object.defineProperty) {
|
||
Object.defineProperty(String, 'fromCodePoint', {
|
||
value: fromCodePoint,
|
||
configurable: true,
|
||
writable: true
|
||
})
|
||
} else {
|
||
String.fromCodePoint = fromCodePoint
|
||
}
|
||
}())
|
||
}
|
||
})(typeof exports === 'undefined' ? this.sax = {} : exports)
|
||
|
||
}).call(this)}).call(this,require("buffer").Buffer)
|
||
|
||
},{"buffer":3,"stream":11,"string_decoder":27}],11:[function(require,module,exports){
|
||
// Copyright Joyent, Inc. and other Node contributors.
|
||
//
|
||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||
// copy of this software and associated documentation files (the
|
||
// "Software"), to deal in the Software without restriction, including
|
||
// without limitation the rights to use, copy, modify, merge, publish,
|
||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||
// persons to whom the Software is furnished to do so, subject to the
|
||
// following conditions:
|
||
//
|
||
// The above copyright notice and this permission notice shall be included
|
||
// in all copies or substantial portions of the Software.
|
||
//
|
||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||
|
||
module.exports = Stream;
|
||
|
||
var EE = require('events').EventEmitter;
|
||
var inherits = require('inherits');
|
||
|
||
inherits(Stream, EE);
|
||
Stream.Readable = require('readable-stream/lib/_stream_readable.js');
|
||
Stream.Writable = require('readable-stream/lib/_stream_writable.js');
|
||
Stream.Duplex = require('readable-stream/lib/_stream_duplex.js');
|
||
Stream.Transform = require('readable-stream/lib/_stream_transform.js');
|
||
Stream.PassThrough = require('readable-stream/lib/_stream_passthrough.js');
|
||
Stream.finished = require('readable-stream/lib/internal/streams/end-of-stream.js')
|
||
Stream.pipeline = require('readable-stream/lib/internal/streams/pipeline.js')
|
||
|
||
// Backwards-compat with node 0.4.x
|
||
Stream.Stream = Stream;
|
||
|
||
|
||
|
||
// old-style streams. Note that the pipe method (the only relevant
|
||
// part of this class) is overridden in the Readable class.
|
||
|
||
function Stream() {
|
||
EE.call(this);
|
||
}
|
||
|
||
Stream.prototype.pipe = function(dest, options) {
|
||
var source = this;
|
||
|
||
function ondata(chunk) {
|
||
if (dest.writable) {
|
||
if (false === dest.write(chunk) && source.pause) {
|
||
source.pause();
|
||
}
|
||
}
|
||
}
|
||
|
||
source.on('data', ondata);
|
||
|
||
function ondrain() {
|
||
if (source.readable && source.resume) {
|
||
source.resume();
|
||
}
|
||
}
|
||
|
||
dest.on('drain', ondrain);
|
||
|
||
// If the 'end' option is not supplied, dest.end() will be called when
|
||
// source gets the 'end' or 'close' events. Only dest.end() once.
|
||
if (!dest._isStdio && (!options || options.end !== false)) {
|
||
source.on('end', onend);
|
||
source.on('close', onclose);
|
||
}
|
||
|
||
var didOnEnd = false;
|
||
function onend() {
|
||
if (didOnEnd) return;
|
||
didOnEnd = true;
|
||
|
||
dest.end();
|
||
}
|
||
|
||
|
||
function onclose() {
|
||
if (didOnEnd) return;
|
||
didOnEnd = true;
|
||
|
||
if (typeof dest.destroy === 'function') dest.destroy();
|
||
}
|
||
|
||
// don't leave dangling pipes when there are errors.
|
||
function onerror(er) {
|
||
cleanup();
|
||
if (EE.listenerCount(this, 'error') === 0) {
|
||
throw er; // Unhandled stream error in pipe.
|
||
}
|
||
}
|
||
|
||
source.on('error', onerror);
|
||
dest.on('error', onerror);
|
||
|
||
// remove all the event listeners that were added.
|
||
function cleanup() {
|
||
source.removeListener('data', ondata);
|
||
dest.removeListener('drain', ondrain);
|
||
|
||
source.removeListener('end', onend);
|
||
source.removeListener('close', onclose);
|
||
|
||
source.removeListener('error', onerror);
|
||
dest.removeListener('error', onerror);
|
||
|
||
source.removeListener('end', cleanup);
|
||
source.removeListener('close', cleanup);
|
||
|
||
dest.removeListener('close', cleanup);
|
||
}
|
||
|
||
source.on('end', cleanup);
|
||
source.on('close', cleanup);
|
||
|
||
dest.on('close', cleanup);
|
||
|
||
dest.emit('pipe', source);
|
||
|
||
// Allow for unix-like usage: A.pipe(B).pipe(C)
|
||
return dest;
|
||
};
|
||
|
||
},{"events":5,"inherits":12,"readable-stream/lib/_stream_duplex.js":14,"readable-stream/lib/_stream_passthrough.js":15,"readable-stream/lib/_stream_readable.js":16,"readable-stream/lib/_stream_transform.js":17,"readable-stream/lib/_stream_writable.js":18,"readable-stream/lib/internal/streams/end-of-stream.js":22,"readable-stream/lib/internal/streams/pipeline.js":24}],12:[function(require,module,exports){
|
||
if (typeof Object.create === 'function') {
|
||
// implementation from standard node.js 'util' module
|
||
module.exports = function inherits(ctor, superCtor) {
|
||
if (superCtor) {
|
||
ctor.super_ = superCtor
|
||
ctor.prototype = Object.create(superCtor.prototype, {
|
||
constructor: {
|
||
value: ctor,
|
||
enumerable: false,
|
||
writable: true,
|
||
configurable: true
|
||
}
|
||
})
|
||
}
|
||
};
|
||
} else {
|
||
// old school shim for old browsers
|
||
module.exports = function inherits(ctor, superCtor) {
|
||
if (superCtor) {
|
||
ctor.super_ = superCtor
|
||
var TempCtor = function () {}
|
||
TempCtor.prototype = superCtor.prototype
|
||
ctor.prototype = new TempCtor()
|
||
ctor.prototype.constructor = ctor
|
||
}
|
||
}
|
||
}
|
||
|
||
},{}],13:[function(require,module,exports){
|
||
'use strict';
|
||
|
||
function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
|
||
|
||
var codes = {};
|
||
|
||
function createErrorType(code, message, Base) {
|
||
if (!Base) {
|
||
Base = Error;
|
||
}
|
||
|
||
function getMessage(arg1, arg2, arg3) {
|
||
if (typeof message === 'string') {
|
||
return message;
|
||
} else {
|
||
return message(arg1, arg2, arg3);
|
||
}
|
||
}
|
||
|
||
var NodeError =
|
||
/*#__PURE__*/
|
||
function (_Base) {
|
||
_inheritsLoose(NodeError, _Base);
|
||
|
||
function NodeError(arg1, arg2, arg3) {
|
||
return _Base.call(this, getMessage(arg1, arg2, arg3)) || this;
|
||
}
|
||
|
||
return NodeError;
|
||
}(Base);
|
||
|
||
NodeError.prototype.name = Base.name;
|
||
NodeError.prototype.code = code;
|
||
codes[code] = NodeError;
|
||
} // https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js
|
||
|
||
|
||
function oneOf(expected, thing) {
|
||
if (Array.isArray(expected)) {
|
||
var len = expected.length;
|
||
expected = expected.map(function (i) {
|
||
return String(i);
|
||
});
|
||
|
||
if (len > 2) {
|
||
return "one of ".concat(thing, " ").concat(expected.slice(0, len - 1).join(', '), ", or ") + expected[len - 1];
|
||
} else if (len === 2) {
|
||
return "one of ".concat(thing, " ").concat(expected[0], " or ").concat(expected[1]);
|
||
} else {
|
||
return "of ".concat(thing, " ").concat(expected[0]);
|
||
}
|
||
} else {
|
||
return "of ".concat(thing, " ").concat(String(expected));
|
||
}
|
||
} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith
|
||
|
||
|
||
function startsWith(str, search, pos) {
|
||
return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search;
|
||
} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
|
||
|
||
|
||
function endsWith(str, search, this_len) {
|
||
if (this_len === undefined || this_len > str.length) {
|
||
this_len = str.length;
|
||
}
|
||
|
||
return str.substring(this_len - search.length, this_len) === search;
|
||
} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes
|
||
|
||
|
||
function includes(str, search, start) {
|
||
if (typeof start !== 'number') {
|
||
start = 0;
|
||
}
|
||
|
||
if (start + search.length > str.length) {
|
||
return false;
|
||
} else {
|
||
return str.indexOf(search, start) !== -1;
|
||
}
|
||
}
|
||
|
||
createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) {
|
||
return 'The value "' + value + '" is invalid for option "' + name + '"';
|
||
}, TypeError);
|
||
createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) {
|
||
// determiner: 'must be' or 'must not be'
|
||
var determiner;
|
||
|
||
if (typeof expected === 'string' && startsWith(expected, 'not ')) {
|
||
determiner = 'must not be';
|
||
expected = expected.replace(/^not /, '');
|
||
} else {
|
||
determiner = 'must be';
|
||
}
|
||
|
||
var msg;
|
||
|
||
if (endsWith(name, ' argument')) {
|
||
// For cases like 'first argument'
|
||
msg = "The ".concat(name, " ").concat(determiner, " ").concat(oneOf(expected, 'type'));
|
||
} else {
|
||
var type = includes(name, '.') ? 'property' : 'argument';
|
||
msg = "The \"".concat(name, "\" ").concat(type, " ").concat(determiner, " ").concat(oneOf(expected, 'type'));
|
||
}
|
||
|
||
msg += ". Received type ".concat(typeof actual);
|
||
return msg;
|
||
}, TypeError);
|
||
createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF');
|
||
createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) {
|
||
return 'The ' + name + ' method is not implemented';
|
||
});
|
||
createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close');
|
||
createErrorType('ERR_STREAM_DESTROYED', function (name) {
|
||
return 'Cannot call ' + name + ' after a stream was destroyed';
|
||
});
|
||
createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times');
|
||
createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable');
|
||
createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end');
|
||
createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError);
|
||
createErrorType('ERR_UNKNOWN_ENCODING', function (arg) {
|
||
return 'Unknown encoding: ' + arg;
|
||
}, TypeError);
|
||
createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event');
|
||
module.exports.codes = codes;
|
||
|
||
},{}],14:[function(require,module,exports){
|
||
(function (process){(function (){
|
||
// Copyright Joyent, Inc. and other Node contributors.
|
||
//
|
||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||
// copy of this software and associated documentation files (the
|
||
// "Software"), to deal in the Software without restriction, including
|
||
// without limitation the rights to use, copy, modify, merge, publish,
|
||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||
// persons to whom the Software is furnished to do so, subject to the
|
||
// following conditions:
|
||
//
|
||
// The above copyright notice and this permission notice shall be included
|
||
// in all copies or substantial portions of the Software.
|
||
//
|
||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||
// a duplex stream is just a stream that is both readable and writable.
|
||
// Since JS doesn't have multiple prototypal inheritance, this class
|
||
// prototypally inherits from Readable, and then parasitically from
|
||
// Writable.
|
||
'use strict';
|
||
/*<replacement>*/
|
||
|
||
var objectKeys = Object.keys || function (obj) {
|
||
var keys = [];
|
||
|
||
for (var key in obj) {
|
||
keys.push(key);
|
||
}
|
||
|
||
return keys;
|
||
};
|
||
/*</replacement>*/
|
||
|
||
|
||
module.exports = Duplex;
|
||
|
||
var Readable = require('./_stream_readable');
|
||
|
||
var Writable = require('./_stream_writable');
|
||
|
||
require('inherits')(Duplex, Readable);
|
||
|
||
{
|
||
// Allow the keys array to be GC'ed.
|
||
var keys = objectKeys(Writable.prototype);
|
||
|
||
for (var v = 0; v < keys.length; v++) {
|
||
var method = keys[v];
|
||
if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method];
|
||
}
|
||
}
|
||
|
||
function Duplex(options) {
|
||
if (!(this instanceof Duplex)) return new Duplex(options);
|
||
Readable.call(this, options);
|
||
Writable.call(this, options);
|
||
this.allowHalfOpen = true;
|
||
|
||
if (options) {
|
||
if (options.readable === false) this.readable = false;
|
||
if (options.writable === false) this.writable = false;
|
||
|
||
if (options.allowHalfOpen === false) {
|
||
this.allowHalfOpen = false;
|
||
this.once('end', onend);
|
||
}
|
||
}
|
||
}
|
||
|
||
Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
return this._writableState.highWaterMark;
|
||
}
|
||
});
|
||
Object.defineProperty(Duplex.prototype, 'writableBuffer', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
return this._writableState && this._writableState.getBuffer();
|
||
}
|
||
});
|
||
Object.defineProperty(Duplex.prototype, 'writableLength', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
return this._writableState.length;
|
||
}
|
||
}); // the no-half-open enforcer
|
||
|
||
function onend() {
|
||
// If the writable side ended, then we're ok.
|
||
if (this._writableState.ended) return; // no more data can be written.
|
||
// But allow more writes to happen in this tick.
|
||
|
||
process.nextTick(onEndNT, this);
|
||
}
|
||
|
||
function onEndNT(self) {
|
||
self.end();
|
||
}
|
||
|
||
Object.defineProperty(Duplex.prototype, 'destroyed', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
if (this._readableState === undefined || this._writableState === undefined) {
|
||
return false;
|
||
}
|
||
|
||
return this._readableState.destroyed && this._writableState.destroyed;
|
||
},
|
||
set: function set(value) {
|
||
// we ignore the value if the stream
|
||
// has not been initialized yet
|
||
if (this._readableState === undefined || this._writableState === undefined) {
|
||
return;
|
||
} // backward compatibility, the user is explicitly
|
||
// managing destroyed
|
||
|
||
|
||
this._readableState.destroyed = value;
|
||
this._writableState.destroyed = value;
|
||
}
|
||
});
|
||
}).call(this)}).call(this,require('_process'))
|
||
|
||
},{"./_stream_readable":16,"./_stream_writable":18,"_process":8,"inherits":12}],15:[function(require,module,exports){
|
||
// Copyright Joyent, Inc. and other Node contributors.
|
||
//
|
||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||
// copy of this software and associated documentation files (the
|
||
// "Software"), to deal in the Software without restriction, including
|
||
// without limitation the rights to use, copy, modify, merge, publish,
|
||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||
// persons to whom the Software is furnished to do so, subject to the
|
||
// following conditions:
|
||
//
|
||
// The above copyright notice and this permission notice shall be included
|
||
// in all copies or substantial portions of the Software.
|
||
//
|
||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||
// a passthrough stream.
|
||
// basically just the most minimal sort of Transform stream.
|
||
// Every written chunk gets output as-is.
|
||
'use strict';
|
||
|
||
module.exports = PassThrough;
|
||
|
||
var Transform = require('./_stream_transform');
|
||
|
||
require('inherits')(PassThrough, Transform);
|
||
|
||
function PassThrough(options) {
|
||
if (!(this instanceof PassThrough)) return new PassThrough(options);
|
||
Transform.call(this, options);
|
||
}
|
||
|
||
PassThrough.prototype._transform = function (chunk, encoding, cb) {
|
||
cb(null, chunk);
|
||
};
|
||
},{"./_stream_transform":17,"inherits":12}],16:[function(require,module,exports){
|
||
(function (process,global){(function (){
|
||
// Copyright Joyent, Inc. and other Node contributors.
|
||
//
|
||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||
// copy of this software and associated documentation files (the
|
||
// "Software"), to deal in the Software without restriction, including
|
||
// without limitation the rights to use, copy, modify, merge, publish,
|
||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||
// persons to whom the Software is furnished to do so, subject to the
|
||
// following conditions:
|
||
//
|
||
// The above copyright notice and this permission notice shall be included
|
||
// in all copies or substantial portions of the Software.
|
||
//
|
||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||
'use strict';
|
||
|
||
module.exports = Readable;
|
||
/*<replacement>*/
|
||
|
||
var Duplex;
|
||
/*</replacement>*/
|
||
|
||
Readable.ReadableState = ReadableState;
|
||
/*<replacement>*/
|
||
|
||
var EE = require('events').EventEmitter;
|
||
|
||
var EElistenerCount = function EElistenerCount(emitter, type) {
|
||
return emitter.listeners(type).length;
|
||
};
|
||
/*</replacement>*/
|
||
|
||
/*<replacement>*/
|
||
|
||
|
||
var Stream = require('./internal/streams/stream');
|
||
/*</replacement>*/
|
||
|
||
|
||
var Buffer = require('buffer').Buffer;
|
||
|
||
var OurUint8Array = global.Uint8Array || function () {};
|
||
|
||
function _uint8ArrayToBuffer(chunk) {
|
||
return Buffer.from(chunk);
|
||
}
|
||
|
||
function _isUint8Array(obj) {
|
||
return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
|
||
}
|
||
/*<replacement>*/
|
||
|
||
|
||
var debugUtil = require('util');
|
||
|
||
var debug;
|
||
|
||
if (debugUtil && debugUtil.debuglog) {
|
||
debug = debugUtil.debuglog('stream');
|
||
} else {
|
||
debug = function debug() {};
|
||
}
|
||
/*</replacement>*/
|
||
|
||
|
||
var BufferList = require('./internal/streams/buffer_list');
|
||
|
||
var destroyImpl = require('./internal/streams/destroy');
|
||
|
||
var _require = require('./internal/streams/state'),
|
||
getHighWaterMark = _require.getHighWaterMark;
|
||
|
||
var _require$codes = require('../errors').codes,
|
||
ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE,
|
||
ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF,
|
||
ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,
|
||
ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; // Lazy loaded to improve the startup performance.
|
||
|
||
|
||
var StringDecoder;
|
||
var createReadableStreamAsyncIterator;
|
||
var from;
|
||
|
||
require('inherits')(Readable, Stream);
|
||
|
||
var errorOrDestroy = destroyImpl.errorOrDestroy;
|
||
var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume'];
|
||
|
||
function prependListener(emitter, event, fn) {
|
||
// Sadly this is not cacheable as some libraries bundle their own
|
||
// event emitter implementation with them.
|
||
if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); // This is a hack to make sure that our error handler is attached before any
|
||
// userland ones. NEVER DO THIS. This is here only because this code needs
|
||
// to continue to work with older versions of Node.js that do not include
|
||
// the prependListener() method. The goal is to eventually remove this hack.
|
||
|
||
if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]];
|
||
}
|
||
|
||
function ReadableState(options, stream, isDuplex) {
|
||
Duplex = Duplex || require('./_stream_duplex');
|
||
options = options || {}; // Duplex streams are both readable and writable, but share
|
||
// the same options object.
|
||
// However, some cases require setting options to different
|
||
// values for the readable and the writable sides of the duplex stream.
|
||
// These options can be provided separately as readableXXX and writableXXX.
|
||
|
||
if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag. Used to make read(n) ignore n and to
|
||
// make all the buffer merging and length checks go away
|
||
|
||
this.objectMode = !!options.objectMode;
|
||
if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; // the point at which it stops calling _read() to fill the buffer
|
||
// Note: 0 is a valid value, means "don't call _read preemptively ever"
|
||
|
||
this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); // A linked list is used to store data chunks instead of an array because the
|
||
// linked list can remove elements from the beginning faster than
|
||
// array.shift()
|
||
|
||
this.buffer = new BufferList();
|
||
this.length = 0;
|
||
this.pipes = null;
|
||
this.pipesCount = 0;
|
||
this.flowing = null;
|
||
this.ended = false;
|
||
this.endEmitted = false;
|
||
this.reading = false; // a flag to be able to tell if the event 'readable'/'data' is emitted
|
||
// immediately, or on a later tick. We set this to true at first, because
|
||
// any actions that shouldn't happen until "later" should generally also
|
||
// not happen before the first read call.
|
||
|
||
this.sync = true; // whenever we return null, then we set a flag to say
|
||
// that we're awaiting a 'readable' event emission.
|
||
|
||
this.needReadable = false;
|
||
this.emittedReadable = false;
|
||
this.readableListening = false;
|
||
this.resumeScheduled = false;
|
||
this.paused = true; // Should close be emitted on destroy. Defaults to true.
|
||
|
||
this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'end' (and potentially 'finish')
|
||
|
||
this.autoDestroy = !!options.autoDestroy; // has it been destroyed
|
||
|
||
this.destroyed = false; // Crypto is kind of old and crusty. Historically, its default string
|
||
// encoding is 'binary' so we have to make this configurable.
|
||
// Everything else in the universe uses 'utf8', though.
|
||
|
||
this.defaultEncoding = options.defaultEncoding || 'utf8'; // the number of writers that are awaiting a drain event in .pipe()s
|
||
|
||
this.awaitDrain = 0; // if true, a maybeReadMore has been scheduled
|
||
|
||
this.readingMore = false;
|
||
this.decoder = null;
|
||
this.encoding = null;
|
||
|
||
if (options.encoding) {
|
||
if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;
|
||
this.decoder = new StringDecoder(options.encoding);
|
||
this.encoding = options.encoding;
|
||
}
|
||
}
|
||
|
||
function Readable(options) {
|
||
Duplex = Duplex || require('./_stream_duplex');
|
||
if (!(this instanceof Readable)) return new Readable(options); // Checking for a Stream.Duplex instance is faster here instead of inside
|
||
// the ReadableState constructor, at least with V8 6.5
|
||
|
||
var isDuplex = this instanceof Duplex;
|
||
this._readableState = new ReadableState(options, this, isDuplex); // legacy
|
||
|
||
this.readable = true;
|
||
|
||
if (options) {
|
||
if (typeof options.read === 'function') this._read = options.read;
|
||
if (typeof options.destroy === 'function') this._destroy = options.destroy;
|
||
}
|
||
|
||
Stream.call(this);
|
||
}
|
||
|
||
Object.defineProperty(Readable.prototype, 'destroyed', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
if (this._readableState === undefined) {
|
||
return false;
|
||
}
|
||
|
||
return this._readableState.destroyed;
|
||
},
|
||
set: function set(value) {
|
||
// we ignore the value if the stream
|
||
// has not been initialized yet
|
||
if (!this._readableState) {
|
||
return;
|
||
} // backward compatibility, the user is explicitly
|
||
// managing destroyed
|
||
|
||
|
||
this._readableState.destroyed = value;
|
||
}
|
||
});
|
||
Readable.prototype.destroy = destroyImpl.destroy;
|
||
Readable.prototype._undestroy = destroyImpl.undestroy;
|
||
|
||
Readable.prototype._destroy = function (err, cb) {
|
||
cb(err);
|
||
}; // Manually shove something into the read() buffer.
|
||
// This returns true if the highWaterMark has not been hit yet,
|
||
// similar to how Writable.write() returns true if you should
|
||
// write() some more.
|
||
|
||
|
||
Readable.prototype.push = function (chunk, encoding) {
|
||
var state = this._readableState;
|
||
var skipChunkCheck;
|
||
|
||
if (!state.objectMode) {
|
||
if (typeof chunk === 'string') {
|
||
encoding = encoding || state.defaultEncoding;
|
||
|
||
if (encoding !== state.encoding) {
|
||
chunk = Buffer.from(chunk, encoding);
|
||
encoding = '';
|
||
}
|
||
|
||
skipChunkCheck = true;
|
||
}
|
||
} else {
|
||
skipChunkCheck = true;
|
||
}
|
||
|
||
return readableAddChunk(this, chunk, encoding, false, skipChunkCheck);
|
||
}; // Unshift should *always* be something directly out of read()
|
||
|
||
|
||
Readable.prototype.unshift = function (chunk) {
|
||
return readableAddChunk(this, chunk, null, true, false);
|
||
};
|
||
|
||
function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) {
|
||
debug('readableAddChunk', chunk);
|
||
var state = stream._readableState;
|
||
|
||
if (chunk === null) {
|
||
state.reading = false;
|
||
onEofChunk(stream, state);
|
||
} else {
|
||
var er;
|
||
if (!skipChunkCheck) er = chunkInvalid(state, chunk);
|
||
|
||
if (er) {
|
||
errorOrDestroy(stream, er);
|
||
} else if (state.objectMode || chunk && chunk.length > 0) {
|
||
if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) {
|
||
chunk = _uint8ArrayToBuffer(chunk);
|
||
}
|
||
|
||
if (addToFront) {
|
||
if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true);
|
||
} else if (state.ended) {
|
||
errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF());
|
||
} else if (state.destroyed) {
|
||
return false;
|
||
} else {
|
||
state.reading = false;
|
||
|
||
if (state.decoder && !encoding) {
|
||
chunk = state.decoder.write(chunk);
|
||
if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state);
|
||
} else {
|
||
addChunk(stream, state, chunk, false);
|
||
}
|
||
}
|
||
} else if (!addToFront) {
|
||
state.reading = false;
|
||
maybeReadMore(stream, state);
|
||
}
|
||
} // We can push more data if we are below the highWaterMark.
|
||
// Also, if we have no data yet, we can stand some more bytes.
|
||
// This is to work around cases where hwm=0, such as the repl.
|
||
|
||
|
||
return !state.ended && (state.length < state.highWaterMark || state.length === 0);
|
||
}
|
||
|
||
function addChunk(stream, state, chunk, addToFront) {
|
||
if (state.flowing && state.length === 0 && !state.sync) {
|
||
state.awaitDrain = 0;
|
||
stream.emit('data', chunk);
|
||
} else {
|
||
// update the buffer info.
|
||
state.length += state.objectMode ? 1 : chunk.length;
|
||
if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk);
|
||
if (state.needReadable) emitReadable(stream);
|
||
}
|
||
|
||
maybeReadMore(stream, state);
|
||
}
|
||
|
||
function chunkInvalid(state, chunk) {
|
||
var er;
|
||
|
||
if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {
|
||
er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk);
|
||
}
|
||
|
||
return er;
|
||
}
|
||
|
||
Readable.prototype.isPaused = function () {
|
||
return this._readableState.flowing === false;
|
||
}; // backwards compatibility.
|
||
|
||
|
||
Readable.prototype.setEncoding = function (enc) {
|
||
if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;
|
||
var decoder = new StringDecoder(enc);
|
||
this._readableState.decoder = decoder; // If setEncoding(null), decoder.encoding equals utf8
|
||
|
||
this._readableState.encoding = this._readableState.decoder.encoding; // Iterate over current buffer to convert already stored Buffers:
|
||
|
||
var p = this._readableState.buffer.head;
|
||
var content = '';
|
||
|
||
while (p !== null) {
|
||
content += decoder.write(p.data);
|
||
p = p.next;
|
||
}
|
||
|
||
this._readableState.buffer.clear();
|
||
|
||
if (content !== '') this._readableState.buffer.push(content);
|
||
this._readableState.length = content.length;
|
||
return this;
|
||
}; // Don't raise the hwm > 1GB
|
||
|
||
|
||
var MAX_HWM = 0x40000000;
|
||
|
||
function computeNewHighWaterMark(n) {
|
||
if (n >= MAX_HWM) {
|
||
// TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE.
|
||
n = MAX_HWM;
|
||
} else {
|
||
// Get the next highest power of 2 to prevent increasing hwm excessively in
|
||
// tiny amounts
|
||
n--;
|
||
n |= n >>> 1;
|
||
n |= n >>> 2;
|
||
n |= n >>> 4;
|
||
n |= n >>> 8;
|
||
n |= n >>> 16;
|
||
n++;
|
||
}
|
||
|
||
return n;
|
||
} // This function is designed to be inlinable, so please take care when making
|
||
// changes to the function body.
|
||
|
||
|
||
function howMuchToRead(n, state) {
|
||
if (n <= 0 || state.length === 0 && state.ended) return 0;
|
||
if (state.objectMode) return 1;
|
||
|
||
if (n !== n) {
|
||
// Only flow one buffer at a time
|
||
if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length;
|
||
} // If we're asking for more than the current hwm, then raise the hwm.
|
||
|
||
|
||
if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n);
|
||
if (n <= state.length) return n; // Don't have enough
|
||
|
||
if (!state.ended) {
|
||
state.needReadable = true;
|
||
return 0;
|
||
}
|
||
|
||
return state.length;
|
||
} // you can override either this method, or the async _read(n) below.
|
||
|
||
|
||
Readable.prototype.read = function (n) {
|
||
debug('read', n);
|
||
n = parseInt(n, 10);
|
||
var state = this._readableState;
|
||
var nOrig = n;
|
||
if (n !== 0) state.emittedReadable = false; // if we're doing read(0) to trigger a readable event, but we
|
||
// already have a bunch of data in the buffer, then just trigger
|
||
// the 'readable' event and move on.
|
||
|
||
if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) {
|
||
debug('read: emitReadable', state.length, state.ended);
|
||
if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this);
|
||
return null;
|
||
}
|
||
|
||
n = howMuchToRead(n, state); // if we've ended, and we're now clear, then finish it up.
|
||
|
||
if (n === 0 && state.ended) {
|
||
if (state.length === 0) endReadable(this);
|
||
return null;
|
||
} // All the actual chunk generation logic needs to be
|
||
// *below* the call to _read. The reason is that in certain
|
||
// synthetic stream cases, such as passthrough streams, _read
|
||
// may be a completely synchronous operation which may change
|
||
// the state of the read buffer, providing enough data when
|
||
// before there was *not* enough.
|
||
//
|
||
// So, the steps are:
|
||
// 1. Figure out what the state of things will be after we do
|
||
// a read from the buffer.
|
||
//
|
||
// 2. If that resulting state will trigger a _read, then call _read.
|
||
// Note that this may be asynchronous, or synchronous. Yes, it is
|
||
// deeply ugly to write APIs this way, but that still doesn't mean
|
||
// that the Readable class should behave improperly, as streams are
|
||
// designed to be sync/async agnostic.
|
||
// Take note if the _read call is sync or async (ie, if the read call
|
||
// has returned yet), so that we know whether or not it's safe to emit
|
||
// 'readable' etc.
|
||
//
|
||
// 3. Actually pull the requested chunks out of the buffer and return.
|
||
// if we need a readable event, then we need to do some reading.
|
||
|
||
|
||
var doRead = state.needReadable;
|
||
debug('need readable', doRead); // if we currently have less than the highWaterMark, then also read some
|
||
|
||
if (state.length === 0 || state.length - n < state.highWaterMark) {
|
||
doRead = true;
|
||
debug('length less than watermark', doRead);
|
||
} // however, if we've ended, then there's no point, and if we're already
|
||
// reading, then it's unnecessary.
|
||
|
||
|
||
if (state.ended || state.reading) {
|
||
doRead = false;
|
||
debug('reading or ended', doRead);
|
||
} else if (doRead) {
|
||
debug('do read');
|
||
state.reading = true;
|
||
state.sync = true; // if the length is currently zero, then we *need* a readable event.
|
||
|
||
if (state.length === 0) state.needReadable = true; // call internal read method
|
||
|
||
this._read(state.highWaterMark);
|
||
|
||
state.sync = false; // If _read pushed data synchronously, then `reading` will be false,
|
||
// and we need to re-evaluate how much data we can return to the user.
|
||
|
||
if (!state.reading) n = howMuchToRead(nOrig, state);
|
||
}
|
||
|
||
var ret;
|
||
if (n > 0) ret = fromList(n, state);else ret = null;
|
||
|
||
if (ret === null) {
|
||
state.needReadable = state.length <= state.highWaterMark;
|
||
n = 0;
|
||
} else {
|
||
state.length -= n;
|
||
state.awaitDrain = 0;
|
||
}
|
||
|
||
if (state.length === 0) {
|
||
// If we have nothing in the buffer, then we want to know
|
||
// as soon as we *do* get something into the buffer.
|
||
if (!state.ended) state.needReadable = true; // If we tried to read() past the EOF, then emit end on the next tick.
|
||
|
||
if (nOrig !== n && state.ended) endReadable(this);
|
||
}
|
||
|
||
if (ret !== null) this.emit('data', ret);
|
||
return ret;
|
||
};
|
||
|
||
function onEofChunk(stream, state) {
|
||
debug('onEofChunk');
|
||
if (state.ended) return;
|
||
|
||
if (state.decoder) {
|
||
var chunk = state.decoder.end();
|
||
|
||
if (chunk && chunk.length) {
|
||
state.buffer.push(chunk);
|
||
state.length += state.objectMode ? 1 : chunk.length;
|
||
}
|
||
}
|
||
|
||
state.ended = true;
|
||
|
||
if (state.sync) {
|
||
// if we are sync, wait until next tick to emit the data.
|
||
// Otherwise we risk emitting data in the flow()
|
||
// the readable code triggers during a read() call
|
||
emitReadable(stream);
|
||
} else {
|
||
// emit 'readable' now to make sure it gets picked up.
|
||
state.needReadable = false;
|
||
|
||
if (!state.emittedReadable) {
|
||
state.emittedReadable = true;
|
||
emitReadable_(stream);
|
||
}
|
||
}
|
||
} // Don't emit readable right away in sync mode, because this can trigger
|
||
// another read() call => stack overflow. This way, it might trigger
|
||
// a nextTick recursion warning, but that's not so bad.
|
||
|
||
|
||
function emitReadable(stream) {
|
||
var state = stream._readableState;
|
||
debug('emitReadable', state.needReadable, state.emittedReadable);
|
||
state.needReadable = false;
|
||
|
||
if (!state.emittedReadable) {
|
||
debug('emitReadable', state.flowing);
|
||
state.emittedReadable = true;
|
||
process.nextTick(emitReadable_, stream);
|
||
}
|
||
}
|
||
|
||
function emitReadable_(stream) {
|
||
var state = stream._readableState;
|
||
debug('emitReadable_', state.destroyed, state.length, state.ended);
|
||
|
||
if (!state.destroyed && (state.length || state.ended)) {
|
||
stream.emit('readable');
|
||
state.emittedReadable = false;
|
||
} // The stream needs another readable event if
|
||
// 1. It is not flowing, as the flow mechanism will take
|
||
// care of it.
|
||
// 2. It is not ended.
|
||
// 3. It is below the highWaterMark, so we can schedule
|
||
// another readable later.
|
||
|
||
|
||
state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark;
|
||
flow(stream);
|
||
} // at this point, the user has presumably seen the 'readable' event,
|
||
// and called read() to consume some data. that may have triggered
|
||
// in turn another _read(n) call, in which case reading = true if
|
||
// it's in progress.
|
||
// However, if we're not ended, or reading, and the length < hwm,
|
||
// then go ahead and try to read some more preemptively.
|
||
|
||
|
||
function maybeReadMore(stream, state) {
|
||
if (!state.readingMore) {
|
||
state.readingMore = true;
|
||
process.nextTick(maybeReadMore_, stream, state);
|
||
}
|
||
}
|
||
|
||
function maybeReadMore_(stream, state) {
|
||
// Attempt to read more data if we should.
|
||
//
|
||
// The conditions for reading more data are (one of):
|
||
// - Not enough data buffered (state.length < state.highWaterMark). The loop
|
||
// is responsible for filling the buffer with enough data if such data
|
||
// is available. If highWaterMark is 0 and we are not in the flowing mode
|
||
// we should _not_ attempt to buffer any extra data. We'll get more data
|
||
// when the stream consumer calls read() instead.
|
||
// - No data in the buffer, and the stream is in flowing mode. In this mode
|
||
// the loop below is responsible for ensuring read() is called. Failing to
|
||
// call read here would abort the flow and there's no other mechanism for
|
||
// continuing the flow if the stream consumer has just subscribed to the
|
||
// 'data' event.
|
||
//
|
||
// In addition to the above conditions to keep reading data, the following
|
||
// conditions prevent the data from being read:
|
||
// - The stream has ended (state.ended).
|
||
// - There is already a pending 'read' operation (state.reading). This is a
|
||
// case where the the stream has called the implementation defined _read()
|
||
// method, but they are processing the call asynchronously and have _not_
|
||
// called push() with new data. In this case we skip performing more
|
||
// read()s. The execution ends in this method again after the _read() ends
|
||
// up calling push() with more data.
|
||
while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) {
|
||
var len = state.length;
|
||
debug('maybeReadMore read 0');
|
||
stream.read(0);
|
||
if (len === state.length) // didn't get any data, stop spinning.
|
||
break;
|
||
}
|
||
|
||
state.readingMore = false;
|
||
} // abstract method. to be overridden in specific implementation classes.
|
||
// call cb(er, data) where data is <= n in length.
|
||
// for virtual (non-string, non-buffer) streams, "length" is somewhat
|
||
// arbitrary, and perhaps not very meaningful.
|
||
|
||
|
||
Readable.prototype._read = function (n) {
|
||
errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()'));
|
||
};
|
||
|
||
Readable.prototype.pipe = function (dest, pipeOpts) {
|
||
var src = this;
|
||
var state = this._readableState;
|
||
|
||
switch (state.pipesCount) {
|
||
case 0:
|
||
state.pipes = dest;
|
||
break;
|
||
|
||
case 1:
|
||
state.pipes = [state.pipes, dest];
|
||
break;
|
||
|
||
default:
|
||
state.pipes.push(dest);
|
||
break;
|
||
}
|
||
|
||
state.pipesCount += 1;
|
||
debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);
|
||
var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr;
|
||
var endFn = doEnd ? onend : unpipe;
|
||
if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn);
|
||
dest.on('unpipe', onunpipe);
|
||
|
||
function onunpipe(readable, unpipeInfo) {
|
||
debug('onunpipe');
|
||
|
||
if (readable === src) {
|
||
if (unpipeInfo && unpipeInfo.hasUnpiped === false) {
|
||
unpipeInfo.hasUnpiped = true;
|
||
cleanup();
|
||
}
|
||
}
|
||
}
|
||
|
||
function onend() {
|
||
debug('onend');
|
||
dest.end();
|
||
} // when the dest drains, it reduces the awaitDrain counter
|
||
// on the source. This would be more elegant with a .once()
|
||
// handler in flow(), but adding and removing repeatedly is
|
||
// too slow.
|
||
|
||
|
||
var ondrain = pipeOnDrain(src);
|
||
dest.on('drain', ondrain);
|
||
var cleanedUp = false;
|
||
|
||
function cleanup() {
|
||
debug('cleanup'); // cleanup event handlers once the pipe is broken
|
||
|
||
dest.removeListener('close', onclose);
|
||
dest.removeListener('finish', onfinish);
|
||
dest.removeListener('drain', ondrain);
|
||
dest.removeListener('error', onerror);
|
||
dest.removeListener('unpipe', onunpipe);
|
||
src.removeListener('end', onend);
|
||
src.removeListener('end', unpipe);
|
||
src.removeListener('data', ondata);
|
||
cleanedUp = true; // if the reader is waiting for a drain event from this
|
||
// specific writer, then it would cause it to never start
|
||
// flowing again.
|
||
// So, if this is awaiting a drain, then we just call it now.
|
||
// If we don't know, then assume that we are waiting for one.
|
||
|
||
if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain();
|
||
}
|
||
|
||
src.on('data', ondata);
|
||
|
||
function ondata(chunk) {
|
||
debug('ondata');
|
||
var ret = dest.write(chunk);
|
||
debug('dest.write', ret);
|
||
|
||
if (ret === false) {
|
||
// If the user unpiped during `dest.write()`, it is possible
|
||
// to get stuck in a permanently paused state if that write
|
||
// also returned false.
|
||
// => Check whether `dest` is still a piping destination.
|
||
if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) {
|
||
debug('false write response, pause', state.awaitDrain);
|
||
state.awaitDrain++;
|
||
}
|
||
|
||
src.pause();
|
||
}
|
||
} // if the dest has an error, then stop piping into it.
|
||
// however, don't suppress the throwing behavior for this.
|
||
|
||
|
||
function onerror(er) {
|
||
debug('onerror', er);
|
||
unpipe();
|
||
dest.removeListener('error', onerror);
|
||
if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er);
|
||
} // Make sure our error handler is attached before userland ones.
|
||
|
||
|
||
prependListener(dest, 'error', onerror); // Both close and finish should trigger unpipe, but only once.
|
||
|
||
function onclose() {
|
||
dest.removeListener('finish', onfinish);
|
||
unpipe();
|
||
}
|
||
|
||
dest.once('close', onclose);
|
||
|
||
function onfinish() {
|
||
debug('onfinish');
|
||
dest.removeListener('close', onclose);
|
||
unpipe();
|
||
}
|
||
|
||
dest.once('finish', onfinish);
|
||
|
||
function unpipe() {
|
||
debug('unpipe');
|
||
src.unpipe(dest);
|
||
} // tell the dest that it's being piped to
|
||
|
||
|
||
dest.emit('pipe', src); // start the flow if it hasn't been started already.
|
||
|
||
if (!state.flowing) {
|
||
debug('pipe resume');
|
||
src.resume();
|
||
}
|
||
|
||
return dest;
|
||
};
|
||
|
||
function pipeOnDrain(src) {
|
||
return function pipeOnDrainFunctionResult() {
|
||
var state = src._readableState;
|
||
debug('pipeOnDrain', state.awaitDrain);
|
||
if (state.awaitDrain) state.awaitDrain--;
|
||
|
||
if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) {
|
||
state.flowing = true;
|
||
flow(src);
|
||
}
|
||
};
|
||
}
|
||
|
||
Readable.prototype.unpipe = function (dest) {
|
||
var state = this._readableState;
|
||
var unpipeInfo = {
|
||
hasUnpiped: false
|
||
}; // if we're not piping anywhere, then do nothing.
|
||
|
||
if (state.pipesCount === 0) return this; // just one destination. most common case.
|
||
|
||
if (state.pipesCount === 1) {
|
||
// passed in one, but it's not the right one.
|
||
if (dest && dest !== state.pipes) return this;
|
||
if (!dest) dest = state.pipes; // got a match.
|
||
|
||
state.pipes = null;
|
||
state.pipesCount = 0;
|
||
state.flowing = false;
|
||
if (dest) dest.emit('unpipe', this, unpipeInfo);
|
||
return this;
|
||
} // slow case. multiple pipe destinations.
|
||
|
||
|
||
if (!dest) {
|
||
// remove all.
|
||
var dests = state.pipes;
|
||
var len = state.pipesCount;
|
||
state.pipes = null;
|
||
state.pipesCount = 0;
|
||
state.flowing = false;
|
||
|
||
for (var i = 0; i < len; i++) {
|
||
dests[i].emit('unpipe', this, {
|
||
hasUnpiped: false
|
||
});
|
||
}
|
||
|
||
return this;
|
||
} // try to find the right one.
|
||
|
||
|
||
var index = indexOf(state.pipes, dest);
|
||
if (index === -1) return this;
|
||
state.pipes.splice(index, 1);
|
||
state.pipesCount -= 1;
|
||
if (state.pipesCount === 1) state.pipes = state.pipes[0];
|
||
dest.emit('unpipe', this, unpipeInfo);
|
||
return this;
|
||
}; // set up data events if they are asked for
|
||
// Ensure readable listeners eventually get something
|
||
|
||
|
||
Readable.prototype.on = function (ev, fn) {
|
||
var res = Stream.prototype.on.call(this, ev, fn);
|
||
var state = this._readableState;
|
||
|
||
if (ev === 'data') {
|
||
// update readableListening so that resume() may be a no-op
|
||
// a few lines down. This is needed to support once('readable').
|
||
state.readableListening = this.listenerCount('readable') > 0; // Try start flowing on next tick if stream isn't explicitly paused
|
||
|
||
if (state.flowing !== false) this.resume();
|
||
} else if (ev === 'readable') {
|
||
if (!state.endEmitted && !state.readableListening) {
|
||
state.readableListening = state.needReadable = true;
|
||
state.flowing = false;
|
||
state.emittedReadable = false;
|
||
debug('on readable', state.length, state.reading);
|
||
|
||
if (state.length) {
|
||
emitReadable(this);
|
||
} else if (!state.reading) {
|
||
process.nextTick(nReadingNextTick, this);
|
||
}
|
||
}
|
||
}
|
||
|
||
return res;
|
||
};
|
||
|
||
Readable.prototype.addListener = Readable.prototype.on;
|
||
|
||
Readable.prototype.removeListener = function (ev, fn) {
|
||
var res = Stream.prototype.removeListener.call(this, ev, fn);
|
||
|
||
if (ev === 'readable') {
|
||
// We need to check if there is someone still listening to
|
||
// readable and reset the state. However this needs to happen
|
||
// after readable has been emitted but before I/O (nextTick) to
|
||
// support once('readable', fn) cycles. This means that calling
|
||
// resume within the same tick will have no
|
||
// effect.
|
||
process.nextTick(updateReadableListening, this);
|
||
}
|
||
|
||
return res;
|
||
};
|
||
|
||
Readable.prototype.removeAllListeners = function (ev) {
|
||
var res = Stream.prototype.removeAllListeners.apply(this, arguments);
|
||
|
||
if (ev === 'readable' || ev === undefined) {
|
||
// We need to check if there is someone still listening to
|
||
// readable and reset the state. However this needs to happen
|
||
// after readable has been emitted but before I/O (nextTick) to
|
||
// support once('readable', fn) cycles. This means that calling
|
||
// resume within the same tick will have no
|
||
// effect.
|
||
process.nextTick(updateReadableListening, this);
|
||
}
|
||
|
||
return res;
|
||
};
|
||
|
||
function updateReadableListening(self) {
|
||
var state = self._readableState;
|
||
state.readableListening = self.listenerCount('readable') > 0;
|
||
|
||
if (state.resumeScheduled && !state.paused) {
|
||
// flowing needs to be set to true now, otherwise
|
||
// the upcoming resume will not flow.
|
||
state.flowing = true; // crude way to check if we should resume
|
||
} else if (self.listenerCount('data') > 0) {
|
||
self.resume();
|
||
}
|
||
}
|
||
|
||
function nReadingNextTick(self) {
|
||
debug('readable nexttick read 0');
|
||
self.read(0);
|
||
} // pause() and resume() are remnants of the legacy readable stream API
|
||
// If the user uses them, then switch into old mode.
|
||
|
||
|
||
Readable.prototype.resume = function () {
|
||
var state = this._readableState;
|
||
|
||
if (!state.flowing) {
|
||
debug('resume'); // we flow only if there is no one listening
|
||
// for readable, but we still have to call
|
||
// resume()
|
||
|
||
state.flowing = !state.readableListening;
|
||
resume(this, state);
|
||
}
|
||
|
||
state.paused = false;
|
||
return this;
|
||
};
|
||
|
||
function resume(stream, state) {
|
||
if (!state.resumeScheduled) {
|
||
state.resumeScheduled = true;
|
||
process.nextTick(resume_, stream, state);
|
||
}
|
||
}
|
||
|
||
function resume_(stream, state) {
|
||
debug('resume', state.reading);
|
||
|
||
if (!state.reading) {
|
||
stream.read(0);
|
||
}
|
||
|
||
state.resumeScheduled = false;
|
||
stream.emit('resume');
|
||
flow(stream);
|
||
if (state.flowing && !state.reading) stream.read(0);
|
||
}
|
||
|
||
Readable.prototype.pause = function () {
|
||
debug('call pause flowing=%j', this._readableState.flowing);
|
||
|
||
if (this._readableState.flowing !== false) {
|
||
debug('pause');
|
||
this._readableState.flowing = false;
|
||
this.emit('pause');
|
||
}
|
||
|
||
this._readableState.paused = true;
|
||
return this;
|
||
};
|
||
|
||
function flow(stream) {
|
||
var state = stream._readableState;
|
||
debug('flow', state.flowing);
|
||
|
||
while (state.flowing && stream.read() !== null) {
|
||
;
|
||
}
|
||
} // wrap an old-style stream as the async data source.
|
||
// This is *not* part of the readable stream interface.
|
||
// It is an ugly unfortunate mess of history.
|
||
|
||
|
||
Readable.prototype.wrap = function (stream) {
|
||
var _this = this;
|
||
|
||
var state = this._readableState;
|
||
var paused = false;
|
||
stream.on('end', function () {
|
||
debug('wrapped end');
|
||
|
||
if (state.decoder && !state.ended) {
|
||
var chunk = state.decoder.end();
|
||
if (chunk && chunk.length) _this.push(chunk);
|
||
}
|
||
|
||
_this.push(null);
|
||
});
|
||
stream.on('data', function (chunk) {
|
||
debug('wrapped data');
|
||
if (state.decoder) chunk = state.decoder.write(chunk); // don't skip over falsy values in objectMode
|
||
|
||
if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return;
|
||
|
||
var ret = _this.push(chunk);
|
||
|
||
if (!ret) {
|
||
paused = true;
|
||
stream.pause();
|
||
}
|
||
}); // proxy all the other methods.
|
||
// important when wrapping filters and duplexes.
|
||
|
||
for (var i in stream) {
|
||
if (this[i] === undefined && typeof stream[i] === 'function') {
|
||
this[i] = function methodWrap(method) {
|
||
return function methodWrapReturnFunction() {
|
||
return stream[method].apply(stream, arguments);
|
||
};
|
||
}(i);
|
||
}
|
||
} // proxy certain important events.
|
||
|
||
|
||
for (var n = 0; n < kProxyEvents.length; n++) {
|
||
stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n]));
|
||
} // when we try to consume some more bytes, simply unpause the
|
||
// underlying stream.
|
||
|
||
|
||
this._read = function (n) {
|
||
debug('wrapped _read', n);
|
||
|
||
if (paused) {
|
||
paused = false;
|
||
stream.resume();
|
||
}
|
||
};
|
||
|
||
return this;
|
||
};
|
||
|
||
if (typeof Symbol === 'function') {
|
||
Readable.prototype[Symbol.asyncIterator] = function () {
|
||
if (createReadableStreamAsyncIterator === undefined) {
|
||
createReadableStreamAsyncIterator = require('./internal/streams/async_iterator');
|
||
}
|
||
|
||
return createReadableStreamAsyncIterator(this);
|
||
};
|
||
}
|
||
|
||
Object.defineProperty(Readable.prototype, 'readableHighWaterMark', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
return this._readableState.highWaterMark;
|
||
}
|
||
});
|
||
Object.defineProperty(Readable.prototype, 'readableBuffer', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
return this._readableState && this._readableState.buffer;
|
||
}
|
||
});
|
||
Object.defineProperty(Readable.prototype, 'readableFlowing', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
return this._readableState.flowing;
|
||
},
|
||
set: function set(state) {
|
||
if (this._readableState) {
|
||
this._readableState.flowing = state;
|
||
}
|
||
}
|
||
}); // exposed for testing purposes only.
|
||
|
||
Readable._fromList = fromList;
|
||
Object.defineProperty(Readable.prototype, 'readableLength', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
return this._readableState.length;
|
||
}
|
||
}); // Pluck off n bytes from an array of buffers.
|
||
// Length is the combined lengths of all the buffers in the list.
|
||
// This function is designed to be inlinable, so please take care when making
|
||
// changes to the function body.
|
||
|
||
function fromList(n, state) {
|
||
// nothing buffered
|
||
if (state.length === 0) return null;
|
||
var ret;
|
||
if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) {
|
||
// read it all, truncate the list
|
||
if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length);
|
||
state.buffer.clear();
|
||
} else {
|
||
// read part of list
|
||
ret = state.buffer.consume(n, state.decoder);
|
||
}
|
||
return ret;
|
||
}
|
||
|
||
function endReadable(stream) {
|
||
var state = stream._readableState;
|
||
debug('endReadable', state.endEmitted);
|
||
|
||
if (!state.endEmitted) {
|
||
state.ended = true;
|
||
process.nextTick(endReadableNT, state, stream);
|
||
}
|
||
}
|
||
|
||
function endReadableNT(state, stream) {
|
||
debug('endReadableNT', state.endEmitted, state.length); // Check that we didn't get one last unshift.
|
||
|
||
if (!state.endEmitted && state.length === 0) {
|
||
state.endEmitted = true;
|
||
stream.readable = false;
|
||
stream.emit('end');
|
||
|
||
if (state.autoDestroy) {
|
||
// In case of duplex streams we need a way to detect
|
||
// if the writable side is ready for autoDestroy as well
|
||
var wState = stream._writableState;
|
||
|
||
if (!wState || wState.autoDestroy && wState.finished) {
|
||
stream.destroy();
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
if (typeof Symbol === 'function') {
|
||
Readable.from = function (iterable, opts) {
|
||
if (from === undefined) {
|
||
from = require('./internal/streams/from');
|
||
}
|
||
|
||
return from(Readable, iterable, opts);
|
||
};
|
||
}
|
||
|
||
function indexOf(xs, x) {
|
||
for (var i = 0, l = xs.length; i < l; i++) {
|
||
if (xs[i] === x) return i;
|
||
}
|
||
|
||
return -1;
|
||
}
|
||
}).call(this)}).call(this,require('_process'),typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
|
||
|
||
},{"../errors":13,"./_stream_duplex":14,"./internal/streams/async_iterator":19,"./internal/streams/buffer_list":20,"./internal/streams/destroy":21,"./internal/streams/from":23,"./internal/streams/state":25,"./internal/streams/stream":26,"_process":8,"buffer":3,"events":5,"inherits":12,"string_decoder/":27,"util":2}],17:[function(require,module,exports){
|
||
// Copyright Joyent, Inc. and other Node contributors.
|
||
//
|
||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||
// copy of this software and associated documentation files (the
|
||
// "Software"), to deal in the Software without restriction, including
|
||
// without limitation the rights to use, copy, modify, merge, publish,
|
||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||
// persons to whom the Software is furnished to do so, subject to the
|
||
// following conditions:
|
||
//
|
||
// The above copyright notice and this permission notice shall be included
|
||
// in all copies or substantial portions of the Software.
|
||
//
|
||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||
// a transform stream is a readable/writable stream where you do
|
||
// something with the data. Sometimes it's called a "filter",
|
||
// but that's not a great name for it, since that implies a thing where
|
||
// some bits pass through, and others are simply ignored. (That would
|
||
// be a valid example of a transform, of course.)
|
||
//
|
||
// While the output is causally related to the input, it's not a
|
||
// necessarily symmetric or synchronous transformation. For example,
|
||
// a zlib stream might take multiple plain-text writes(), and then
|
||
// emit a single compressed chunk some time in the future.
|
||
//
|
||
// Here's how this works:
|
||
//
|
||
// The Transform stream has all the aspects of the readable and writable
|
||
// stream classes. When you write(chunk), that calls _write(chunk,cb)
|
||
// internally, and returns false if there's a lot of pending writes
|
||
// buffered up. When you call read(), that calls _read(n) until
|
||
// there's enough pending readable data buffered up.
|
||
//
|
||
// In a transform stream, the written data is placed in a buffer. When
|
||
// _read(n) is called, it transforms the queued up data, calling the
|
||
// buffered _write cb's as it consumes chunks. If consuming a single
|
||
// written chunk would result in multiple output chunks, then the first
|
||
// outputted bit calls the readcb, and subsequent chunks just go into
|
||
// the read buffer, and will cause it to emit 'readable' if necessary.
|
||
//
|
||
// This way, back-pressure is actually determined by the reading side,
|
||
// since _read has to be called to start processing a new chunk. However,
|
||
// a pathological inflate type of transform can cause excessive buffering
|
||
// here. For example, imagine a stream where every byte of input is
|
||
// interpreted as an integer from 0-255, and then results in that many
|
||
// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
|
||
// 1kb of data being output. In this case, you could write a very small
|
||
// amount of input, and end up with a very large amount of output. In
|
||
// such a pathological inflating mechanism, there'd be no way to tell
|
||
// the system to stop doing the transform. A single 4MB write could
|
||
// cause the system to run out of memory.
|
||
//
|
||
// However, even in such a pathological case, only a single written chunk
|
||
// would be consumed, and then the rest would wait (un-transformed) until
|
||
// the results of the previous transformed chunk were consumed.
|
||
'use strict';
|
||
|
||
module.exports = Transform;
|
||
|
||
var _require$codes = require('../errors').codes,
|
||
ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,
|
||
ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK,
|
||
ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING,
|
||
ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0;
|
||
|
||
var Duplex = require('./_stream_duplex');
|
||
|
||
require('inherits')(Transform, Duplex);
|
||
|
||
function afterTransform(er, data) {
|
||
var ts = this._transformState;
|
||
ts.transforming = false;
|
||
var cb = ts.writecb;
|
||
|
||
if (cb === null) {
|
||
return this.emit('error', new ERR_MULTIPLE_CALLBACK());
|
||
}
|
||
|
||
ts.writechunk = null;
|
||
ts.writecb = null;
|
||
if (data != null) // single equals check for both `null` and `undefined`
|
||
this.push(data);
|
||
cb(er);
|
||
var rs = this._readableState;
|
||
rs.reading = false;
|
||
|
||
if (rs.needReadable || rs.length < rs.highWaterMark) {
|
||
this._read(rs.highWaterMark);
|
||
}
|
||
}
|
||
|
||
function Transform(options) {
|
||
if (!(this instanceof Transform)) return new Transform(options);
|
||
Duplex.call(this, options);
|
||
this._transformState = {
|
||
afterTransform: afterTransform.bind(this),
|
||
needTransform: false,
|
||
transforming: false,
|
||
writecb: null,
|
||
writechunk: null,
|
||
writeencoding: null
|
||
}; // start out asking for a readable event once data is transformed.
|
||
|
||
this._readableState.needReadable = true; // we have implemented the _read method, and done the other things
|
||
// that Readable wants before the first _read call, so unset the
|
||
// sync guard flag.
|
||
|
||
this._readableState.sync = false;
|
||
|
||
if (options) {
|
||
if (typeof options.transform === 'function') this._transform = options.transform;
|
||
if (typeof options.flush === 'function') this._flush = options.flush;
|
||
} // When the writable side finishes, then flush out anything remaining.
|
||
|
||
|
||
this.on('prefinish', prefinish);
|
||
}
|
||
|
||
function prefinish() {
|
||
var _this = this;
|
||
|
||
if (typeof this._flush === 'function' && !this._readableState.destroyed) {
|
||
this._flush(function (er, data) {
|
||
done(_this, er, data);
|
||
});
|
||
} else {
|
||
done(this, null, null);
|
||
}
|
||
}
|
||
|
||
Transform.prototype.push = function (chunk, encoding) {
|
||
this._transformState.needTransform = false;
|
||
return Duplex.prototype.push.call(this, chunk, encoding);
|
||
}; // This is the part where you do stuff!
|
||
// override this function in implementation classes.
|
||
// 'chunk' is an input chunk.
|
||
//
|
||
// Call `push(newChunk)` to pass along transformed output
|
||
// to the readable side. You may call 'push' zero or more times.
|
||
//
|
||
// Call `cb(err)` when you are done with this chunk. If you pass
|
||
// an error, then that'll put the hurt on the whole operation. If you
|
||
// never call cb(), then you'll never get another chunk.
|
||
|
||
|
||
Transform.prototype._transform = function (chunk, encoding, cb) {
|
||
cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()'));
|
||
};
|
||
|
||
Transform.prototype._write = function (chunk, encoding, cb) {
|
||
var ts = this._transformState;
|
||
ts.writecb = cb;
|
||
ts.writechunk = chunk;
|
||
ts.writeencoding = encoding;
|
||
|
||
if (!ts.transforming) {
|
||
var rs = this._readableState;
|
||
if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);
|
||
}
|
||
}; // Doesn't matter what the args are here.
|
||
// _transform does all the work.
|
||
// That we got here means that the readable side wants more data.
|
||
|
||
|
||
Transform.prototype._read = function (n) {
|
||
var ts = this._transformState;
|
||
|
||
if (ts.writechunk !== null && !ts.transforming) {
|
||
ts.transforming = true;
|
||
|
||
this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
|
||
} else {
|
||
// mark that we need a transform, so that any data that comes in
|
||
// will get processed, now that we've asked for it.
|
||
ts.needTransform = true;
|
||
}
|
||
};
|
||
|
||
Transform.prototype._destroy = function (err, cb) {
|
||
Duplex.prototype._destroy.call(this, err, function (err2) {
|
||
cb(err2);
|
||
});
|
||
};
|
||
|
||
function done(stream, er, data) {
|
||
if (er) return stream.emit('error', er);
|
||
if (data != null) // single equals check for both `null` and `undefined`
|
||
stream.push(data); // TODO(BridgeAR): Write a test for these two error cases
|
||
// if there's nothing in the write buffer, then that means
|
||
// that nothing more will ever be provided
|
||
|
||
if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0();
|
||
if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING();
|
||
return stream.push(null);
|
||
}
|
||
},{"../errors":13,"./_stream_duplex":14,"inherits":12}],18:[function(require,module,exports){
|
||
(function (process,global){(function (){
|
||
// Copyright Joyent, Inc. and other Node contributors.
|
||
//
|
||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||
// copy of this software and associated documentation files (the
|
||
// "Software"), to deal in the Software without restriction, including
|
||
// without limitation the rights to use, copy, modify, merge, publish,
|
||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||
// persons to whom the Software is furnished to do so, subject to the
|
||
// following conditions:
|
||
//
|
||
// The above copyright notice and this permission notice shall be included
|
||
// in all copies or substantial portions of the Software.
|
||
//
|
||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||
// A bit simpler than readable streams.
|
||
// Implement an async ._write(chunk, encoding, cb), and it'll handle all
|
||
// the drain event emission and buffering.
|
||
'use strict';
|
||
|
||
module.exports = Writable;
|
||
/* <replacement> */
|
||
|
||
function WriteReq(chunk, encoding, cb) {
|
||
this.chunk = chunk;
|
||
this.encoding = encoding;
|
||
this.callback = cb;
|
||
this.next = null;
|
||
} // It seems a linked list but it is not
|
||
// there will be only 2 of these for each stream
|
||
|
||
|
||
function CorkedRequest(state) {
|
||
var _this = this;
|
||
|
||
this.next = null;
|
||
this.entry = null;
|
||
|
||
this.finish = function () {
|
||
onCorkedFinish(_this, state);
|
||
};
|
||
}
|
||
/* </replacement> */
|
||
|
||
/*<replacement>*/
|
||
|
||
|
||
var Duplex;
|
||
/*</replacement>*/
|
||
|
||
Writable.WritableState = WritableState;
|
||
/*<replacement>*/
|
||
|
||
var internalUtil = {
|
||
deprecate: require('util-deprecate')
|
||
};
|
||
/*</replacement>*/
|
||
|
||
/*<replacement>*/
|
||
|
||
var Stream = require('./internal/streams/stream');
|
||
/*</replacement>*/
|
||
|
||
|
||
var Buffer = require('buffer').Buffer;
|
||
|
||
var OurUint8Array = global.Uint8Array || function () {};
|
||
|
||
function _uint8ArrayToBuffer(chunk) {
|
||
return Buffer.from(chunk);
|
||
}
|
||
|
||
function _isUint8Array(obj) {
|
||
return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
|
||
}
|
||
|
||
var destroyImpl = require('./internal/streams/destroy');
|
||
|
||
var _require = require('./internal/streams/state'),
|
||
getHighWaterMark = _require.getHighWaterMark;
|
||
|
||
var _require$codes = require('../errors').codes,
|
||
ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE,
|
||
ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,
|
||
ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK,
|
||
ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE,
|
||
ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED,
|
||
ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES,
|
||
ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END,
|
||
ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING;
|
||
|
||
var errorOrDestroy = destroyImpl.errorOrDestroy;
|
||
|
||
require('inherits')(Writable, Stream);
|
||
|
||
function nop() {}
|
||
|
||
function WritableState(options, stream, isDuplex) {
|
||
Duplex = Duplex || require('./_stream_duplex');
|
||
options = options || {}; // Duplex streams are both readable and writable, but share
|
||
// the same options object.
|
||
// However, some cases require setting options to different
|
||
// values for the readable and the writable sides of the duplex stream,
|
||
// e.g. options.readableObjectMode vs. options.writableObjectMode, etc.
|
||
|
||
if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag to indicate whether or not this stream
|
||
// contains buffers or objects.
|
||
|
||
this.objectMode = !!options.objectMode;
|
||
if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; // the point at which write() starts returning false
|
||
// Note: 0 is a valid value, means that we always return false if
|
||
// the entire buffer is not flushed immediately on write()
|
||
|
||
this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); // if _final has been called
|
||
|
||
this.finalCalled = false; // drain event flag.
|
||
|
||
this.needDrain = false; // at the start of calling end()
|
||
|
||
this.ending = false; // when end() has been called, and returned
|
||
|
||
this.ended = false; // when 'finish' is emitted
|
||
|
||
this.finished = false; // has it been destroyed
|
||
|
||
this.destroyed = false; // should we decode strings into buffers before passing to _write?
|
||
// this is here so that some node-core streams can optimize string
|
||
// handling at a lower level.
|
||
|
||
var noDecode = options.decodeStrings === false;
|
||
this.decodeStrings = !noDecode; // Crypto is kind of old and crusty. Historically, its default string
|
||
// encoding is 'binary' so we have to make this configurable.
|
||
// Everything else in the universe uses 'utf8', though.
|
||
|
||
this.defaultEncoding = options.defaultEncoding || 'utf8'; // not an actual buffer we keep track of, but a measurement
|
||
// of how much we're waiting to get pushed to some underlying
|
||
// socket or file.
|
||
|
||
this.length = 0; // a flag to see when we're in the middle of a write.
|
||
|
||
this.writing = false; // when true all writes will be buffered until .uncork() call
|
||
|
||
this.corked = 0; // a flag to be able to tell if the onwrite cb is called immediately,
|
||
// or on a later tick. We set this to true at first, because any
|
||
// actions that shouldn't happen until "later" should generally also
|
||
// not happen before the first write call.
|
||
|
||
this.sync = true; // a flag to know if we're processing previously buffered items, which
|
||
// may call the _write() callback in the same tick, so that we don't
|
||
// end up in an overlapped onwrite situation.
|
||
|
||
this.bufferProcessing = false; // the callback that's passed to _write(chunk,cb)
|
||
|
||
this.onwrite = function (er) {
|
||
onwrite(stream, er);
|
||
}; // the callback that the user supplies to write(chunk,encoding,cb)
|
||
|
||
|
||
this.writecb = null; // the amount that is being written when _write is called.
|
||
|
||
this.writelen = 0;
|
||
this.bufferedRequest = null;
|
||
this.lastBufferedRequest = null; // number of pending user-supplied write callbacks
|
||
// this must be 0 before 'finish' can be emitted
|
||
|
||
this.pendingcb = 0; // emit prefinish if the only thing we're waiting for is _write cbs
|
||
// This is relevant for synchronous Transform streams
|
||
|
||
this.prefinished = false; // True if the error was already emitted and should not be thrown again
|
||
|
||
this.errorEmitted = false; // Should close be emitted on destroy. Defaults to true.
|
||
|
||
this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'finish' (and potentially 'end')
|
||
|
||
this.autoDestroy = !!options.autoDestroy; // count buffered requests
|
||
|
||
this.bufferedRequestCount = 0; // allocate the first CorkedRequest, there is always
|
||
// one allocated and free to use, and we maintain at most two
|
||
|
||
this.corkedRequestsFree = new CorkedRequest(this);
|
||
}
|
||
|
||
WritableState.prototype.getBuffer = function getBuffer() {
|
||
var current = this.bufferedRequest;
|
||
var out = [];
|
||
|
||
while (current) {
|
||
out.push(current);
|
||
current = current.next;
|
||
}
|
||
|
||
return out;
|
||
};
|
||
|
||
(function () {
|
||
try {
|
||
Object.defineProperty(WritableState.prototype, 'buffer', {
|
||
get: internalUtil.deprecate(function writableStateBufferGetter() {
|
||
return this.getBuffer();
|
||
}, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003')
|
||
});
|
||
} catch (_) {}
|
||
})(); // Test _writableState for inheritance to account for Duplex streams,
|
||
// whose prototype chain only points to Readable.
|
||
|
||
|
||
var realHasInstance;
|
||
|
||
if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') {
|
||
realHasInstance = Function.prototype[Symbol.hasInstance];
|
||
Object.defineProperty(Writable, Symbol.hasInstance, {
|
||
value: function value(object) {
|
||
if (realHasInstance.call(this, object)) return true;
|
||
if (this !== Writable) return false;
|
||
return object && object._writableState instanceof WritableState;
|
||
}
|
||
});
|
||
} else {
|
||
realHasInstance = function realHasInstance(object) {
|
||
return object instanceof this;
|
||
};
|
||
}
|
||
|
||
function Writable(options) {
|
||
Duplex = Duplex || require('./_stream_duplex'); // Writable ctor is applied to Duplexes, too.
|
||
// `realHasInstance` is necessary because using plain `instanceof`
|
||
// would return false, as no `_writableState` property is attached.
|
||
// Trying to use the custom `instanceof` for Writable here will also break the
|
||
// Node.js LazyTransform implementation, which has a non-trivial getter for
|
||
// `_writableState` that would lead to infinite recursion.
|
||
// Checking for a Stream.Duplex instance is faster here instead of inside
|
||
// the WritableState constructor, at least with V8 6.5
|
||
|
||
var isDuplex = this instanceof Duplex;
|
||
if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options);
|
||
this._writableState = new WritableState(options, this, isDuplex); // legacy.
|
||
|
||
this.writable = true;
|
||
|
||
if (options) {
|
||
if (typeof options.write === 'function') this._write = options.write;
|
||
if (typeof options.writev === 'function') this._writev = options.writev;
|
||
if (typeof options.destroy === 'function') this._destroy = options.destroy;
|
||
if (typeof options.final === 'function') this._final = options.final;
|
||
}
|
||
|
||
Stream.call(this);
|
||
} // Otherwise people can pipe Writable streams, which is just wrong.
|
||
|
||
|
||
Writable.prototype.pipe = function () {
|
||
errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE());
|
||
};
|
||
|
||
function writeAfterEnd(stream, cb) {
|
||
var er = new ERR_STREAM_WRITE_AFTER_END(); // TODO: defer error events consistently everywhere, not just the cb
|
||
|
||
errorOrDestroy(stream, er);
|
||
process.nextTick(cb, er);
|
||
} // Checks that a user-supplied chunk is valid, especially for the particular
|
||
// mode the stream is in. Currently this means that `null` is never accepted
|
||
// and undefined/non-string values are only allowed in object mode.
|
||
|
||
|
||
function validChunk(stream, state, chunk, cb) {
|
||
var er;
|
||
|
||
if (chunk === null) {
|
||
er = new ERR_STREAM_NULL_VALUES();
|
||
} else if (typeof chunk !== 'string' && !state.objectMode) {
|
||
er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk);
|
||
}
|
||
|
||
if (er) {
|
||
errorOrDestroy(stream, er);
|
||
process.nextTick(cb, er);
|
||
return false;
|
||
}
|
||
|
||
return true;
|
||
}
|
||
|
||
Writable.prototype.write = function (chunk, encoding, cb) {
|
||
var state = this._writableState;
|
||
var ret = false;
|
||
|
||
var isBuf = !state.objectMode && _isUint8Array(chunk);
|
||
|
||
if (isBuf && !Buffer.isBuffer(chunk)) {
|
||
chunk = _uint8ArrayToBuffer(chunk);
|
||
}
|
||
|
||
if (typeof encoding === 'function') {
|
||
cb = encoding;
|
||
encoding = null;
|
||
}
|
||
|
||
if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;
|
||
if (typeof cb !== 'function') cb = nop;
|
||
if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) {
|
||
state.pendingcb++;
|
||
ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb);
|
||
}
|
||
return ret;
|
||
};
|
||
|
||
Writable.prototype.cork = function () {
|
||
this._writableState.corked++;
|
||
};
|
||
|
||
Writable.prototype.uncork = function () {
|
||
var state = this._writableState;
|
||
|
||
if (state.corked) {
|
||
state.corked--;
|
||
if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state);
|
||
}
|
||
};
|
||
|
||
Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
|
||
// node::ParseEncoding() requires lower case.
|
||
if (typeof encoding === 'string') encoding = encoding.toLowerCase();
|
||
if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding);
|
||
this._writableState.defaultEncoding = encoding;
|
||
return this;
|
||
};
|
||
|
||
Object.defineProperty(Writable.prototype, 'writableBuffer', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
return this._writableState && this._writableState.getBuffer();
|
||
}
|
||
});
|
||
|
||
function decodeChunk(state, chunk, encoding) {
|
||
if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {
|
||
chunk = Buffer.from(chunk, encoding);
|
||
}
|
||
|
||
return chunk;
|
||
}
|
||
|
||
Object.defineProperty(Writable.prototype, 'writableHighWaterMark', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
return this._writableState.highWaterMark;
|
||
}
|
||
}); // if we're already writing something, then just put this
|
||
// in the queue, and wait our turn. Otherwise, call _write
|
||
// If we return false, then we need a drain event, so set that flag.
|
||
|
||
function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) {
|
||
if (!isBuf) {
|
||
var newChunk = decodeChunk(state, chunk, encoding);
|
||
|
||
if (chunk !== newChunk) {
|
||
isBuf = true;
|
||
encoding = 'buffer';
|
||
chunk = newChunk;
|
||
}
|
||
}
|
||
|
||
var len = state.objectMode ? 1 : chunk.length;
|
||
state.length += len;
|
||
var ret = state.length < state.highWaterMark; // we must ensure that previous needDrain will not be reset to false.
|
||
|
||
if (!ret) state.needDrain = true;
|
||
|
||
if (state.writing || state.corked) {
|
||
var last = state.lastBufferedRequest;
|
||
state.lastBufferedRequest = {
|
||
chunk: chunk,
|
||
encoding: encoding,
|
||
isBuf: isBuf,
|
||
callback: cb,
|
||
next: null
|
||
};
|
||
|
||
if (last) {
|
||
last.next = state.lastBufferedRequest;
|
||
} else {
|
||
state.bufferedRequest = state.lastBufferedRequest;
|
||
}
|
||
|
||
state.bufferedRequestCount += 1;
|
||
} else {
|
||
doWrite(stream, state, false, len, chunk, encoding, cb);
|
||
}
|
||
|
||
return ret;
|
||
}
|
||
|
||
function doWrite(stream, state, writev, len, chunk, encoding, cb) {
|
||
state.writelen = len;
|
||
state.writecb = cb;
|
||
state.writing = true;
|
||
state.sync = true;
|
||
if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite);
|
||
state.sync = false;
|
||
}
|
||
|
||
function onwriteError(stream, state, sync, er, cb) {
|
||
--state.pendingcb;
|
||
|
||
if (sync) {
|
||
// defer the callback if we are being called synchronously
|
||
// to avoid piling up things on the stack
|
||
process.nextTick(cb, er); // this can emit finish, and it will always happen
|
||
// after error
|
||
|
||
process.nextTick(finishMaybe, stream, state);
|
||
stream._writableState.errorEmitted = true;
|
||
errorOrDestroy(stream, er);
|
||
} else {
|
||
// the caller expect this to happen before if
|
||
// it is async
|
||
cb(er);
|
||
stream._writableState.errorEmitted = true;
|
||
errorOrDestroy(stream, er); // this can emit finish, but finish must
|
||
// always follow error
|
||
|
||
finishMaybe(stream, state);
|
||
}
|
||
}
|
||
|
||
function onwriteStateUpdate(state) {
|
||
state.writing = false;
|
||
state.writecb = null;
|
||
state.length -= state.writelen;
|
||
state.writelen = 0;
|
||
}
|
||
|
||
function onwrite(stream, er) {
|
||
var state = stream._writableState;
|
||
var sync = state.sync;
|
||
var cb = state.writecb;
|
||
if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK();
|
||
onwriteStateUpdate(state);
|
||
if (er) onwriteError(stream, state, sync, er, cb);else {
|
||
// Check if we're actually ready to finish, but don't emit yet
|
||
var finished = needFinish(state) || stream.destroyed;
|
||
|
||
if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) {
|
||
clearBuffer(stream, state);
|
||
}
|
||
|
||
if (sync) {
|
||
process.nextTick(afterWrite, stream, state, finished, cb);
|
||
} else {
|
||
afterWrite(stream, state, finished, cb);
|
||
}
|
||
}
|
||
}
|
||
|
||
function afterWrite(stream, state, finished, cb) {
|
||
if (!finished) onwriteDrain(stream, state);
|
||
state.pendingcb--;
|
||
cb();
|
||
finishMaybe(stream, state);
|
||
} // Must force callback to be called on nextTick, so that we don't
|
||
// emit 'drain' before the write() consumer gets the 'false' return
|
||
// value, and has a chance to attach a 'drain' listener.
|
||
|
||
|
||
function onwriteDrain(stream, state) {
|
||
if (state.length === 0 && state.needDrain) {
|
||
state.needDrain = false;
|
||
stream.emit('drain');
|
||
}
|
||
} // if there's something in the buffer waiting, then process it
|
||
|
||
|
||
function clearBuffer(stream, state) {
|
||
state.bufferProcessing = true;
|
||
var entry = state.bufferedRequest;
|
||
|
||
if (stream._writev && entry && entry.next) {
|
||
// Fast case, write everything using _writev()
|
||
var l = state.bufferedRequestCount;
|
||
var buffer = new Array(l);
|
||
var holder = state.corkedRequestsFree;
|
||
holder.entry = entry;
|
||
var count = 0;
|
||
var allBuffers = true;
|
||
|
||
while (entry) {
|
||
buffer[count] = entry;
|
||
if (!entry.isBuf) allBuffers = false;
|
||
entry = entry.next;
|
||
count += 1;
|
||
}
|
||
|
||
buffer.allBuffers = allBuffers;
|
||
doWrite(stream, state, true, state.length, buffer, '', holder.finish); // doWrite is almost always async, defer these to save a bit of time
|
||
// as the hot path ends with doWrite
|
||
|
||
state.pendingcb++;
|
||
state.lastBufferedRequest = null;
|
||
|
||
if (holder.next) {
|
||
state.corkedRequestsFree = holder.next;
|
||
holder.next = null;
|
||
} else {
|
||
state.corkedRequestsFree = new CorkedRequest(state);
|
||
}
|
||
|
||
state.bufferedRequestCount = 0;
|
||
} else {
|
||
// Slow case, write chunks one-by-one
|
||
while (entry) {
|
||
var chunk = entry.chunk;
|
||
var encoding = entry.encoding;
|
||
var cb = entry.callback;
|
||
var len = state.objectMode ? 1 : chunk.length;
|
||
doWrite(stream, state, false, len, chunk, encoding, cb);
|
||
entry = entry.next;
|
||
state.bufferedRequestCount--; // if we didn't call the onwrite immediately, then
|
||
// it means that we need to wait until it does.
|
||
// also, that means that the chunk and cb are currently
|
||
// being processed, so move the buffer counter past them.
|
||
|
||
if (state.writing) {
|
||
break;
|
||
}
|
||
}
|
||
|
||
if (entry === null) state.lastBufferedRequest = null;
|
||
}
|
||
|
||
state.bufferedRequest = entry;
|
||
state.bufferProcessing = false;
|
||
}
|
||
|
||
Writable.prototype._write = function (chunk, encoding, cb) {
|
||
cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()'));
|
||
};
|
||
|
||
Writable.prototype._writev = null;
|
||
|
||
Writable.prototype.end = function (chunk, encoding, cb) {
|
||
var state = this._writableState;
|
||
|
||
if (typeof chunk === 'function') {
|
||
cb = chunk;
|
||
chunk = null;
|
||
encoding = null;
|
||
} else if (typeof encoding === 'function') {
|
||
cb = encoding;
|
||
encoding = null;
|
||
}
|
||
|
||
if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); // .end() fully uncorks
|
||
|
||
if (state.corked) {
|
||
state.corked = 1;
|
||
this.uncork();
|
||
} // ignore unnecessary end() calls.
|
||
|
||
|
||
if (!state.ending) endWritable(this, state, cb);
|
||
return this;
|
||
};
|
||
|
||
Object.defineProperty(Writable.prototype, 'writableLength', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
return this._writableState.length;
|
||
}
|
||
});
|
||
|
||
function needFinish(state) {
|
||
return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing;
|
||
}
|
||
|
||
function callFinal(stream, state) {
|
||
stream._final(function (err) {
|
||
state.pendingcb--;
|
||
|
||
if (err) {
|
||
errorOrDestroy(stream, err);
|
||
}
|
||
|
||
state.prefinished = true;
|
||
stream.emit('prefinish');
|
||
finishMaybe(stream, state);
|
||
});
|
||
}
|
||
|
||
function prefinish(stream, state) {
|
||
if (!state.prefinished && !state.finalCalled) {
|
||
if (typeof stream._final === 'function' && !state.destroyed) {
|
||
state.pendingcb++;
|
||
state.finalCalled = true;
|
||
process.nextTick(callFinal, stream, state);
|
||
} else {
|
||
state.prefinished = true;
|
||
stream.emit('prefinish');
|
||
}
|
||
}
|
||
}
|
||
|
||
function finishMaybe(stream, state) {
|
||
var need = needFinish(state);
|
||
|
||
if (need) {
|
||
prefinish(stream, state);
|
||
|
||
if (state.pendingcb === 0) {
|
||
state.finished = true;
|
||
stream.emit('finish');
|
||
|
||
if (state.autoDestroy) {
|
||
// In case of duplex streams we need a way to detect
|
||
// if the readable side is ready for autoDestroy as well
|
||
var rState = stream._readableState;
|
||
|
||
if (!rState || rState.autoDestroy && rState.endEmitted) {
|
||
stream.destroy();
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
return need;
|
||
}
|
||
|
||
function endWritable(stream, state, cb) {
|
||
state.ending = true;
|
||
finishMaybe(stream, state);
|
||
|
||
if (cb) {
|
||
if (state.finished) process.nextTick(cb);else stream.once('finish', cb);
|
||
}
|
||
|
||
state.ended = true;
|
||
stream.writable = false;
|
||
}
|
||
|
||
function onCorkedFinish(corkReq, state, err) {
|
||
var entry = corkReq.entry;
|
||
corkReq.entry = null;
|
||
|
||
while (entry) {
|
||
var cb = entry.callback;
|
||
state.pendingcb--;
|
||
cb(err);
|
||
entry = entry.next;
|
||
} // reuse the free corkReq.
|
||
|
||
|
||
state.corkedRequestsFree.next = corkReq;
|
||
}
|
||
|
||
Object.defineProperty(Writable.prototype, 'destroyed', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
if (this._writableState === undefined) {
|
||
return false;
|
||
}
|
||
|
||
return this._writableState.destroyed;
|
||
},
|
||
set: function set(value) {
|
||
// we ignore the value if the stream
|
||
// has not been initialized yet
|
||
if (!this._writableState) {
|
||
return;
|
||
} // backward compatibility, the user is explicitly
|
||
// managing destroyed
|
||
|
||
|
||
this._writableState.destroyed = value;
|
||
}
|
||
});
|
||
Writable.prototype.destroy = destroyImpl.destroy;
|
||
Writable.prototype._undestroy = destroyImpl.undestroy;
|
||
|
||
Writable.prototype._destroy = function (err, cb) {
|
||
cb(err);
|
||
};
|
||
}).call(this)}).call(this,require('_process'),typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
|
||
|
||
},{"../errors":13,"./_stream_duplex":14,"./internal/streams/destroy":21,"./internal/streams/state":25,"./internal/streams/stream":26,"_process":8,"buffer":3,"inherits":12,"util-deprecate":28}],19:[function(require,module,exports){
|
||
(function (process){(function (){
|
||
'use strict';
|
||
|
||
var _Object$setPrototypeO;
|
||
|
||
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
|
||
|
||
var finished = require('./end-of-stream');
|
||
|
||
var kLastResolve = Symbol('lastResolve');
|
||
var kLastReject = Symbol('lastReject');
|
||
var kError = Symbol('error');
|
||
var kEnded = Symbol('ended');
|
||
var kLastPromise = Symbol('lastPromise');
|
||
var kHandlePromise = Symbol('handlePromise');
|
||
var kStream = Symbol('stream');
|
||
|
||
function createIterResult(value, done) {
|
||
return {
|
||
value: value,
|
||
done: done
|
||
};
|
||
}
|
||
|
||
function readAndResolve(iter) {
|
||
var resolve = iter[kLastResolve];
|
||
|
||
if (resolve !== null) {
|
||
var data = iter[kStream].read(); // we defer if data is null
|
||
// we can be expecting either 'end' or
|
||
// 'error'
|
||
|
||
if (data !== null) {
|
||
iter[kLastPromise] = null;
|
||
iter[kLastResolve] = null;
|
||
iter[kLastReject] = null;
|
||
resolve(createIterResult(data, false));
|
||
}
|
||
}
|
||
}
|
||
|
||
function onReadable(iter) {
|
||
// we wait for the next tick, because it might
|
||
// emit an error with process.nextTick
|
||
process.nextTick(readAndResolve, iter);
|
||
}
|
||
|
||
function wrapForNext(lastPromise, iter) {
|
||
return function (resolve, reject) {
|
||
lastPromise.then(function () {
|
||
if (iter[kEnded]) {
|
||
resolve(createIterResult(undefined, true));
|
||
return;
|
||
}
|
||
|
||
iter[kHandlePromise](resolve, reject);
|
||
}, reject);
|
||
};
|
||
}
|
||
|
||
var AsyncIteratorPrototype = Object.getPrototypeOf(function () {});
|
||
var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = {
|
||
get stream() {
|
||
return this[kStream];
|
||
},
|
||
|
||
next: function next() {
|
||
var _this = this;
|
||
|
||
// if we have detected an error in the meanwhile
|
||
// reject straight away
|
||
var error = this[kError];
|
||
|
||
if (error !== null) {
|
||
return Promise.reject(error);
|
||
}
|
||
|
||
if (this[kEnded]) {
|
||
return Promise.resolve(createIterResult(undefined, true));
|
||
}
|
||
|
||
if (this[kStream].destroyed) {
|
||
// We need to defer via nextTick because if .destroy(err) is
|
||
// called, the error will be emitted via nextTick, and
|
||
// we cannot guarantee that there is no error lingering around
|
||
// waiting to be emitted.
|
||
return new Promise(function (resolve, reject) {
|
||
process.nextTick(function () {
|
||
if (_this[kError]) {
|
||
reject(_this[kError]);
|
||
} else {
|
||
resolve(createIterResult(undefined, true));
|
||
}
|
||
});
|
||
});
|
||
} // if we have multiple next() calls
|
||
// we will wait for the previous Promise to finish
|
||
// this logic is optimized to support for await loops,
|
||
// where next() is only called once at a time
|
||
|
||
|
||
var lastPromise = this[kLastPromise];
|
||
var promise;
|
||
|
||
if (lastPromise) {
|
||
promise = new Promise(wrapForNext(lastPromise, this));
|
||
} else {
|
||
// fast path needed to support multiple this.push()
|
||
// without triggering the next() queue
|
||
var data = this[kStream].read();
|
||
|
||
if (data !== null) {
|
||
return Promise.resolve(createIterResult(data, false));
|
||
}
|
||
|
||
promise = new Promise(this[kHandlePromise]);
|
||
}
|
||
|
||
this[kLastPromise] = promise;
|
||
return promise;
|
||
}
|
||
}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () {
|
||
return this;
|
||
}), _defineProperty(_Object$setPrototypeO, "return", function _return() {
|
||
var _this2 = this;
|
||
|
||
// destroy(err, cb) is a private API
|
||
// we can guarantee we have that here, because we control the
|
||
// Readable class this is attached to
|
||
return new Promise(function (resolve, reject) {
|
||
_this2[kStream].destroy(null, function (err) {
|
||
if (err) {
|
||
reject(err);
|
||
return;
|
||
}
|
||
|
||
resolve(createIterResult(undefined, true));
|
||
});
|
||
});
|
||
}), _Object$setPrototypeO), AsyncIteratorPrototype);
|
||
|
||
var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) {
|
||
var _Object$create;
|
||
|
||
var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, {
|
||
value: stream,
|
||
writable: true
|
||
}), _defineProperty(_Object$create, kLastResolve, {
|
||
value: null,
|
||
writable: true
|
||
}), _defineProperty(_Object$create, kLastReject, {
|
||
value: null,
|
||
writable: true
|
||
}), _defineProperty(_Object$create, kError, {
|
||
value: null,
|
||
writable: true
|
||
}), _defineProperty(_Object$create, kEnded, {
|
||
value: stream._readableState.endEmitted,
|
||
writable: true
|
||
}), _defineProperty(_Object$create, kHandlePromise, {
|
||
value: function value(resolve, reject) {
|
||
var data = iterator[kStream].read();
|
||
|
||
if (data) {
|
||
iterator[kLastPromise] = null;
|
||
iterator[kLastResolve] = null;
|
||
iterator[kLastReject] = null;
|
||
resolve(createIterResult(data, false));
|
||
} else {
|
||
iterator[kLastResolve] = resolve;
|
||
iterator[kLastReject] = reject;
|
||
}
|
||
},
|
||
writable: true
|
||
}), _Object$create));
|
||
iterator[kLastPromise] = null;
|
||
finished(stream, function (err) {
|
||
if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') {
|
||
var reject = iterator[kLastReject]; // reject if we are waiting for data in the Promise
|
||
// returned by next() and store the error
|
||
|
||
if (reject !== null) {
|
||
iterator[kLastPromise] = null;
|
||
iterator[kLastResolve] = null;
|
||
iterator[kLastReject] = null;
|
||
reject(err);
|
||
}
|
||
|
||
iterator[kError] = err;
|
||
return;
|
||
}
|
||
|
||
var resolve = iterator[kLastResolve];
|
||
|
||
if (resolve !== null) {
|
||
iterator[kLastPromise] = null;
|
||
iterator[kLastResolve] = null;
|
||
iterator[kLastReject] = null;
|
||
resolve(createIterResult(undefined, true));
|
||
}
|
||
|
||
iterator[kEnded] = true;
|
||
});
|
||
stream.on('readable', onReadable.bind(null, iterator));
|
||
return iterator;
|
||
};
|
||
|
||
module.exports = createReadableStreamAsyncIterator;
|
||
}).call(this)}).call(this,require('_process'))
|
||
|
||
},{"./end-of-stream":22,"_process":8}],20:[function(require,module,exports){
|
||
'use strict';
|
||
|
||
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; }
|
||
|
||
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
|
||
|
||
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
|
||
|
||
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
|
||
|
||
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
|
||
|
||
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
|
||
|
||
var _require = require('buffer'),
|
||
Buffer = _require.Buffer;
|
||
|
||
var _require2 = require('util'),
|
||
inspect = _require2.inspect;
|
||
|
||
var custom = inspect && inspect.custom || 'inspect';
|
||
|
||
function copyBuffer(src, target, offset) {
|
||
Buffer.prototype.copy.call(src, target, offset);
|
||
}
|
||
|
||
module.exports =
|
||
/*#__PURE__*/
|
||
function () {
|
||
function BufferList() {
|
||
_classCallCheck(this, BufferList);
|
||
|
||
this.head = null;
|
||
this.tail = null;
|
||
this.length = 0;
|
||
}
|
||
|
||
_createClass(BufferList, [{
|
||
key: "push",
|
||
value: function push(v) {
|
||
var entry = {
|
||
data: v,
|
||
next: null
|
||
};
|
||
if (this.length > 0) this.tail.next = entry;else this.head = entry;
|
||
this.tail = entry;
|
||
++this.length;
|
||
}
|
||
}, {
|
||
key: "unshift",
|
||
value: function unshift(v) {
|
||
var entry = {
|
||
data: v,
|
||
next: this.head
|
||
};
|
||
if (this.length === 0) this.tail = entry;
|
||
this.head = entry;
|
||
++this.length;
|
||
}
|
||
}, {
|
||
key: "shift",
|
||
value: function shift() {
|
||
if (this.length === 0) return;
|
||
var ret = this.head.data;
|
||
if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next;
|
||
--this.length;
|
||
return ret;
|
||
}
|
||
}, {
|
||
key: "clear",
|
||
value: function clear() {
|
||
this.head = this.tail = null;
|
||
this.length = 0;
|
||
}
|
||
}, {
|
||
key: "join",
|
||
value: function join(s) {
|
||
if (this.length === 0) return '';
|
||
var p = this.head;
|
||
var ret = '' + p.data;
|
||
|
||
while (p = p.next) {
|
||
ret += s + p.data;
|
||
}
|
||
|
||
return ret;
|
||
}
|
||
}, {
|
||
key: "concat",
|
||
value: function concat(n) {
|
||
if (this.length === 0) return Buffer.alloc(0);
|
||
var ret = Buffer.allocUnsafe(n >>> 0);
|
||
var p = this.head;
|
||
var i = 0;
|
||
|
||
while (p) {
|
||
copyBuffer(p.data, ret, i);
|
||
i += p.data.length;
|
||
p = p.next;
|
||
}
|
||
|
||
return ret;
|
||
} // Consumes a specified amount of bytes or characters from the buffered data.
|
||
|
||
}, {
|
||
key: "consume",
|
||
value: function consume(n, hasStrings) {
|
||
var ret;
|
||
|
||
if (n < this.head.data.length) {
|
||
// `slice` is the same for buffers and strings.
|
||
ret = this.head.data.slice(0, n);
|
||
this.head.data = this.head.data.slice(n);
|
||
} else if (n === this.head.data.length) {
|
||
// First chunk is a perfect match.
|
||
ret = this.shift();
|
||
} else {
|
||
// Result spans more than one buffer.
|
||
ret = hasStrings ? this._getString(n) : this._getBuffer(n);
|
||
}
|
||
|
||
return ret;
|
||
}
|
||
}, {
|
||
key: "first",
|
||
value: function first() {
|
||
return this.head.data;
|
||
} // Consumes a specified amount of characters from the buffered data.
|
||
|
||
}, {
|
||
key: "_getString",
|
||
value: function _getString(n) {
|
||
var p = this.head;
|
||
var c = 1;
|
||
var ret = p.data;
|
||
n -= ret.length;
|
||
|
||
while (p = p.next) {
|
||
var str = p.data;
|
||
var nb = n > str.length ? str.length : n;
|
||
if (nb === str.length) ret += str;else ret += str.slice(0, n);
|
||
n -= nb;
|
||
|
||
if (n === 0) {
|
||
if (nb === str.length) {
|
||
++c;
|
||
if (p.next) this.head = p.next;else this.head = this.tail = null;
|
||
} else {
|
||
this.head = p;
|
||
p.data = str.slice(nb);
|
||
}
|
||
|
||
break;
|
||
}
|
||
|
||
++c;
|
||
}
|
||
|
||
this.length -= c;
|
||
return ret;
|
||
} // Consumes a specified amount of bytes from the buffered data.
|
||
|
||
}, {
|
||
key: "_getBuffer",
|
||
value: function _getBuffer(n) {
|
||
var ret = Buffer.allocUnsafe(n);
|
||
var p = this.head;
|
||
var c = 1;
|
||
p.data.copy(ret);
|
||
n -= p.data.length;
|
||
|
||
while (p = p.next) {
|
||
var buf = p.data;
|
||
var nb = n > buf.length ? buf.length : n;
|
||
buf.copy(ret, ret.length - n, 0, nb);
|
||
n -= nb;
|
||
|
||
if (n === 0) {
|
||
if (nb === buf.length) {
|
||
++c;
|
||
if (p.next) this.head = p.next;else this.head = this.tail = null;
|
||
} else {
|
||
this.head = p;
|
||
p.data = buf.slice(nb);
|
||
}
|
||
|
||
break;
|
||
}
|
||
|
||
++c;
|
||
}
|
||
|
||
this.length -= c;
|
||
return ret;
|
||
} // Make sure the linked list only shows the minimal necessary information.
|
||
|
||
}, {
|
||
key: custom,
|
||
value: function value(_, options) {
|
||
return inspect(this, _objectSpread({}, options, {
|
||
// Only inspect one level.
|
||
depth: 0,
|
||
// It should not recurse.
|
||
customInspect: false
|
||
}));
|
||
}
|
||
}]);
|
||
|
||
return BufferList;
|
||
}();
|
||
},{"buffer":3,"util":2}],21:[function(require,module,exports){
|
||
(function (process){(function (){
|
||
'use strict'; // undocumented cb() API, needed for core, not for public API
|
||
|
||
function destroy(err, cb) {
|
||
var _this = this;
|
||
|
||
var readableDestroyed = this._readableState && this._readableState.destroyed;
|
||
var writableDestroyed = this._writableState && this._writableState.destroyed;
|
||
|
||
if (readableDestroyed || writableDestroyed) {
|
||
if (cb) {
|
||
cb(err);
|
||
} else if (err) {
|
||
if (!this._writableState) {
|
||
process.nextTick(emitErrorNT, this, err);
|
||
} else if (!this._writableState.errorEmitted) {
|
||
this._writableState.errorEmitted = true;
|
||
process.nextTick(emitErrorNT, this, err);
|
||
}
|
||
}
|
||
|
||
return this;
|
||
} // we set destroyed to true before firing error callbacks in order
|
||
// to make it re-entrance safe in case destroy() is called within callbacks
|
||
|
||
|
||
if (this._readableState) {
|
||
this._readableState.destroyed = true;
|
||
} // if this is a duplex stream mark the writable part as destroyed as well
|
||
|
||
|
||
if (this._writableState) {
|
||
this._writableState.destroyed = true;
|
||
}
|
||
|
||
this._destroy(err || null, function (err) {
|
||
if (!cb && err) {
|
||
if (!_this._writableState) {
|
||
process.nextTick(emitErrorAndCloseNT, _this, err);
|
||
} else if (!_this._writableState.errorEmitted) {
|
||
_this._writableState.errorEmitted = true;
|
||
process.nextTick(emitErrorAndCloseNT, _this, err);
|
||
} else {
|
||
process.nextTick(emitCloseNT, _this);
|
||
}
|
||
} else if (cb) {
|
||
process.nextTick(emitCloseNT, _this);
|
||
cb(err);
|
||
} else {
|
||
process.nextTick(emitCloseNT, _this);
|
||
}
|
||
});
|
||
|
||
return this;
|
||
}
|
||
|
||
function emitErrorAndCloseNT(self, err) {
|
||
emitErrorNT(self, err);
|
||
emitCloseNT(self);
|
||
}
|
||
|
||
function emitCloseNT(self) {
|
||
if (self._writableState && !self._writableState.emitClose) return;
|
||
if (self._readableState && !self._readableState.emitClose) return;
|
||
self.emit('close');
|
||
}
|
||
|
||
function undestroy() {
|
||
if (this._readableState) {
|
||
this._readableState.destroyed = false;
|
||
this._readableState.reading = false;
|
||
this._readableState.ended = false;
|
||
this._readableState.endEmitted = false;
|
||
}
|
||
|
||
if (this._writableState) {
|
||
this._writableState.destroyed = false;
|
||
this._writableState.ended = false;
|
||
this._writableState.ending = false;
|
||
this._writableState.finalCalled = false;
|
||
this._writableState.prefinished = false;
|
||
this._writableState.finished = false;
|
||
this._writableState.errorEmitted = false;
|
||
}
|
||
}
|
||
|
||
function emitErrorNT(self, err) {
|
||
self.emit('error', err);
|
||
}
|
||
|
||
function errorOrDestroy(stream, err) {
|
||
// We have tests that rely on errors being emitted
|
||
// in the same tick, so changing this is semver major.
|
||
// For now when you opt-in to autoDestroy we allow
|
||
// the error to be emitted nextTick. In a future
|
||
// semver major update we should change the default to this.
|
||
var rState = stream._readableState;
|
||
var wState = stream._writableState;
|
||
if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err);
|
||
}
|
||
|
||
module.exports = {
|
||
destroy: destroy,
|
||
undestroy: undestroy,
|
||
errorOrDestroy: errorOrDestroy
|
||
};
|
||
}).call(this)}).call(this,require('_process'))
|
||
|
||
},{"_process":8}],22:[function(require,module,exports){
|
||
// Ported from https://github.com/mafintosh/end-of-stream with
|
||
// permission from the author, Mathias Buus (@mafintosh).
|
||
'use strict';
|
||
|
||
var ERR_STREAM_PREMATURE_CLOSE = require('../../../errors').codes.ERR_STREAM_PREMATURE_CLOSE;
|
||
|
||
function once(callback) {
|
||
var called = false;
|
||
return function () {
|
||
if (called) return;
|
||
called = true;
|
||
|
||
for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
|
||
args[_key] = arguments[_key];
|
||
}
|
||
|
||
callback.apply(this, args);
|
||
};
|
||
}
|
||
|
||
function noop() {}
|
||
|
||
function isRequest(stream) {
|
||
return stream.setHeader && typeof stream.abort === 'function';
|
||
}
|
||
|
||
function eos(stream, opts, callback) {
|
||
if (typeof opts === 'function') return eos(stream, null, opts);
|
||
if (!opts) opts = {};
|
||
callback = once(callback || noop);
|
||
var readable = opts.readable || opts.readable !== false && stream.readable;
|
||
var writable = opts.writable || opts.writable !== false && stream.writable;
|
||
|
||
var onlegacyfinish = function onlegacyfinish() {
|
||
if (!stream.writable) onfinish();
|
||
};
|
||
|
||
var writableEnded = stream._writableState && stream._writableState.finished;
|
||
|
||
var onfinish = function onfinish() {
|
||
writable = false;
|
||
writableEnded = true;
|
||
if (!readable) callback.call(stream);
|
||
};
|
||
|
||
var readableEnded = stream._readableState && stream._readableState.endEmitted;
|
||
|
||
var onend = function onend() {
|
||
readable = false;
|
||
readableEnded = true;
|
||
if (!writable) callback.call(stream);
|
||
};
|
||
|
||
var onerror = function onerror(err) {
|
||
callback.call(stream, err);
|
||
};
|
||
|
||
var onclose = function onclose() {
|
||
var err;
|
||
|
||
if (readable && !readableEnded) {
|
||
if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE();
|
||
return callback.call(stream, err);
|
||
}
|
||
|
||
if (writable && !writableEnded) {
|
||
if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE();
|
||
return callback.call(stream, err);
|
||
}
|
||
};
|
||
|
||
var onrequest = function onrequest() {
|
||
stream.req.on('finish', onfinish);
|
||
};
|
||
|
||
if (isRequest(stream)) {
|
||
stream.on('complete', onfinish);
|
||
stream.on('abort', onclose);
|
||
if (stream.req) onrequest();else stream.on('request', onrequest);
|
||
} else if (writable && !stream._writableState) {
|
||
// legacy streams
|
||
stream.on('end', onlegacyfinish);
|
||
stream.on('close', onlegacyfinish);
|
||
}
|
||
|
||
stream.on('end', onend);
|
||
stream.on('finish', onfinish);
|
||
if (opts.error !== false) stream.on('error', onerror);
|
||
stream.on('close', onclose);
|
||
return function () {
|
||
stream.removeListener('complete', onfinish);
|
||
stream.removeListener('abort', onclose);
|
||
stream.removeListener('request', onrequest);
|
||
if (stream.req) stream.req.removeListener('finish', onfinish);
|
||
stream.removeListener('end', onlegacyfinish);
|
||
stream.removeListener('close', onlegacyfinish);
|
||
stream.removeListener('finish', onfinish);
|
||
stream.removeListener('end', onend);
|
||
stream.removeListener('error', onerror);
|
||
stream.removeListener('close', onclose);
|
||
};
|
||
}
|
||
|
||
module.exports = eos;
|
||
},{"../../../errors":13}],23:[function(require,module,exports){
|
||
module.exports = function () {
|
||
throw new Error('Readable.from is not available in the browser')
|
||
};
|
||
|
||
},{}],24:[function(require,module,exports){
|
||
// Ported from https://github.com/mafintosh/pump with
|
||
// permission from the author, Mathias Buus (@mafintosh).
|
||
'use strict';
|
||
|
||
var eos;
|
||
|
||
function once(callback) {
|
||
var called = false;
|
||
return function () {
|
||
if (called) return;
|
||
called = true;
|
||
callback.apply(void 0, arguments);
|
||
};
|
||
}
|
||
|
||
var _require$codes = require('../../../errors').codes,
|
||
ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS,
|
||
ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED;
|
||
|
||
function noop(err) {
|
||
// Rethrow the error if it exists to avoid swallowing it
|
||
if (err) throw err;
|
||
}
|
||
|
||
function isRequest(stream) {
|
||
return stream.setHeader && typeof stream.abort === 'function';
|
||
}
|
||
|
||
function destroyer(stream, reading, writing, callback) {
|
||
callback = once(callback);
|
||
var closed = false;
|
||
stream.on('close', function () {
|
||
closed = true;
|
||
});
|
||
if (eos === undefined) eos = require('./end-of-stream');
|
||
eos(stream, {
|
||
readable: reading,
|
||
writable: writing
|
||
}, function (err) {
|
||
if (err) return callback(err);
|
||
closed = true;
|
||
callback();
|
||
});
|
||
var destroyed = false;
|
||
return function (err) {
|
||
if (closed) return;
|
||
if (destroyed) return;
|
||
destroyed = true; // request.destroy just do .end - .abort is what we want
|
||
|
||
if (isRequest(stream)) return stream.abort();
|
||
if (typeof stream.destroy === 'function') return stream.destroy();
|
||
callback(err || new ERR_STREAM_DESTROYED('pipe'));
|
||
};
|
||
}
|
||
|
||
function call(fn) {
|
||
fn();
|
||
}
|
||
|
||
function pipe(from, to) {
|
||
return from.pipe(to);
|
||
}
|
||
|
||
function popCallback(streams) {
|
||
if (!streams.length) return noop;
|
||
if (typeof streams[streams.length - 1] !== 'function') return noop;
|
||
return streams.pop();
|
||
}
|
||
|
||
function pipeline() {
|
||
for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) {
|
||
streams[_key] = arguments[_key];
|
||
}
|
||
|
||
var callback = popCallback(streams);
|
||
if (Array.isArray(streams[0])) streams = streams[0];
|
||
|
||
if (streams.length < 2) {
|
||
throw new ERR_MISSING_ARGS('streams');
|
||
}
|
||
|
||
var error;
|
||
var destroys = streams.map(function (stream, i) {
|
||
var reading = i < streams.length - 1;
|
||
var writing = i > 0;
|
||
return destroyer(stream, reading, writing, function (err) {
|
||
if (!error) error = err;
|
||
if (err) destroys.forEach(call);
|
||
if (reading) return;
|
||
destroys.forEach(call);
|
||
callback(error);
|
||
});
|
||
});
|
||
return streams.reduce(pipe);
|
||
}
|
||
|
||
module.exports = pipeline;
|
||
},{"../../../errors":13,"./end-of-stream":22}],25:[function(require,module,exports){
|
||
'use strict';
|
||
|
||
var ERR_INVALID_OPT_VALUE = require('../../../errors').codes.ERR_INVALID_OPT_VALUE;
|
||
|
||
function highWaterMarkFrom(options, isDuplex, duplexKey) {
|
||
return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null;
|
||
}
|
||
|
||
function getHighWaterMark(state, options, duplexKey, isDuplex) {
|
||
var hwm = highWaterMarkFrom(options, isDuplex, duplexKey);
|
||
|
||
if (hwm != null) {
|
||
if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) {
|
||
var name = isDuplex ? duplexKey : 'highWaterMark';
|
||
throw new ERR_INVALID_OPT_VALUE(name, hwm);
|
||
}
|
||
|
||
return Math.floor(hwm);
|
||
} // Default value
|
||
|
||
|
||
return state.objectMode ? 16 : 16 * 1024;
|
||
}
|
||
|
||
module.exports = {
|
||
getHighWaterMark: getHighWaterMark
|
||
};
|
||
},{"../../../errors":13}],26:[function(require,module,exports){
|
||
module.exports = require('events').EventEmitter;
|
||
|
||
},{"events":5}],27:[function(require,module,exports){
|
||
// Copyright Joyent, Inc. and other Node contributors.
|
||
//
|
||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||
// copy of this software and associated documentation files (the
|
||
// "Software"), to deal in the Software without restriction, including
|
||
// without limitation the rights to use, copy, modify, merge, publish,
|
||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||
// persons to whom the Software is furnished to do so, subject to the
|
||
// following conditions:
|
||
//
|
||
// The above copyright notice and this permission notice shall be included
|
||
// in all copies or substantial portions of the Software.
|
||
//
|
||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||
|
||
'use strict';
|
||
|
||
/*<replacement>*/
|
||
|
||
var Buffer = require('safe-buffer').Buffer;
|
||
/*</replacement>*/
|
||
|
||
var isEncoding = Buffer.isEncoding || function (encoding) {
|
||
encoding = '' + encoding;
|
||
switch (encoding && encoding.toLowerCase()) {
|
||
case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw':
|
||
return true;
|
||
default:
|
||
return false;
|
||
}
|
||
};
|
||
|
||
function _normalizeEncoding(enc) {
|
||
if (!enc) return 'utf8';
|
||
var retried;
|
||
while (true) {
|
||
switch (enc) {
|
||
case 'utf8':
|
||
case 'utf-8':
|
||
return 'utf8';
|
||
case 'ucs2':
|
||
case 'ucs-2':
|
||
case 'utf16le':
|
||
case 'utf-16le':
|
||
return 'utf16le';
|
||
case 'latin1':
|
||
case 'binary':
|
||
return 'latin1';
|
||
case 'base64':
|
||
case 'ascii':
|
||
case 'hex':
|
||
return enc;
|
||
default:
|
||
if (retried) return; // undefined
|
||
enc = ('' + enc).toLowerCase();
|
||
retried = true;
|
||
}
|
||
}
|
||
};
|
||
|
||
// Do not cache `Buffer.isEncoding` when checking encoding names as some
|
||
// modules monkey-patch it to support additional encodings
|
||
function normalizeEncoding(enc) {
|
||
var nenc = _normalizeEncoding(enc);
|
||
if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc);
|
||
return nenc || enc;
|
||
}
|
||
|
||
// StringDecoder provides an interface for efficiently splitting a series of
|
||
// buffers into a series of JS strings without breaking apart multi-byte
|
||
// characters.
|
||
exports.StringDecoder = StringDecoder;
|
||
function StringDecoder(encoding) {
|
||
this.encoding = normalizeEncoding(encoding);
|
||
var nb;
|
||
switch (this.encoding) {
|
||
case 'utf16le':
|
||
this.text = utf16Text;
|
||
this.end = utf16End;
|
||
nb = 4;
|
||
break;
|
||
case 'utf8':
|
||
this.fillLast = utf8FillLast;
|
||
nb = 4;
|
||
break;
|
||
case 'base64':
|
||
this.text = base64Text;
|
||
this.end = base64End;
|
||
nb = 3;
|
||
break;
|
||
default:
|
||
this.write = simpleWrite;
|
||
this.end = simpleEnd;
|
||
return;
|
||
}
|
||
this.lastNeed = 0;
|
||
this.lastTotal = 0;
|
||
this.lastChar = Buffer.allocUnsafe(nb);
|
||
}
|
||
|
||
StringDecoder.prototype.write = function (buf) {
|
||
if (buf.length === 0) return '';
|
||
var r;
|
||
var i;
|
||
if (this.lastNeed) {
|
||
r = this.fillLast(buf);
|
||
if (r === undefined) return '';
|
||
i = this.lastNeed;
|
||
this.lastNeed = 0;
|
||
} else {
|
||
i = 0;
|
||
}
|
||
if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i);
|
||
return r || '';
|
||
};
|
||
|
||
StringDecoder.prototype.end = utf8End;
|
||
|
||
// Returns only complete characters in a Buffer
|
||
StringDecoder.prototype.text = utf8Text;
|
||
|
||
// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer
|
||
StringDecoder.prototype.fillLast = function (buf) {
|
||
if (this.lastNeed <= buf.length) {
|
||
buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed);
|
||
return this.lastChar.toString(this.encoding, 0, this.lastTotal);
|
||
}
|
||
buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length);
|
||
this.lastNeed -= buf.length;
|
||
};
|
||
|
||
// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a
|
||
// continuation byte. If an invalid byte is detected, -2 is returned.
|
||
function utf8CheckByte(byte) {
|
||
if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4;
|
||
return byte >> 6 === 0x02 ? -1 : -2;
|
||
}
|
||
|
||
// Checks at most 3 bytes at the end of a Buffer in order to detect an
|
||
// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4)
|
||
// needed to complete the UTF-8 character (if applicable) are returned.
|
||
function utf8CheckIncomplete(self, buf, i) {
|
||
var j = buf.length - 1;
|
||
if (j < i) return 0;
|
||
var nb = utf8CheckByte(buf[j]);
|
||
if (nb >= 0) {
|
||
if (nb > 0) self.lastNeed = nb - 1;
|
||
return nb;
|
||
}
|
||
if (--j < i || nb === -2) return 0;
|
||
nb = utf8CheckByte(buf[j]);
|
||
if (nb >= 0) {
|
||
if (nb > 0) self.lastNeed = nb - 2;
|
||
return nb;
|
||
}
|
||
if (--j < i || nb === -2) return 0;
|
||
nb = utf8CheckByte(buf[j]);
|
||
if (nb >= 0) {
|
||
if (nb > 0) {
|
||
if (nb === 2) nb = 0;else self.lastNeed = nb - 3;
|
||
}
|
||
return nb;
|
||
}
|
||
return 0;
|
||
}
|
||
|
||
// Validates as many continuation bytes for a multi-byte UTF-8 character as
|
||
// needed or are available. If we see a non-continuation byte where we expect
|
||
// one, we "replace" the validated continuation bytes we've seen so far with
|
||
// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding
|
||
// behavior. The continuation byte check is included three times in the case
|
||
// where all of the continuation bytes for a character exist in the same buffer.
|
||
// It is also done this way as a slight performance increase instead of using a
|
||
// loop.
|
||
function utf8CheckExtraBytes(self, buf, p) {
|
||
if ((buf[0] & 0xC0) !== 0x80) {
|
||
self.lastNeed = 0;
|
||
return '\ufffd';
|
||
}
|
||
if (self.lastNeed > 1 && buf.length > 1) {
|
||
if ((buf[1] & 0xC0) !== 0x80) {
|
||
self.lastNeed = 1;
|
||
return '\ufffd';
|
||
}
|
||
if (self.lastNeed > 2 && buf.length > 2) {
|
||
if ((buf[2] & 0xC0) !== 0x80) {
|
||
self.lastNeed = 2;
|
||
return '\ufffd';
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer.
|
||
function utf8FillLast(buf) {
|
||
var p = this.lastTotal - this.lastNeed;
|
||
var r = utf8CheckExtraBytes(this, buf, p);
|
||
if (r !== undefined) return r;
|
||
if (this.lastNeed <= buf.length) {
|
||
buf.copy(this.lastChar, p, 0, this.lastNeed);
|
||
return this.lastChar.toString(this.encoding, 0, this.lastTotal);
|
||
}
|
||
buf.copy(this.lastChar, p, 0, buf.length);
|
||
this.lastNeed -= buf.length;
|
||
}
|
||
|
||
// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a
|
||
// partial character, the character's bytes are buffered until the required
|
||
// number of bytes are available.
|
||
function utf8Text(buf, i) {
|
||
var total = utf8CheckIncomplete(this, buf, i);
|
||
if (!this.lastNeed) return buf.toString('utf8', i);
|
||
this.lastTotal = total;
|
||
var end = buf.length - (total - this.lastNeed);
|
||
buf.copy(this.lastChar, 0, end);
|
||
return buf.toString('utf8', i, end);
|
||
}
|
||
|
||
// For UTF-8, a replacement character is added when ending on a partial
|
||
// character.
|
||
function utf8End(buf) {
|
||
var r = buf && buf.length ? this.write(buf) : '';
|
||
if (this.lastNeed) return r + '\ufffd';
|
||
return r;
|
||
}
|
||
|
||
// UTF-16LE typically needs two bytes per character, but even if we have an even
|
||
// number of bytes available, we need to check if we end on a leading/high
|
||
// surrogate. In that case, we need to wait for the next two bytes in order to
|
||
// decode the last character properly.
|
||
function utf16Text(buf, i) {
|
||
if ((buf.length - i) % 2 === 0) {
|
||
var r = buf.toString('utf16le', i);
|
||
if (r) {
|
||
var c = r.charCodeAt(r.length - 1);
|
||
if (c >= 0xD800 && c <= 0xDBFF) {
|
||
this.lastNeed = 2;
|
||
this.lastTotal = 4;
|
||
this.lastChar[0] = buf[buf.length - 2];
|
||
this.lastChar[1] = buf[buf.length - 1];
|
||
return r.slice(0, -1);
|
||
}
|
||
}
|
||
return r;
|
||
}
|
||
this.lastNeed = 1;
|
||
this.lastTotal = 2;
|
||
this.lastChar[0] = buf[buf.length - 1];
|
||
return buf.toString('utf16le', i, buf.length - 1);
|
||
}
|
||
|
||
// For UTF-16LE we do not explicitly append special replacement characters if we
|
||
// end on a partial character, we simply let v8 handle that.
|
||
function utf16End(buf) {
|
||
var r = buf && buf.length ? this.write(buf) : '';
|
||
if (this.lastNeed) {
|
||
var end = this.lastTotal - this.lastNeed;
|
||
return r + this.lastChar.toString('utf16le', 0, end);
|
||
}
|
||
return r;
|
||
}
|
||
|
||
function base64Text(buf, i) {
|
||
var n = (buf.length - i) % 3;
|
||
if (n === 0) return buf.toString('base64', i);
|
||
this.lastNeed = 3 - n;
|
||
this.lastTotal = 3;
|
||
if (n === 1) {
|
||
this.lastChar[0] = buf[buf.length - 1];
|
||
} else {
|
||
this.lastChar[0] = buf[buf.length - 2];
|
||
this.lastChar[1] = buf[buf.length - 1];
|
||
}
|
||
return buf.toString('base64', i, buf.length - n);
|
||
}
|
||
|
||
function base64End(buf) {
|
||
var r = buf && buf.length ? this.write(buf) : '';
|
||
if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed);
|
||
return r;
|
||
}
|
||
|
||
// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex)
|
||
function simpleWrite(buf) {
|
||
return buf.toString(this.encoding);
|
||
}
|
||
|
||
function simpleEnd(buf) {
|
||
return buf && buf.length ? this.write(buf) : '';
|
||
}
|
||
},{"safe-buffer":9}],28:[function(require,module,exports){
|
||
(function (global){(function (){
|
||
|
||
/**
|
||
* Module exports.
|
||
*/
|
||
|
||
module.exports = deprecate;
|
||
|
||
/**
|
||
* Mark that a method should not be used.
|
||
* Returns a modified function which warns once by default.
|
||
*
|
||
* If `localStorage.noDeprecation = true` is set, then it is a no-op.
|
||
*
|
||
* If `localStorage.throwDeprecation = true` is set, then deprecated functions
|
||
* will throw an Error when invoked.
|
||
*
|
||
* If `localStorage.traceDeprecation = true` is set, then deprecated functions
|
||
* will invoke `console.trace()` instead of `console.error()`.
|
||
*
|
||
* @param {Function} fn - the function to deprecate
|
||
* @param {String} msg - the string to print to the console when `fn` is invoked
|
||
* @returns {Function} a new "deprecated" version of `fn`
|
||
* @api public
|
||
*/
|
||
|
||
function deprecate (fn, msg) {
|
||
if (config('noDeprecation')) {
|
||
return fn;
|
||
}
|
||
|
||
var warned = false;
|
||
function deprecated() {
|
||
if (!warned) {
|
||
if (config('throwDeprecation')) {
|
||
throw new Error(msg);
|
||
} else if (config('traceDeprecation')) {
|
||
console.trace(msg);
|
||
} else {
|
||
console.warn(msg);
|
||
}
|
||
warned = true;
|
||
}
|
||
return fn.apply(this, arguments);
|
||
}
|
||
|
||
return deprecated;
|
||
}
|
||
|
||
/**
|
||
* Checks `localStorage` for boolean values for the given `name`.
|
||
*
|
||
* @param {String} name
|
||
* @returns {Boolean}
|
||
* @api private
|
||
*/
|
||
|
||
function config (name) {
|
||
// accessing global.localStorage can trigger a DOMException in sandboxed iframes
|
||
try {
|
||
if (!global.localStorage) return false;
|
||
} catch (_) {
|
||
return false;
|
||
}
|
||
var val = global.localStorage[name];
|
||
if (null == val) return false;
|
||
return String(val).toLowerCase() === 'true';
|
||
}
|
||
|
||
}).call(this)}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
|
||
|
||
},{}],29:[function(require,module,exports){
|
||
module.exports = {
|
||
|
||
isArray: function(value) {
|
||
if (Array.isArray) {
|
||
return Array.isArray(value);
|
||
}
|
||
// fallback for older browsers like IE 8
|
||
return Object.prototype.toString.call( value ) === '[object Array]';
|
||
}
|
||
|
||
};
|
||
|
||
},{}],30:[function(require,module,exports){
|
||
/*jslint node:true */
|
||
|
||
var xml2js = require('./xml2js');
|
||
var xml2json = require('./xml2json');
|
||
var js2xml = require('./js2xml');
|
||
var json2xml = require('./json2xml');
|
||
|
||
module.exports = {
|
||
xml2js: xml2js,
|
||
xml2json: xml2json,
|
||
js2xml: js2xml,
|
||
json2xml: json2xml
|
||
};
|
||
|
||
},{"./js2xml":31,"./json2xml":32,"./xml2js":34,"./xml2json":35}],31:[function(require,module,exports){
|
||
var helper = require('./options-helper');
|
||
var isArray = require('./array-helper').isArray;
|
||
|
||
var currentElement, currentElementName;
|
||
|
||
function validateOptions(userOptions) {
|
||
var options = helper.copyOptions(userOptions);
|
||
helper.ensureFlagExists('ignoreDeclaration', options);
|
||
helper.ensureFlagExists('ignoreInstruction', options);
|
||
helper.ensureFlagExists('ignoreAttributes', options);
|
||
helper.ensureFlagExists('ignoreText', options);
|
||
helper.ensureFlagExists('ignoreComment', options);
|
||
helper.ensureFlagExists('ignoreCdata', options);
|
||
helper.ensureFlagExists('ignoreDoctype', options);
|
||
helper.ensureFlagExists('compact', options);
|
||
helper.ensureFlagExists('indentText', options);
|
||
helper.ensureFlagExists('indentCdata', options);
|
||
helper.ensureFlagExists('indentAttributes', options);
|
||
helper.ensureFlagExists('indentInstruction', options);
|
||
helper.ensureFlagExists('fullTagEmptyElement', options);
|
||
helper.ensureFlagExists('noQuotesForNativeAttributes', options);
|
||
helper.ensureSpacesExists(options);
|
||
if (typeof options.spaces === 'number') {
|
||
options.spaces = Array(options.spaces + 1).join(' ');
|
||
}
|
||
helper.ensureKeyExists('declaration', options);
|
||
helper.ensureKeyExists('instruction', options);
|
||
helper.ensureKeyExists('attributes', options);
|
||
helper.ensureKeyExists('text', options);
|
||
helper.ensureKeyExists('comment', options);
|
||
helper.ensureKeyExists('cdata', options);
|
||
helper.ensureKeyExists('doctype', options);
|
||
helper.ensureKeyExists('type', options);
|
||
helper.ensureKeyExists('name', options);
|
||
helper.ensureKeyExists('elements', options);
|
||
helper.checkFnExists('doctype', options);
|
||
helper.checkFnExists('instruction', options);
|
||
helper.checkFnExists('cdata', options);
|
||
helper.checkFnExists('comment', options);
|
||
helper.checkFnExists('text', options);
|
||
helper.checkFnExists('instructionName', options);
|
||
helper.checkFnExists('elementName', options);
|
||
helper.checkFnExists('attributeName', options);
|
||
helper.checkFnExists('attributeValue', options);
|
||
helper.checkFnExists('attributes', options);
|
||
helper.checkFnExists('fullTagEmptyElement', options);
|
||
return options;
|
||
}
|
||
|
||
function writeIndentation(options, depth, firstLine) {
|
||
return (!firstLine && options.spaces ? '\n' : '') + Array(depth + 1).join(options.spaces);
|
||
}
|
||
|
||
function writeAttributes(attributes, options, depth) {
|
||
if (options.ignoreAttributes) {
|
||
return '';
|
||
}
|
||
if ('attributesFn' in options) {
|
||
attributes = options.attributesFn(attributes, currentElementName, currentElement);
|
||
}
|
||
var key, attr, attrName, quote, result = [];
|
||
for (key in attributes) {
|
||
if (attributes.hasOwnProperty(key) && attributes[key] !== null && attributes[key] !== undefined) {
|
||
quote = options.noQuotesForNativeAttributes && typeof attributes[key] !== 'string' ? '' : '"';
|
||
attr = '' + attributes[key]; // ensure number and boolean are converted to String
|
||
attr = attr.replace(/"/g, '"');
|
||
attrName = 'attributeNameFn' in options ? options.attributeNameFn(key, attr, currentElementName, currentElement) : key;
|
||
result.push((options.spaces && options.indentAttributes? writeIndentation(options, depth+1, false) : ' '));
|
||
result.push(attrName + '=' + quote + ('attributeValueFn' in options ? options.attributeValueFn(attr, key, currentElementName, currentElement) : attr) + quote);
|
||
}
|
||
}
|
||
if (attributes && Object.keys(attributes).length && options.spaces && options.indentAttributes) {
|
||
result.push(writeIndentation(options, depth, false));
|
||
}
|
||
return result.join('');
|
||
}
|
||
|
||
function writeDeclaration(declaration, options, depth) {
|
||
currentElement = declaration;
|
||
currentElementName = 'xml';
|
||
return options.ignoreDeclaration ? '' : '<?' + 'xml' + writeAttributes(declaration[options.attributesKey], options, depth) + '?>';
|
||
}
|
||
|
||
function writeInstruction(instruction, options, depth) {
|
||
if (options.ignoreInstruction) {
|
||
return '';
|
||
}
|
||
var key;
|
||
for (key in instruction) {
|
||
if (instruction.hasOwnProperty(key)) {
|
||
break;
|
||
}
|
||
}
|
||
var instructionName = 'instructionNameFn' in options ? options.instructionNameFn(key, instruction[key], currentElementName, currentElement) : key;
|
||
if (typeof instruction[key] === 'object') {
|
||
currentElement = instruction;
|
||
currentElementName = instructionName;
|
||
return '<?' + instructionName + writeAttributes(instruction[key][options.attributesKey], options, depth) + '?>';
|
||
} else {
|
||
var instructionValue = instruction[key] ? instruction[key] : '';
|
||
if ('instructionFn' in options) instructionValue = options.instructionFn(instructionValue, key, currentElementName, currentElement);
|
||
return '<?' + instructionName + (instructionValue ? ' ' + instructionValue : '') + '?>';
|
||
}
|
||
}
|
||
|
||
function writeComment(comment, options) {
|
||
return options.ignoreComment ? '' : '<!--' + ('commentFn' in options ? options.commentFn(comment, currentElementName, currentElement) : comment) + '-->';
|
||
}
|
||
|
||
function writeCdata(cdata, options) {
|
||
return options.ignoreCdata ? '' : '<![CDATA[' + ('cdataFn' in options ? options.cdataFn(cdata, currentElementName, currentElement) : cdata.replace(']]>', ']]]]><![CDATA[>')) + ']]>';
|
||
}
|
||
|
||
function writeDoctype(doctype, options) {
|
||
return options.ignoreDoctype ? '' : '<!DOCTYPE ' + ('doctypeFn' in options ? options.doctypeFn(doctype, currentElementName, currentElement) : doctype) + '>';
|
||
}
|
||
|
||
function writeText(text, options) {
|
||
if (options.ignoreText) return '';
|
||
text = '' + text; // ensure Number and Boolean are converted to String
|
||
text = text.replace(/&/g, '&'); // desanitize to avoid double sanitization
|
||
text = text.replace(/&/g, '&').replace(/</g, '<').replace(/>/g, '>');
|
||
return 'textFn' in options ? options.textFn(text, currentElementName, currentElement) : text;
|
||
}
|
||
|
||
function hasContent(element, options) {
|
||
var i;
|
||
if (element.elements && element.elements.length) {
|
||
for (i = 0; i < element.elements.length; ++i) {
|
||
switch (element.elements[i][options.typeKey]) {
|
||
case 'text':
|
||
if (options.indentText) {
|
||
return true;
|
||
}
|
||
break; // skip to next key
|
||
case 'cdata':
|
||
if (options.indentCdata) {
|
||
return true;
|
||
}
|
||
break; // skip to next key
|
||
case 'instruction':
|
||
if (options.indentInstruction) {
|
||
return true;
|
||
}
|
||
break; // skip to next key
|
||
case 'doctype':
|
||
case 'comment':
|
||
case 'element':
|
||
return true;
|
||
default:
|
||
return true;
|
||
}
|
||
}
|
||
}
|
||
return false;
|
||
}
|
||
|
||
function writeElement(element, options, depth) {
|
||
currentElement = element;
|
||
currentElementName = element.name;
|
||
var xml = [], elementName = 'elementNameFn' in options ? options.elementNameFn(element.name, element) : element.name;
|
||
xml.push('<' + elementName);
|
||
if (element[options.attributesKey]) {
|
||
xml.push(writeAttributes(element[options.attributesKey], options, depth));
|
||
}
|
||
var withClosingTag = element[options.elementsKey] && element[options.elementsKey].length || element[options.attributesKey] && element[options.attributesKey]['xml:space'] === 'preserve';
|
||
if (!withClosingTag) {
|
||
if ('fullTagEmptyElementFn' in options) {
|
||
withClosingTag = options.fullTagEmptyElementFn(element.name, element);
|
||
} else {
|
||
withClosingTag = options.fullTagEmptyElement;
|
||
}
|
||
}
|
||
if (withClosingTag) {
|
||
xml.push('>');
|
||
if (element[options.elementsKey] && element[options.elementsKey].length) {
|
||
xml.push(writeElements(element[options.elementsKey], options, depth + 1));
|
||
currentElement = element;
|
||
currentElementName = element.name;
|
||
}
|
||
xml.push(options.spaces && hasContent(element, options) ? '\n' + Array(depth + 1).join(options.spaces) : '');
|
||
xml.push('</' + elementName + '>');
|
||
} else {
|
||
xml.push('/>');
|
||
}
|
||
return xml.join('');
|
||
}
|
||
|
||
function writeElements(elements, options, depth, firstLine) {
|
||
return elements.reduce(function (xml, element) {
|
||
var indent = writeIndentation(options, depth, firstLine && !xml);
|
||
switch (element.type) {
|
||
case 'element': return xml + indent + writeElement(element, options, depth);
|
||
case 'comment': return xml + indent + writeComment(element[options.commentKey], options);
|
||
case 'doctype': return xml + indent + writeDoctype(element[options.doctypeKey], options);
|
||
case 'cdata': return xml + (options.indentCdata ? indent : '') + writeCdata(element[options.cdataKey], options);
|
||
case 'text': return xml + (options.indentText ? indent : '') + writeText(element[options.textKey], options);
|
||
case 'instruction':
|
||
var instruction = {};
|
||
instruction[element[options.nameKey]] = element[options.attributesKey] ? element : element[options.instructionKey];
|
||
return xml + (options.indentInstruction ? indent : '') + writeInstruction(instruction, options, depth);
|
||
}
|
||
}, '');
|
||
}
|
||
|
||
function hasContentCompact(element, options, anyContent) {
|
||
var key;
|
||
for (key in element) {
|
||
if (element.hasOwnProperty(key)) {
|
||
switch (key) {
|
||
case options.parentKey:
|
||
case options.attributesKey:
|
||
break; // skip to next key
|
||
case options.textKey:
|
||
if (options.indentText || anyContent) {
|
||
return true;
|
||
}
|
||
break; // skip to next key
|
||
case options.cdataKey:
|
||
if (options.indentCdata || anyContent) {
|
||
return true;
|
||
}
|
||
break; // skip to next key
|
||
case options.instructionKey:
|
||
if (options.indentInstruction || anyContent) {
|
||
return true;
|
||
}
|
||
break; // skip to next key
|
||
case options.doctypeKey:
|
||
case options.commentKey:
|
||
return true;
|
||
default:
|
||
return true;
|
||
}
|
||
}
|
||
}
|
||
return false;
|
||
}
|
||
|
||
function writeElementCompact(element, name, options, depth, indent) {
|
||
currentElement = element;
|
||
currentElementName = name;
|
||
var elementName = 'elementNameFn' in options ? options.elementNameFn(name, element) : name;
|
||
if (typeof element === 'undefined' || element === null || element === '') {
|
||
return 'fullTagEmptyElementFn' in options && options.fullTagEmptyElementFn(name, element) || options.fullTagEmptyElement ? '<' + elementName + '></' + elementName + '>' : '<' + elementName + '/>';
|
||
}
|
||
var xml = [];
|
||
if (name) {
|
||
xml.push('<' + elementName);
|
||
if (typeof element !== 'object') {
|
||
xml.push('>' + writeText(element,options) + '</' + elementName + '>');
|
||
return xml.join('');
|
||
}
|
||
if (element[options.attributesKey]) {
|
||
xml.push(writeAttributes(element[options.attributesKey], options, depth));
|
||
}
|
||
var withClosingTag = hasContentCompact(element, options, true) || element[options.attributesKey] && element[options.attributesKey]['xml:space'] === 'preserve';
|
||
if (!withClosingTag) {
|
||
if ('fullTagEmptyElementFn' in options) {
|
||
withClosingTag = options.fullTagEmptyElementFn(name, element);
|
||
} else {
|
||
withClosingTag = options.fullTagEmptyElement;
|
||
}
|
||
}
|
||
if (withClosingTag) {
|
||
xml.push('>');
|
||
} else {
|
||
xml.push('/>');
|
||
return xml.join('');
|
||
}
|
||
}
|
||
xml.push(writeElementsCompact(element, options, depth + 1, false));
|
||
currentElement = element;
|
||
currentElementName = name;
|
||
if (name) {
|
||
xml.push((indent ? writeIndentation(options, depth, false) : '') + '</' + elementName + '>');
|
||
}
|
||
return xml.join('');
|
||
}
|
||
|
||
function writeElementsCompact(element, options, depth, firstLine) {
|
||
var i, key, nodes, xml = [];
|
||
for (key in element) {
|
||
if (element.hasOwnProperty(key)) {
|
||
nodes = isArray(element[key]) ? element[key] : [element[key]];
|
||
for (i = 0; i < nodes.length; ++i) {
|
||
switch (key) {
|
||
case options.declarationKey: xml.push(writeDeclaration(nodes[i], options, depth)); break;
|
||
case options.instructionKey: xml.push((options.indentInstruction ? writeIndentation(options, depth, firstLine) : '') + writeInstruction(nodes[i], options, depth)); break;
|
||
case options.attributesKey: case options.parentKey: break; // skip
|
||
case options.textKey: xml.push((options.indentText ? writeIndentation(options, depth, firstLine) : '') + writeText(nodes[i], options)); break;
|
||
case options.cdataKey: xml.push((options.indentCdata ? writeIndentation(options, depth, firstLine) : '') + writeCdata(nodes[i], options)); break;
|
||
case options.doctypeKey: xml.push(writeIndentation(options, depth, firstLine) + writeDoctype(nodes[i], options)); break;
|
||
case options.commentKey: xml.push(writeIndentation(options, depth, firstLine) + writeComment(nodes[i], options)); break;
|
||
default: xml.push(writeIndentation(options, depth, firstLine) + writeElementCompact(nodes[i], key, options, depth, hasContentCompact(nodes[i], options)));
|
||
}
|
||
firstLine = firstLine && !xml.length;
|
||
}
|
||
}
|
||
}
|
||
return xml.join('');
|
||
}
|
||
|
||
module.exports = function (js, options) {
|
||
options = validateOptions(options);
|
||
var xml = [];
|
||
currentElement = js;
|
||
currentElementName = '_root_';
|
||
if (options.compact) {
|
||
xml.push(writeElementsCompact(js, options, 0, true));
|
||
} else {
|
||
if (js[options.declarationKey]) {
|
||
xml.push(writeDeclaration(js[options.declarationKey], options, 0));
|
||
}
|
||
if (js[options.elementsKey] && js[options.elementsKey].length) {
|
||
xml.push(writeElements(js[options.elementsKey], options, 0, !xml.length));
|
||
}
|
||
}
|
||
return xml.join('');
|
||
};
|
||
|
||
},{"./array-helper":29,"./options-helper":33}],32:[function(require,module,exports){
|
||
(function (Buffer){(function (){
|
||
var js2xml = require('./js2xml.js');
|
||
|
||
module.exports = function (json, options) {
|
||
if (json instanceof Buffer) {
|
||
json = json.toString();
|
||
}
|
||
var js = null;
|
||
if (typeof (json) === 'string') {
|
||
try {
|
||
js = JSON.parse(json);
|
||
} catch (e) {
|
||
throw new Error('The JSON structure is invalid');
|
||
}
|
||
} else {
|
||
js = json;
|
||
}
|
||
return js2xml(js, options);
|
||
};
|
||
|
||
}).call(this)}).call(this,require("buffer").Buffer)
|
||
|
||
},{"./js2xml.js":31,"buffer":3}],33:[function(require,module,exports){
|
||
var isArray = require('./array-helper').isArray;
|
||
|
||
module.exports = {
|
||
|
||
copyOptions: function (options) {
|
||
var key, copy = {};
|
||
for (key in options) {
|
||
if (options.hasOwnProperty(key)) {
|
||
copy[key] = options[key];
|
||
}
|
||
}
|
||
return copy;
|
||
},
|
||
|
||
ensureFlagExists: function (item, options) {
|
||
if (!(item in options) || typeof options[item] !== 'boolean') {
|
||
options[item] = false;
|
||
}
|
||
},
|
||
|
||
ensureSpacesExists: function (options) {
|
||
if (!('spaces' in options) || (typeof options.spaces !== 'number' && typeof options.spaces !== 'string')) {
|
||
options.spaces = 0;
|
||
}
|
||
},
|
||
|
||
ensureAlwaysArrayExists: function (options) {
|
||
if (!('alwaysArray' in options) || (typeof options.alwaysArray !== 'boolean' && !isArray(options.alwaysArray))) {
|
||
options.alwaysArray = false;
|
||
}
|
||
},
|
||
|
||
ensureKeyExists: function (key, options) {
|
||
if (!(key + 'Key' in options) || typeof options[key + 'Key'] !== 'string') {
|
||
options[key + 'Key'] = options.compact ? '_' + key : key;
|
||
}
|
||
},
|
||
|
||
checkFnExists: function (key, options) {
|
||
return key + 'Fn' in options;
|
||
}
|
||
|
||
};
|
||
|
||
},{"./array-helper":29}],34:[function(require,module,exports){
|
||
var sax = require('sax');
|
||
var expat /*= require('node-expat');*/ = { on: function () { }, parse: function () { } };
|
||
var helper = require('./options-helper');
|
||
var isArray = require('./array-helper').isArray;
|
||
|
||
var options;
|
||
var pureJsParser = true;
|
||
var currentElement;
|
||
|
||
function validateOptions(userOptions) {
|
||
options = helper.copyOptions(userOptions);
|
||
helper.ensureFlagExists('ignoreDeclaration', options);
|
||
helper.ensureFlagExists('ignoreInstruction', options);
|
||
helper.ensureFlagExists('ignoreAttributes', options);
|
||
helper.ensureFlagExists('ignoreText', options);
|
||
helper.ensureFlagExists('ignoreComment', options);
|
||
helper.ensureFlagExists('ignoreCdata', options);
|
||
helper.ensureFlagExists('ignoreDoctype', options);
|
||
helper.ensureFlagExists('compact', options);
|
||
helper.ensureFlagExists('alwaysChildren', options);
|
||
helper.ensureFlagExists('addParent', options);
|
||
helper.ensureFlagExists('trim', options);
|
||
helper.ensureFlagExists('nativeType', options);
|
||
helper.ensureFlagExists('nativeTypeAttributes', options);
|
||
helper.ensureFlagExists('sanitize', options);
|
||
helper.ensureFlagExists('instructionHasAttributes', options);
|
||
helper.ensureFlagExists('captureSpacesBetweenElements', options);
|
||
helper.ensureAlwaysArrayExists(options);
|
||
helper.ensureKeyExists('declaration', options);
|
||
helper.ensureKeyExists('instruction', options);
|
||
helper.ensureKeyExists('attributes', options);
|
||
helper.ensureKeyExists('text', options);
|
||
helper.ensureKeyExists('comment', options);
|
||
helper.ensureKeyExists('cdata', options);
|
||
helper.ensureKeyExists('doctype', options);
|
||
helper.ensureKeyExists('type', options);
|
||
helper.ensureKeyExists('name', options);
|
||
helper.ensureKeyExists('elements', options);
|
||
helper.ensureKeyExists('parent', options);
|
||
helper.checkFnExists('doctype', options);
|
||
helper.checkFnExists('instruction', options);
|
||
helper.checkFnExists('cdata', options);
|
||
helper.checkFnExists('comment', options);
|
||
helper.checkFnExists('text', options);
|
||
helper.checkFnExists('instructionName', options);
|
||
helper.checkFnExists('elementName', options);
|
||
helper.checkFnExists('attributeName', options);
|
||
helper.checkFnExists('attributeValue', options);
|
||
helper.checkFnExists('attributes', options);
|
||
return options;
|
||
}
|
||
|
||
function nativeType(value) {
|
||
var nValue = Number(value);
|
||
if (!isNaN(nValue)) {
|
||
return nValue;
|
||
}
|
||
var bValue = value.toLowerCase();
|
||
if (bValue === 'true') {
|
||
return true;
|
||
} else if (bValue === 'false') {
|
||
return false;
|
||
}
|
||
return value;
|
||
}
|
||
|
||
function addField(type, value) {
|
||
var key;
|
||
if (options.compact) {
|
||
if (
|
||
!currentElement[options[type + 'Key']] &&
|
||
(isArray(options.alwaysArray) ? options.alwaysArray.indexOf(options[type + 'Key']) !== -1 : options.alwaysArray)
|
||
) {
|
||
currentElement[options[type + 'Key']] = [];
|
||
}
|
||
if (currentElement[options[type + 'Key']] && !isArray(currentElement[options[type + 'Key']])) {
|
||
currentElement[options[type + 'Key']] = [currentElement[options[type + 'Key']]];
|
||
}
|
||
if (type + 'Fn' in options && typeof value === 'string') {
|
||
value = options[type + 'Fn'](value, currentElement);
|
||
}
|
||
if (type === 'instruction' && ('instructionFn' in options || 'instructionNameFn' in options)) {
|
||
for (key in value) {
|
||
if (value.hasOwnProperty(key)) {
|
||
if ('instructionFn' in options) {
|
||
value[key] = options.instructionFn(value[key], key, currentElement);
|
||
} else {
|
||
var temp = value[key];
|
||
delete value[key];
|
||
value[options.instructionNameFn(key, temp, currentElement)] = temp;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
if (isArray(currentElement[options[type + 'Key']])) {
|
||
currentElement[options[type + 'Key']].push(value);
|
||
} else {
|
||
currentElement[options[type + 'Key']] = value;
|
||
}
|
||
} else {
|
||
if (!currentElement[options.elementsKey]) {
|
||
currentElement[options.elementsKey] = [];
|
||
}
|
||
var element = {};
|
||
element[options.typeKey] = type;
|
||
if (type === 'instruction') {
|
||
for (key in value) {
|
||
if (value.hasOwnProperty(key)) {
|
||
break;
|
||
}
|
||
}
|
||
element[options.nameKey] = 'instructionNameFn' in options ? options.instructionNameFn(key, value, currentElement) : key;
|
||
if (options.instructionHasAttributes) {
|
||
element[options.attributesKey] = value[key][options.attributesKey];
|
||
if ('instructionFn' in options) {
|
||
element[options.attributesKey] = options.instructionFn(element[options.attributesKey], key, currentElement);
|
||
}
|
||
} else {
|
||
if ('instructionFn' in options) {
|
||
value[key] = options.instructionFn(value[key], key, currentElement);
|
||
}
|
||
element[options.instructionKey] = value[key];
|
||
}
|
||
} else {
|
||
if (type + 'Fn' in options) {
|
||
value = options[type + 'Fn'](value, currentElement);
|
||
}
|
||
element[options[type + 'Key']] = value;
|
||
}
|
||
if (options.addParent) {
|
||
element[options.parentKey] = currentElement;
|
||
}
|
||
currentElement[options.elementsKey].push(element);
|
||
}
|
||
}
|
||
|
||
function manipulateAttributes(attributes) {
|
||
if ('attributesFn' in options && attributes) {
|
||
attributes = options.attributesFn(attributes, currentElement);
|
||
}
|
||
if ((options.trim || 'attributeValueFn' in options || 'attributeNameFn' in options || options.nativeTypeAttributes) && attributes) {
|
||
var key;
|
||
for (key in attributes) {
|
||
if (attributes.hasOwnProperty(key)) {
|
||
if (options.trim) attributes[key] = attributes[key].trim();
|
||
if (options.nativeTypeAttributes) {
|
||
attributes[key] = nativeType(attributes[key]);
|
||
}
|
||
if ('attributeValueFn' in options) attributes[key] = options.attributeValueFn(attributes[key], key, currentElement);
|
||
if ('attributeNameFn' in options) {
|
||
var temp = attributes[key];
|
||
delete attributes[key];
|
||
attributes[options.attributeNameFn(key, attributes[key], currentElement)] = temp;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
return attributes;
|
||
}
|
||
|
||
function onInstruction(instruction) {
|
||
var attributes = {};
|
||
if (instruction.body && (instruction.name.toLowerCase() === 'xml' || options.instructionHasAttributes)) {
|
||
var attrsRegExp = /([\w:-]+)\s*=\s*(?:"([^"]*)"|'([^']*)'|(\w+))\s*/g;
|
||
var match;
|
||
while ((match = attrsRegExp.exec(instruction.body)) !== null) {
|
||
attributes[match[1]] = match[2] || match[3] || match[4];
|
||
}
|
||
attributes = manipulateAttributes(attributes);
|
||
}
|
||
if (instruction.name.toLowerCase() === 'xml') {
|
||
if (options.ignoreDeclaration) {
|
||
return;
|
||
}
|
||
currentElement[options.declarationKey] = {};
|
||
if (Object.keys(attributes).length) {
|
||
currentElement[options.declarationKey][options.attributesKey] = attributes;
|
||
}
|
||
if (options.addParent) {
|
||
currentElement[options.declarationKey][options.parentKey] = currentElement;
|
||
}
|
||
} else {
|
||
if (options.ignoreInstruction) {
|
||
return;
|
||
}
|
||
if (options.trim) {
|
||
instruction.body = instruction.body.trim();
|
||
}
|
||
var value = {};
|
||
if (options.instructionHasAttributes && Object.keys(attributes).length) {
|
||
value[instruction.name] = {};
|
||
value[instruction.name][options.attributesKey] = attributes;
|
||
} else {
|
||
value[instruction.name] = instruction.body;
|
||
}
|
||
addField('instruction', value);
|
||
}
|
||
}
|
||
|
||
function onStartElement(name, attributes) {
|
||
var element;
|
||
if (typeof name === 'object') {
|
||
attributes = name.attributes;
|
||
name = name.name;
|
||
}
|
||
attributes = manipulateAttributes(attributes);
|
||
if ('elementNameFn' in options) {
|
||
name = options.elementNameFn(name, currentElement);
|
||
}
|
||
if (options.compact) {
|
||
element = {};
|
||
if (!options.ignoreAttributes && attributes && Object.keys(attributes).length) {
|
||
element[options.attributesKey] = {};
|
||
var key;
|
||
for (key in attributes) {
|
||
if (attributes.hasOwnProperty(key)) {
|
||
element[options.attributesKey][key] = attributes[key];
|
||
}
|
||
}
|
||
}
|
||
if (
|
||
!(name in currentElement) &&
|
||
(isArray(options.alwaysArray) ? options.alwaysArray.indexOf(name) !== -1 : options.alwaysArray)
|
||
) {
|
||
currentElement[name] = [];
|
||
}
|
||
if (currentElement[name] && !isArray(currentElement[name])) {
|
||
currentElement[name] = [currentElement[name]];
|
||
}
|
||
if (isArray(currentElement[name])) {
|
||
currentElement[name].push(element);
|
||
} else {
|
||
currentElement[name] = element;
|
||
}
|
||
} else {
|
||
if (!currentElement[options.elementsKey]) {
|
||
currentElement[options.elementsKey] = [];
|
||
}
|
||
element = {};
|
||
element[options.typeKey] = 'element';
|
||
element[options.nameKey] = name;
|
||
if (!options.ignoreAttributes && attributes && Object.keys(attributes).length) {
|
||
element[options.attributesKey] = attributes;
|
||
}
|
||
if (options.alwaysChildren) {
|
||
element[options.elementsKey] = [];
|
||
}
|
||
currentElement[options.elementsKey].push(element);
|
||
}
|
||
element[options.parentKey] = currentElement; // will be deleted in onEndElement() if !options.addParent
|
||
currentElement = element;
|
||
}
|
||
|
||
function onText(text) {
|
||
if (options.ignoreText) {
|
||
return;
|
||
}
|
||
if (!text.trim() && !options.captureSpacesBetweenElements) {
|
||
return;
|
||
}
|
||
if (options.trim) {
|
||
text = text.trim();
|
||
}
|
||
if (options.nativeType) {
|
||
text = nativeType(text);
|
||
}
|
||
if (options.sanitize) {
|
||
text = text.replace(/&/g, '&').replace(/</g, '<').replace(/>/g, '>');
|
||
}
|
||
addField('text', text);
|
||
}
|
||
|
||
function onComment(comment) {
|
||
if (options.ignoreComment) {
|
||
return;
|
||
}
|
||
if (options.trim) {
|
||
comment = comment.trim();
|
||
}
|
||
addField('comment', comment);
|
||
}
|
||
|
||
function onEndElement(name) {
|
||
var parentElement = currentElement[options.parentKey];
|
||
if (!options.addParent) {
|
||
delete currentElement[options.parentKey];
|
||
}
|
||
currentElement = parentElement;
|
||
}
|
||
|
||
function onCdata(cdata) {
|
||
if (options.ignoreCdata) {
|
||
return;
|
||
}
|
||
if (options.trim) {
|
||
cdata = cdata.trim();
|
||
}
|
||
addField('cdata', cdata);
|
||
}
|
||
|
||
function onDoctype(doctype) {
|
||
if (options.ignoreDoctype) {
|
||
return;
|
||
}
|
||
doctype = doctype.replace(/^ /, '');
|
||
if (options.trim) {
|
||
doctype = doctype.trim();
|
||
}
|
||
addField('doctype', doctype);
|
||
}
|
||
|
||
function onError(error) {
|
||
error.note = error; //console.error(error);
|
||
}
|
||
|
||
module.exports = function (xml, userOptions) {
|
||
|
||
var parser = pureJsParser ? sax.parser(true, {}) : parser = new expat.Parser('UTF-8');
|
||
var result = {};
|
||
currentElement = result;
|
||
|
||
options = validateOptions(userOptions);
|
||
|
||
if (pureJsParser) {
|
||
parser.opt = {strictEntities: true};
|
||
parser.onopentag = onStartElement;
|
||
parser.ontext = onText;
|
||
parser.oncomment = onComment;
|
||
parser.onclosetag = onEndElement;
|
||
parser.onerror = onError;
|
||
parser.oncdata = onCdata;
|
||
parser.ondoctype = onDoctype;
|
||
parser.onprocessinginstruction = onInstruction;
|
||
} else {
|
||
parser.on('startElement', onStartElement);
|
||
parser.on('text', onText);
|
||
parser.on('comment', onComment);
|
||
parser.on('endElement', onEndElement);
|
||
parser.on('error', onError);
|
||
//parser.on('startCdata', onStartCdata);
|
||
//parser.on('endCdata', onEndCdata);
|
||
//parser.on('entityDecl', onEntityDecl);
|
||
}
|
||
|
||
if (pureJsParser) {
|
||
parser.write(xml).close();
|
||
} else {
|
||
if (!parser.parse(xml)) {
|
||
throw new Error('XML parsing error: ' + parser.getError());
|
||
}
|
||
}
|
||
|
||
if (result[options.elementsKey]) {
|
||
var temp = result[options.elementsKey];
|
||
delete result[options.elementsKey];
|
||
result[options.elementsKey] = temp;
|
||
delete result.text;
|
||
}
|
||
|
||
return result;
|
||
|
||
};
|
||
|
||
},{"./array-helper":29,"./options-helper":33,"sax":10}],35:[function(require,module,exports){
|
||
var helper = require('./options-helper');
|
||
var xml2js = require('./xml2js');
|
||
|
||
function validateOptions (userOptions) {
|
||
var options = helper.copyOptions(userOptions);
|
||
helper.ensureSpacesExists(options);
|
||
return options;
|
||
}
|
||
|
||
module.exports = function(xml, userOptions) {
|
||
var options, js, json, parentKey;
|
||
options = validateOptions(userOptions);
|
||
js = xml2js(xml, options);
|
||
parentKey = 'compact' in options && options.compact ? '_parent' : 'parent';
|
||
// parentKey = ptions.compact ? '_parent' : 'parent'; // consider this
|
||
if ('addParent' in options && options.addParent) {
|
||
json = JSON.stringify(js, function (k, v) { return k === parentKey? '_' : v; }, options.spaces);
|
||
} else {
|
||
json = JSON.stringify(js, null, options.spaces);
|
||
}
|
||
return json.replace(/\u2028/g, '\\u2028').replace(/\u2029/g, '\\u2029');
|
||
};
|
||
|
||
},{"./options-helper":33,"./xml2js":34}],36:[function(require,module,exports){
|
||
// color value ranges are all 0 to 1
|
||
|
||
// input: hue,saturation,lightness in [0,1] - output: red,green,blue in [0,1]
|
||
var Color, component_names, hsl2rgb;
|
||
|
||
hsl2rgb = function(hue, saturation, lightness) {
|
||
var a, f;
|
||
a = saturation * Math.min(lightness, 1 - lightness);
|
||
f = function(n, k = (n + hue * 12) % 12) {
|
||
return lightness - a * Math.max(Math.min(k - 3, 9 - k, 1), -1);
|
||
};
|
||
return [f(0), f(8), f(4)].map(function(component) {
|
||
return component;
|
||
});
|
||
};
|
||
|
||
component_names = ["red", "green", "blue", "hue", "saturation", "lightness", "value", "cyan", "magenta", "yellow", "key", "alpha", "x", "y", "z", "l", "a", "b"];
|
||
|
||
module.exports = Color = class Color {
|
||
constructor(options) {
|
||
var c, component_name, cyan, i, j, key, l, len, len1, len2, magenta, powed, ref, ref1, reject_args, rgb, white_D50, x, xyz, y, yellow, z;
|
||
// @TODO: don't assign all of {@red, @green, @blue, @hue, @saturation, @value, @lightness} right away
|
||
// only assign the properties that are used
|
||
({red: this.red, green: this.green, blue: this.blue, hue: this.hue, saturation: this.saturation, value: this.value, lightness: this.lightness, cyan, magenta, yellow, key, alpha: this.alpha, name: this.name} = options);
|
||
for (i = 0, len = component_names.length; i < len; i++) {
|
||
component_name = component_names[i];
|
||
if (!(options[component_name] != null)) {
|
||
continue;
|
||
}
|
||
if ((!isFinite(options[component_name])) || (typeof options[component_name] !== "number")) {
|
||
throw new TypeError(`Color component option ${component_name} is not a finite number: ${JSON.stringify(options[component_name])}`);
|
||
}
|
||
if (options[component_name] < 0 || options[component_name] > 1) {
|
||
throw new TypeError(`Color component option ${component_name} outside range of [0,1]: ${options[component_name]}`);
|
||
}
|
||
}
|
||
reject_args = function() {
|
||
throw new TypeError(`Color constructor must be called with {red,green,blue} or {hue,saturation,value} or {hue,saturation,lightness} or {cyan,magenta,yellow,key} or {x,y,z} or {l,a,b}, ${(function() {
|
||
try {
|
||
return `got ${JSON.stringify(options)}`;
|
||
} catch (error) {
|
||
return "got something that couldn't be displayed with JSON.stringify for this error message";
|
||
}
|
||
})()}`);
|
||
};
|
||
if ((this.red != null) && (this.green != null) && (this.blue != null)) {
|
||
|
||
// Red Green Blue
|
||
// (no conversions needed here)
|
||
} else if ((this.hue != null) && (this.saturation != null)) {
|
||
// Cylindrical Color Space
|
||
if (this.value != null) {
|
||
// Hue Saturation Value
|
||
this.lightness = (2 - this.saturation) * this.value / 2;
|
||
this.saturation = this.saturation * this.value / (this.lightness < 0.5 ? this.lightness * 2 : 2 - this.lightness * 2);
|
||
if (isNaN(this.saturation)) {
|
||
this.saturation = 0;
|
||
}
|
||
} else if (this.lightness != null) {
|
||
|
||
// Hue Saturation Lightness
|
||
// (no conversions needed here)
|
||
} else if (options.brightness != null) {
|
||
throw new TypeError("{hue, saturation, brightness} not supported. Use {hue, saturation, value} instead for an equivalent color space");
|
||
} else {
|
||
reject_args();
|
||
}
|
||
[this.red, this.green, this.blue] = hsl2rgb(this.hue, this.saturation, this.lightness);
|
||
} else if ((cyan != null) && (magenta != null) && (yellow != null) && (key != null)) {
|
||
// Cyan Magenta Yellow blacK
|
||
throw new Error("CMYK color space is not currently supported");
|
||
this.red = 1 - Math.min(1, cyan * (1 - key) + key);
|
||
this.green = 1 - Math.min(1, magenta * (1 - key) + key);
|
||
this.blue = 1 - Math.min(1, yellow * (1 - key) + key);
|
||
} else {
|
||
// TODO: rename l -> lightness?
|
||
// a/b -> aChroma/bChroma? aChrominance/bChrominance??
|
||
if ((options.l != null) && (options.a != null) && (options.b != null)) {
|
||
throw new Error("L*a*b* color space is not currently supported");
|
||
white_D50 = {
|
||
x: 96.422,
|
||
y: 100.000,
|
||
z: 82.521
|
||
};
|
||
// white_D65 =
|
||
// x: 95.047
|
||
// y: 100.000
|
||
// z: 108.883
|
||
options.a -= 1 / 2;
|
||
options.b -= 1 / 2;
|
||
// TODO: Get this actually working, using Information and Math instead of Fiddling Around
|
||
// It would be nice if I could find some XYZ palettes,
|
||
// since the LAB handling depends on the XYZ handling.
|
||
options.l = Math.pow(options.l, 2); // messing around
|
||
options.l *= 15; // messing around
|
||
options.a *= 80; // messing around
|
||
options.b *= 80; // messing around
|
||
xyz = {
|
||
y: (options.l + 16) / 116
|
||
};
|
||
xyz.x = options.a / 500 + xyz.y;
|
||
xyz.z = xyz.y - options.b / 200;
|
||
ref = "xyz";
|
||
for (j = 0, len1 = ref.length; j < len1; j++) {
|
||
c = ref[j];
|
||
powed = Math.pow(xyz[c], 3);
|
||
if (powed > 0.008856) {
|
||
xyz[c] = powed;
|
||
} else {
|
||
xyz[c] = (xyz[c] - 16 / 116) / 7.787;
|
||
}
|
||
// set {x, y, z} options for fallthrough
|
||
options[c] = xyz[c] * white_D50[c];
|
||
}
|
||
}
|
||
// fallthrough
|
||
if ((options.x != null) && (options.y != null) && (options.z != null)) {
|
||
throw new Error("XYZ color space is not currently supported");
|
||
({x, y, z} = options);
|
||
rgb = {
|
||
r: x * 3.2406 + y * -1.5372 + z * -0.4986,
|
||
g: x * -0.9689 + y * 1.8758 + z * 0.0415,
|
||
b: x * 0.0557 + y * -0.2040 + z * 1.0570
|
||
};
|
||
ref1 = "rgb";
|
||
|
||
// r = 3.2404542*x - 1.5371385*y - 0.4985314*z
|
||
// g = -0.9692660*x + 1.8760108*y + 0.0415560*z
|
||
// b = 0.0556434*x - 0.2040259*y + 1.0572252*z
|
||
for (l = 0, len2 = ref1.length; l < len2; l++) {
|
||
c = ref1[l];
|
||
if (rgb[c] < 0) {
|
||
rgb[c] = 0;
|
||
}
|
||
if (rgb[c] > 0.0031308) {
|
||
rgb[c] = 1.055 * Math.pow(rgb[c], 1 / 2.4) - 0.055;
|
||
} else {
|
||
rgb[c] *= 12.92;
|
||
}
|
||
}
|
||
this.red = rgb.r;
|
||
this.green = rgb.g;
|
||
this.blue = rgb.b;
|
||
} else {
|
||
reject_args();
|
||
}
|
||
}
|
||
}
|
||
|
||
toString() {
|
||
if (this.hue != null) {
|
||
// Hue Saturation Lightness
|
||
if (this.alpha != null) {
|
||
return `hsla(${this.hue * 360}, ${this.saturation * 100}%, ${this.lightness * 100}%, ${this.alpha})`;
|
||
} else {
|
||
return `hsl(${this.hue * 360}, ${this.saturation * 100}%, ${this.lightness * 100}%)`;
|
||
}
|
||
} else if (this.red != null) {
|
||
// Red Green Blue
|
||
if (this.alpha != null) {
|
||
return `rgba(${this.red * 255}, ${this.green * 255}, ${this.blue * 255}, ${this.alpha})`;
|
||
} else {
|
||
return `rgb(${this.red * 255}, ${this.green * 255}, ${this.blue * 255})`;
|
||
}
|
||
}
|
||
}
|
||
|
||
static is(colorA, colorB, epsilon = 0.0001) {
|
||
var ref, ref1;
|
||
return Math.abs(colorA.red - colorB.red) < epsilon && Math.abs(colorA.green - colorB.green) < epsilon && Math.abs(colorA.blue - colorB.blue) < epsilon && Math.abs(((ref = colorA.alpha) != null ? ref : 1) - ((ref1 = colorB.alpha) != null ? ref1 : 1)) < epsilon;
|
||
}
|
||
|
||
};
|
||
|
||
|
||
},{}],37:[function(require,module,exports){
|
||
var Color, Palette, component_names;
|
||
|
||
Color = require("./Color");
|
||
|
||
component_names = ["r", "g", "b", "h", "s", "l", "v", "x", "y", "z", "a", "b", "c", "m", "y", "k", "red", "green", "blue", "hue", "saturation", "lightness", "value", "cyan", "magenta", "yellow", "key", "alpha"];
|
||
|
||
module.exports = Palette = class Palette extends Array {
|
||
constructor(...args) {
|
||
super(...args);
|
||
this.name = void 0;
|
||
this.description = void 0;
|
||
this.numberOfColumns = void 0;
|
||
this.geometrySpecifiedByFile = void 0;
|
||
}
|
||
|
||
add(o) {
|
||
var component_name, i, len, new_color;
|
||
for (i = 0, len = component_names.length; i < len; i++) {
|
||
component_name = component_names[i];
|
||
if (!(o[component_name] != null)) {
|
||
continue;
|
||
}
|
||
if ((!isFinite(o[component_name])) || (typeof o[component_name] !== "number")) {
|
||
throw new TypeError(`palette.add() component option ${component_name} is not a finite number: ${JSON.stringify(o[component_name])}`);
|
||
}
|
||
if (o[component_name] < 0 || o[component_name] > 1) {
|
||
throw new TypeError(`palette.add() component option ${component_name} outside range of [0,1]: ${o[component_name]}`);
|
||
}
|
||
}
|
||
new_color = o instanceof Color ? o : new Color(o);
|
||
return this.push(new_color);
|
||
}
|
||
|
||
};
|
||
|
||
/*
|
||
guess_dimensions: ->
|
||
* TODO: get this working properly and enable
|
||
|
||
len = @length
|
||
candidate_dimensions = []
|
||
for numberOfColumns in [0..len]
|
||
n_rows = len / numberOfColumns
|
||
if n_rows is Math.round n_rows
|
||
candidate_dimensions.push [n_rows, numberOfColumns]
|
||
|
||
squarest = [0, 3495093]
|
||
for cd in candidate_dimensions
|
||
if Math.abs(cd[0] - cd[1]) < Math.abs(squarest[0] - squarest[1])
|
||
squarest = cd
|
||
|
||
@numberOfColumns = squarest[1]
|
||
*/
|
||
|
||
|
||
},{"./Color":36}],38:[function(require,module,exports){
|
||
var MAX_UINT16, MAX_UINT32, Palette, PhotoshopColorSpace, get_utf_16_string, jDataView;
|
||
|
||
jDataView = require("jdataview");
|
||
|
||
Palette = require("../Palette");
|
||
|
||
MAX_UINT16 = 2 ** 16 - 1;
|
||
|
||
MAX_UINT32 = 2 ** 32 - 1;
|
||
|
||
PhotoshopColorSpace = Object.freeze({
|
||
RGB: 0,
|
||
HSB: 1, // also known as HSV
|
||
CMYK: 2,
|
||
PANTONE: 3, // brand name
|
||
FOCOLTONE: 4, // brand name
|
||
TRUMATCH: 5, // brand name
|
||
TOYO: 6, // brand name
|
||
LAB: 7, // CIELAB D50
|
||
GRAYSCALE: 8,
|
||
WIDE_CMYK: 9,
|
||
HKS: 10, // brand name
|
||
DIC: 11, // brand name
|
||
TOTAL_INK: 12, // brand name
|
||
MONITOR_RGB: 13,
|
||
DUOTONE: 14,
|
||
OPACITY: 15,
|
||
WEB: 16,
|
||
GRAY_FLOAT: 17,
|
||
RGB_FLOAT: 18,
|
||
OPACITY_FLOAT: 19,
|
||
0: "RGB",
|
||
1: "HSB", // also known as HSV
|
||
2: "CMYK",
|
||
3: "PANTONE", // brand name
|
||
4: "FOCOLTONE", // brand name
|
||
5: "TRUMATCH", // brand name
|
||
6: "TOYO", // brand name
|
||
7: "LAB", // CIELAB D50
|
||
8: "GRAYSCALE",
|
||
9: "WIDE_CMYK",
|
||
10: "HKS", // brand name
|
||
11: "DIC", // brand name
|
||
12: "TOTAL_INK", // brand name
|
||
13: "MONITOR_RGB",
|
||
14: "DUOTONE",
|
||
15: "OPACITY",
|
||
16: "WEB",
|
||
17: "GRAY_FLOAT",
|
||
18: "RGB_FLOAT",
|
||
19: "OPACITY_FLOAT"
|
||
});
|
||
|
||
get_utf_16_string = function(view, length, including_terminator) {
|
||
var i, ref, string;
|
||
if (including_terminator) {
|
||
length -= 1;
|
||
}
|
||
string = "";
|
||
for (i = 0, ref = length; (0 <= ref ? i < ref : i > ref); 0 <= ref ? i++ : i--) {
|
||
string += String.fromCharCode(view.getUint16());
|
||
}
|
||
if (including_terminator) {
|
||
view.getUint16(); // should be 0x0000
|
||
}
|
||
return string;
|
||
};
|
||
|
||
module.exports.read_adobe_color_swatch = function({data}) {
|
||
var aco_v1_version, aco_v2_colors_offset, aco_v2_number_of_colors, aco_v2_offset, aco_v2_version, header_size, i, j, number_of_colors, palette, read_color, ref, ref1, view;
|
||
// ACO (Adobe Color Swatch)
|
||
palette = new Palette();
|
||
view = new jDataView(data);
|
||
read_color = function(aco_v2) {
|
||
var color_space, length_including_terminator, name, w, x, y, z;
|
||
color_space = view.getUint16();
|
||
w = view.getUint16() / MAX_UINT16;
|
||
x = view.getUint16() / MAX_UINT16;
|
||
y = view.getUint16() / MAX_UINT16;
|
||
z = view.getUint16() / MAX_UINT16;
|
||
if (aco_v2) {
|
||
view.getUint16(); // should be 0x0000
|
||
length_including_terminator = view.getUint16();
|
||
name = get_utf_16_string(view, length_including_terminator, true);
|
||
} else {
|
||
name = void 0;
|
||
}
|
||
switch (color_space) {
|
||
case PhotoshopColorSpace.RGB:
|
||
return palette.add({
|
||
red: w,
|
||
green: x,
|
||
blue: y,
|
||
name: name
|
||
});
|
||
case PhotoshopColorSpace.HSB:
|
||
return palette.add({
|
||
hue: w,
|
||
saturation: x,
|
||
value: y,
|
||
name: name
|
||
});
|
||
case PhotoshopColorSpace.CMYK:
|
||
case PhotoshopColorSpace.WIDE_CMYK:
|
||
return palette.add({
|
||
cyan: w,
|
||
magenta: x,
|
||
yellow: y,
|
||
key: z,
|
||
name: name
|
||
});
|
||
case PhotoshopColorSpace.LAB:
|
||
return palette.add({
|
||
l: w,
|
||
a: x,
|
||
b: y,
|
||
name: name
|
||
});
|
||
case PhotoshopColorSpace.GRAYSCALE:
|
||
return palette.add({
|
||
red: w,
|
||
green: w,
|
||
blue: w,
|
||
name: name
|
||
});
|
||
}
|
||
};
|
||
aco_v1_version = view.getUint16();
|
||
number_of_colors = view.getUint16();
|
||
if (aco_v1_version !== 1) {
|
||
throw new Error("Not an Adobe Color Swatch file");
|
||
}
|
||
header_size = 4; // ACO v1 or v2 header, same size
|
||
aco_v2_offset = header_size + number_of_colors * (5 * 2);
|
||
aco_v2_colors_offset = aco_v2_offset + header_size;
|
||
if (view.byteLength <= aco_v2_offset) {
|
||
// ACO v1 only file
|
||
for (i = 0, ref = number_of_colors; (0 <= ref ? i < ref : i > ref); 0 <= ref ? i++ : i--) {
|
||
read_color(false);
|
||
}
|
||
return palette;
|
||
}
|
||
view.seek(aco_v2_offset);
|
||
aco_v2_version = view.getUint16();
|
||
aco_v2_number_of_colors = view.getUint16();
|
||
// view.seek(aco_v2_colors_offset)
|
||
if (aco_v2_version !== 2) {
|
||
throw new Error("Not an Adobe Color Swatch file v2");
|
||
}
|
||
if (aco_v2_number_of_colors !== number_of_colors) {
|
||
throw new Error("Number of colors mismatch between ACO v1 and v2 sections");
|
||
}
|
||
for (j = 0, ref1 = number_of_colors; (0 <= ref1 ? j < ref1 : j > ref1); 0 <= ref1 ? j++ : j--) {
|
||
read_color(true);
|
||
}
|
||
return palette;
|
||
};
|
||
|
||
module.exports.write_adobe_color_swatch = function(palette) {
|
||
var aco_v1_colors, aco_v2_colors, array_buffer, color, file_size, file_view, i, j, len, len1, write_color;
|
||
// ACO (Adobe Color Swatch)
|
||
write_color = function(color, aco_v2) {
|
||
var char, color_space, color_view, component, components, i, j, len, len1, name, ref, size;
|
||
name = (ref = color.name) != null ? ref : color.toString();
|
||
color_space = PhotoshopColorSpace.RGB;
|
||
components = [
|
||
color.red,
|
||
color.green,
|
||
color.blue,
|
||
0 // always 4 long
|
||
];
|
||
size = 2 + components.length * 2; // color space // components
|
||
if (aco_v2) {
|
||
size += 2 + 2 + (name.length + 1) * 2; // padding/reserved // name length // name + terminator
|
||
}
|
||
color_view = new jDataView(size);
|
||
color_view.writeUint16(color_space);
|
||
for (i = 0, len = components.length; i < len; i++) {
|
||
component = components[i];
|
||
color_view.writeUint16(component * MAX_UINT16);
|
||
}
|
||
if (aco_v2) {
|
||
color_view.writeUint16(0); // padding/reserved
|
||
color_view.writeUint16(name.length + 1);
|
||
for (j = 0, len1 = name.length; j < len1; j++) {
|
||
char = name[j];
|
||
color_view.writeUint16(char.charCodeAt(0));
|
||
}
|
||
color_view.writeUint16(0); // terminator
|
||
}
|
||
return color_view.buffer;
|
||
};
|
||
aco_v1_colors = (function() {
|
||
var i, len, results;
|
||
results = [];
|
||
for (i = 0, len = palette.length; i < len; i++) {
|
||
color = palette[i];
|
||
results.push(write_color(color, false));
|
||
}
|
||
return results;
|
||
})();
|
||
aco_v2_colors = (function() {
|
||
var i, len, results;
|
||
results = [];
|
||
for (i = 0, len = palette.length; i < len; i++) {
|
||
color = palette[i];
|
||
results.push(write_color(color, true));
|
||
}
|
||
return results;
|
||
})();
|
||
// aco v1
|
||
file_size = 2 + 2 + aco_v1_colors.reduce((function(size_sum, array_buffer) { // version number // number of colors
|
||
return size_sum + array_buffer.byteLength;
|
||
// # aco v2
|
||
}), 0) + 2 + 2 + aco_v2_colors.reduce((function(size_sum, array_buffer) { // version number // number of colors
|
||
return size_sum + array_buffer.byteLength;
|
||
}), 0);
|
||
file_view = new jDataView(file_size);
|
||
// aco v1
|
||
file_view.writeUint16(1); // version number for aco v1 section
|
||
file_view.writeUint16(palette.length); // number of colors
|
||
for (i = 0, len = aco_v1_colors.length; i < len; i++) {
|
||
array_buffer = aco_v1_colors[i];
|
||
file_view.writeBytes(new Uint8Array(array_buffer));
|
||
}
|
||
// aco v2
|
||
file_view.writeUint16(2); // version number for aco v2 section
|
||
file_view.writeUint16(palette.length); // number of colors
|
||
for (j = 0, len1 = aco_v2_colors.length; j < len1; j++) {
|
||
array_buffer = aco_v2_colors[j];
|
||
file_view.writeBytes(new Uint8Array(array_buffer));
|
||
}
|
||
return file_view.buffer;
|
||
};
|
||
|
||
module.exports.read_adobe_swatch_exchange = function({data}) {
|
||
var BLOCK_TYPE_COLOR, BLOCK_TYPE_GROUP_END, BLOCK_TYPE_GROUP_START, COLOR_MODE_GLOBAL, COLOR_MODE_NORMAL, COLOR_MODE_SPOT, COLOR_SPACE_CMYK, COLOR_SPACE_GRAYSCALE, COLOR_SPACE_RGB, block_end_pos, block_length, block_type, color_mode, color_space, gray, i, name, name_length_including_terminator, number_of_blocks, palette, ref, version, view, within_groups;
|
||
// ASE (Adobe Swatch Exchange)
|
||
palette = new Palette();
|
||
view = new jDataView(data);
|
||
if (view.getString(4) !== "ASEF") {
|
||
throw new Error("Not an Adobe Swatch Exchange file");
|
||
}
|
||
version = view.getUint32();
|
||
// if version isnt 1
|
||
// throw new Error "Unknown Adobe Swatch Exchange format version #{version}"
|
||
number_of_blocks = view.getUint32();
|
||
BLOCK_TYPE_GROUP_START = 0xc001;
|
||
BLOCK_TYPE_GROUP_END = 0xc002;
|
||
BLOCK_TYPE_COLOR = 0x0001;
|
||
COLOR_SPACE_CMYK = "CMYK";
|
||
COLOR_SPACE_RGB = "RGB ";
|
||
COLOR_SPACE_GRAYSCALE = "GRAY";
|
||
COLOR_MODE_GLOBAL = 0;
|
||
COLOR_MODE_SPOT = 1;
|
||
COLOR_MODE_NORMAL = 2;
|
||
within_groups = [];
|
||
for (i = 0, ref = number_of_blocks; (0 <= ref ? i < ref : i > ref); 0 <= ref ? i++ : i--) {
|
||
block_type = view.getUint16();
|
||
block_length = view.getUint32();
|
||
block_end_pos = view.tell() + block_length;
|
||
switch (block_type) {
|
||
case BLOCK_TYPE_GROUP_START:
|
||
name_length_including_terminator = view.getUint16();
|
||
name = get_utf_16_string(view, name_length_including_terminator, true);
|
||
within_groups.push({name});
|
||
break;
|
||
case BLOCK_TYPE_GROUP_END:
|
||
within_groups.pop();
|
||
break;
|
||
case BLOCK_TYPE_COLOR:
|
||
name_length_including_terminator = view.getUint16();
|
||
name = get_utf_16_string(view, name_length_including_terminator, true);
|
||
color_space = view.getString(4);
|
||
switch (color_space) {
|
||
case COLOR_SPACE_CMYK:
|
||
palette.add({
|
||
cyan: view.getFloat32(),
|
||
magenta: view.getFloat32(),
|
||
yellow: view.getFloat32(),
|
||
key: view.getFloat32(),
|
||
name: name
|
||
});
|
||
break;
|
||
case COLOR_SPACE_RGB:
|
||
palette.add({
|
||
red: view.getFloat32(),
|
||
green: view.getFloat32(),
|
||
blue: view.getFloat32(),
|
||
name: name
|
||
});
|
||
break;
|
||
case COLOR_SPACE_GRAYSCALE:
|
||
gray = view.getFloat32();
|
||
palette.add({
|
||
red: gray,
|
||
green: gray,
|
||
blue: gray,
|
||
name: name
|
||
});
|
||
}
|
||
color_mode = view.getUint16();
|
||
}
|
||
view.seek(block_end_pos);
|
||
}
|
||
return palette;
|
||
};
|
||
|
||
module.exports.write_adobe_swatch_exchange = function(palette) {
|
||
var BLOCK_TYPE_COLOR, BLOCK_TYPE_GROUP_END, BLOCK_TYPE_GROUP_START, COLOR_MODE_GLOBAL, COLOR_MODE_NORMAL, COLOR_MODE_SPOT, COLOR_SPACE_CMYK, COLOR_SPACE_GRAYSCALE, COLOR_SPACE_RGB, block, block_size, block_type, block_view, blocks, body_size, char, color, file_size, i, j, k, len, len1, len2, name, ref, size_of_all_blocks, version, view;
|
||
// ASE (Adobe Swatch Exchange)
|
||
|
||
// TODO: DRY
|
||
BLOCK_TYPE_GROUP_START = 0xc001;
|
||
BLOCK_TYPE_GROUP_END = 0xc002;
|
||
BLOCK_TYPE_COLOR = 0x0001;
|
||
COLOR_SPACE_CMYK = "CMYK";
|
||
COLOR_SPACE_RGB = "RGB ";
|
||
COLOR_SPACE_GRAYSCALE = "GRAY";
|
||
COLOR_MODE_GLOBAL = 0;
|
||
COLOR_MODE_SPOT = 1;
|
||
COLOR_MODE_NORMAL = 2;
|
||
blocks = [];
|
||
size_of_all_blocks = 0;
|
||
for (i = 0, len = palette.length; i < len; i++) {
|
||
color = palette[i];
|
||
name = (ref = color.name) != null ? ref : color.toString();
|
||
block_type = BLOCK_TYPE_COLOR;
|
||
body_size = 2 + (name.length + 1) * 2 + 4 + (3 * 4) + 2; // name length + + + // name (zero codepoint terminated and 2 bytes per codepoint) // color space ID // color components (3 float32 values) // color type
|
||
block_size = 2 + 4 + body_size; // block type // body size // body
|
||
block_view = jDataView(block_size);
|
||
block_view.writeUint16(block_type);
|
||
block_view.writeUint32(body_size);
|
||
block_view.writeUint16(name.length + 1);
|
||
for (j = 0, len1 = name.length; j < len1; j++) {
|
||
char = name[j];
|
||
block_view.writeUint16(char.charCodeAt(0));
|
||
}
|
||
block_view.writeUint16(0); // terminator
|
||
block_view.writeString(COLOR_SPACE_RGB);
|
||
block_view.writeFloat32(color.red);
|
||
block_view.writeFloat32(color.green);
|
||
block_view.writeFloat32(color.blue);
|
||
block_view.writeUint16(COLOR_MODE_GLOBAL); // TODO: which to use?
|
||
blocks.push(block_view.buffer);
|
||
size_of_all_blocks += block_size;
|
||
}
|
||
file_size = 4 + 4 + 4 + size_of_all_blocks; // magic number ("ASEF") // version number // number of blocks
|
||
view = new jDataView(file_size);
|
||
view.writeString("ASEF");
|
||
version = 1;
|
||
view.writeUint32(version);
|
||
view.writeUint32(blocks.length);
|
||
for (k = 0, len2 = blocks.length; k < len2; k++) {
|
||
block = blocks[k];
|
||
view.writeBytes(new Uint8Array(block));
|
||
}
|
||
return view.buffer;
|
||
};
|
||
|
||
module.exports.read_adobe_color_book = function({data}) {
|
||
var add, bad, book_description, book_id, book_title, color_code, color_count, color_name, color_name_prefix, color_name_suffix, color_space, extract_value, i, page_selector_offset, page_size, palette, pos, ref, sig, ver, view;
|
||
// ACB (Adobe Color Book)
|
||
|
||
// References:
|
||
// https://magnetiq.ca/pages/acb-spec/
|
||
// https://github.com/jacobbubu/acb/blob/177e3acc9549d6f7802f9d039410f218942b1610/decoder.coffee
|
||
palette = new Palette();
|
||
view = new jDataView(data);
|
||
sig = view.getString(4);
|
||
if (sig !== "8BCB") {
|
||
throw new Error("Not an Adobe Color Book");
|
||
}
|
||
ver = view.getUint16();
|
||
if (ver !== 1) {
|
||
throw new Error(`Unknown Adobe Color Book version: ${ver}?`);
|
||
}
|
||
extract_value = function(str) {
|
||
var value;
|
||
// remove wrapper double quote
|
||
value = str.replace(/^"(.*)"$/, '$1');
|
||
// e.x: $$$/acb/Pantone/ProcessYellow=Process Yellow CP
|
||
if (value.startsWith('$$$')) {
|
||
value = value.split('=')[1];
|
||
}
|
||
value = value.replace('^R', '®');
|
||
value = value.replace('^C', '©');
|
||
return value;
|
||
};
|
||
book_id = view.getUint16();
|
||
book_title = extract_value(get_utf_16_string(view, view.getUint32(), false));
|
||
color_name_prefix = extract_value(get_utf_16_string(view, view.getUint32(), false));
|
||
color_name_suffix = extract_value(get_utf_16_string(view, view.getUint32(), false));
|
||
book_description = extract_value(get_utf_16_string(view, view.getUint32()));
|
||
color_count = view.getUint16();
|
||
page_size = view.getUint16();
|
||
page_selector_offset = view.getUint16();
|
||
color_space = view.getUint16();
|
||
for (i = 0, ref = color_count; (0 <= ref ? i < ref : i > ref); 0 <= ref ? i++ : i--) {
|
||
color_name = extract_value(get_utf_16_string(view, view.getUint32(), false));
|
||
color_code = view.getString(6).trim();
|
||
color_code = color_code.replace(/^0*(\d+)$/, '$1');
|
||
color_code = color_code.replace('X', '-');
|
||
|
||
// just in case? I've not seen an example of this
|
||
if (color_code && !color_name) {
|
||
pos = color_code.lastIndexOf(color_name_suffix.trim());
|
||
color_name = pos >= 0 ? color_code.slice(0, pos) : color_code;
|
||
}
|
||
// console.log color_code, pos, color_name
|
||
add = function(o) {
|
||
if (!color_name.trim() && !color_code.trim()) {
|
||
return;
|
||
}
|
||
// This is just a dummy record used for padding
|
||
o.name = color_name_prefix + color_name + color_name_suffix;
|
||
// o.code = color_code
|
||
return palette.add(o);
|
||
};
|
||
bad = function() {
|
||
throw new Error(`Color space #${color_space} (${PhotoshopColorSpace[color_space]}) not supported.`);
|
||
};
|
||
switch (color_space) {
|
||
case 0: // RGB
|
||
add({
|
||
red: view.getUint8() / 255,
|
||
green: view.getUint8() / 255,
|
||
blue: view.getUint8() / 255
|
||
});
|
||
break;
|
||
case 1: // HSB
|
||
add({
|
||
hue: view.getUint8() / 255,
|
||
saturation: view.getUint8() / 255,
|
||
value: view.getUint8() / 255
|
||
});
|
||
break;
|
||
case 2: // CMYK
|
||
add({
|
||
cyan: 1 - (view.getUint8() / 255),
|
||
magenta: 1 - (view.getUint8() / 255),
|
||
yellow: 1 - (view.getUint8() / 255),
|
||
key: 1 - (view.getUint8() / 255)
|
||
});
|
||
break;
|
||
case 3: // Pantone
|
||
bad();
|
||
break;
|
||
case 4: // Focoltone
|
||
bad();
|
||
break;
|
||
case 5: // Trumatch
|
||
bad();
|
||
break;
|
||
case 6: // Toyo
|
||
bad();
|
||
break;
|
||
case 7: // Lab (CIELAB D50)
|
||
add({
|
||
l: view.getUint8() / 255,
|
||
a: view.getUint8() / 255,
|
||
b: view.getUint8() / 255
|
||
});
|
||
break;
|
||
case 8: // Grayscale
|
||
bad();
|
||
break;
|
||
case 9: // Wide CMYK
|
||
bad();
|
||
break;
|
||
case 10: // HKS
|
||
bad();
|
||
break;
|
||
default:
|
||
bad();
|
||
}
|
||
}
|
||
|
||
// There's an optional field defining whether the color book is for spot or process colors.
|
||
// Would need to check for EOF to read this field.
|
||
// isSpot = view.getString(8) is "spflspot"
|
||
palette.name = book_title;
|
||
palette.description = book_description;
|
||
return palette;
|
||
};
|
||
|
||
|
||
},{"../Palette":37,"jdataview":7}],39:[function(require,module,exports){
|
||
// Read/write Adobe Color Table file (.act)
|
||
/*
|
||
"There is no version number written in the file.
|
||
The file is 768 or 772 bytes long and contains 256 RGB colors.
|
||
The first color in the table is index zero.
|
||
There are three bytes per color in the order red, green, blue.
|
||
If the file is 772 bytes long there are 4 additional bytes remaining.
|
||
Two bytes for the number of colors to use.
|
||
Two bytes for the color index with the transparency color to use."
|
||
|
||
https://www.adobe.com/devnet-apps/photoshop/fileformatashtml/#50577411_pgfId-1070626
|
||
*/
|
||
var Palette, jDataView, read_adobe_color_table, write_adobe_color_table;
|
||
|
||
jDataView = require("jdataview");
|
||
|
||
Palette = require("../Palette");
|
||
|
||
module.exports = read_adobe_color_table = function({data, fileExt}) {
|
||
var j, palette, ref, view;
|
||
palette = new Palette();
|
||
view = new jDataView(data);
|
||
if (!(((ref = view.byteLength) === 768 || ref === 772) || fileExt === "act")) { // because "Fireworks can read ACT files bigger than 768 bytes"
|
||
throw new Error(`file size must be 768 or 772 bytes (saw ${view.byteLength}), OR file extension must be '.act' (saw '.${fileExt}')`);
|
||
}
|
||
for (var j = 0; j < 256; j++) {
|
||
palette.add({
|
||
red: view.getUint8() / 255,
|
||
green: view.getUint8() / 255,
|
||
blue: view.getUint8() / 255
|
||
});
|
||
}
|
||
palette.numberOfColumns = 16; // configurable in Photoshop, but this is the default view, and for instance Visibone and the default swatches rely on this layout
|
||
return palette;
|
||
};
|
||
|
||
module.exports.write = write_adobe_color_table = function(palette) {
|
||
var i, j, view;
|
||
view = new jDataView(256 * 3);
|
||
for (i = j = 0; j < 256; i = ++j) {
|
||
view.writeUint8(palette[i] ? Math.round(palette[i].red * 255) : 0);
|
||
view.writeUint8(palette[i] ? Math.round(palette[i].green * 255) : 0);
|
||
view.writeUint8(palette[i] ? Math.round(palette[i].blue * 255) : 0);
|
||
}
|
||
return view.buffer;
|
||
};
|
||
|
||
|
||
},{"../Palette":37,"jdataview":7}],40:[function(require,module,exports){
|
||
// Detect CSS colors (except named colors), and write .css/.less/.scss/.sass/.styl files
|
||
var Palette, css_escape;
|
||
|
||
css_escape = require("css.escape");
|
||
|
||
Palette = require("../Palette");
|
||
|
||
// TODO: detect names via structures like CSS variables, JSON object keys/values, comments
|
||
// TODO: use all colors regardless of format, within a detected structure, or maybe always
|
||
module.exports = function({fileContentString}) {
|
||
var char, hex, i, j, len, len1, most_colors, n, n_control_characters, palette, palette_hex_long, palette_hex_short, palette_hsl, palette_hsla, palette_rgb, palette_rgba, palettes;
|
||
n_control_characters = 0;
|
||
for (i = 0, len = fileContentString.length; i < len; i++) {
|
||
char = fileContentString[i];
|
||
if (char === "\x00" || char === "\x01" || char === "\x02" || char === "\x03" || char === "\x04" || char === "\x05" || char === "\x06" || char === "\x07" || char === "\x08" || char === "\x0B" || char === "\x0C" || char === "\x0E" || char === "\x0F" || char === "\x10" || char === "\x11" || char === "\x12" || char === "\x13" || char === "\x14" || char === "\x15" || char === "\x16" || char === "\x17" || char === "\x18" || char === "\x19" || char === "\x1A" || char === "\x1B" || char === "\x1C" || char === "\x1D" || char === "\x1E" || char === "\x1F" || char === "\x7F") {
|
||
n_control_characters++;
|
||
}
|
||
}
|
||
if (n_control_characters > 5) {
|
||
throw new Error("looks like a binary file");
|
||
}
|
||
palettes = [palette_hex_long = new Palette(), palette_hex_short = new Palette(), palette_rgb = new Palette(), palette_hsl = new Palette(), palette_hsla = new Palette(), palette_rgba = new Palette()];
|
||
hex = function(x) {
|
||
return parseInt(x, 16);
|
||
};
|
||
fileContentString.replace(/\#([0-9A-F]{3}|[0-9A-F]{6}|[0-9A-F]{4}|[0-9A-F]{8})(?![0-9A-F])/gim, function(m, $1) { // hashtag # #/
|
||
// three hex-digits (#A0C)
|
||
// six hex-digits (#AA00CC)
|
||
// with alpha, four hex-digits (#A0CF)
|
||
// with alpha, eight hex-digits (#AA00CCFF)
|
||
// (and no more!)
|
||
if ($1.length > 4) {
|
||
return palette_hex_long.add({
|
||
red: hex($1[0] + $1[1]) / 255,
|
||
green: hex($1[2] + $1[3]) / 255,
|
||
blue: hex($1[4] + $1[5]) / 255,
|
||
alpha: $1.length === 8 ? hex($1[6] + $1[7]) / 255 : void 0
|
||
});
|
||
} else {
|
||
return palette_hex_short.add({
|
||
red: hex($1[0] + $1[0]) / 255,
|
||
green: hex($1[1] + $1[1]) / 255,
|
||
blue: hex($1[2] + $1[2]) / 255,
|
||
alpha: $1.length === 4 ? hex($1[3] + $1[3]) / 255 : void 0
|
||
});
|
||
}
|
||
});
|
||
fileContentString.replace(/rgb\(\s*([0-9]*\.?[0-9]+)(%?)\s*(?:,|\s)\s*([0-9]*\.?[0-9]+)(%?)\s*(?:,|\s)\s*([0-9]*\.?[0-9]+)(%?)\s*\)/gim, function(_m, r_val, r_unit, g_val, g_unit, b_val, b_unit) { // red
|
||
// green
|
||
// blue
|
||
return palette_rgb.add({
|
||
red: Number(r_val) / (r_unit === "%" ? 100 : 255),
|
||
green: Number(g_val) / (g_unit === "%" ? 100 : 255),
|
||
blue: Number(b_val) / (b_unit === "%" ? 100 : 255)
|
||
});
|
||
});
|
||
fileContentString.replace(/rgba?\(\s*([0-9]*\.?[0-9]+)(%?)\s*(?:,|\s)\s*([0-9]*\.?[0-9]+)(%?)\s*(?:,|\s)\s*([0-9]*\.?[0-9]+)(%?)\s*(?:,|\/)\s*([0-9]*\.?[0-9]+)(%?)\s*\)/gim, function(_m, r_val, r_unit, g_val, g_unit, b_val, b_unit, a_val, a_unit) { // red
|
||
// green
|
||
// blue
|
||
// alpha
|
||
return palette_rgba.add({
|
||
red: Number(r_val) / (r_unit === "%" ? 100 : 255),
|
||
green: Number(g_val) / (g_unit === "%" ? 100 : 255),
|
||
blue: Number(b_val) / (b_unit === "%" ? 100 : 255),
|
||
alpha: Number(a_val) / (a_unit === "%" ? 100 : 1)
|
||
});
|
||
});
|
||
fileContentString.replace(/hsl\(\s*([0-9]*\.?[0-9]+)(deg|rad|turn|)\s*(?:,|\s)\s*([0-9]*\.?[0-9]+)(%?)\s*(?:,|\s)\s*([0-9]*\.?[0-9]+)(%?)\s*\)/gim, function(_m, h_val, h_unit, s_val, s_unit, l_val, l_unit) { // hue
|
||
// saturation
|
||
// value
|
||
return palette_hsl.add({
|
||
hue: Number(h_val) / (h_unit === "rad" ? 2 * Math.PI : h_unit === "turn" ? 1 : 360),
|
||
saturation: Number(s_val) / (s_unit === "%" ? 100 : 1),
|
||
lightness: Number(l_val) / (l_unit === "%" ? 100 : 1)
|
||
});
|
||
});
|
||
fileContentString.replace(/hsla?\(\s*([0-9]*\.?[0-9]+)(deg|rad|turn|)\s*(?:,|\s)\s*([0-9]*\.?[0-9]+)(%?)\s*(?:,|\s)\s*([0-9]*\.?[0-9]+)(%?)\s*(?:,|\/)\s*([0-9]*\.?[0-9]+)(%?)\s*\)/gim, function(_m, h_val, h_unit, s_val, s_unit, l_val, l_unit, a_val, a_unit) { // hue
|
||
// saturation
|
||
// value
|
||
// alpha
|
||
return palette_hsla.add({
|
||
hue: Number(h_val) / (h_unit === "rad" ? 2 * Math.PI : h_unit === "turn" ? 1 : 360),
|
||
saturation: Number(s_val) / (s_unit === "%" ? 100 : 1),
|
||
lightness: Number(l_val) / (l_unit === "%" ? 100 : 1),
|
||
alpha: Number(a_val) / (a_unit === "%" ? 100 : 1)
|
||
});
|
||
});
|
||
most_colors = [];
|
||
for (j = 0, len1 = palettes.length; j < len1; j++) {
|
||
palette = palettes[j];
|
||
if (palette.length >= most_colors.length) {
|
||
most_colors = palette;
|
||
}
|
||
}
|
||
n = most_colors.length;
|
||
if (n < 4) {
|
||
throw new Error(["No colors found", "Only one color found", "Only a couple colors found", "Only a few colors found"][n] + ` (${n})`);
|
||
}
|
||
return most_colors;
|
||
};
|
||
|
||
module.exports.write_css = function(palette) {
|
||
return `:root {
|
||
${palette.map(function(color, index) {
|
||
return `--${color.name ? css_escape(color.name.replace(/\s/g, "-")) : `color-${index + 1}`}: ${color};`;
|
||
}).join("\n\t")}
|
||
}`;
|
||
};
|
||
|
||
module.exports.write_styl = function(palette) {
|
||
return palette.map(function(color, index) {
|
||
return `${color.name ? css_escape(color.name.replace(/\s/g, "-")) : `color-${index + 1}`} = ${color};`;
|
||
}).join("\n");
|
||
};
|
||
|
||
module.exports.write_less = function(palette) {
|
||
return palette.map(function(color, index) {
|
||
return `@${color.name ? css_escape(color.name.replace(/\s/g, "-")) : `color-${index + 1}`}: ${color};`;
|
||
}).join("\n");
|
||
};
|
||
|
||
module.exports.write_scss = function(palette) {
|
||
return palette.map(function(color, index) {
|
||
return `$${color.name ? css_escape(color.name.replace(/\s/g, "-")) : `color-${index + 1}`}: ${color};`;
|
||
}).join("\n");
|
||
};
|
||
|
||
module.exports.write_sass = function(palette) {
|
||
return palette.map(function(color, index) {
|
||
return `$${color.name ? css_escape(color.name.replace(/\s/g, "-")) : `color-${index + 1}`}: ${color}`;
|
||
}).join("\n");
|
||
};
|
||
|
||
|
||
},{"../Palette":37,"css.escape":4}],41:[function(require,module,exports){
|
||
// Load a ColorSchemer palette (.cs)
|
||
var Palette, jDataView;
|
||
|
||
jDataView = require("jdataview");
|
||
|
||
Palette = require("../Palette");
|
||
|
||
module.exports = function({data, fileExt}) {
|
||
var color_count, i, j, littleEndian, palette, ref, version, view;
|
||
if (fileExt !== "cs") {
|
||
throw new Error(`ColorSchemer loader is only enabled when file extension is '.cs' (saw '.${fileExt}' instead)`);
|
||
}
|
||
palette = new Palette();
|
||
littleEndian = true;
|
||
view = new jDataView(data, 0, void 0, littleEndian);
|
||
version = view.getUint16(); // or something
|
||
color_count = view.getUint16();
|
||
for (i = j = 0, ref = color_count; (0 <= ref ? j < ref : j > ref); i = 0 <= ref ? ++j : --j) {
|
||
view.seek(8 + i * 26);
|
||
palette.add({
|
||
red: view.getUint8() / 255,
|
||
green: view.getUint8() / 255,
|
||
blue: view.getUint8() / 255
|
||
});
|
||
}
|
||
return palette;
|
||
};
|
||
|
||
|
||
},{"../Palette":37,"jdataview":7}],42:[function(require,module,exports){
|
||
// Read/write GIMP palette (.gpl), also used by or supported by many programs, such as Inkscape, Krita,
|
||
var Palette, parse_gimp_or_kde_rgb_palette, write_gimp_or_kde_rgb_palette;
|
||
|
||
Palette = require("../Palette");
|
||
|
||
parse_gimp_or_kde_rgb_palette = function(fileContentString, format_name) {
|
||
var line, line_index, lines, m, palette, r_g_b_name;
|
||
lines = fileContentString.split(/\r?\n/);
|
||
if (lines[0] !== format_name) {
|
||
throw new Error(`Not a ${format_name}`);
|
||
}
|
||
palette = new Palette();
|
||
line_index = 0;
|
||
// on the first iteration, line_index = 1 because the increment happens at the start of the loop
|
||
while ((line_index += 1) < lines.length) {
|
||
line = lines[line_index];
|
||
if (line[0] === "#" || line === "") {
|
||
continue;
|
||
}
|
||
// TODO: handle non-start-of-line comments? where's the spec?
|
||
m = line.match(/Name:\s*(.*)/);
|
||
if (m) {
|
||
palette.name = m[1];
|
||
continue;
|
||
}
|
||
m = line.match(/Columns:\s*(.*)/);
|
||
if (m) {
|
||
palette.numberOfColumns = Number(m[1]);
|
||
// TODO: handle 0 as not specified? where's the spec at, yo?
|
||
palette.geometrySpecifiedByFile = true;
|
||
continue;
|
||
}
|
||
|
||
// TODO: replace \s with [\ \t] (spaces or tabs)
|
||
// it can't match \n because it's already split on that, but still
|
||
// TODO: handle line with no name but space on the end
|
||
r_g_b_name = line.match(/^\s*([0-9]+)\s+([0-9]+)\s+([0-9]+)(?:\s+(.*))?$/); // "at the beginning of the line,"
|
||
// "give or take some spaces,"
|
||
// match 3 groups of numbers separated by spaces
|
||
// red
|
||
// green
|
||
// blue
|
||
// optionally a name
|
||
// "and that should be the end of the line"
|
||
if (!r_g_b_name) {
|
||
throw new Error(`Line ${line_index + 1} doesn't match pattern of red green blue name`);
|
||
}
|
||
palette.add({
|
||
red: Number(r_g_b_name[1]) / 255,
|
||
green: Number(r_g_b_name[2]) / 255,
|
||
blue: Number(r_g_b_name[3]) / 255,
|
||
name: r_g_b_name[4]
|
||
});
|
||
}
|
||
return palette;
|
||
};
|
||
|
||
module.exports = function({fileContentString}) {
|
||
return parse_gimp_or_kde_rgb_palette(fileContentString, "GIMP Palette");
|
||
};
|
||
|
||
write_gimp_or_kde_rgb_palette = function(palette, format_name) {
|
||
return `${format_name || "GIMP Palette"}
|
||
Name: ${palette.name || "Saved Colors"}
|
||
Columns: ${palette.numberOfColumns || 8}
|
||
#
|
||
${palette.map((color) => {
|
||
var blue, green, red;
|
||
({red, green, blue} = color);
|
||
return `${[red, green, blue].map((component) => {
|
||
return `${Math.round(component * 255)}`.padEnd(3, " ");
|
||
}).join(" ")} ${color.name || color}`;
|
||
}).join("\n")}`;
|
||
};
|
||
|
||
module.exports.write = function(palette) {
|
||
return write_gimp_or_kde_rgb_palette(palette, "GIMP Palette");
|
||
};
|
||
|
||
module.exports.extension = "gpl";
|
||
|
||
module.exports.write_gimp_or_kde_rgb_palette = write_gimp_or_kde_rgb_palette;
|
||
|
||
module.exports.parse_gimp_or_kde_rgb_palette = parse_gimp_or_kde_rgb_palette;
|
||
|
||
|
||
},{"../Palette":37}],43:[function(require,module,exports){
|
||
// Read/write Allaire Homesite / Macromedia ColdFusion palette (.hpl)
|
||
var Palette;
|
||
|
||
Palette = require("../Palette");
|
||
|
||
module.exports = function({fileContentString}) {
|
||
var i, len, line, lines, match, palette;
|
||
lines = fileContentString.split(/\r?\n/);
|
||
if (lines[0] !== "Palette") {
|
||
throw new Error("Not a Homesite palette");
|
||
}
|
||
if (!lines[1].match(/Version [34]\.0/)) {
|
||
throw new Error("Unsupported Homesite palette version");
|
||
}
|
||
palette = new Palette();
|
||
for (i = 0, len = lines.length; i < len; i++) {
|
||
line = lines[i];
|
||
match = line.match(/(\d+)\s+(\d+)\s+(\d+)/);
|
||
if (match) {
|
||
palette.add({
|
||
red: Number(match[1]) / 255,
|
||
green: Number(match[2]) / 255,
|
||
blue: Number(match[3]) / 255
|
||
});
|
||
}
|
||
}
|
||
return palette;
|
||
};
|
||
|
||
module.exports.write = function(palette) {
|
||
return `Palette
|
||
Version 4.0
|
||
-----------
|
||
${palette.map(function(color) {
|
||
return `${Math.round(color.red * 255)} ${Math.round(color.green * 255)} ${Math.round(color.blue * 255)}`;
|
||
}).join("\n")}`;
|
||
};
|
||
|
||
|
||
},{"../Palette":37}],44:[function(require,module,exports){
|
||
// Read/write KDE RGB Palette / KolourPaint / KOffice palette (.colors)
|
||
var parse_gimp_or_kde_rgb_palette, write_gimp_or_kde_rgb_palette;
|
||
|
||
({parse_gimp_or_kde_rgb_palette, write_gimp_or_kde_rgb_palette} = require("./GIMP"));
|
||
|
||
module.exports = function({fileContentString}) {
|
||
return parse_gimp_or_kde_rgb_palette(fileContentString, "KDE RGB Palette");
|
||
};
|
||
|
||
module.exports.write = function(palette) {
|
||
return write_gimp_or_kde_rgb_palette(palette, "KDE RGB Palette");
|
||
};
|
||
|
||
module.exports.extension = "colors";
|
||
|
||
|
||
},{"./GIMP":42}],45:[function(require,module,exports){
|
||
// Read/write Paint.NET palette format (.txt)
|
||
var Palette;
|
||
|
||
Palette = require("../Palette");
|
||
|
||
module.exports = function({fileContentString}) {
|
||
var hex, i, len, line, m, palette, ref;
|
||
palette = new Palette();
|
||
hex = function(x) {
|
||
return parseInt(x, 16);
|
||
};
|
||
ref = fileContentString.split(/\r?\n/);
|
||
for (i = 0, len = ref.length; i < len; i++) {
|
||
line = ref[i];
|
||
m = line.match(/^([0-9A-F]{2})([0-9A-F]{2})([0-9A-F]{2})([0-9A-F]{2})$/i);
|
||
if (m) {
|
||
palette.add({
|
||
alpha: hex(m[1]) / 255,
|
||
red: hex(m[2]) / 255,
|
||
green: hex(m[3]) / 255,
|
||
blue: hex(m[4]) / 255
|
||
});
|
||
}
|
||
}
|
||
return palette;
|
||
};
|
||
|
||
module.exports.write = function(palette) {
|
||
var comments, component_to_hex, stringify_color;
|
||
component_to_hex = function(component) {
|
||
var hex;
|
||
hex = Math.round(component * 255).toString(16);
|
||
if (hex.length === 1) {
|
||
return `0${hex}`;
|
||
} else {
|
||
return hex;
|
||
}
|
||
};
|
||
stringify_color = function(color) {
|
||
var alpha, blue, green, red;
|
||
({alpha, red, green, blue} = color);
|
||
if (alpha == null) {
|
||
alpha = 1;
|
||
}
|
||
return [alpha, red, green, blue].map(component_to_hex).join("");
|
||
};
|
||
comments = `Paint.NET Palette File
|
||
Lines that start with a semicolon are comments
|
||
Colors are written as 8-digit hexadecimal numbers: aarrggbb
|
||
For example, this would specify green: FF00FF00
|
||
The alpha ('aa') value specifies how transparent a color is. FF is fully opaque, 00 is fully transparent.
|
||
A palette must consist of ninety six (96) colors. If there are less than this, the remaining color
|
||
slots will be set to white (FFFFFFFF). If there are more, then the remaining colors will be ignored.
|
||
|
||
`;
|
||
if (palette.name) {
|
||
comments += `Palette Name: ${palette.name}\n`;
|
||
}
|
||
if (palette.description) {
|
||
comments += `Description: ${palette.description}\n`;
|
||
}
|
||
comments += `Colors: ${palette.length}\n`;
|
||
if (palette.numberOfColumns) {
|
||
comments += `Columns: ${palette.numberOfColumns}\n`;
|
||
}
|
||
comments = `; ${comments}`.replace(/\n/g, "\n; ").replace(/\s*\n/g, "\n");
|
||
return `${comments}
|
||
${palette.map(stringify_color).join("\n")}`;
|
||
};
|
||
|
||
|
||
},{"../Palette":37}],46:[function(require,module,exports){
|
||
// Read/write JASC PAL file (Paint Shop Pro palette file) (.pal)
|
||
var Palette;
|
||
|
||
Palette = require("../Palette");
|
||
|
||
module.exports = function({fileContentString}) {
|
||
var i, j, len, line, lines, palette, rgb;
|
||
lines = fileContentString.split(/[\n\r]+/m);
|
||
if (lines[0] !== "JASC-PAL") {
|
||
throw new Error("Not a JASC-PAL");
|
||
}
|
||
if (lines[1] !== "0100") {
|
||
throw new Error("Unknown JASC-PAL version");
|
||
}
|
||
// if lines[2] isnt "256"
|
||
// "that's ok"
|
||
palette = new Palette();
|
||
//n_colors = Number(lines[2])
|
||
for (i = j = 0, len = lines.length; j < len; i = ++j) {
|
||
line = lines[i];
|
||
if (line !== "" && i > 2) {
|
||
rgb = line.split(" ");
|
||
palette.add({
|
||
red: Number(rgb[0]) / 255,
|
||
green: Number(rgb[1]) / 255,
|
||
blue: Number(rgb[2]) / 255
|
||
});
|
||
}
|
||
}
|
||
return palette;
|
||
};
|
||
|
||
module.exports.write = function(palette) {
|
||
return `JASC-PAL
|
||
0100
|
||
${palette.length}
|
||
${palette.map(function(color) {
|
||
return `${Math.round(color.red * 255)} ${Math.round(color.green * 255)} ${Math.round(color.blue * 255)}`;
|
||
}).join("\n")}`;
|
||
};
|
||
|
||
|
||
},{"../Palette":37}],47:[function(require,module,exports){
|
||
// Read/write Resource Interchange File Format (RIFF) palette file (.pal)
|
||
|
||
// ported from C# code at https://worms2d.info/Palette_file
|
||
var Palette, jDataView;
|
||
|
||
jDataView = require("jdataview");
|
||
|
||
Palette = require("../Palette");
|
||
|
||
module.exports = function({data}) {
|
||
var chunkSize, chunkType, colorCount, dataSize, i, littleEndian, palVersion, palette, ref, riff, type, view;
|
||
littleEndian = true;
|
||
view = new jDataView(data, 0, void 0, littleEndian);
|
||
|
||
// RIFF header
|
||
riff = view.getString(4); // "RIFF"
|
||
dataSize = view.getUint32();
|
||
type = view.getString(4); // "PAL "
|
||
if (riff !== "RIFF") {
|
||
throw new Error("RIFF header not found; not a RIFF PAL file");
|
||
}
|
||
if (type !== "PAL ") {
|
||
throw new Error(`RIFF header says this isn't a PAL file,
|
||
more of a sort of ${(type + "").trim()} file`);
|
||
}
|
||
|
||
// Data chunk
|
||
chunkType = view.getString(4); // "data"
|
||
chunkSize = view.getUint32();
|
||
palVersion = view.getUint16(); // 0x0300
|
||
colorCount = view.getUint16();
|
||
if (chunkType !== "data") {
|
||
throw new Error(`Data chunk not found (...'${chunkType}'?)`);
|
||
}
|
||
if (palVersion !== 0x0300) {
|
||
throw new Error(`Unsupported PAL file format version: 0x${palVersion.toString(16)}`);
|
||
}
|
||
|
||
// Colors
|
||
palette = new Palette();
|
||
for (i = 0, ref = colorCount; (0 <= ref ? i < ref : i > ref); 0 <= ref ? i++ : i--) {
|
||
palette.add({
|
||
red: view.getUint8() / 255,
|
||
green: view.getUint8() / 255,
|
||
blue: view.getUint8() / 255
|
||
});
|
||
view.getUint8(); // "flags", always 0x00
|
||
}
|
||
return palette;
|
||
};
|
||
|
||
module.exports.write = function(palette) {
|
||
var color, data_chunk_body_size, data_chunk_total_size, file_size, file_view, i, len, littleEndian;
|
||
data_chunk_body_size = 2 + 2 + 4 * palette.length; // for the version number (Uint16) // for the color count (Uint16) // for the colors (4x Uint8)
|
||
data_chunk_total_size = 4 + 4 + data_chunk_body_size; // for "data" // for the chunk size (Uint32) // for the data chunk body
|
||
file_size = 4 + 4 + 4 + data_chunk_total_size; // for "RIFF" // for the document size (Uint32) // for "PAL " // for the data chunk
|
||
littleEndian = true;
|
||
file_view = new jDataView(file_size, 0, void 0, littleEndian);
|
||
file_view.writeString("RIFF");
|
||
file_view.writeUint32(data_chunk_total_size + 4);
|
||
file_view.writeString("PAL ");
|
||
file_view.writeString("data");
|
||
file_view.writeUint32(data_chunk_body_size);
|
||
file_view.writeUint16(0x0300); // version number
|
||
file_view.writeUint16(palette.length); // number of colors
|
||
for (i = 0, len = palette.length; i < len; i++) {
|
||
color = palette[i];
|
||
file_view.writeUint8(Math.round(color.red * 255));
|
||
file_view.writeUint8(Math.round(color.green * 255));
|
||
file_view.writeUint8(Math.round(color.blue * 255));
|
||
file_view.writeUint8(0); // "flags"
|
||
}
|
||
return file_view.buffer;
|
||
};
|
||
|
||
|
||
},{"../Palette":37,"jdataview":7}],48:[function(require,module,exports){
|
||
// Read/write sK1 palettes (.skp)
|
||
|
||
// These files are actually sort of python source code,
|
||
// but let's just try to parse them as if it's a declarative language.
|
||
|
||
// Normally the format is handled line by line but compiling python code for each line:
|
||
// https://github.com/sk1project/uniconvertor/blob/751a4f559bac48ded59028d9b2cf6778d1267a8f/src/uc2/formats/skp/skp_filters.py#L43-L44
|
||
var Palette, parse_css_hex_color;
|
||
|
||
Palette = require("../Palette");
|
||
|
||
({parse_css_hex_color} = require("../helpers"));
|
||
|
||
module.exports = function({fileContentString}) {
|
||
var _, args, args_str, fn, fn_name, fns, i, len, line, line_index, lines, match, n, palette, parse_args;
|
||
lines = fileContentString.split(/[\n\r]+/m);
|
||
palette = new Palette();
|
||
fns = {
|
||
set_name: function(name) {
|
||
return palette.name = name;
|
||
},
|
||
// set_source: (source)-> palette.source = source
|
||
add_comments: function(line) {
|
||
if (palette.description == null) {
|
||
palette.description = "";
|
||
}
|
||
if (palette.description.length > 0) {
|
||
palette.description += "\n";
|
||
}
|
||
return palette.description += line;
|
||
},
|
||
set_columns: function(columns) {
|
||
palette.numberOfColumns = columns;
|
||
return palette.geometrySpecifiedByFile = true;
|
||
},
|
||
hexcolor: function(hexcolor, name) {
|
||
var color;
|
||
// TODO: find example palettes with hexcolor()
|
||
// I think adding # is unnesessary
|
||
color = parse_css_hex_color("#" + hexcolor);
|
||
color.name = name;
|
||
return palette.add(color);
|
||
},
|
||
rgbcolor: function(red, green, blue, name) {
|
||
// TODO: find example palettes with rgbcolor()
|
||
return palette.add({
|
||
red: red / 255,
|
||
green: green / 255,
|
||
blue: blue / 255,
|
||
name: name
|
||
});
|
||
},
|
||
color: function([color_type, components, alpha, name]) {
|
||
switch (color_type) {
|
||
case "RGB":
|
||
return palette.add({
|
||
red: components[0],
|
||
green: components[1],
|
||
blue: components[2],
|
||
alpha: alpha,
|
||
name: name
|
||
});
|
||
case "Grayscale":
|
||
return palette.add({
|
||
red: components[0],
|
||
green: components[0],
|
||
blue: components[0],
|
||
alpha: alpha,
|
||
name: name
|
||
});
|
||
case "CMYK":
|
||
return palette.add({
|
||
cyan: components[0],
|
||
magenta: components[1],
|
||
yellow: components[2],
|
||
key: components[3],
|
||
alpha: alpha,
|
||
name: name
|
||
});
|
||
case "HSL":
|
||
return palette.add({
|
||
hue: components[0],
|
||
saturation: components[1],
|
||
lightness: components[2],
|
||
alpha: alpha,
|
||
name: name
|
||
});
|
||
}
|
||
}
|
||
};
|
||
parse_args = function(args_str, line_number) {
|
||
var args, index, parse_array, parse_number, parse_string, ref, ref1;
|
||
// JSON.parse("[#{args_str.replace(/\bu(['"])/g, "$1").replace(/"/g, '\\"').replace(/'/g, '"')}]")
|
||
// TODO: proper parsing that handles u"You've got mail!" etc.
|
||
args = [];
|
||
index = 0;
|
||
parse_string = function() {
|
||
var quote_char, str;
|
||
str = "";
|
||
quote_char = args_str[index];
|
||
if (quote_char !== "'" && quote_char !== '"') {
|
||
throw new Error("Expected to start parsing string on a quote character");
|
||
}
|
||
index += 1;
|
||
while (index < args_str.length) {
|
||
if (args_str[index] === "\\") {
|
||
index += 1;
|
||
if (args_str[index] === "\\") {
|
||
str += "\\";
|
||
} else if (args_str[index] === "r") {
|
||
str += "\r";
|
||
} else if (args_str[index] === "n") {
|
||
str += "\n";
|
||
} else if (args_str[index] === "t") {
|
||
str += "\t";
|
||
} else if (args_str[index] === "v") {
|
||
str += "\v";
|
||
} else if (args_str[index] === "a") {
|
||
str += "\a";
|
||
} else if (args_str[index] === "b") {
|
||
str += "\b";
|
||
} else if (args_str[index] === "'") {
|
||
str += "'";
|
||
} else if (args_str[index] === '"') {
|
||
str += '"';
|
||
} else if (args_str[index].match(/\d/)) {
|
||
|
||
// TODO: handle octal escape
|
||
} else if (args_str[index] === "x") {
|
||
|
||
// TODO: handle hex escape
|
||
} else if (args_str[index] === "N") {
|
||
|
||
// TODO: character by Unicode name
|
||
} else if (args_str[index] === "u") {
|
||
|
||
// TODO: character with 16-bit hex value (four hexadecimal digits).
|
||
} else if (args_str[index] === "U") {
|
||
|
||
} else {
|
||
// TODO: character with 32-bit hex value (eight hexadecimal digits).
|
||
console.log(`Warning: unecessary escape in python string: \\${args_str[index]}`);
|
||
str += args_str[index];
|
||
}
|
||
} else if (args_str[index] === quote_char) {
|
||
return str;
|
||
} else {
|
||
str += args_str[index];
|
||
}
|
||
index += 1;
|
||
}
|
||
throw new SyntaxError(`Expected end of string on line ${line_number}`);
|
||
};
|
||
parse_number = function() {
|
||
var num_str;
|
||
// not super robust - 127.0.0.1 is just a number, right?
|
||
// could be done a lot simpler too with a single regexp match
|
||
num_str = "";
|
||
while (index < args_str.length) {
|
||
if (args_str[index].match(/[\d\.]/)) {
|
||
num_str += args_str[index];
|
||
} else {
|
||
break;
|
||
}
|
||
index += 1;
|
||
}
|
||
index -= 1;
|
||
return parseFloat(num_str);
|
||
};
|
||
parse_array = function() {
|
||
var ref, ref1, values;
|
||
index += 1;
|
||
values = [];
|
||
while (index < args_str.length) {
|
||
if (args_str[index] === "u") {
|
||
index += 1;
|
||
if ((ref = args_str[index]) === "'" || ref === '"') {
|
||
values.push(parse_string());
|
||
} else {
|
||
throw new SyntaxError(`Unexpected 'u${args_str.slice(index)}' on line ${line_number}`);
|
||
}
|
||
} else if ((ref1 = args_str[index]) === "'" || ref1 === '"') {
|
||
values.push(parse_string());
|
||
} else if (args_str[index] === "[") {
|
||
values.push(parse_array());
|
||
} else if (args_str[index].match(/\d/)) {
|
||
values.push(parse_number());
|
||
} else if (args_str[index] === "]") {
|
||
return values;
|
||
} else if (args_str[index] === ",") {
|
||
|
||
// not keeping track of commas, you could duplicate or omit them and we'd still parse
|
||
} else if (args_str[index].match(/\S/)) {
|
||
throw new SyntaxError(`Unexpected '${args_str.slice(index)}' on line ${line_number}`);
|
||
}
|
||
index += 1;
|
||
}
|
||
throw new SyntaxError(`Expected end of array on line ${line_number}`);
|
||
};
|
||
while (index < args_str.length) {
|
||
if (args_str[index] === "u") {
|
||
index += 1;
|
||
if ((ref = args_str[index]) === "'" || ref === '"') {
|
||
args.push(parse_string());
|
||
} else {
|
||
throw new SyntaxError(`Unexpected 'u${args_str.slice(index)}' on line ${line_number}`);
|
||
}
|
||
} else if ((ref1 = args_str[index]) === "'" || ref1 === '"') {
|
||
args.push(parse_string());
|
||
} else if (args_str[index] === "[") {
|
||
args.push(parse_array());
|
||
} else if (args_str[index].match(/\d/)) {
|
||
args.push(parse_number());
|
||
} else if (args_str[index] === ",") {
|
||
|
||
// not keeping track of commas, you could duplicate or omit them and we'd still parse
|
||
} else if (args_str[index].match(/\S/)) {
|
||
throw new SyntaxError(`Unexpected '${args_str.slice(index)}' on line ${line_number}`);
|
||
}
|
||
index += 1;
|
||
}
|
||
return args;
|
||
};
|
||
for (line_index = i = 0, len = lines.length; i < len; line_index = ++i) {
|
||
line = lines[line_index];
|
||
match = line.match(/([\w_]+)\((.*)\)/);
|
||
if (match) {
|
||
[_, fn_name, args_str] = match;
|
||
fn = fns[fn_name];
|
||
if (fn) {
|
||
args = parse_args(args_str, line_index + 1);
|
||
fn(...args);
|
||
}
|
||
}
|
||
}
|
||
n = palette.length;
|
||
if (n < 2) {
|
||
throw new Error(["No colors found", "Only one color found"][n] + ` (${n})`);
|
||
}
|
||
return palette;
|
||
};
|
||
|
||
module.exports.write = function(palette) {
|
||
var alpha, color, color_type, components, i, item, j, len, len1, name, ref, ref1, ref2, serialize_str, str;
|
||
serialize_str = function(str) {
|
||
return `'${str.replace(/[\r\n]+/g, " ").replace(/'/g, "\\'")}'`;
|
||
};
|
||
str = "##sK1 palette\n";
|
||
str += "palette()\n";
|
||
if (palette.name) {
|
||
str += `set_name(${serialize_str(palette.name)})\n`;
|
||
}
|
||
if (palette.source) {
|
||
str += `set_source(${serialize_str(palette.source)})\n`;
|
||
}
|
||
if (palette.description) {
|
||
ref = palette.description.split(/\r?\n/g);
|
||
for (i = 0, len = ref.length; i < len; i++) {
|
||
item = ref[i];
|
||
str += `add_comments(${serialize_str(item)})\n`;
|
||
}
|
||
}
|
||
if (palette.numberOfColumns) {
|
||
str += `set_columns(${palette.numberOfColumns})\n`;
|
||
}
|
||
for (j = 0, len1 = palette.length; j < len1; j++) {
|
||
color = palette[j];
|
||
if (color.hue != null) {
|
||
color_type = "HSL";
|
||
components = [color.hue, color.saturation, color.lightness];
|
||
} else {
|
||
color_type = "RGB";
|
||
components = [color.red, color.green, color.blue];
|
||
}
|
||
alpha = (ref1 = color.alpha) != null ? ref1 : 1;
|
||
name = (ref2 = color.name) != null ? ref2 : color.toString();
|
||
str += `color([${serialize_str(color_type)}, [${components.join(", ")}], ${alpha}, ${serialize_str(name)}])\n`;
|
||
}
|
||
str += "palette_end()\n";
|
||
return str;
|
||
};
|
||
|
||
|
||
},{"../Palette":37,"../helpers":56}],49:[function(require,module,exports){
|
||
// Read/write Skencil palette (.spl) ("Sketch RGBPalette")
|
||
// Skencil was formerly called Sketch, but this is not related to the .sketchpalette format.
|
||
var Palette;
|
||
|
||
Palette = require("../Palette");
|
||
|
||
module.exports = function({fileContentString}) {
|
||
var i, len, line, line_index, lines, palette, r_g_b_name;
|
||
lines = fileContentString.split(/[\n\r]+/m);
|
||
if (lines[0] !== "##Sketch RGBPalette 0") {
|
||
throw new Error("Not a Skencil palette");
|
||
}
|
||
palette = new Palette();
|
||
for (line_index = i = 0, len = lines.length; i < len; line_index = ++i) {
|
||
line = lines[line_index];
|
||
if (line[0] === "#" || line === "") {
|
||
continue;
|
||
}
|
||
// TODO: handle non-start-of-line comments? where's the spec?
|
||
|
||
// TODO: replace \s with [\ \t] (spaces or tabs)
|
||
// it can't match \n because it's already split on that, but still
|
||
// TODO: handle line with no name but space on the end
|
||
r_g_b_name = line.match(/^\s*([0-9]*\.?[0-9]+)\s+([0-9]*\.?[0-9]+)\s+([0-9]*\.?[0-9]+)(?:\s+(.*))?$/); // at the beginning of the line,
|
||
// perhaps with some leading spaces
|
||
// match 3 groups of numbers separated by spaces
|
||
// red
|
||
// green
|
||
// blue
|
||
// optionally a name
|
||
// "and that should be the end of the line"
|
||
if (!r_g_b_name) {
|
||
throw new Error(`Line ${line_index + 1} doesn't match pattern of red green blue name`);
|
||
}
|
||
palette.add({
|
||
red: Number(r_g_b_name[1]),
|
||
green: Number(r_g_b_name[2]),
|
||
blue: Number(r_g_b_name[3]),
|
||
name: r_g_b_name[4]
|
||
});
|
||
}
|
||
return palette;
|
||
};
|
||
|
||
module.exports.write = function(palette) {
|
||
return `##Sketch RGBPalette 0
|
||
${palette.map(function(color) {
|
||
var ref;
|
||
return `${color.red.toFixed(6)} ${color.green.toFixed(6)} ${color.blue.toFixed(6)} ${(ref = color.name) != null ? ref : color}`;
|
||
}).join("\n")}`;
|
||
};
|
||
|
||
|
||
},{"../Palette":37}],50:[function(require,module,exports){
|
||
// Read/write StarCraft raw palette (.pal)
|
||
var Palette, jDataView;
|
||
|
||
jDataView = require("jdataview");
|
||
|
||
Palette = require("../Palette");
|
||
|
||
module.exports = function({data}) {
|
||
var j, palette, view;
|
||
palette = new Palette();
|
||
view = new jDataView(data);
|
||
if (view.byteLength !== 768) {
|
||
throw new Error(`Wrong file size, must be ${768} bytes long (not ${view.byteLength})`);
|
||
}
|
||
for (var j = 0; j < 256; j++) {
|
||
palette.add({
|
||
red: view.getUint8() / 255,
|
||
green: view.getUint8() / 255,
|
||
blue: view.getUint8() / 255
|
||
});
|
||
}
|
||
// no padding
|
||
|
||
//? palette.numberOfColumns = 16
|
||
return palette;
|
||
};
|
||
|
||
module.exports.write = function(palette) {
|
||
var i, j, view;
|
||
view = new jDataView(256 * 3);
|
||
for (i = j = 0; j < 256; i = ++j) {
|
||
view.writeUint8(palette[i] ? Math.round(palette[i].red * 255) : 0);
|
||
view.writeUint8(palette[i] ? Math.round(palette[i].green * 255) : 0);
|
||
view.writeUint8(palette[i] ? Math.round(palette[i].blue * 255) : 0);
|
||
}
|
||
return view.buffer;
|
||
};
|
||
|
||
|
||
},{"../Palette":37,"jdataview":7}],51:[function(require,module,exports){
|
||
// Read/write StarCraft padded raw palette (.wpe)
|
||
var Palette, jDataView;
|
||
|
||
jDataView = require("jdataview");
|
||
|
||
Palette = require("../Palette");
|
||
|
||
module.exports = function({data}) {
|
||
var j, palette, view;
|
||
palette = new Palette();
|
||
view = new jDataView(data);
|
||
if (view.byteLength !== 1024) {
|
||
throw new Error(`Wrong file size, must be ${1024} bytes long (not ${view.byteLength})`);
|
||
}
|
||
for (var j = 0; j < 256; j++) {
|
||
palette.add({
|
||
red: view.getUint8() / 255,
|
||
green: view.getUint8() / 255,
|
||
blue: view.getUint8() / 255
|
||
});
|
||
view.getUint8(); // padding
|
||
}
|
||
palette.numberOfColumns = 16;
|
||
return palette;
|
||
};
|
||
|
||
module.exports.write = function(palette) {
|
||
var i, j, view;
|
||
view = new jDataView(256 * 4);
|
||
for (i = j = 0; j < 256; i = ++j) {
|
||
view.writeUint8(palette[i] ? Math.round(palette[i].red * 255) : 0);
|
||
view.writeUint8(palette[i] ? Math.round(palette[i].green * 255) : 0);
|
||
view.writeUint8(palette[i] ? Math.round(palette[i].blue * 255) : 0);
|
||
view.writeUint8(0); // padding
|
||
}
|
||
return view.buffer;
|
||
};
|
||
|
||
|
||
},{"../Palette":37,"jdataview":7}],52:[function(require,module,exports){
|
||
// Read/write StarOffice / OpenOffice / LibreOffice palette (.soc)
|
||
var Palette, convert, parse_css_hex_color, write_soc;
|
||
|
||
convert = require("xml-js");
|
||
|
||
Palette = require("../Palette");
|
||
|
||
({parse_css_hex_color} = require("../helpers"));
|
||
|
||
module.exports.read_soc = function({fileContentString}) {
|
||
var child, color_options, element, i, j, len, len1, palette, parsed, ref, ref1, ref2, ref3, ref4;
|
||
if (!fileContentString.match(/^\s*<\?\s*xml/i)) { // I doubt space is actually allowed between <? and xml
|
||
throw new Error("not a StarOffice palette (no <?xml...?> declaration)");
|
||
}
|
||
palette = new Palette();
|
||
parsed = convert.xml2js(fileContentString, {
|
||
compact: false
|
||
});
|
||
if (!((ref = parsed.elements) != null ? ref.length : void 0)) {
|
||
throw new Error("No XML elements found");
|
||
}
|
||
ref1 = parsed.elements;
|
||
for (i = 0, len = ref1.length; i < len; i++) {
|
||
element = ref1[i];
|
||
if (((ref2 = element.name) != null ? ref2.match(/:color-table$/) : void 0) && element.elements) {
|
||
ref3 = element.elements;
|
||
for (j = 0, len1 = ref3.length; j < len1; j++) {
|
||
child = ref3[j];
|
||
if (!(child.name === "draw:color" && ((ref4 = child.attributes["draw:color"]) != null ? ref4.match(/#/) : void 0))) {
|
||
continue;
|
||
}
|
||
// TODO: probably can be any CSS color
|
||
color_options = parse_css_hex_color(child.attributes["draw:color"]);
|
||
color_options.name = child.attributes["draw:name"];
|
||
palette.add(color_options);
|
||
}
|
||
}
|
||
}
|
||
return palette;
|
||
};
|
||
|
||
write_soc = function(palette, modern) {
|
||
var color, component_to_hex, to_css_hex_color;
|
||
component_to_hex = function(component) {
|
||
var hex;
|
||
hex = Math.round(component * 255).toString(16);
|
||
if (hex.length === 1) {
|
||
return `0${hex}`;
|
||
} else {
|
||
return hex;
|
||
}
|
||
};
|
||
to_css_hex_color = function(color) {
|
||
var blue, green, red;
|
||
({red, green, blue} = color);
|
||
return "#" + [red, green, blue].map(component_to_hex).join("");
|
||
};
|
||
return convert.js2xml({
|
||
declaration: {
|
||
attributes: {
|
||
version: "1.0",
|
||
encoding: "UTF-8"
|
||
}
|
||
},
|
||
elements: [
|
||
{
|
||
// {
|
||
// type: "comment"
|
||
// comment: ""
|
||
// }
|
||
type: "element",
|
||
name: modern ? "ooo:color-table" : "office:color-table",
|
||
attributes: modern ? {
|
||
"xmlns:office": "urn:oasis:names:tc:opendocument:xmlns:office:1.0",
|
||
"xmlns:draw": "urn:oasis:names:tc:opendocument:xmlns:drawing:1.0",
|
||
"xmlns:xlink": "http://www.w3.org/1999/xlink",
|
||
"xmlns:svg": "http://www.w3.org/2000/svg",
|
||
"xmlns:ooo": "http://openoffice.org/2004/office"
|
||
} : {
|
||
"xmlns:form": "http://openoffice.org/2000/form",
|
||
"xmlns:number": "http://openoffice.org/2000/datastyle",
|
||
"xmlns:xlink": "http://www.w3.org/1999/xlink",
|
||
"xmlns:office": "http://openoffice.org/2000/office",
|
||
"xmlns:meta": "http://openoffice.org/2000/meta",
|
||
"xmlns:math": "http://www.w3.org/1998/Math/MathML",
|
||
"xmlns:svg": "http://www.w3.org/2000/svg",
|
||
"xmlns:dr3d": "http://openoffice.org/2000/dr3d",
|
||
"xmlns:text": "http://openoffice.org/2000/text",
|
||
"xmlns:style": "http://openoffice.org/2000/style",
|
||
"xmlns:script": "http://openoffice.org/2000/script",
|
||
"xmlns:chart": "http://openoffice.org/2000/chart",
|
||
"xmlns:draw": "http://openoffice.org/2000/drawing",
|
||
"xmlns:table": "http://openoffice.org/2000/table",
|
||
"xmlns:fo": "http://www.w3.org/1999/XSL/Format",
|
||
"xmlns:config": "http://openoffice.org/2001/config",
|
||
"xmlns:dc": "http://purl.org/dc/elements/1.1/"
|
||
},
|
||
elements: (function() {
|
||
var i,
|
||
len,
|
||
results;
|
||
results = [];
|
||
for (i = 0, len = palette.length; i < len; i++) {
|
||
color = palette[i];
|
||
results.push({
|
||
type: "element",
|
||
name: "draw:color",
|
||
attributes: {
|
||
"draw:name": color.name || color.toString(),
|
||
"draw:color": to_css_hex_color(color)
|
||
}
|
||
});
|
||
}
|
||
return results;
|
||
})()
|
||
}
|
||
]
|
||
}, {
|
||
spaces: "\t"
|
||
});
|
||
};
|
||
|
||
module.exports.write_staroffice_soc = function(palette) {
|
||
return write_soc(palette, false);
|
||
};
|
||
|
||
// I assume this is the format used by OpenOffice.org as well, given the ooo namespace
|
||
module.exports.write_libreoffice_soc = function(palette) {
|
||
return write_soc(palette, true);
|
||
};
|
||
|
||
|
||
},{"../Palette":37,"../helpers":56,"xml-js":30}],53:[function(require,module,exports){
|
||
// Read/write Sketch App JSON palette (.sketchpalette)
|
||
// (not related to .spl Sketch RGB Palette format)
|
||
|
||
// based on https://github.com/andrewfiorillo/sketch-palettes/blob/5b6bfa6eb25cb3244a9e6a226df259e8fb31fc2c/Sketch%20Palettes.sketchplugin/Contents/Sketch/sketchPalettes.js
|
||
var Palette, parse_css_hex_color, version;
|
||
|
||
Palette = require("../Palette");
|
||
|
||
({parse_css_hex_color} = require("../helpers"));
|
||
|
||
version = 1.4;
|
||
|
||
module.exports = function({fileContentString}) {
|
||
var colorAssets, colorDefinitions, color_definition, compatibleVersion, gradientAssets, hex_color, i, images, j, len, len1, palette, paletteContents, ref;
|
||
if (!fileContentString.match(/^\s*{/)) {
|
||
throw new Error("not sketchpalette JSON");
|
||
}
|
||
paletteContents = JSON.parse(fileContentString);
|
||
compatibleVersion = paletteContents.compatibleVersion;
|
||
// Check for presets in file, else set to empty array
|
||
colorDefinitions = (ref = paletteContents.colors) != null ? ref : [];
|
||
// gradientDefinitions = paletteContents.gradients ? []
|
||
// imageDefinitions = paletteContents.images ? []
|
||
colorAssets = [];
|
||
gradientAssets = [];
|
||
images = [];
|
||
palette = new Palette();
|
||
// Check if plugin is out of date and incompatible with a newer palette version
|
||
if (compatibleVersion && compatibleVersion > version) {
|
||
throw new Error(`Can't handle compatibleVersion of ${compatibleVersion}.`);
|
||
}
|
||
// Check for older hex code palette version
|
||
if (!compatibleVersion || compatibleVersion < 1.4) {
|
||
// Convert hex colors
|
||
for (i = 0, len = colorDefinitions.length; i < len; i++) {
|
||
hex_color = colorDefinitions[i];
|
||
palette.add(parse_css_hex_color(hex_color));
|
||
}
|
||
} else {
|
||
// Color Fills: convert rgba colors
|
||
if (colorDefinitions.length > 0) {
|
||
for (j = 0, len1 = colorDefinitions.length; j < len1; j++) {
|
||
color_definition = colorDefinitions[j];
|
||
palette.add(color_definition);
|
||
}
|
||
}
|
||
}
|
||
// # Pattern Fills: convert base64 strings to MSImageData objects
|
||
// if imageDefinitions.length > 0
|
||
// for imageDefinition in imageDefinitions
|
||
// nsdata = NSData.alloc().initWithBase64EncodedString_options(imageDefinition.data, 0)
|
||
// nsimage = NSImage.alloc().initWithData(nsdata)
|
||
// # msimage = MSImageData.alloc().initWithImageConvertingColorSpace(nsimage)
|
||
// msimage = MSImageData.alloc().initWithImage(nsimage)
|
||
// images.push(msimage)
|
||
|
||
// # Gradient Fills: build MSGradientStop and MSGradient objects
|
||
// if gradientDefinitions.length > 0
|
||
// for gradient in gradientDefinitions
|
||
// # Create gradient stops
|
||
// stops = []
|
||
// for stop in gradient.stops
|
||
// color = MSColor.colorWithRed_green_blue_alpha(
|
||
// stop.color.red,
|
||
// stop.color.green,
|
||
// stop.color.blue,
|
||
// stop.color.alpha
|
||
// )
|
||
// stops.push(MSGradientStop.stopWithPosition_color_(stop.position, color))
|
||
|
||
// # Create gradient object and set basic properties
|
||
// msgradient = MSGradient.new()
|
||
// msgradient.setGradientType(gradient.gradientType)
|
||
// # msgradient.shouldSmoothenOpacity = gradient.shouldSmoothenOpacity
|
||
// msgradient.elipseLength = gradient.elipseLength
|
||
// msgradient.setStops(stops)
|
||
|
||
// # Parse From and To values into arrays e.g.: from: "{0.1,-0.43}" => fromValue = [0.1, -0.43]
|
||
// fromValue = gradient.from.slice(1,-1).split(",")
|
||
// toValue = gradient.to.slice(1,-1).split(",")
|
||
|
||
// # Set CGPoint objects as From and To values
|
||
// msgradient.setFrom({ x: fromValue[0], y: fromValue[1] })
|
||
// msgradient.setTo({ x: toValue[0], y: toValue[1] })
|
||
|
||
// gradientName = gradient.name ? null
|
||
// gradientAssets.push(MSGradientAsset.alloc().initWithAsset_name(msgradient, gradientName))
|
||
return palette;
|
||
};
|
||
|
||
module.exports.write = function(palette) {
|
||
return JSON.stringify({
|
||
"compatibleVersion": "1.4",
|
||
"pluginVersion": "1.4",
|
||
"colors": palette.map(function(color) {
|
||
var alpha, blue, green, red;
|
||
({red, green, blue, alpha} = color);
|
||
if (alpha == null) {
|
||
alpha = 1;
|
||
}
|
||
return {red, green, blue, alpha};
|
||
})
|
||
}, null, "\t");
|
||
};
|
||
|
||
module.exports.extension = "sketchpalette";
|
||
|
||
|
||
},{"../Palette":37,"../helpers":56}],54:[function(require,module,exports){
|
||
// Load tabular RGB values
|
||
var Palette;
|
||
|
||
Palette = require("../Palette");
|
||
|
||
module.exports = function({fileContentString}) {
|
||
var csv_palette, i, j, len, len1, line, lines, most_colors, n, palette, palettes, ssv_palette, try_parse_line;
|
||
lines = fileContentString.split(/[\n\r]+/m);
|
||
palettes = [csv_palette = new Palette(), ssv_palette = new Palette()];
|
||
try_parse_line = function(line, palette, regexp) {
|
||
var match;
|
||
match = line.match(regexp);
|
||
if (match) {
|
||
return palette.add({
|
||
red: Number(match[1]) / 255,
|
||
green: Number(match[2]) / 255,
|
||
blue: Number(match[3]) / 255
|
||
});
|
||
}
|
||
};
|
||
for (i = 0, len = lines.length; i < len; i++) {
|
||
line = lines[i];
|
||
try_parse_line(line, csv_palette, /([0-9]*\.?[0-9]+),\s*([0-9]*\.?[0-9]+),\s*([0-9]*\.?[0-9]+)/);
|
||
try_parse_line(line, ssv_palette, /([0-9]*\.?[0-9]+)\s+([0-9]*\.?[0-9]+)\s+([0-9]*\.?[0-9]+)/);
|
||
}
|
||
most_colors = [];
|
||
for (j = 0, len1 = palettes.length; j < len1; j++) {
|
||
palette = palettes[j];
|
||
if (palette.length >= most_colors.length) {
|
||
most_colors = palette;
|
||
}
|
||
}
|
||
n = most_colors.length;
|
||
if (n < 4) {
|
||
throw new Error(["No colors found", "Only one color found", "Only a couple colors found", "Only a few colors found"][n] + ` (${n})`);
|
||
}
|
||
if (most_colors.every(function(color) {
|
||
return color.red <= 1 / 255 && color.green <= 1 / 255 && color.blue <= 1 / 255;
|
||
})) {
|
||
most_colors.forEach(function(color) {
|
||
color.red *= 255;
|
||
color.green *= 255;
|
||
return color.blue *= 255;
|
||
});
|
||
}
|
||
return most_colors;
|
||
};
|
||
|
||
|
||
},{"../Palette":37}],55:[function(require,module,exports){
|
||
// Load Windows .theme and .themepack files
|
||
var Palette, parseINIString, parseThemeFileString;
|
||
|
||
Palette = require("../Palette");
|
||
|
||
parseINIString = function(fileContentString) {
|
||
var lines, regex, section, value;
|
||
regex = {
|
||
section: /^\s*\[\s*([^\]]*)\s*\]\s*$/,
|
||
param: /^\s*([^=]+?)\s*=\s*(.*?)\s*$/,
|
||
comment: /^\s*;.*$/
|
||
};
|
||
value = {};
|
||
lines = fileContentString.split(/[\r\n]+/);
|
||
section = null;
|
||
lines.forEach(function(line) {
|
||
var match;
|
||
if (regex.comment.test(line)) {
|
||
return;
|
||
} else if (regex.param.test(line)) {
|
||
match = line.match(regex.param);
|
||
if (section) {
|
||
value[section][match[1]] = match[2];
|
||
} else {
|
||
value[match[1]] = match[2];
|
||
}
|
||
} else if (regex.section.test(line)) {
|
||
match = line.match(regex.section);
|
||
value[match[1]] = {};
|
||
section = match[1];
|
||
} else if (line.length === 0 && section) {
|
||
section = null;
|
||
}
|
||
});
|
||
return value;
|
||
};
|
||
|
||
parseThemeFileString = function(themeIni) {
|
||
var colors, component, components, i, j, key, len, palette, ref, theme;
|
||
// .theme is a renamed .ini text file
|
||
// .themepack is a renamed .cab file, and parsing it as .ini seems to work well enough for the most part, as the .ini data appears in plain,
|
||
// but it may not if compression is enabled for the .cab file
|
||
theme = parseINIString(themeIni);
|
||
colors = theme["Control Panel\\Colors"];
|
||
if (!colors) {
|
||
throw new Error("Invalid theme file, no [Control Panel\\Colors] section");
|
||
}
|
||
palette = new Palette();
|
||
for (key in colors) {
|
||
// for .themepack file support, just ignore bad keys that were parsed
|
||
if (!key.match(/\W/)) {
|
||
components = colors[key].split(" ");
|
||
if (components.length === 3) {
|
||
for (i = j = 0, len = components.length; j < len; i = ++j) {
|
||
component = components[i];
|
||
components[i] = parseInt(component, 10);
|
||
}
|
||
if (components.every(function(component) {
|
||
return isFinite(component);
|
||
})) {
|
||
palette.add({
|
||
red: components[0] / 255,
|
||
green: components[1] / 255,
|
||
blue: components[2] / 255,
|
||
name: key
|
||
});
|
||
}
|
||
}
|
||
}
|
||
}
|
||
palette.name = (ref = theme["Theme"]) != null ? ref.DisplayName : void 0; // or theme["General"]?.Name for KDE .colors
|
||
return palette;
|
||
};
|
||
|
||
module.exports = function({fileContentString}) {
|
||
return parseThemeFileString(fileContentString);
|
||
};
|
||
|
||
|
||
},{"../Palette":37}],56:[function(require,module,exports){
|
||
// TODO: DRY with CSS.coffee
|
||
module.exports.parse_css_hex_color = function(hex_color) {
|
||
var $0, $1, hex, match;
|
||
hex = function(x) {
|
||
return parseInt(x, 16);
|
||
};
|
||
match = hex_color.match(/\#([0-9A-F]{3}|[0-9A-F]{6}|[0-9A-F]{4}|[0-9A-F]{8})(?![0-9A-F])/im); // hashtag # #/
|
||
// three hex-digits (#A0C)
|
||
// six hex-digits (#AA00CC)
|
||
// with alpha, four hex-digits (#A0CF)
|
||
// with alpha, eight hex-digits (#AA00CCFF)
|
||
// (and no more!)
|
||
[$0, $1] = match;
|
||
if ($1.length > 4) {
|
||
return {
|
||
red: hex($1[0] + $1[1]) / 255,
|
||
green: hex($1[2] + $1[3]) / 255,
|
||
blue: hex($1[4] + $1[5]) / 255,
|
||
alpha: $1.length === 8 ? hex($1[6] + $1[7]) / 255 : void 0
|
||
};
|
||
} else {
|
||
return {
|
||
red: hex($1[0] + $1[0]) / 255,
|
||
green: hex($1[1] + $1[1]) / 255,
|
||
blue: hex($1[2] + $1[2]) / 255,
|
||
alpha: $1.length === 4 ? hex($1[3] + $1[3]) / 255 : void 0
|
||
};
|
||
}
|
||
};
|
||
|
||
|
||
},{}],57:[function(require,module,exports){
|
||
var AnyPalette, Color, LoadingErrors, Palette, format, format_id, formats, k, len, normalize_options, read_palette, ref,
|
||
splice = [].splice;
|
||
|
||
Palette = require("./Palette");
|
||
|
||
Color = require("./Color");
|
||
|
||
LoadingErrors = class LoadingErrors extends Error {
|
||
constructor(errors1) {
|
||
var error;
|
||
super();
|
||
this.errors = errors1;
|
||
this.message = "Some errors were encountered when loading:" + (function() {
|
||
var k, len, ref, results;
|
||
ref = this.errors;
|
||
results = [];
|
||
for (k = 0, len = ref.length; k < len; k++) {
|
||
error = ref[k];
|
||
results.push("\n\t" + error.message);
|
||
}
|
||
return results;
|
||
}).call(this);
|
||
}
|
||
|
||
};
|
||
|
||
// Formats are sorted by file extension if available,
|
||
// but it's not always available, and some formats use the same extensions.
|
||
// More generic formats should go at the bottom.
|
||
formats = {
|
||
PAINT_SHOP_PRO_PALETTE: {
|
||
name: "Paint Shop Pro palette",
|
||
fileExtensions: ["psppalette", "pal"],
|
||
readFromText: require("./formats/PaintShopPro"),
|
||
write: (require("./formats/PaintShopPro")).write
|
||
},
|
||
RIFF_PALETTE: {
|
||
name: "RIFF PAL",
|
||
fileExtensions: ["pal"],
|
||
read: require("./formats/RIFF"),
|
||
write: (require("./formats/RIFF")).write
|
||
},
|
||
COLORSCHEMER_PALETTE: {
|
||
name: "ColorSchemer palette",
|
||
fileExtensions: ["cs"],
|
||
read: require("./formats/ColorSchemer")
|
||
},
|
||
PAINTDOTNET_PALETTE: {
|
||
name: "Paint.NET palette",
|
||
fileExtensions: ["txt"],
|
||
readFromText: require("./formats/Paint.NET"),
|
||
write: (require("./formats/Paint.NET")).write
|
||
},
|
||
GIMP_PALETTE: {
|
||
name: "GIMP palette",
|
||
fileExtensions: ["gpl", "gimp", "colors"],
|
||
readFromText: require("./formats/GIMP"),
|
||
write: (require("./formats/GIMP")).write
|
||
},
|
||
KDE_RGB_PALETTE: {
|
||
name: "KolourPaint palette",
|
||
fileExtensions: ["colors"],
|
||
readFromText: require("./formats/KolourPaint"),
|
||
write: (require("./formats/KolourPaint")).write
|
||
},
|
||
SKENCIL_PALETTE: {
|
||
name: "Skencil palette",
|
||
fileExtensions: ["spl"],
|
||
readFromText: require("./formats/SPL"),
|
||
write: (require("./formats/SPL")).write
|
||
},
|
||
SKETCH_JSON_PALETTE: {
|
||
name: "Sketch palette",
|
||
fileExtensions: ["sketchpalette"],
|
||
readFromText: require("./formats/sketchpalette"),
|
||
write: (require("./formats/sketchpalette")).write
|
||
},
|
||
SK1_PALETTE: {
|
||
name: "sK1 palette",
|
||
fileExtensions: ["skp"],
|
||
readFromText: require("./formats/SKP"),
|
||
write: (require("./formats/SKP")).write
|
||
},
|
||
WINDOWS_THEME_COLORS: {
|
||
name: "Windows desktop theme",
|
||
fileExtensions: ["theme", "themepack"],
|
||
readFromText: require("./formats/theme")
|
||
},
|
||
ADOBE_SWATCH_EXCHANGE_PALETTE: {
|
||
name: "Adobe Swatch Exchange",
|
||
fileExtensions: ["ase"],
|
||
read: (require("./formats/Adobe")).read_adobe_swatch_exchange,
|
||
write: (require("./formats/Adobe")).write_adobe_swatch_exchange
|
||
},
|
||
ADOBE_COLOR_BOOK_PALETTE: {
|
||
name: "Adobe Color Book",
|
||
fileExtensions: ["acb"],
|
||
read: (require("./formats/Adobe")).read_adobe_color_book
|
||
},
|
||
STAROFFICE_PALETTE: {
|
||
name: "StarOffice Colors",
|
||
fileExtensions: ["soc"],
|
||
readFromText: (require("./formats/StarOffice")).read_soc,
|
||
write: (require("./formats/StarOffice")).write_libreoffice_soc
|
||
},
|
||
// KDE_THEME_COLORS: {
|
||
// name: "KDE desktop theme"
|
||
// fileExtensions: ["colors"]
|
||
// read: require "./formats/theme"
|
||
// }
|
||
CSS_VARIABLES: {
|
||
name: "CSS variables",
|
||
fileExtensions: ["css"],
|
||
write: (require("./formats/CSS")).write_css
|
||
},
|
||
SCSS_VARIABLES: {
|
||
name: "SCSS variables",
|
||
fileExtensions: ["scss"],
|
||
write: (require("./formats/CSS")).write_scss
|
||
},
|
||
SASS_VARIABLES: {
|
||
name: "SASS variables",
|
||
fileExtensions: ["sass"],
|
||
write: (require("./formats/CSS")).write_sass
|
||
},
|
||
LESS_VARIABLES: {
|
||
name: "LESS variables",
|
||
fileExtensions: ["less"],
|
||
write: (require("./formats/CSS")).write_less
|
||
},
|
||
STYLUS_VARIABLES: {
|
||
name: "Stylus variables",
|
||
fileExtensions: ["styl"],
|
||
write: (require("./formats/CSS")).write_styl
|
||
},
|
||
CSS_COLORS: {
|
||
name: "CSS colors",
|
||
fileExtensions: ["css", "scss", "sass", "less", "styl", "html", "htm", "svg", "js", "ts", "xml", "txt"],
|
||
readFromText: require("./formats/CSS")
|
||
},
|
||
HOMESITE_PALETTE: {
|
||
name: "Homesite palette",
|
||
fileExtensions: ["hpl"],
|
||
readFromText: require("./formats/Homesite"),
|
||
write: (require("./formats/Homesite")).write
|
||
},
|
||
ADOBE_COLOR_SWATCH_PALETTE: {
|
||
name: "Adobe Color Swatch",
|
||
fileExtensions: ["aco"],
|
||
read: (require("./formats/Adobe")).read_adobe_color_swatch,
|
||
write: (require("./formats/Adobe")).write_adobe_color_swatch
|
||
},
|
||
ADOBE_COLOR_TABLE_PALETTE: {
|
||
name: "Adobe Color Table",
|
||
fileExtensions: ["act"],
|
||
read: require("./formats/AdobeColorTable"),
|
||
write: (require("./formats/AdobeColorTable")).write
|
||
},
|
||
STARCRAFT_PALETTE: {
|
||
name: "StarCraft palette",
|
||
fileExtensions: ["pal"],
|
||
read: require("./formats/StarCraft"),
|
||
write: (require("./formats/StarCraft")).write
|
||
},
|
||
STARCRAFT_PADDED: {
|
||
name: "StarCraft terrain palette",
|
||
fileExtensions: ["wpe"],
|
||
read: require("./formats/StarCraftPadded"),
|
||
write: (require("./formats/StarCraftPadded")).write
|
||
},
|
||
// AUTOCAD_COLOR_BOOK_PALETTE: {
|
||
// name: "AutoCAD Color Book"
|
||
// fileExtensions: ["acb"]
|
||
// readFromText?: require "./formats/AutoCADColorBook"
|
||
// }
|
||
|
||
// CORELDRAW_PALETTE: {
|
||
// # (same as Paint Shop Pro palette?)
|
||
// name: "CorelDRAW palette"
|
||
// fileExtensions: ["pal", "cpl"]
|
||
// readFromText?: require "./formats/CorelDRAW"
|
||
// }
|
||
TABULAR: {
|
||
name: "tabular colors",
|
||
fileExtensions: ["csv", "tsv", "txt"],
|
||
readFromText: require("./formats/tabular")
|
||
}
|
||
};
|
||
|
||
ref = Object.keys(formats);
|
||
for (k = 0, len = ref.length; k < len; k++) {
|
||
format_id = ref[k];
|
||
format = formats[format_id];
|
||
format.fileExtensionsPretty = `.${format.fileExtensions.join(", .")}`;
|
||
}
|
||
|
||
read_palette = function(o, callback) {
|
||
var e, err, errors, format_ids, l, len1, len2, m, matching_ext, msg, palette, ref1;
|
||
o.fileContentString = typeof o.data === "string" ? o.data : new TextDecoder().decode(o.data);
|
||
// find formats that use this file extension
|
||
matching_ext = {};
|
||
ref1 = Object.keys(formats);
|
||
for (l = 0, len1 = ref1.length; l < len1; l++) {
|
||
format_id = ref1[l];
|
||
if (formats[format_id].fileExtensions.indexOf(o.fileExt) > -1) {
|
||
matching_ext[format_id] = true;
|
||
}
|
||
}
|
||
|
||
// sort formats to the beginning that use this file extension
|
||
format_ids = Object.keys(formats);
|
||
format_ids.sort(function(format_id_1, format_id_2) {
|
||
return (matching_ext[format_id_2] != null) - (matching_ext[format_id_1] != null);
|
||
});
|
||
|
||
// try loading stuff
|
||
errors = [];
|
||
for (m = 0, len2 = format_ids.length; m < len2; m++) {
|
||
format_id = format_ids[m];
|
||
format = formats[format_id];
|
||
if (!(format.read || format.readFromText)) {
|
||
continue; // skip this format
|
||
}
|
||
try {
|
||
if (format.readFromText) {
|
||
palette = format.readFromText(o);
|
||
} else {
|
||
palette = format.read(o);
|
||
}
|
||
if (palette.length === 0) {
|
||
palette = null;
|
||
throw new Error("no colors returned");
|
||
}
|
||
} catch (error1) {
|
||
e = error1;
|
||
// TODO: should this be "failed to read"?
|
||
msg = `failed to load ${o.fileName} as ${format.name}: ${e.message}`;
|
||
// msg = "failed to load #{o.fileName} as #{format.name}: #{if format_id.match(/staroffice/i) then e.stack else e.message}"
|
||
// if matching_ext[format_id]? #and not e.message.match(/not a/i) # meant to avoid "Not a <FORMAT> Palette", overly broad
|
||
// console?.error? msg
|
||
// else
|
||
// console?.warn? msg
|
||
|
||
// TODO: maybe this shouldn't be an Error object, just a {message, error} object
|
||
// or {friendlyMessage, error}
|
||
err = new Error(msg);
|
||
err.error = e;
|
||
err.__PATCHED_LIB_TO_ADD_THIS__format = format;
|
||
errors.push(err);
|
||
}
|
||
if (palette) {
|
||
// console?.info? "loaded #{o.fileName} as #{format.name}"
|
||
palette.confidence = matching_ext[format_id] != null ? 0.9 : 0.01;
|
||
callback(null, palette, format, matching_ext[format_id] != null, {
|
||
__errors_before_success: errors
|
||
});
|
||
return;
|
||
}
|
||
}
|
||
callback(new LoadingErrors(errors));
|
||
};
|
||
|
||
normalize_options = function(o = {}) {
|
||
var ref1, ref2;
|
||
if (typeof o === "string" || o instanceof String) {
|
||
o = {
|
||
filePath: o
|
||
};
|
||
}
|
||
if ((typeof File !== "undefined" && File !== null) && o instanceof File) {
|
||
o = {
|
||
file: o
|
||
};
|
||
}
|
||
|
||
// o.minColors ?= 2
|
||
// o.maxColors ?= 256
|
||
if (o.fileName == null) {
|
||
o.fileName = (ref1 = (ref2 = o.file) != null ? ref2.name : void 0) != null ? ref1 : (o.filePath ? require("path").basename(o.filePath) : void 0);
|
||
}
|
||
if (o.fileExt == null) {
|
||
o.fileExt = `${o.fileName}`.split(".").pop();
|
||
}
|
||
o.fileExt = `${o.fileExt}`.toLowerCase();
|
||
return o;
|
||
};
|
||
|
||
// LoadingErrors
|
||
AnyPalette = {Color, Palette, formats};
|
||
|
||
// Get palette from a file
|
||
AnyPalette.loadPalette = function(o, callback) {
|
||
var fr, fs;
|
||
if (!o) {
|
||
throw new TypeError("parameters required: AnyPalette.loadPalette(options, function callback(error, palette){})");
|
||
}
|
||
if (!callback) {
|
||
throw new TypeError("callback required: AnyPalette.loadPalette(options, function callback(error, palette){})");
|
||
}
|
||
o = normalize_options(o);
|
||
if (o.data) {
|
||
return read_palette(o, callback);
|
||
} else if (o.file) {
|
||
if (!(o.file instanceof File)) {
|
||
throw new TypeError("options.file was passed but it is not a File");
|
||
}
|
||
fr = new FileReader();
|
||
fr.onerror = function() {
|
||
return callback(fr.error);
|
||
};
|
||
fr.onload = function() {
|
||
o.data = fr.result;
|
||
return read_palette(o, callback);
|
||
};
|
||
return fr.readAsArrayBuffer(o.file);
|
||
} else if (o.filePath != null) {
|
||
fs = require("fs");
|
||
return fs.readFile(o.filePath, function(error, data) {
|
||
if (error) {
|
||
return callback(error);
|
||
} else {
|
||
o.data = data;
|
||
return read_palette(o, callback);
|
||
}
|
||
});
|
||
} else {
|
||
throw new TypeError("either options.data or options.file or options.filePath must be passed");
|
||
}
|
||
};
|
||
|
||
AnyPalette.writePalette = function(palette, format) {
|
||
if (format == null) {
|
||
format = AnyPalette.formats.GIMP_PALETTE;
|
||
}
|
||
return format.write(palette);
|
||
};
|
||
|
||
// file = new File([palette_content_string], (palette.name ? "Saved Colors") + ".#{format.fileExtensions[0]}")
|
||
// return [file, format.fileExtensions[0]]
|
||
AnyPalette.uniqueColors = function(palette, epsilon) {
|
||
var i, i_color, j, j_color, new_palette, ref1;
|
||
new_palette = new Palette();
|
||
new_palette.name = this.name;
|
||
new_palette.description = this.description;
|
||
// These aren't super meaningful if some colors are removed:
|
||
// new_palette.numberOfColumns = palette.numberOfColumns
|
||
// new_palette.geometrySpecifiedByFile = palette.geometrySpecifiedByFile
|
||
splice.apply(new_palette, [0, 9e9].concat(ref1 = palette.slice(0))), ref1;
|
||
// In-place uniquify
|
||
// (Can't simply use `new_palette[..] = [...new Set(palette)]` because it's Color objects, not strings)
|
||
i = 0;
|
||
while (i < new_palette.length) {
|
||
i_color = new_palette[i];
|
||
j = i + 1;
|
||
while (j < new_palette.length) {
|
||
j_color = new_palette[j];
|
||
if (Color.is(i_color, j_color, epsilon)) {
|
||
new_palette.splice(j, 1);
|
||
j -= 1;
|
||
}
|
||
j += 1;
|
||
}
|
||
i += 1;
|
||
}
|
||
return new_palette;
|
||
};
|
||
|
||
// Exports
|
||
module.exports = AnyPalette;
|
||
|
||
|
||
},{"./Color":36,"./Palette":37,"./formats/Adobe":38,"./formats/AdobeColorTable":39,"./formats/CSS":40,"./formats/ColorSchemer":41,"./formats/GIMP":42,"./formats/Homesite":43,"./formats/KolourPaint":44,"./formats/Paint.NET":45,"./formats/PaintShopPro":46,"./formats/RIFF":47,"./formats/SKP":48,"./formats/SPL":49,"./formats/StarCraft":50,"./formats/StarCraftPadded":51,"./formats/StarOffice":52,"./formats/sketchpalette":53,"./formats/tabular":54,"./formats/theme":55,"fs":"fs","path":"path"}]},{},[57])(57)
|
||
});
|
||
|