mirror of
				https://github.com/flynx/pWiki.git
				synced 2025-10-31 02:50:08 +00:00 
			
		
		
		
	
		
			
	
	
		
			21766 lines
		
	
	
		
			606 KiB
		
	
	
	
		
			JavaScript
		
	
	
	
	
	
		
		
			
		
	
	
			21766 lines
		
	
	
		
			606 KiB
		
	
	
	
		
			JavaScript
		
	
	
	
	
	
|  | // PouchDB localStorage plugin 7.3.0
 | ||
|  | // Based on localstorage-down: https://github.com/No9/localstorage-down
 | ||
|  | // 
 | ||
|  | // (c) 2012-2022 Dale Harvey and the PouchDB team
 | ||
|  | // PouchDB may be freely distributed under the Apache license, version 2.0.
 | ||
|  | // For all details and documentation:
 | ||
|  | // http://pouchdb.com
 | ||
|  | (function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i<t.length;i++)o(t[i]);return o}return r})()({1:[function(_dereq_,module,exports){ | ||
|  | (function (process){(function (){ | ||
|  | /* Copyright (c) 2013 Rod Vagg, MIT License */ | ||
|  | 
 | ||
|  | function AbstractChainedBatch (db) { | ||
|  |   this._db         = db | ||
|  |   this._operations = [] | ||
|  |   this._written    = false | ||
|  | } | ||
|  | 
 | ||
|  | AbstractChainedBatch.prototype._checkWritten = function () { | ||
|  |   if (this._written) | ||
|  |     throw new Error('write() already called on this batch') | ||
|  | } | ||
|  | 
 | ||
|  | AbstractChainedBatch.prototype.put = function (key, value) { | ||
|  |   this._checkWritten() | ||
|  | 
 | ||
|  |   var err = this._db._checkKeyValue(key, 'key', this._db._isBuffer) | ||
|  |   if (err) throw err | ||
|  |   err = this._db._checkKeyValue(value, 'value', this._db._isBuffer) | ||
|  |   if (err) throw err | ||
|  | 
 | ||
|  |   if (!this._db._isBuffer(key)) key = String(key) | ||
|  |   if (!this._db._isBuffer(value)) value = String(value) | ||
|  | 
 | ||
|  |   if (typeof this._put == 'function' ) | ||
|  |     this._put(key, value) | ||
|  |   else | ||
|  |     this._operations.push({ type: 'put', key: key, value: value }) | ||
|  | 
 | ||
|  |   return this | ||
|  | } | ||
|  | 
 | ||
|  | AbstractChainedBatch.prototype.del = function (key) { | ||
|  |   this._checkWritten() | ||
|  | 
 | ||
|  |   var err = this._db._checkKeyValue(key, 'key', this._db._isBuffer) | ||
|  |   if (err) throw err | ||
|  | 
 | ||
|  |   if (!this._db._isBuffer(key)) key = String(key) | ||
|  | 
 | ||
|  |   if (typeof this._del == 'function' ) | ||
|  |     this._del(key) | ||
|  |   else | ||
|  |     this._operations.push({ type: 'del', key: key }) | ||
|  | 
 | ||
|  |   return this | ||
|  | } | ||
|  | 
 | ||
|  | AbstractChainedBatch.prototype.clear = function () { | ||
|  |   this._checkWritten() | ||
|  | 
 | ||
|  |   this._operations = [] | ||
|  | 
 | ||
|  |   if (typeof this._clear == 'function' ) | ||
|  |     this._clear() | ||
|  | 
 | ||
|  |   return this | ||
|  | } | ||
|  | 
 | ||
|  | AbstractChainedBatch.prototype.write = function (options, callback) { | ||
|  |   this._checkWritten() | ||
|  | 
 | ||
|  |   if (typeof options == 'function') | ||
|  |     callback = options | ||
|  |   if (typeof callback != 'function') | ||
|  |     throw new Error('write() requires a callback argument') | ||
|  |   if (typeof options != 'object') | ||
|  |     options = {} | ||
|  | 
 | ||
|  |   this._written = true | ||
|  | 
 | ||
|  |   if (typeof this._write == 'function' ) | ||
|  |     return this._write(callback) | ||
|  | 
 | ||
|  |   if (typeof this._db._batch == 'function') | ||
|  |     return this._db._batch(this._operations, options, callback) | ||
|  | 
 | ||
|  |   process.nextTick(callback) | ||
|  | } | ||
|  | 
 | ||
|  | module.exports = AbstractChainedBatch | ||
|  | }).call(this)}).call(this,_dereq_(73)) | ||
|  | },{"73":73}],2:[function(_dereq_,module,exports){ | ||
|  | (function (process){(function (){ | ||
|  | /* Copyright (c) 2013 Rod Vagg, MIT License */ | ||
|  | 
 | ||
|  | function AbstractIterator (db) { | ||
|  |   this.db = db | ||
|  |   this._ended = false | ||
|  |   this._nexting = false | ||
|  | } | ||
|  | 
 | ||
|  | AbstractIterator.prototype.next = function (callback) { | ||
|  |   var self = this | ||
|  | 
 | ||
|  |   if (typeof callback != 'function') | ||
|  |     throw new Error('next() requires a callback argument') | ||
|  | 
 | ||
|  |   if (self._ended) | ||
|  |     return callback(new Error('cannot call next() after end()')) | ||
|  |   if (self._nexting) | ||
|  |     return callback(new Error('cannot call next() before previous next() has completed')) | ||
|  | 
 | ||
|  |   self._nexting = true | ||
|  |   if (typeof self._next == 'function') { | ||
|  |     return self._next(function () { | ||
|  |       self._nexting = false | ||
|  |       callback.apply(null, arguments) | ||
|  |     }) | ||
|  |   } | ||
|  | 
 | ||
|  |   process.nextTick(function () { | ||
|  |     self._nexting = false | ||
|  |     callback() | ||
|  |   }) | ||
|  | } | ||
|  | 
 | ||
|  | AbstractIterator.prototype.end = function (callback) { | ||
|  |   if (typeof callback != 'function') | ||
|  |     throw new Error('end() requires a callback argument') | ||
|  | 
 | ||
|  |   if (this._ended) | ||
|  |     return callback(new Error('end() already called on iterator')) | ||
|  | 
 | ||
|  |   this._ended = true | ||
|  | 
 | ||
|  |   if (typeof this._end == 'function') | ||
|  |     return this._end(callback) | ||
|  | 
 | ||
|  |   process.nextTick(callback) | ||
|  | } | ||
|  | 
 | ||
|  | module.exports = AbstractIterator | ||
|  | 
 | ||
|  | }).call(this)}).call(this,_dereq_(73)) | ||
|  | },{"73":73}],3:[function(_dereq_,module,exports){ | ||
|  | (function (Buffer,process){(function (){ | ||
|  | /* Copyright (c) 2013 Rod Vagg, MIT License */ | ||
|  | 
 | ||
|  | var xtend                = _dereq_(4) | ||
|  |   , AbstractIterator     = _dereq_(2) | ||
|  |   , AbstractChainedBatch = _dereq_(1) | ||
|  | 
 | ||
|  | function AbstractLevelDOWN (location) { | ||
|  |   if (!arguments.length || location === undefined) | ||
|  |     throw new Error('constructor requires at least a location argument') | ||
|  | 
 | ||
|  |   if (typeof location != 'string') | ||
|  |     throw new Error('constructor requires a location string argument') | ||
|  | 
 | ||
|  |   this.location = location | ||
|  | } | ||
|  | 
 | ||
|  | AbstractLevelDOWN.prototype.open = function (options, callback) { | ||
|  |   if (typeof options == 'function') | ||
|  |     callback = options | ||
|  | 
 | ||
|  |   if (typeof callback != 'function') | ||
|  |     throw new Error('open() requires a callback argument') | ||
|  | 
 | ||
|  |   if (typeof options != 'object') | ||
|  |     options = {} | ||
|  | 
 | ||
|  |   if (typeof this._open == 'function') | ||
|  |     return this._open(options, callback) | ||
|  | 
 | ||
|  |   process.nextTick(callback) | ||
|  | } | ||
|  | 
 | ||
|  | AbstractLevelDOWN.prototype.close = function (callback) { | ||
|  |   if (typeof callback != 'function') | ||
|  |     throw new Error('close() requires a callback argument') | ||
|  | 
 | ||
|  |   if (typeof this._close == 'function') | ||
|  |     return this._close(callback) | ||
|  | 
 | ||
|  |   process.nextTick(callback) | ||
|  | } | ||
|  | 
 | ||
|  | AbstractLevelDOWN.prototype.get = function (key, options, callback) { | ||
|  |   var err | ||
|  | 
 | ||
|  |   if (typeof options == 'function') | ||
|  |     callback = options | ||
|  | 
 | ||
|  |   if (typeof callback != 'function') | ||
|  |     throw new Error('get() requires a callback argument') | ||
|  | 
 | ||
|  |   if (err = this._checkKeyValue(key, 'key', this._isBuffer)) | ||
|  |     return callback(err) | ||
|  | 
 | ||
|  |   if (!this._isBuffer(key)) | ||
|  |     key = String(key) | ||
|  | 
 | ||
|  |   if (typeof options != 'object') | ||
|  |     options = {} | ||
|  | 
 | ||
|  |   if (typeof this._get == 'function') | ||
|  |     return this._get(key, options, callback) | ||
|  | 
 | ||
|  |   process.nextTick(function () { callback(new Error('NotFound')) }) | ||
|  | } | ||
|  | 
 | ||
|  | AbstractLevelDOWN.prototype.put = function (key, value, options, callback) { | ||
|  |   var err | ||
|  | 
 | ||
|  |   if (typeof options == 'function') | ||
|  |     callback = options | ||
|  | 
 | ||
|  |   if (typeof callback != 'function') | ||
|  |     throw new Error('put() requires a callback argument') | ||
|  | 
 | ||
|  |   if (err = this._checkKeyValue(key, 'key', this._isBuffer)) | ||
|  |     return callback(err) | ||
|  | 
 | ||
|  |   if (err = this._checkKeyValue(value, 'value', this._isBuffer)) | ||
|  |     return callback(err) | ||
|  | 
 | ||
|  |   if (!this._isBuffer(key)) | ||
|  |     key = String(key) | ||
|  | 
 | ||
|  |   // coerce value to string in node, don't touch it in browser
 | ||
|  |   // (indexeddb can store any JS type)
 | ||
|  |   if (!this._isBuffer(value) && !process.browser) | ||
|  |     value = String(value) | ||
|  | 
 | ||
|  |   if (typeof options != 'object') | ||
|  |     options = {} | ||
|  | 
 | ||
|  |   if (typeof this._put == 'function') | ||
|  |     return this._put(key, value, options, callback) | ||
|  | 
 | ||
|  |   process.nextTick(callback) | ||
|  | } | ||
|  | 
 | ||
|  | AbstractLevelDOWN.prototype.del = function (key, options, callback) { | ||
|  |   var err | ||
|  | 
 | ||
|  |   if (typeof options == 'function') | ||
|  |     callback = options | ||
|  | 
 | ||
|  |   if (typeof callback != 'function') | ||
|  |     throw new Error('del() requires a callback argument') | ||
|  | 
 | ||
|  |   if (err = this._checkKeyValue(key, 'key', this._isBuffer)) | ||
|  |     return callback(err) | ||
|  | 
 | ||
|  |   if (!this._isBuffer(key)) | ||
|  |     key = String(key) | ||
|  | 
 | ||
|  |   if (typeof options != 'object') | ||
|  |     options = {} | ||
|  | 
 | ||
|  |   if (typeof this._del == 'function') | ||
|  |     return this._del(key, options, callback) | ||
|  | 
 | ||
|  |   process.nextTick(callback) | ||
|  | } | ||
|  | 
 | ||
|  | AbstractLevelDOWN.prototype.batch = function (array, options, callback) { | ||
|  |   if (!arguments.length) | ||
|  |     return this._chainedBatch() | ||
|  | 
 | ||
|  |   if (typeof options == 'function') | ||
|  |     callback = options | ||
|  | 
 | ||
|  |   if (typeof callback != 'function') | ||
|  |     throw new Error('batch(array) requires a callback argument') | ||
|  | 
 | ||
|  |   if (!Array.isArray(array)) | ||
|  |     return callback(new Error('batch(array) requires an array argument')) | ||
|  | 
 | ||
|  |   if (typeof options != 'object') | ||
|  |     options = {} | ||
|  | 
 | ||
|  |   var i = 0 | ||
|  |     , l = array.length | ||
|  |     , e | ||
|  |     , err | ||
|  | 
 | ||
|  |   for (; i < l; i++) { | ||
|  |     e = array[i] | ||
|  |     if (typeof e != 'object') | ||
|  |       continue | ||
|  | 
 | ||
|  |     if (err = this._checkKeyValue(e.type, 'type', this._isBuffer)) | ||
|  |       return callback(err) | ||
|  | 
 | ||
|  |     if (err = this._checkKeyValue(e.key, 'key', this._isBuffer)) | ||
|  |       return callback(err) | ||
|  | 
 | ||
|  |     if (e.type == 'put') { | ||
|  |       if (err = this._checkKeyValue(e.value, 'value', this._isBuffer)) | ||
|  |         return callback(err) | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   if (typeof this._batch == 'function') | ||
|  |     return this._batch(array, options, callback) | ||
|  | 
 | ||
|  |   process.nextTick(callback) | ||
|  | } | ||
|  | 
 | ||
|  | //TODO: remove from here, not a necessary primitive
 | ||
|  | AbstractLevelDOWN.prototype.approximateSize = function (start, end, callback) { | ||
|  |   if (   start == null | ||
|  |       || end == null | ||
|  |       || typeof start == 'function' | ||
|  |       || typeof end == 'function') { | ||
|  |     throw new Error('approximateSize() requires valid `start`, `end` and `callback` arguments') | ||
|  |   } | ||
|  | 
 | ||
|  |   if (typeof callback != 'function') | ||
|  |     throw new Error('approximateSize() requires a callback argument') | ||
|  | 
 | ||
|  |   if (!this._isBuffer(start)) | ||
|  |     start = String(start) | ||
|  | 
 | ||
|  |   if (!this._isBuffer(end)) | ||
|  |     end = String(end) | ||
|  | 
 | ||
|  |   if (typeof this._approximateSize == 'function') | ||
|  |     return this._approximateSize(start, end, callback) | ||
|  | 
 | ||
|  |   process.nextTick(function () { | ||
|  |     callback(null, 0) | ||
|  |   }) | ||
|  | } | ||
|  | 
 | ||
|  | AbstractLevelDOWN.prototype._setupIteratorOptions = function (options) { | ||
|  |   var self = this | ||
|  | 
 | ||
|  |   options = xtend(options) | ||
|  | 
 | ||
|  |   ;[ 'start', 'end', 'gt', 'gte', 'lt', 'lte' ].forEach(function (o) { | ||
|  |     if (options[o] && self._isBuffer(options[o]) && options[o].length === 0) | ||
|  |       delete options[o] | ||
|  |   }) | ||
|  | 
 | ||
|  |   options.reverse = !!options.reverse | ||
|  | 
 | ||
|  |   // fix `start` so it takes into account gt, gte, lt, lte as appropriate
 | ||
|  |   if (options.reverse && options.lt) | ||
|  |     options.start = options.lt | ||
|  |   if (options.reverse && options.lte) | ||
|  |     options.start = options.lte | ||
|  |   if (!options.reverse && options.gt) | ||
|  |     options.start = options.gt | ||
|  |   if (!options.reverse && options.gte) | ||
|  |     options.start = options.gte | ||
|  | 
 | ||
|  |   if ((options.reverse && options.lt && !options.lte) | ||
|  |     || (!options.reverse && options.gt && !options.gte)) | ||
|  |     options.exclusiveStart = true // start should *not* include matching key
 | ||
|  | 
 | ||
|  |   return options | ||
|  | } | ||
|  | 
 | ||
|  | AbstractLevelDOWN.prototype.iterator = function (options) { | ||
|  |   if (typeof options != 'object') | ||
|  |     options = {} | ||
|  | 
 | ||
|  |   options = this._setupIteratorOptions(options) | ||
|  | 
 | ||
|  |   if (typeof this._iterator == 'function') | ||
|  |     return this._iterator(options) | ||
|  | 
 | ||
|  |   return new AbstractIterator(this) | ||
|  | } | ||
|  | 
 | ||
|  | AbstractLevelDOWN.prototype._chainedBatch = function () { | ||
|  |   return new AbstractChainedBatch(this) | ||
|  | } | ||
|  | 
 | ||
|  | AbstractLevelDOWN.prototype._isBuffer = function (obj) { | ||
|  |   return Buffer.isBuffer(obj) | ||
|  | } | ||
|  | 
 | ||
|  | AbstractLevelDOWN.prototype._checkKeyValue = function (obj, type) { | ||
|  |   if (obj === null || obj === undefined) | ||
|  |     return new Error(type + ' cannot be `null` or `undefined`') | ||
|  | 
 | ||
|  |   if (obj === null || obj === undefined) | ||
|  |     return new Error(type + ' cannot be `null` or `undefined`') | ||
|  | 
 | ||
|  |   if (this._isBuffer(obj)) { | ||
|  |     if (obj.length === 0) | ||
|  |       return new Error(type + ' cannot be an empty Buffer') | ||
|  |   } else if (String(obj) === '') | ||
|  |     return new Error(type + ' cannot be an empty String') | ||
|  | } | ||
|  | 
 | ||
|  | module.exports.AbstractLevelDOWN    = AbstractLevelDOWN | ||
|  | module.exports.AbstractIterator     = AbstractIterator | ||
|  | module.exports.AbstractChainedBatch = AbstractChainedBatch | ||
|  | 
 | ||
|  | }).call(this)}).call(this,{"isBuffer":_dereq_(38)},_dereq_(73)) | ||
|  | },{"1":1,"2":2,"38":38,"4":4,"73":73}],4:[function(_dereq_,module,exports){ | ||
|  | module.exports = extend | ||
|  | 
 | ||
|  | function extend() { | ||
|  |     var target = {} | ||
|  | 
 | ||
|  |     for (var i = 0; i < arguments.length; i++) { | ||
|  |         var source = arguments[i] | ||
|  | 
 | ||
|  |         for (var key in source) { | ||
|  |             if (source.hasOwnProperty(key)) { | ||
|  |                 target[key] = source[key] | ||
|  |             } | ||
|  |         } | ||
|  |     } | ||
|  | 
 | ||
|  |     return target | ||
|  | } | ||
|  | 
 | ||
|  | },{}],5:[function(_dereq_,module,exports){ | ||
|  | 'use strict'; | ||
|  | 
 | ||
|  | module.exports = argsArray; | ||
|  | 
 | ||
|  | function argsArray(fun) { | ||
|  |   return function () { | ||
|  |     var len = arguments.length; | ||
|  |     if (len) { | ||
|  |       var args = []; | ||
|  |       var i = -1; | ||
|  |       while (++i < len) { | ||
|  |         args[i] = arguments[i]; | ||
|  |       } | ||
|  |       return fun.call(this, args); | ||
|  |     } else { | ||
|  |       return fun.call(this, []); | ||
|  |     } | ||
|  |   }; | ||
|  | } | ||
|  | },{}],6:[function(_dereq_,module,exports){ | ||
|  | (function (global){(function (){ | ||
|  | 'use strict'; | ||
|  | 
 | ||
|  | var objectAssign = _dereq_(71); | ||
|  | 
 | ||
|  | // compare and isBuffer taken from https://github.com/feross/buffer/blob/680e9e5e488f22aac27599a57dc844a6315928dd/index.js
 | ||
|  | // original notice:
 | ||
|  | 
 | ||
|  | /*! | ||
|  |  * The buffer module from node.js, for the browser. | ||
|  |  * | ||
|  |  * @author   Feross Aboukhadijeh <feross@feross.org> <http://feross.org>
 | ||
|  |  * @license  MIT | ||
|  |  */ | ||
|  | function compare(a, b) { | ||
|  |   if (a === b) { | ||
|  |     return 0; | ||
|  |   } | ||
|  | 
 | ||
|  |   var x = a.length; | ||
|  |   var y = b.length; | ||
|  | 
 | ||
|  |   for (var i = 0, len = Math.min(x, y); i < len; ++i) { | ||
|  |     if (a[i] !== b[i]) { | ||
|  |       x = a[i]; | ||
|  |       y = b[i]; | ||
|  |       break; | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   if (x < y) { | ||
|  |     return -1; | ||
|  |   } | ||
|  |   if (y < x) { | ||
|  |     return 1; | ||
|  |   } | ||
|  |   return 0; | ||
|  | } | ||
|  | function isBuffer(b) { | ||
|  |   if (global.Buffer && typeof global.Buffer.isBuffer === 'function') { | ||
|  |     return global.Buffer.isBuffer(b); | ||
|  |   } | ||
|  |   return !!(b != null && b._isBuffer); | ||
|  | } | ||
|  | 
 | ||
|  | // based on node assert, original notice:
 | ||
|  | // NB: The URL to the CommonJS spec is kept just for tradition.
 | ||
|  | //     node-assert has evolved a lot since then, both in API and behavior.
 | ||
|  | 
 | ||
|  | // http://wiki.commonjs.org/wiki/Unit_Testing/1.0
 | ||
|  | //
 | ||
|  | // THIS IS NOT TESTED NOR LIKELY TO WORK OUTSIDE V8!
 | ||
|  | //
 | ||
|  | // Originally from narwhal.js (http://narwhaljs.org)
 | ||
|  | // Copyright (c) 2009 Thomas Robinson <280north.com>
 | ||
|  | //
 | ||
|  | // Permission is hereby granted, free of charge, to any person obtaining a copy
 | ||
|  | // of this software and associated documentation files (the 'Software'), to
 | ||
|  | // deal in the Software without restriction, including without limitation the
 | ||
|  | // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
 | ||
|  | // sell copies of the Software, and to permit persons to whom the Software is
 | ||
|  | // furnished to do so, subject to the following conditions:
 | ||
|  | //
 | ||
|  | // The above copyright notice and this permission notice shall be included in
 | ||
|  | // all copies or substantial portions of the Software.
 | ||
|  | //
 | ||
|  | // THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 | ||
|  | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 | ||
|  | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
 | ||
|  | // AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
 | ||
|  | // ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
 | ||
|  | // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
 | ||
|  | 
 | ||
|  | var util = _dereq_(9); | ||
|  | var hasOwn = Object.prototype.hasOwnProperty; | ||
|  | var pSlice = Array.prototype.slice; | ||
|  | var functionsHaveNames = (function () { | ||
|  |   return function foo() {}.name === 'foo'; | ||
|  | }()); | ||
|  | function pToString (obj) { | ||
|  |   return Object.prototype.toString.call(obj); | ||
|  | } | ||
|  | function isView(arrbuf) { | ||
|  |   if (isBuffer(arrbuf)) { | ||
|  |     return false; | ||
|  |   } | ||
|  |   if (typeof global.ArrayBuffer !== 'function') { | ||
|  |     return false; | ||
|  |   } | ||
|  |   if (typeof ArrayBuffer.isView === 'function') { | ||
|  |     return ArrayBuffer.isView(arrbuf); | ||
|  |   } | ||
|  |   if (!arrbuf) { | ||
|  |     return false; | ||
|  |   } | ||
|  |   if (arrbuf instanceof DataView) { | ||
|  |     return true; | ||
|  |   } | ||
|  |   if (arrbuf.buffer && arrbuf.buffer instanceof ArrayBuffer) { | ||
|  |     return true; | ||
|  |   } | ||
|  |   return false; | ||
|  | } | ||
|  | // 1. The assert module provides functions that throw
 | ||
|  | // AssertionError's when particular conditions are not met. The
 | ||
|  | // assert module must conform to the following interface.
 | ||
|  | 
 | ||
|  | var assert = module.exports = ok; | ||
|  | 
 | ||
|  | // 2. The AssertionError is defined in assert.
 | ||
|  | // new assert.AssertionError({ message: message,
 | ||
|  | //                             actual: actual,
 | ||
|  | //                             expected: expected })
 | ||
|  | 
 | ||
|  | var regex = /\s*function\s+([^\(\s]*)\s*/; | ||
|  | // based on https://github.com/ljharb/function.prototype.name/blob/adeeeec8bfcc6068b187d7d9fb3d5bb1d3a30899/implementation.js
 | ||
|  | function getName(func) { | ||
|  |   if (!util.isFunction(func)) { | ||
|  |     return; | ||
|  |   } | ||
|  |   if (functionsHaveNames) { | ||
|  |     return func.name; | ||
|  |   } | ||
|  |   var str = func.toString(); | ||
|  |   var match = str.match(regex); | ||
|  |   return match && match[1]; | ||
|  | } | ||
|  | assert.AssertionError = function AssertionError(options) { | ||
|  |   this.name = 'AssertionError'; | ||
|  |   this.actual = options.actual; | ||
|  |   this.expected = options.expected; | ||
|  |   this.operator = options.operator; | ||
|  |   if (options.message) { | ||
|  |     this.message = options.message; | ||
|  |     this.generatedMessage = false; | ||
|  |   } else { | ||
|  |     this.message = getMessage(this); | ||
|  |     this.generatedMessage = true; | ||
|  |   } | ||
|  |   var stackStartFunction = options.stackStartFunction || fail; | ||
|  |   if (Error.captureStackTrace) { | ||
|  |     Error.captureStackTrace(this, stackStartFunction); | ||
|  |   } else { | ||
|  |     // non v8 browsers so we can have a stacktrace
 | ||
|  |     var err = new Error(); | ||
|  |     if (err.stack) { | ||
|  |       var out = err.stack; | ||
|  | 
 | ||
|  |       // try to strip useless frames
 | ||
|  |       var fn_name = getName(stackStartFunction); | ||
|  |       var idx = out.indexOf('\n' + fn_name); | ||
|  |       if (idx >= 0) { | ||
|  |         // once we have located the function frame
 | ||
|  |         // we need to strip out everything before it (and its line)
 | ||
|  |         var next_line = out.indexOf('\n', idx + 1); | ||
|  |         out = out.substring(next_line + 1); | ||
|  |       } | ||
|  | 
 | ||
|  |       this.stack = out; | ||
|  |     } | ||
|  |   } | ||
|  | }; | ||
|  | 
 | ||
|  | // assert.AssertionError instanceof Error
 | ||
|  | util.inherits(assert.AssertionError, Error); | ||
|  | 
 | ||
|  | function truncate(s, n) { | ||
|  |   if (typeof s === 'string') { | ||
|  |     return s.length < n ? s : s.slice(0, n); | ||
|  |   } else { | ||
|  |     return s; | ||
|  |   } | ||
|  | } | ||
|  | function inspect(something) { | ||
|  |   if (functionsHaveNames || !util.isFunction(something)) { | ||
|  |     return util.inspect(something); | ||
|  |   } | ||
|  |   var rawname = getName(something); | ||
|  |   var name = rawname ? ': ' + rawname : ''; | ||
|  |   return '[Function' +  name + ']'; | ||
|  | } | ||
|  | function getMessage(self) { | ||
|  |   return truncate(inspect(self.actual), 128) + ' ' + | ||
|  |          self.operator + ' ' + | ||
|  |          truncate(inspect(self.expected), 128); | ||
|  | } | ||
|  | 
 | ||
|  | // At present only the three keys mentioned above are used and
 | ||
|  | // understood by the spec. Implementations or sub modules can pass
 | ||
|  | // other keys to the AssertionError's constructor - they will be
 | ||
|  | // ignored.
 | ||
|  | 
 | ||
|  | // 3. All of the following functions must throw an AssertionError
 | ||
|  | // when a corresponding condition is not met, with a message that
 | ||
|  | // may be undefined if not provided.  All assertion methods provide
 | ||
|  | // both the actual and expected values to the assertion error for
 | ||
|  | // display purposes.
 | ||
|  | 
 | ||
|  | function fail(actual, expected, message, operator, stackStartFunction) { | ||
|  |   throw new assert.AssertionError({ | ||
|  |     message: message, | ||
|  |     actual: actual, | ||
|  |     expected: expected, | ||
|  |     operator: operator, | ||
|  |     stackStartFunction: stackStartFunction | ||
|  |   }); | ||
|  | } | ||
|  | 
 | ||
|  | // EXTENSION! allows for well behaved errors defined elsewhere.
 | ||
|  | assert.fail = fail; | ||
|  | 
 | ||
|  | // 4. Pure assertion tests whether a value is truthy, as determined
 | ||
|  | // by !!guard.
 | ||
|  | // assert.ok(guard, message_opt);
 | ||
|  | // This statement is equivalent to assert.equal(true, !!guard,
 | ||
|  | // message_opt);. To test strictly for the value true, use
 | ||
|  | // assert.strictEqual(true, guard, message_opt);.
 | ||
|  | 
 | ||
|  | function ok(value, message) { | ||
|  |   if (!value) fail(value, true, message, '==', assert.ok); | ||
|  | } | ||
|  | assert.ok = ok; | ||
|  | 
 | ||
|  | // 5. The equality assertion tests shallow, coercive equality with
 | ||
|  | // ==.
 | ||
|  | // assert.equal(actual, expected, message_opt);
 | ||
|  | 
 | ||
|  | assert.equal = function equal(actual, expected, message) { | ||
|  |   if (actual != expected) fail(actual, expected, message, '==', assert.equal); | ||
|  | }; | ||
|  | 
 | ||
|  | // 6. The non-equality assertion tests for whether two objects are not equal
 | ||
|  | // with != assert.notEqual(actual, expected, message_opt);
 | ||
|  | 
 | ||
|  | assert.notEqual = function notEqual(actual, expected, message) { | ||
|  |   if (actual == expected) { | ||
|  |     fail(actual, expected, message, '!=', assert.notEqual); | ||
|  |   } | ||
|  | }; | ||
|  | 
 | ||
|  | // 7. The equivalence assertion tests a deep equality relation.
 | ||
|  | // assert.deepEqual(actual, expected, message_opt);
 | ||
|  | 
 | ||
|  | assert.deepEqual = function deepEqual(actual, expected, message) { | ||
|  |   if (!_deepEqual(actual, expected, false)) { | ||
|  |     fail(actual, expected, message, 'deepEqual', assert.deepEqual); | ||
|  |   } | ||
|  | }; | ||
|  | 
 | ||
|  | assert.deepStrictEqual = function deepStrictEqual(actual, expected, message) { | ||
|  |   if (!_deepEqual(actual, expected, true)) { | ||
|  |     fail(actual, expected, message, 'deepStrictEqual', assert.deepStrictEqual); | ||
|  |   } | ||
|  | }; | ||
|  | 
 | ||
|  | function _deepEqual(actual, expected, strict, memos) { | ||
|  |   // 7.1. All identical values are equivalent, as determined by ===.
 | ||
|  |   if (actual === expected) { | ||
|  |     return true; | ||
|  |   } else if (isBuffer(actual) && isBuffer(expected)) { | ||
|  |     return compare(actual, expected) === 0; | ||
|  | 
 | ||
|  |   // 7.2. If the expected value is a Date object, the actual value is
 | ||
|  |   // equivalent if it is also a Date object that refers to the same time.
 | ||
|  |   } else if (util.isDate(actual) && util.isDate(expected)) { | ||
|  |     return actual.getTime() === expected.getTime(); | ||
|  | 
 | ||
|  |   // 7.3 If the expected value is a RegExp object, the actual value is
 | ||
|  |   // equivalent if it is also a RegExp object with the same source and
 | ||
|  |   // properties (`global`, `multiline`, `lastIndex`, `ignoreCase`).
 | ||
|  |   } else if (util.isRegExp(actual) && util.isRegExp(expected)) { | ||
|  |     return actual.source === expected.source && | ||
|  |            actual.global === expected.global && | ||
|  |            actual.multiline === expected.multiline && | ||
|  |            actual.lastIndex === expected.lastIndex && | ||
|  |            actual.ignoreCase === expected.ignoreCase; | ||
|  | 
 | ||
|  |   // 7.4. Other pairs that do not both pass typeof value == 'object',
 | ||
|  |   // equivalence is determined by ==.
 | ||
|  |   } else if ((actual === null || typeof actual !== 'object') && | ||
|  |              (expected === null || typeof expected !== 'object')) { | ||
|  |     return strict ? actual === expected : actual == expected; | ||
|  | 
 | ||
|  |   // If both values are instances of typed arrays, wrap their underlying
 | ||
|  |   // ArrayBuffers in a Buffer each to increase performance
 | ||
|  |   // This optimization requires the arrays to have the same type as checked by
 | ||
|  |   // Object.prototype.toString (aka pToString). Never perform binary
 | ||
|  |   // comparisons for Float*Arrays, though, since e.g. +0 === -0 but their
 | ||
|  |   // bit patterns are not identical.
 | ||
|  |   } else if (isView(actual) && isView(expected) && | ||
|  |              pToString(actual) === pToString(expected) && | ||
|  |              !(actual instanceof Float32Array || | ||
|  |                actual instanceof Float64Array)) { | ||
|  |     return compare(new Uint8Array(actual.buffer), | ||
|  |                    new Uint8Array(expected.buffer)) === 0; | ||
|  | 
 | ||
|  |   // 7.5 For all other Object pairs, including Array objects, equivalence is
 | ||
|  |   // determined by having the same number of owned properties (as verified
 | ||
|  |   // with Object.prototype.hasOwnProperty.call), the same set of keys
 | ||
|  |   // (although not necessarily the same order), equivalent values for every
 | ||
|  |   // corresponding key, and an identical 'prototype' property. Note: this
 | ||
|  |   // accounts for both named and indexed properties on Arrays.
 | ||
|  |   } else if (isBuffer(actual) !== isBuffer(expected)) { | ||
|  |     return false; | ||
|  |   } else { | ||
|  |     memos = memos || {actual: [], expected: []}; | ||
|  | 
 | ||
|  |     var actualIndex = memos.actual.indexOf(actual); | ||
|  |     if (actualIndex !== -1) { | ||
|  |       if (actualIndex === memos.expected.indexOf(expected)) { | ||
|  |         return true; | ||
|  |       } | ||
|  |     } | ||
|  | 
 | ||
|  |     memos.actual.push(actual); | ||
|  |     memos.expected.push(expected); | ||
|  | 
 | ||
|  |     return objEquiv(actual, expected, strict, memos); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function isArguments(object) { | ||
|  |   return Object.prototype.toString.call(object) == '[object Arguments]'; | ||
|  | } | ||
|  | 
 | ||
|  | function objEquiv(a, b, strict, actualVisitedObjects) { | ||
|  |   if (a === null || a === undefined || b === null || b === undefined) | ||
|  |     return false; | ||
|  |   // if one is a primitive, the other must be same
 | ||
|  |   if (util.isPrimitive(a) || util.isPrimitive(b)) | ||
|  |     return a === b; | ||
|  |   if (strict && Object.getPrototypeOf(a) !== Object.getPrototypeOf(b)) | ||
|  |     return false; | ||
|  |   var aIsArgs = isArguments(a); | ||
|  |   var bIsArgs = isArguments(b); | ||
|  |   if ((aIsArgs && !bIsArgs) || (!aIsArgs && bIsArgs)) | ||
|  |     return false; | ||
|  |   if (aIsArgs) { | ||
|  |     a = pSlice.call(a); | ||
|  |     b = pSlice.call(b); | ||
|  |     return _deepEqual(a, b, strict); | ||
|  |   } | ||
|  |   var ka = objectKeys(a); | ||
|  |   var kb = objectKeys(b); | ||
|  |   var key, i; | ||
|  |   // having the same number of owned properties (keys incorporates
 | ||
|  |   // hasOwnProperty)
 | ||
|  |   if (ka.length !== kb.length) | ||
|  |     return false; | ||
|  |   //the same set of keys (although not necessarily the same order),
 | ||
|  |   ka.sort(); | ||
|  |   kb.sort(); | ||
|  |   //~~~cheap key test
 | ||
|  |   for (i = ka.length - 1; i >= 0; i--) { | ||
|  |     if (ka[i] !== kb[i]) | ||
|  |       return false; | ||
|  |   } | ||
|  |   //equivalent values for every corresponding key, and
 | ||
|  |   //~~~possibly expensive deep test
 | ||
|  |   for (i = ka.length - 1; i >= 0; i--) { | ||
|  |     key = ka[i]; | ||
|  |     if (!_deepEqual(a[key], b[key], strict, actualVisitedObjects)) | ||
|  |       return false; | ||
|  |   } | ||
|  |   return true; | ||
|  | } | ||
|  | 
 | ||
|  | // 8. The non-equivalence assertion tests for any deep inequality.
 | ||
|  | // assert.notDeepEqual(actual, expected, message_opt);
 | ||
|  | 
 | ||
|  | assert.notDeepEqual = function notDeepEqual(actual, expected, message) { | ||
|  |   if (_deepEqual(actual, expected, false)) { | ||
|  |     fail(actual, expected, message, 'notDeepEqual', assert.notDeepEqual); | ||
|  |   } | ||
|  | }; | ||
|  | 
 | ||
|  | assert.notDeepStrictEqual = notDeepStrictEqual; | ||
|  | function notDeepStrictEqual(actual, expected, message) { | ||
|  |   if (_deepEqual(actual, expected, true)) { | ||
|  |     fail(actual, expected, message, 'notDeepStrictEqual', notDeepStrictEqual); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | 
 | ||
|  | // 9. The strict equality assertion tests strict equality, as determined by ===.
 | ||
|  | // assert.strictEqual(actual, expected, message_opt);
 | ||
|  | 
 | ||
|  | assert.strictEqual = function strictEqual(actual, expected, message) { | ||
|  |   if (actual !== expected) { | ||
|  |     fail(actual, expected, message, '===', assert.strictEqual); | ||
|  |   } | ||
|  | }; | ||
|  | 
 | ||
|  | // 10. The strict non-equality assertion tests for strict inequality, as
 | ||
|  | // determined by !==.  assert.notStrictEqual(actual, expected, message_opt);
 | ||
|  | 
 | ||
|  | assert.notStrictEqual = function notStrictEqual(actual, expected, message) { | ||
|  |   if (actual === expected) { | ||
|  |     fail(actual, expected, message, '!==', assert.notStrictEqual); | ||
|  |   } | ||
|  | }; | ||
|  | 
 | ||
|  | function expectedException(actual, expected) { | ||
|  |   if (!actual || !expected) { | ||
|  |     return false; | ||
|  |   } | ||
|  | 
 | ||
|  |   if (Object.prototype.toString.call(expected) == '[object RegExp]') { | ||
|  |     return expected.test(actual); | ||
|  |   } | ||
|  | 
 | ||
|  |   try { | ||
|  |     if (actual instanceof expected) { | ||
|  |       return true; | ||
|  |     } | ||
|  |   } catch (e) { | ||
|  |     // Ignore.  The instanceof check doesn't work for arrow functions.
 | ||
|  |   } | ||
|  | 
 | ||
|  |   if (Error.isPrototypeOf(expected)) { | ||
|  |     return false; | ||
|  |   } | ||
|  | 
 | ||
|  |   return expected.call({}, actual) === true; | ||
|  | } | ||
|  | 
 | ||
|  | function _tryBlock(block) { | ||
|  |   var error; | ||
|  |   try { | ||
|  |     block(); | ||
|  |   } catch (e) { | ||
|  |     error = e; | ||
|  |   } | ||
|  |   return error; | ||
|  | } | ||
|  | 
 | ||
|  | function _throws(shouldThrow, block, expected, message) { | ||
|  |   var actual; | ||
|  | 
 | ||
|  |   if (typeof block !== 'function') { | ||
|  |     throw new TypeError('"block" argument must be a function'); | ||
|  |   } | ||
|  | 
 | ||
|  |   if (typeof expected === 'string') { | ||
|  |     message = expected; | ||
|  |     expected = null; | ||
|  |   } | ||
|  | 
 | ||
|  |   actual = _tryBlock(block); | ||
|  | 
 | ||
|  |   message = (expected && expected.name ? ' (' + expected.name + ').' : '.') + | ||
|  |             (message ? ' ' + message : '.'); | ||
|  | 
 | ||
|  |   if (shouldThrow && !actual) { | ||
|  |     fail(actual, expected, 'Missing expected exception' + message); | ||
|  |   } | ||
|  | 
 | ||
|  |   var userProvidedMessage = typeof message === 'string'; | ||
|  |   var isUnwantedException = !shouldThrow && util.isError(actual); | ||
|  |   var isUnexpectedException = !shouldThrow && actual && !expected; | ||
|  | 
 | ||
|  |   if ((isUnwantedException && | ||
|  |       userProvidedMessage && | ||
|  |       expectedException(actual, expected)) || | ||
|  |       isUnexpectedException) { | ||
|  |     fail(actual, expected, 'Got unwanted exception' + message); | ||
|  |   } | ||
|  | 
 | ||
|  |   if ((shouldThrow && actual && expected && | ||
|  |       !expectedException(actual, expected)) || (!shouldThrow && actual)) { | ||
|  |     throw actual; | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | // 11. Expected to throw an error:
 | ||
|  | // assert.throws(block, Error_opt, message_opt);
 | ||
|  | 
 | ||
|  | assert.throws = function(block, /*optional*/error, /*optional*/message) { | ||
|  |   _throws(true, block, error, message); | ||
|  | }; | ||
|  | 
 | ||
|  | // EXTENSION! This is annoying to write outside this module.
 | ||
|  | assert.doesNotThrow = function(block, /*optional*/error, /*optional*/message) { | ||
|  |   _throws(false, block, error, message); | ||
|  | }; | ||
|  | 
 | ||
|  | assert.ifError = function(err) { if (err) throw err; }; | ||
|  | 
 | ||
|  | // Expose a strict only variant of assert
 | ||
|  | function strict(value, message) { | ||
|  |   if (!value) fail(value, true, message, '==', strict); | ||
|  | } | ||
|  | assert.strict = objectAssign(strict, assert, { | ||
|  |   equal: assert.strictEqual, | ||
|  |   deepEqual: assert.deepStrictEqual, | ||
|  |   notEqual: assert.notStrictEqual, | ||
|  |   notDeepEqual: assert.notDeepStrictEqual | ||
|  | }); | ||
|  | assert.strict.strict = assert.strict; | ||
|  | 
 | ||
|  | var objectKeys = Object.keys || function (obj) { | ||
|  |   var keys = []; | ||
|  |   for (var key in obj) { | ||
|  |     if (hasOwn.call(obj, key)) keys.push(key); | ||
|  |   } | ||
|  |   return keys; | ||
|  | }; | ||
|  | 
 | ||
|  | }).call(this)}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | ||
|  | },{"71":71,"9":9}],7:[function(_dereq_,module,exports){ | ||
|  | if (typeof Object.create === 'function') { | ||
|  |   // implementation from standard node.js 'util' module
 | ||
|  |   module.exports = function inherits(ctor, superCtor) { | ||
|  |     ctor.super_ = superCtor | ||
|  |     ctor.prototype = Object.create(superCtor.prototype, { | ||
|  |       constructor: { | ||
|  |         value: ctor, | ||
|  |         enumerable: false, | ||
|  |         writable: true, | ||
|  |         configurable: true | ||
|  |       } | ||
|  |     }); | ||
|  |   }; | ||
|  | } else { | ||
|  |   // old school shim for old browsers
 | ||
|  |   module.exports = function inherits(ctor, superCtor) { | ||
|  |     ctor.super_ = superCtor | ||
|  |     var TempCtor = function () {} | ||
|  |     TempCtor.prototype = superCtor.prototype | ||
|  |     ctor.prototype = new TempCtor() | ||
|  |     ctor.prototype.constructor = ctor | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | },{}],8:[function(_dereq_,module,exports){ | ||
|  | module.exports = function isBuffer(arg) { | ||
|  |   return arg && typeof arg === 'object' | ||
|  |     && typeof arg.copy === 'function' | ||
|  |     && typeof arg.fill === 'function' | ||
|  |     && typeof arg.readUInt8 === 'function'; | ||
|  | } | ||
|  | },{}],9:[function(_dereq_,module,exports){ | ||
|  | (function (process,global){(function (){ | ||
|  | // Copyright Joyent, Inc. and other Node contributors.
 | ||
|  | //
 | ||
|  | // Permission is hereby granted, free of charge, to any person obtaining a
 | ||
|  | // copy of this software and associated documentation files (the
 | ||
|  | // "Software"), to deal in the Software without restriction, including
 | ||
|  | // without limitation the rights to use, copy, modify, merge, publish,
 | ||
|  | // distribute, sublicense, and/or sell copies of the Software, and to permit
 | ||
|  | // persons to whom the Software is furnished to do so, subject to the
 | ||
|  | // following conditions:
 | ||
|  | //
 | ||
|  | // The above copyright notice and this permission notice shall be included
 | ||
|  | // in all copies or substantial portions of the Software.
 | ||
|  | //
 | ||
|  | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
 | ||
|  | // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
 | ||
|  | // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
 | ||
|  | // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
 | ||
|  | // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
 | ||
|  | // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
 | ||
|  | // USE OR OTHER DEALINGS IN THE SOFTWARE.
 | ||
|  | 
 | ||
|  | var formatRegExp = /%[sdj%]/g; | ||
|  | exports.format = function(f) { | ||
|  |   if (!isString(f)) { | ||
|  |     var objects = []; | ||
|  |     for (var i = 0; i < arguments.length; i++) { | ||
|  |       objects.push(inspect(arguments[i])); | ||
|  |     } | ||
|  |     return objects.join(' '); | ||
|  |   } | ||
|  | 
 | ||
|  |   var i = 1; | ||
|  |   var args = arguments; | ||
|  |   var len = args.length; | ||
|  |   var str = String(f).replace(formatRegExp, function(x) { | ||
|  |     if (x === '%%') return '%'; | ||
|  |     if (i >= len) return x; | ||
|  |     switch (x) { | ||
|  |       case '%s': return String(args[i++]); | ||
|  |       case '%d': return Number(args[i++]); | ||
|  |       case '%j': | ||
|  |         try { | ||
|  |           return JSON.stringify(args[i++]); | ||
|  |         } catch (_) { | ||
|  |           return '[Circular]'; | ||
|  |         } | ||
|  |       default: | ||
|  |         return x; | ||
|  |     } | ||
|  |   }); | ||
|  |   for (var x = args[i]; i < len; x = args[++i]) { | ||
|  |     if (isNull(x) || !isObject(x)) { | ||
|  |       str += ' ' + x; | ||
|  |     } else { | ||
|  |       str += ' ' + inspect(x); | ||
|  |     } | ||
|  |   } | ||
|  |   return str; | ||
|  | }; | ||
|  | 
 | ||
|  | 
 | ||
|  | // Mark that a method should not be used.
 | ||
|  | // Returns a modified function which warns once by default.
 | ||
|  | // If --no-deprecation is set, then it is a no-op.
 | ||
|  | exports.deprecate = function(fn, msg) { | ||
|  |   // Allow for deprecating things in the process of starting up.
 | ||
|  |   if (isUndefined(global.process)) { | ||
|  |     return function() { | ||
|  |       return exports.deprecate(fn, msg).apply(this, arguments); | ||
|  |     }; | ||
|  |   } | ||
|  | 
 | ||
|  |   if (process.noDeprecation === true) { | ||
|  |     return fn; | ||
|  |   } | ||
|  | 
 | ||
|  |   var warned = false; | ||
|  |   function deprecated() { | ||
|  |     if (!warned) { | ||
|  |       if (process.throwDeprecation) { | ||
|  |         throw new Error(msg); | ||
|  |       } else if (process.traceDeprecation) { | ||
|  |         console.trace(msg); | ||
|  |       } else { | ||
|  |         console.error(msg); | ||
|  |       } | ||
|  |       warned = true; | ||
|  |     } | ||
|  |     return fn.apply(this, arguments); | ||
|  |   } | ||
|  | 
 | ||
|  |   return deprecated; | ||
|  | }; | ||
|  | 
 | ||
|  | 
 | ||
|  | var debugs = {}; | ||
|  | var debugEnviron; | ||
|  | exports.debuglog = function(set) { | ||
|  |   if (isUndefined(debugEnviron)) | ||
|  |     debugEnviron = process.env.NODE_DEBUG || ''; | ||
|  |   set = set.toUpperCase(); | ||
|  |   if (!debugs[set]) { | ||
|  |     if (new RegExp('\\b' + set + '\\b', 'i').test(debugEnviron)) { | ||
|  |       var pid = process.pid; | ||
|  |       debugs[set] = function() { | ||
|  |         var msg = exports.format.apply(exports, arguments); | ||
|  |         console.error('%s %d: %s', set, pid, msg); | ||
|  |       }; | ||
|  |     } else { | ||
|  |       debugs[set] = function() {}; | ||
|  |     } | ||
|  |   } | ||
|  |   return debugs[set]; | ||
|  | }; | ||
|  | 
 | ||
|  | 
 | ||
|  | /** | ||
|  |  * Echos the value of a value. Trys to print the value out | ||
|  |  * in the best way possible given the different types. | ||
|  |  * | ||
|  |  * @param {Object} obj The object to print out. | ||
|  |  * @param {Object} opts Optional options object that alters the output. | ||
|  |  */ | ||
|  | /* legacy: obj, showHidden, depth, colors*/ | ||
|  | function inspect(obj, opts) { | ||
|  |   // default options
 | ||
|  |   var ctx = { | ||
|  |     seen: [], | ||
|  |     stylize: stylizeNoColor | ||
|  |   }; | ||
|  |   // legacy...
 | ||
|  |   if (arguments.length >= 3) ctx.depth = arguments[2]; | ||
|  |   if (arguments.length >= 4) ctx.colors = arguments[3]; | ||
|  |   if (isBoolean(opts)) { | ||
|  |     // legacy...
 | ||
|  |     ctx.showHidden = opts; | ||
|  |   } else if (opts) { | ||
|  |     // got an "options" object
 | ||
|  |     exports._extend(ctx, opts); | ||
|  |   } | ||
|  |   // set default options
 | ||
|  |   if (isUndefined(ctx.showHidden)) ctx.showHidden = false; | ||
|  |   if (isUndefined(ctx.depth)) ctx.depth = 2; | ||
|  |   if (isUndefined(ctx.colors)) ctx.colors = false; | ||
|  |   if (isUndefined(ctx.customInspect)) ctx.customInspect = true; | ||
|  |   if (ctx.colors) ctx.stylize = stylizeWithColor; | ||
|  |   return formatValue(ctx, obj, ctx.depth); | ||
|  | } | ||
|  | exports.inspect = inspect; | ||
|  | 
 | ||
|  | 
 | ||
|  | // http://en.wikipedia.org/wiki/ANSI_escape_code#graphics
 | ||
|  | inspect.colors = { | ||
|  |   'bold' : [1, 22], | ||
|  |   'italic' : [3, 23], | ||
|  |   'underline' : [4, 24], | ||
|  |   'inverse' : [7, 27], | ||
|  |   'white' : [37, 39], | ||
|  |   'grey' : [90, 39], | ||
|  |   'black' : [30, 39], | ||
|  |   'blue' : [34, 39], | ||
|  |   'cyan' : [36, 39], | ||
|  |   'green' : [32, 39], | ||
|  |   'magenta' : [35, 39], | ||
|  |   'red' : [31, 39], | ||
|  |   'yellow' : [33, 39] | ||
|  | }; | ||
|  | 
 | ||
|  | // Don't use 'blue' not visible on cmd.exe
 | ||
|  | inspect.styles = { | ||
|  |   'special': 'cyan', | ||
|  |   'number': 'yellow', | ||
|  |   'boolean': 'yellow', | ||
|  |   'undefined': 'grey', | ||
|  |   'null': 'bold', | ||
|  |   'string': 'green', | ||
|  |   'date': 'magenta', | ||
|  |   // "name": intentionally not styling
 | ||
|  |   'regexp': 'red' | ||
|  | }; | ||
|  | 
 | ||
|  | 
 | ||
|  | function stylizeWithColor(str, styleType) { | ||
|  |   var style = inspect.styles[styleType]; | ||
|  | 
 | ||
|  |   if (style) { | ||
|  |     return '\u001b[' + inspect.colors[style][0] + 'm' + str + | ||
|  |            '\u001b[' + inspect.colors[style][1] + 'm'; | ||
|  |   } else { | ||
|  |     return str; | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | 
 | ||
|  | function stylizeNoColor(str, styleType) { | ||
|  |   return str; | ||
|  | } | ||
|  | 
 | ||
|  | 
 | ||
|  | function arrayToHash(array) { | ||
|  |   var hash = {}; | ||
|  | 
 | ||
|  |   array.forEach(function(val, idx) { | ||
|  |     hash[val] = true; | ||
|  |   }); | ||
|  | 
 | ||
|  |   return hash; | ||
|  | } | ||
|  | 
 | ||
|  | 
 | ||
|  | function formatValue(ctx, value, recurseTimes) { | ||
|  |   // Provide a hook for user-specified inspect functions.
 | ||
|  |   // Check that value is an object with an inspect function on it
 | ||
|  |   if (ctx.customInspect && | ||
|  |       value && | ||
|  |       isFunction(value.inspect) && | ||
|  |       // Filter out the util module, it's inspect function is special
 | ||
|  |       value.inspect !== exports.inspect && | ||
|  |       // Also filter out any prototype objects using the circular check.
 | ||
|  |       !(value.constructor && value.constructor.prototype === value)) { | ||
|  |     var ret = value.inspect(recurseTimes, ctx); | ||
|  |     if (!isString(ret)) { | ||
|  |       ret = formatValue(ctx, ret, recurseTimes); | ||
|  |     } | ||
|  |     return ret; | ||
|  |   } | ||
|  | 
 | ||
|  |   // Primitive types cannot have properties
 | ||
|  |   var primitive = formatPrimitive(ctx, value); | ||
|  |   if (primitive) { | ||
|  |     return primitive; | ||
|  |   } | ||
|  | 
 | ||
|  |   // Look up the keys of the object.
 | ||
|  |   var keys = Object.keys(value); | ||
|  |   var visibleKeys = arrayToHash(keys); | ||
|  | 
 | ||
|  |   if (ctx.showHidden) { | ||
|  |     keys = Object.getOwnPropertyNames(value); | ||
|  |   } | ||
|  | 
 | ||
|  |   // IE doesn't make error fields non-enumerable
 | ||
|  |   // http://msdn.microsoft.com/en-us/library/ie/dww52sbt(v=vs.94).aspx
 | ||
|  |   if (isError(value) | ||
|  |       && (keys.indexOf('message') >= 0 || keys.indexOf('description') >= 0)) { | ||
|  |     return formatError(value); | ||
|  |   } | ||
|  | 
 | ||
|  |   // Some type of object without properties can be shortcutted.
 | ||
|  |   if (keys.length === 0) { | ||
|  |     if (isFunction(value)) { | ||
|  |       var name = value.name ? ': ' + value.name : ''; | ||
|  |       return ctx.stylize('[Function' + name + ']', 'special'); | ||
|  |     } | ||
|  |     if (isRegExp(value)) { | ||
|  |       return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp'); | ||
|  |     } | ||
|  |     if (isDate(value)) { | ||
|  |       return ctx.stylize(Date.prototype.toString.call(value), 'date'); | ||
|  |     } | ||
|  |     if (isError(value)) { | ||
|  |       return formatError(value); | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   var base = '', array = false, braces = ['{', '}']; | ||
|  | 
 | ||
|  |   // Make Array say that they are Array
 | ||
|  |   if (isArray(value)) { | ||
|  |     array = true; | ||
|  |     braces = ['[', ']']; | ||
|  |   } | ||
|  | 
 | ||
|  |   // Make functions say that they are functions
 | ||
|  |   if (isFunction(value)) { | ||
|  |     var n = value.name ? ': ' + value.name : ''; | ||
|  |     base = ' [Function' + n + ']'; | ||
|  |   } | ||
|  | 
 | ||
|  |   // Make RegExps say that they are RegExps
 | ||
|  |   if (isRegExp(value)) { | ||
|  |     base = ' ' + RegExp.prototype.toString.call(value); | ||
|  |   } | ||
|  | 
 | ||
|  |   // Make dates with properties first say the date
 | ||
|  |   if (isDate(value)) { | ||
|  |     base = ' ' + Date.prototype.toUTCString.call(value); | ||
|  |   } | ||
|  | 
 | ||
|  |   // Make error with message first say the error
 | ||
|  |   if (isError(value)) { | ||
|  |     base = ' ' + formatError(value); | ||
|  |   } | ||
|  | 
 | ||
|  |   if (keys.length === 0 && (!array || value.length == 0)) { | ||
|  |     return braces[0] + base + braces[1]; | ||
|  |   } | ||
|  | 
 | ||
|  |   if (recurseTimes < 0) { | ||
|  |     if (isRegExp(value)) { | ||
|  |       return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp'); | ||
|  |     } else { | ||
|  |       return ctx.stylize('[Object]', 'special'); | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   ctx.seen.push(value); | ||
|  | 
 | ||
|  |   var output; | ||
|  |   if (array) { | ||
|  |     output = formatArray(ctx, value, recurseTimes, visibleKeys, keys); | ||
|  |   } else { | ||
|  |     output = keys.map(function(key) { | ||
|  |       return formatProperty(ctx, value, recurseTimes, visibleKeys, key, array); | ||
|  |     }); | ||
|  |   } | ||
|  | 
 | ||
|  |   ctx.seen.pop(); | ||
|  | 
 | ||
|  |   return reduceToSingleString(output, base, braces); | ||
|  | } | ||
|  | 
 | ||
|  | 
 | ||
|  | function formatPrimitive(ctx, value) { | ||
|  |   if (isUndefined(value)) | ||
|  |     return ctx.stylize('undefined', 'undefined'); | ||
|  |   if (isString(value)) { | ||
|  |     var simple = '\'' + JSON.stringify(value).replace(/^"|"$/g, '') | ||
|  |                                              .replace(/'/g, "\\'") | ||
|  |                                              .replace(/\\"/g, '"') + '\''; | ||
|  |     return ctx.stylize(simple, 'string'); | ||
|  |   } | ||
|  |   if (isNumber(value)) | ||
|  |     return ctx.stylize('' + value, 'number'); | ||
|  |   if (isBoolean(value)) | ||
|  |     return ctx.stylize('' + value, 'boolean'); | ||
|  |   // For some reason typeof null is "object", so special case here.
 | ||
|  |   if (isNull(value)) | ||
|  |     return ctx.stylize('null', 'null'); | ||
|  | } | ||
|  | 
 | ||
|  | 
 | ||
|  | function formatError(value) { | ||
|  |   return '[' + Error.prototype.toString.call(value) + ']'; | ||
|  | } | ||
|  | 
 | ||
|  | 
 | ||
|  | function formatArray(ctx, value, recurseTimes, visibleKeys, keys) { | ||
|  |   var output = []; | ||
|  |   for (var i = 0, l = value.length; i < l; ++i) { | ||
|  |     if (hasOwnProperty(value, String(i))) { | ||
|  |       output.push(formatProperty(ctx, value, recurseTimes, visibleKeys, | ||
|  |           String(i), true)); | ||
|  |     } else { | ||
|  |       output.push(''); | ||
|  |     } | ||
|  |   } | ||
|  |   keys.forEach(function(key) { | ||
|  |     if (!key.match(/^\d+$/)) { | ||
|  |       output.push(formatProperty(ctx, value, recurseTimes, visibleKeys, | ||
|  |           key, true)); | ||
|  |     } | ||
|  |   }); | ||
|  |   return output; | ||
|  | } | ||
|  | 
 | ||
|  | 
 | ||
|  | function formatProperty(ctx, value, recurseTimes, visibleKeys, key, array) { | ||
|  |   var name, str, desc; | ||
|  |   desc = Object.getOwnPropertyDescriptor(value, key) || { value: value[key] }; | ||
|  |   if (desc.get) { | ||
|  |     if (desc.set) { | ||
|  |       str = ctx.stylize('[Getter/Setter]', 'special'); | ||
|  |     } else { | ||
|  |       str = ctx.stylize('[Getter]', 'special'); | ||
|  |     } | ||
|  |   } else { | ||
|  |     if (desc.set) { | ||
|  |       str = ctx.stylize('[Setter]', 'special'); | ||
|  |     } | ||
|  |   } | ||
|  |   if (!hasOwnProperty(visibleKeys, key)) { | ||
|  |     name = '[' + key + ']'; | ||
|  |   } | ||
|  |   if (!str) { | ||
|  |     if (ctx.seen.indexOf(desc.value) < 0) { | ||
|  |       if (isNull(recurseTimes)) { | ||
|  |         str = formatValue(ctx, desc.value, null); | ||
|  |       } else { | ||
|  |         str = formatValue(ctx, desc.value, recurseTimes - 1); | ||
|  |       } | ||
|  |       if (str.indexOf('\n') > -1) { | ||
|  |         if (array) { | ||
|  |           str = str.split('\n').map(function(line) { | ||
|  |             return '  ' + line; | ||
|  |           }).join('\n').substr(2); | ||
|  |         } else { | ||
|  |           str = '\n' + str.split('\n').map(function(line) { | ||
|  |             return '   ' + line; | ||
|  |           }).join('\n'); | ||
|  |         } | ||
|  |       } | ||
|  |     } else { | ||
|  |       str = ctx.stylize('[Circular]', 'special'); | ||
|  |     } | ||
|  |   } | ||
|  |   if (isUndefined(name)) { | ||
|  |     if (array && key.match(/^\d+$/)) { | ||
|  |       return str; | ||
|  |     } | ||
|  |     name = JSON.stringify('' + key); | ||
|  |     if (name.match(/^"([a-zA-Z_][a-zA-Z_0-9]*)"$/)) { | ||
|  |       name = name.substr(1, name.length - 2); | ||
|  |       name = ctx.stylize(name, 'name'); | ||
|  |     } else { | ||
|  |       name = name.replace(/'/g, "\\'") | ||
|  |                  .replace(/\\"/g, '"') | ||
|  |                  .replace(/(^"|"$)/g, "'"); | ||
|  |       name = ctx.stylize(name, 'string'); | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   return name + ': ' + str; | ||
|  | } | ||
|  | 
 | ||
|  | 
 | ||
|  | function reduceToSingleString(output, base, braces) { | ||
|  |   var numLinesEst = 0; | ||
|  |   var length = output.reduce(function(prev, cur) { | ||
|  |     numLinesEst++; | ||
|  |     if (cur.indexOf('\n') >= 0) numLinesEst++; | ||
|  |     return prev + cur.replace(/\u001b\[\d\d?m/g, '').length + 1; | ||
|  |   }, 0); | ||
|  | 
 | ||
|  |   if (length > 60) { | ||
|  |     return braces[0] + | ||
|  |            (base === '' ? '' : base + '\n ') + | ||
|  |            ' ' + | ||
|  |            output.join(',\n  ') + | ||
|  |            ' ' + | ||
|  |            braces[1]; | ||
|  |   } | ||
|  | 
 | ||
|  |   return braces[0] + base + ' ' + output.join(', ') + ' ' + braces[1]; | ||
|  | } | ||
|  | 
 | ||
|  | 
 | ||
|  | // NOTE: These type checking functions intentionally don't use `instanceof`
 | ||
|  | // because it is fragile and can be easily faked with `Object.create()`.
 | ||
|  | function isArray(ar) { | ||
|  |   return Array.isArray(ar); | ||
|  | } | ||
|  | exports.isArray = isArray; | ||
|  | 
 | ||
|  | function isBoolean(arg) { | ||
|  |   return typeof arg === 'boolean'; | ||
|  | } | ||
|  | exports.isBoolean = isBoolean; | ||
|  | 
 | ||
|  | function isNull(arg) { | ||
|  |   return arg === null; | ||
|  | } | ||
|  | exports.isNull = isNull; | ||
|  | 
 | ||
|  | function isNullOrUndefined(arg) { | ||
|  |   return arg == null; | ||
|  | } | ||
|  | exports.isNullOrUndefined = isNullOrUndefined; | ||
|  | 
 | ||
|  | function isNumber(arg) { | ||
|  |   return typeof arg === 'number'; | ||
|  | } | ||
|  | exports.isNumber = isNumber; | ||
|  | 
 | ||
|  | function isString(arg) { | ||
|  |   return typeof arg === 'string'; | ||
|  | } | ||
|  | exports.isString = isString; | ||
|  | 
 | ||
|  | function isSymbol(arg) { | ||
|  |   return typeof arg === 'symbol'; | ||
|  | } | ||
|  | exports.isSymbol = isSymbol; | ||
|  | 
 | ||
|  | function isUndefined(arg) { | ||
|  |   return arg === void 0; | ||
|  | } | ||
|  | exports.isUndefined = isUndefined; | ||
|  | 
 | ||
|  | function isRegExp(re) { | ||
|  |   return isObject(re) && objectToString(re) === '[object RegExp]'; | ||
|  | } | ||
|  | exports.isRegExp = isRegExp; | ||
|  | 
 | ||
|  | function isObject(arg) { | ||
|  |   return typeof arg === 'object' && arg !== null; | ||
|  | } | ||
|  | exports.isObject = isObject; | ||
|  | 
 | ||
|  | function isDate(d) { | ||
|  |   return isObject(d) && objectToString(d) === '[object Date]'; | ||
|  | } | ||
|  | exports.isDate = isDate; | ||
|  | 
 | ||
|  | function isError(e) { | ||
|  |   return isObject(e) && | ||
|  |       (objectToString(e) === '[object Error]' || e instanceof Error); | ||
|  | } | ||
|  | exports.isError = isError; | ||
|  | 
 | ||
|  | function isFunction(arg) { | ||
|  |   return typeof arg === 'function'; | ||
|  | } | ||
|  | exports.isFunction = isFunction; | ||
|  | 
 | ||
|  | function isPrimitive(arg) { | ||
|  |   return arg === null || | ||
|  |          typeof arg === 'boolean' || | ||
|  |          typeof arg === 'number' || | ||
|  |          typeof arg === 'string' || | ||
|  |          typeof arg === 'symbol' ||  // ES6 symbol
 | ||
|  |          typeof arg === 'undefined'; | ||
|  | } | ||
|  | exports.isPrimitive = isPrimitive; | ||
|  | 
 | ||
|  | exports.isBuffer = _dereq_(8); | ||
|  | 
 | ||
|  | function objectToString(o) { | ||
|  |   return Object.prototype.toString.call(o); | ||
|  | } | ||
|  | 
 | ||
|  | 
 | ||
|  | function pad(n) { | ||
|  |   return n < 10 ? '0' + n.toString(10) : n.toString(10); | ||
|  | } | ||
|  | 
 | ||
|  | 
 | ||
|  | var months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', | ||
|  |               'Oct', 'Nov', 'Dec']; | ||
|  | 
 | ||
|  | // 26 Feb 16:19:34
 | ||
|  | function timestamp() { | ||
|  |   var d = new Date(); | ||
|  |   var time = [pad(d.getHours()), | ||
|  |               pad(d.getMinutes()), | ||
|  |               pad(d.getSeconds())].join(':'); | ||
|  |   return [d.getDate(), months[d.getMonth()], time].join(' '); | ||
|  | } | ||
|  | 
 | ||
|  | 
 | ||
|  | // log is just a thin wrapper to console.log that prepends a timestamp
 | ||
|  | exports.log = function() { | ||
|  |   console.log('%s - %s', timestamp(), exports.format.apply(exports, arguments)); | ||
|  | }; | ||
|  | 
 | ||
|  | 
 | ||
|  | /** | ||
|  |  * Inherit the prototype methods from one constructor into another. | ||
|  |  * | ||
|  |  * The Function.prototype.inherits from lang.js rewritten as a standalone | ||
|  |  * function (not on Function.prototype). NOTE: If this file is to be loaded | ||
|  |  * during bootstrapping this function needs to be rewritten using some native | ||
|  |  * functions as prototype setup using normal JavaScript does not work as | ||
|  |  * expected during bootstrapping (see mirror.js in r114903). | ||
|  |  * | ||
|  |  * @param {function} ctor Constructor function which needs to inherit the | ||
|  |  *     prototype. | ||
|  |  * @param {function} superCtor Constructor function to inherit prototype from. | ||
|  |  */ | ||
|  | exports.inherits = _dereq_(7); | ||
|  | 
 | ||
|  | exports._extend = function(origin, add) { | ||
|  |   // Don't do anything if add isn't an object
 | ||
|  |   if (!add || !isObject(add)) return origin; | ||
|  | 
 | ||
|  |   var keys = Object.keys(add); | ||
|  |   var i = keys.length; | ||
|  |   while (i--) { | ||
|  |     origin[keys[i]] = add[keys[i]]; | ||
|  |   } | ||
|  |   return origin; | ||
|  | }; | ||
|  | 
 | ||
|  | function hasOwnProperty(obj, prop) { | ||
|  |   return Object.prototype.hasOwnProperty.call(obj, prop); | ||
|  | } | ||
|  | 
 | ||
|  | }).call(this)}).call(this,_dereq_(73),typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | ||
|  | },{"7":7,"73":73,"8":8}],10:[function(_dereq_,module,exports){ | ||
|  | 'use strict' | ||
|  | 
 | ||
|  | exports.byteLength = byteLength | ||
|  | exports.toByteArray = toByteArray | ||
|  | exports.fromByteArray = fromByteArray | ||
|  | 
 | ||
|  | var lookup = [] | ||
|  | var revLookup = [] | ||
|  | var Arr = typeof Uint8Array !== 'undefined' ? Uint8Array : Array | ||
|  | 
 | ||
|  | var code = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/' | ||
|  | for (var i = 0, len = code.length; i < len; ++i) { | ||
|  |   lookup[i] = code[i] | ||
|  |   revLookup[code.charCodeAt(i)] = i | ||
|  | } | ||
|  | 
 | ||
|  | // Support decoding URL-safe base64 strings, as Node.js does.
 | ||
|  | // See: https://en.wikipedia.org/wiki/Base64#URL_applications
 | ||
|  | revLookup['-'.charCodeAt(0)] = 62 | ||
|  | revLookup['_'.charCodeAt(0)] = 63 | ||
|  | 
 | ||
|  | function getLens (b64) { | ||
|  |   var len = b64.length | ||
|  | 
 | ||
|  |   if (len % 4 > 0) { | ||
|  |     throw new Error('Invalid string. Length must be a multiple of 4') | ||
|  |   } | ||
|  | 
 | ||
|  |   // Trim off extra bytes after placeholder bytes are found
 | ||
|  |   // See: https://github.com/beatgammit/base64-js/issues/42
 | ||
|  |   var validLen = b64.indexOf('=') | ||
|  |   if (validLen === -1) validLen = len | ||
|  | 
 | ||
|  |   var placeHoldersLen = validLen === len | ||
|  |     ? 0 | ||
|  |     : 4 - (validLen % 4) | ||
|  | 
 | ||
|  |   return [validLen, placeHoldersLen] | ||
|  | } | ||
|  | 
 | ||
|  | // base64 is 4/3 + up to two characters of the original data
 | ||
|  | function byteLength (b64) { | ||
|  |   var lens = getLens(b64) | ||
|  |   var validLen = lens[0] | ||
|  |   var placeHoldersLen = lens[1] | ||
|  |   return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen | ||
|  | } | ||
|  | 
 | ||
|  | function _byteLength (b64, validLen, placeHoldersLen) { | ||
|  |   return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen | ||
|  | } | ||
|  | 
 | ||
|  | function toByteArray (b64) { | ||
|  |   var tmp | ||
|  |   var lens = getLens(b64) | ||
|  |   var validLen = lens[0] | ||
|  |   var placeHoldersLen = lens[1] | ||
|  | 
 | ||
|  |   var arr = new Arr(_byteLength(b64, validLen, placeHoldersLen)) | ||
|  | 
 | ||
|  |   var curByte = 0 | ||
|  | 
 | ||
|  |   // if there are placeholders, only get up to the last complete 4 chars
 | ||
|  |   var len = placeHoldersLen > 0 | ||
|  |     ? validLen - 4 | ||
|  |     : validLen | ||
|  | 
 | ||
|  |   var i | ||
|  |   for (i = 0; i < len; i += 4) { | ||
|  |     tmp = | ||
|  |       (revLookup[b64.charCodeAt(i)] << 18) | | ||
|  |       (revLookup[b64.charCodeAt(i + 1)] << 12) | | ||
|  |       (revLookup[b64.charCodeAt(i + 2)] << 6) | | ||
|  |       revLookup[b64.charCodeAt(i + 3)] | ||
|  |     arr[curByte++] = (tmp >> 16) & 0xFF | ||
|  |     arr[curByte++] = (tmp >> 8) & 0xFF | ||
|  |     arr[curByte++] = tmp & 0xFF | ||
|  |   } | ||
|  | 
 | ||
|  |   if (placeHoldersLen === 2) { | ||
|  |     tmp = | ||
|  |       (revLookup[b64.charCodeAt(i)] << 2) | | ||
|  |       (revLookup[b64.charCodeAt(i + 1)] >> 4) | ||
|  |     arr[curByte++] = tmp & 0xFF | ||
|  |   } | ||
|  | 
 | ||
|  |   if (placeHoldersLen === 1) { | ||
|  |     tmp = | ||
|  |       (revLookup[b64.charCodeAt(i)] << 10) | | ||
|  |       (revLookup[b64.charCodeAt(i + 1)] << 4) | | ||
|  |       (revLookup[b64.charCodeAt(i + 2)] >> 2) | ||
|  |     arr[curByte++] = (tmp >> 8) & 0xFF | ||
|  |     arr[curByte++] = tmp & 0xFF | ||
|  |   } | ||
|  | 
 | ||
|  |   return arr | ||
|  | } | ||
|  | 
 | ||
|  | function tripletToBase64 (num) { | ||
|  |   return lookup[num >> 18 & 0x3F] + | ||
|  |     lookup[num >> 12 & 0x3F] + | ||
|  |     lookup[num >> 6 & 0x3F] + | ||
|  |     lookup[num & 0x3F] | ||
|  | } | ||
|  | 
 | ||
|  | function encodeChunk (uint8, start, end) { | ||
|  |   var tmp | ||
|  |   var output = [] | ||
|  |   for (var i = start; i < end; i += 3) { | ||
|  |     tmp = | ||
|  |       ((uint8[i] << 16) & 0xFF0000) + | ||
|  |       ((uint8[i + 1] << 8) & 0xFF00) + | ||
|  |       (uint8[i + 2] & 0xFF) | ||
|  |     output.push(tripletToBase64(tmp)) | ||
|  |   } | ||
|  |   return output.join('') | ||
|  | } | ||
|  | 
 | ||
|  | function fromByteArray (uint8) { | ||
|  |   var tmp | ||
|  |   var len = uint8.length | ||
|  |   var extraBytes = len % 3 // if we have 1 byte left, pad 2 bytes
 | ||
|  |   var parts = [] | ||
|  |   var maxChunkLength = 16383 // must be multiple of 3
 | ||
|  | 
 | ||
|  |   // go through the array every three bytes, we'll deal with trailing stuff later
 | ||
|  |   for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) { | ||
|  |     parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength))) | ||
|  |   } | ||
|  | 
 | ||
|  |   // pad the end with zeros, but make sure to not forget the extra bytes
 | ||
|  |   if (extraBytes === 1) { | ||
|  |     tmp = uint8[len - 1] | ||
|  |     parts.push( | ||
|  |       lookup[tmp >> 2] + | ||
|  |       lookup[(tmp << 4) & 0x3F] + | ||
|  |       '==' | ||
|  |     ) | ||
|  |   } else if (extraBytes === 2) { | ||
|  |     tmp = (uint8[len - 2] << 8) + uint8[len - 1] | ||
|  |     parts.push( | ||
|  |       lookup[tmp >> 10] + | ||
|  |       lookup[(tmp >> 4) & 0x3F] + | ||
|  |       lookup[(tmp << 2) & 0x3F] + | ||
|  |       '=' | ||
|  |     ) | ||
|  |   } | ||
|  | 
 | ||
|  |   return parts.join('') | ||
|  | } | ||
|  | 
 | ||
|  | },{}],11:[function(_dereq_,module,exports){ | ||
|  | 
 | ||
|  | },{}],12:[function(_dereq_,module,exports){ | ||
|  | (function (Buffer){(function (){ | ||
|  | /* eslint-disable node/no-deprecated-api */ | ||
|  | 
 | ||
|  | var toString = Object.prototype.toString | ||
|  | 
 | ||
|  | var isModern = ( | ||
|  |   typeof Buffer !== 'undefined' && | ||
|  |   typeof Buffer.alloc === 'function' && | ||
|  |   typeof Buffer.allocUnsafe === 'function' && | ||
|  |   typeof Buffer.from === 'function' | ||
|  | ) | ||
|  | 
 | ||
|  | function isArrayBuffer (input) { | ||
|  |   return toString.call(input).slice(8, -1) === 'ArrayBuffer' | ||
|  | } | ||
|  | 
 | ||
|  | function fromArrayBuffer (obj, byteOffset, length) { | ||
|  |   byteOffset >>>= 0 | ||
|  | 
 | ||
|  |   var maxLength = obj.byteLength - byteOffset | ||
|  | 
 | ||
|  |   if (maxLength < 0) { | ||
|  |     throw new RangeError("'offset' is out of bounds") | ||
|  |   } | ||
|  | 
 | ||
|  |   if (length === undefined) { | ||
|  |     length = maxLength | ||
|  |   } else { | ||
|  |     length >>>= 0 | ||
|  | 
 | ||
|  |     if (length > maxLength) { | ||
|  |       throw new RangeError("'length' is out of bounds") | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   return isModern | ||
|  |     ? Buffer.from(obj.slice(byteOffset, byteOffset + length)) | ||
|  |     : new Buffer(new Uint8Array(obj.slice(byteOffset, byteOffset + length))) | ||
|  | } | ||
|  | 
 | ||
|  | function fromString (string, encoding) { | ||
|  |   if (typeof encoding !== 'string' || encoding === '') { | ||
|  |     encoding = 'utf8' | ||
|  |   } | ||
|  | 
 | ||
|  |   if (!Buffer.isEncoding(encoding)) { | ||
|  |     throw new TypeError('"encoding" must be a valid string encoding') | ||
|  |   } | ||
|  | 
 | ||
|  |   return isModern | ||
|  |     ? Buffer.from(string, encoding) | ||
|  |     : new Buffer(string, encoding) | ||
|  | } | ||
|  | 
 | ||
|  | function bufferFrom (value, encodingOrOffset, length) { | ||
|  |   if (typeof value === 'number') { | ||
|  |     throw new TypeError('"value" argument must not be a number') | ||
|  |   } | ||
|  | 
 | ||
|  |   if (isArrayBuffer(value)) { | ||
|  |     return fromArrayBuffer(value, encodingOrOffset, length) | ||
|  |   } | ||
|  | 
 | ||
|  |   if (typeof value === 'string') { | ||
|  |     return fromString(value, encodingOrOffset) | ||
|  |   } | ||
|  | 
 | ||
|  |   return isModern | ||
|  |     ? Buffer.from(value) | ||
|  |     : new Buffer(value) | ||
|  | } | ||
|  | 
 | ||
|  | module.exports = bufferFrom | ||
|  | 
 | ||
|  | }).call(this)}).call(this,_dereq_(13).Buffer) | ||
|  | },{"13":13}],13:[function(_dereq_,module,exports){ | ||
|  | (function (Buffer){(function (){ | ||
|  | /*! | ||
|  |  * The buffer module from node.js, for the browser. | ||
|  |  * | ||
|  |  * @author   Feross Aboukhadijeh <https://feross.org>
 | ||
|  |  * @license  MIT | ||
|  |  */ | ||
|  | /* eslint-disable no-proto */ | ||
|  | 
 | ||
|  | 'use strict' | ||
|  | 
 | ||
|  | var base64 = _dereq_(10) | ||
|  | var ieee754 = _dereq_(30) | ||
|  | var customInspectSymbol = | ||
|  |   (typeof Symbol === 'function' && typeof Symbol['for'] === 'function') // eslint-disable-line dot-notation
 | ||
|  |     ? Symbol['for']('nodejs.util.inspect.custom') // eslint-disable-line dot-notation
 | ||
|  |     : null | ||
|  | 
 | ||
|  | exports.Buffer = Buffer | ||
|  | exports.SlowBuffer = SlowBuffer | ||
|  | exports.INSPECT_MAX_BYTES = 50 | ||
|  | 
 | ||
|  | var K_MAX_LENGTH = 0x7fffffff | ||
|  | exports.kMaxLength = K_MAX_LENGTH | ||
|  | 
 | ||
|  | /** | ||
|  |  * If `Buffer.TYPED_ARRAY_SUPPORT`: | ||
|  |  *   === true    Use Uint8Array implementation (fastest) | ||
|  |  *   === false   Print warning and recommend using `buffer` v4.x which has an Object | ||
|  |  *               implementation (most compatible, even IE6) | ||
|  |  * | ||
|  |  * Browsers that support typed arrays are IE 10+, Firefox 4+, Chrome 7+, Safari 5.1+, | ||
|  |  * Opera 11.6+, iOS 4.2+. | ||
|  |  * | ||
|  |  * We report that the browser does not support typed arrays if the are not subclassable | ||
|  |  * using __proto__. Firefox 4-29 lacks support for adding new properties to `Uint8Array` | ||
|  |  * (See: https://bugzilla.mozilla.org/show_bug.cgi?id=695438). IE 10 lacks support
 | ||
|  |  * for __proto__ and has a buggy typed array implementation. | ||
|  |  */ | ||
|  | Buffer.TYPED_ARRAY_SUPPORT = typedArraySupport() | ||
|  | 
 | ||
|  | if (!Buffer.TYPED_ARRAY_SUPPORT && typeof console !== 'undefined' && | ||
|  |     typeof console.error === 'function') { | ||
|  |   console.error( | ||
|  |     'This browser lacks typed array (Uint8Array) support which is required by ' + | ||
|  |     '`buffer` v5.x. Use `buffer` v4.x if you require old browser support.' | ||
|  |   ) | ||
|  | } | ||
|  | 
 | ||
|  | function typedArraySupport () { | ||
|  |   // Can typed array instances can be augmented?
 | ||
|  |   try { | ||
|  |     var arr = new Uint8Array(1) | ||
|  |     var proto = { foo: function () { return 42 } } | ||
|  |     Object.setPrototypeOf(proto, Uint8Array.prototype) | ||
|  |     Object.setPrototypeOf(arr, proto) | ||
|  |     return arr.foo() === 42 | ||
|  |   } catch (e) { | ||
|  |     return false | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | Object.defineProperty(Buffer.prototype, 'parent', { | ||
|  |   enumerable: true, | ||
|  |   get: function () { | ||
|  |     if (!Buffer.isBuffer(this)) return undefined | ||
|  |     return this.buffer | ||
|  |   } | ||
|  | }) | ||
|  | 
 | ||
|  | Object.defineProperty(Buffer.prototype, 'offset', { | ||
|  |   enumerable: true, | ||
|  |   get: function () { | ||
|  |     if (!Buffer.isBuffer(this)) return undefined | ||
|  |     return this.byteOffset | ||
|  |   } | ||
|  | }) | ||
|  | 
 | ||
|  | function createBuffer (length) { | ||
|  |   if (length > K_MAX_LENGTH) { | ||
|  |     throw new RangeError('The value "' + length + '" is invalid for option "size"') | ||
|  |   } | ||
|  |   // Return an augmented `Uint8Array` instance
 | ||
|  |   var buf = new Uint8Array(length) | ||
|  |   Object.setPrototypeOf(buf, Buffer.prototype) | ||
|  |   return buf | ||
|  | } | ||
|  | 
 | ||
|  | /** | ||
|  |  * The Buffer constructor returns instances of `Uint8Array` that have their | ||
|  |  * prototype changed to `Buffer.prototype`. Furthermore, `Buffer` is a subclass of | ||
|  |  * `Uint8Array`, so the returned instances will have all the node `Buffer` methods | ||
|  |  * and the `Uint8Array` methods. Square bracket notation works as expected -- it | ||
|  |  * returns a single octet. | ||
|  |  * | ||
|  |  * The `Uint8Array` prototype remains unmodified. | ||
|  |  */ | ||
|  | 
 | ||
|  | function Buffer (arg, encodingOrOffset, length) { | ||
|  |   // Common case.
 | ||
|  |   if (typeof arg === 'number') { | ||
|  |     if (typeof encodingOrOffset === 'string') { | ||
|  |       throw new TypeError( | ||
|  |         'The "string" argument must be of type string. Received type number' | ||
|  |       ) | ||
|  |     } | ||
|  |     return allocUnsafe(arg) | ||
|  |   } | ||
|  |   return from(arg, encodingOrOffset, length) | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.poolSize = 8192 // not used by this implementation
 | ||
|  | 
 | ||
|  | function from (value, encodingOrOffset, length) { | ||
|  |   if (typeof value === 'string') { | ||
|  |     return fromString(value, encodingOrOffset) | ||
|  |   } | ||
|  | 
 | ||
|  |   if (ArrayBuffer.isView(value)) { | ||
|  |     return fromArrayView(value) | ||
|  |   } | ||
|  | 
 | ||
|  |   if (value == null) { | ||
|  |     throw new TypeError( | ||
|  |       'The first argument must be one of type string, Buffer, ArrayBuffer, Array, ' + | ||
|  |       'or Array-like Object. Received type ' + (typeof value) | ||
|  |     ) | ||
|  |   } | ||
|  | 
 | ||
|  |   if (isInstance(value, ArrayBuffer) || | ||
|  |       (value && isInstance(value.buffer, ArrayBuffer))) { | ||
|  |     return fromArrayBuffer(value, encodingOrOffset, length) | ||
|  |   } | ||
|  | 
 | ||
|  |   if (typeof SharedArrayBuffer !== 'undefined' && | ||
|  |       (isInstance(value, SharedArrayBuffer) || | ||
|  |       (value && isInstance(value.buffer, SharedArrayBuffer)))) { | ||
|  |     return fromArrayBuffer(value, encodingOrOffset, length) | ||
|  |   } | ||
|  | 
 | ||
|  |   if (typeof value === 'number') { | ||
|  |     throw new TypeError( | ||
|  |       'The "value" argument must not be of type number. Received type number' | ||
|  |     ) | ||
|  |   } | ||
|  | 
 | ||
|  |   var valueOf = value.valueOf && value.valueOf() | ||
|  |   if (valueOf != null && valueOf !== value) { | ||
|  |     return Buffer.from(valueOf, encodingOrOffset, length) | ||
|  |   } | ||
|  | 
 | ||
|  |   var b = fromObject(value) | ||
|  |   if (b) return b | ||
|  | 
 | ||
|  |   if (typeof Symbol !== 'undefined' && Symbol.toPrimitive != null && | ||
|  |       typeof value[Symbol.toPrimitive] === 'function') { | ||
|  |     return Buffer.from( | ||
|  |       value[Symbol.toPrimitive]('string'), encodingOrOffset, length | ||
|  |     ) | ||
|  |   } | ||
|  | 
 | ||
|  |   throw new TypeError( | ||
|  |     'The first argument must be one of type string, Buffer, ArrayBuffer, Array, ' + | ||
|  |     'or Array-like Object. Received type ' + (typeof value) | ||
|  |   ) | ||
|  | } | ||
|  | 
 | ||
|  | /** | ||
|  |  * Functionally equivalent to Buffer(arg, encoding) but throws a TypeError | ||
|  |  * if value is a number. | ||
|  |  * Buffer.from(str[, encoding]) | ||
|  |  * Buffer.from(array) | ||
|  |  * Buffer.from(buffer) | ||
|  |  * Buffer.from(arrayBuffer[, byteOffset[, length]]) | ||
|  |  **/ | ||
|  | Buffer.from = function (value, encodingOrOffset, length) { | ||
|  |   return from(value, encodingOrOffset, length) | ||
|  | } | ||
|  | 
 | ||
|  | // Note: Change prototype *after* Buffer.from is defined to workaround Chrome bug:
 | ||
|  | // https://github.com/feross/buffer/pull/148
 | ||
|  | Object.setPrototypeOf(Buffer.prototype, Uint8Array.prototype) | ||
|  | Object.setPrototypeOf(Buffer, Uint8Array) | ||
|  | 
 | ||
|  | function assertSize (size) { | ||
|  |   if (typeof size !== 'number') { | ||
|  |     throw new TypeError('"size" argument must be of type number') | ||
|  |   } else if (size < 0) { | ||
|  |     throw new RangeError('The value "' + size + '" is invalid for option "size"') | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function alloc (size, fill, encoding) { | ||
|  |   assertSize(size) | ||
|  |   if (size <= 0) { | ||
|  |     return createBuffer(size) | ||
|  |   } | ||
|  |   if (fill !== undefined) { | ||
|  |     // Only pay attention to encoding if it's a string. This
 | ||
|  |     // prevents accidentally sending in a number that would
 | ||
|  |     // be interpreted as a start offset.
 | ||
|  |     return typeof encoding === 'string' | ||
|  |       ? createBuffer(size).fill(fill, encoding) | ||
|  |       : createBuffer(size).fill(fill) | ||
|  |   } | ||
|  |   return createBuffer(size) | ||
|  | } | ||
|  | 
 | ||
|  | /** | ||
|  |  * Creates a new filled Buffer instance. | ||
|  |  * alloc(size[, fill[, encoding]]) | ||
|  |  **/ | ||
|  | Buffer.alloc = function (size, fill, encoding) { | ||
|  |   return alloc(size, fill, encoding) | ||
|  | } | ||
|  | 
 | ||
|  | function allocUnsafe (size) { | ||
|  |   assertSize(size) | ||
|  |   return createBuffer(size < 0 ? 0 : checked(size) | 0) | ||
|  | } | ||
|  | 
 | ||
|  | /** | ||
|  |  * Equivalent to Buffer(num), by default creates a non-zero-filled Buffer instance. | ||
|  |  * */ | ||
|  | Buffer.allocUnsafe = function (size) { | ||
|  |   return allocUnsafe(size) | ||
|  | } | ||
|  | /** | ||
|  |  * Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance. | ||
|  |  */ | ||
|  | Buffer.allocUnsafeSlow = function (size) { | ||
|  |   return allocUnsafe(size) | ||
|  | } | ||
|  | 
 | ||
|  | function fromString (string, encoding) { | ||
|  |   if (typeof encoding !== 'string' || encoding === '') { | ||
|  |     encoding = 'utf8' | ||
|  |   } | ||
|  | 
 | ||
|  |   if (!Buffer.isEncoding(encoding)) { | ||
|  |     throw new TypeError('Unknown encoding: ' + encoding) | ||
|  |   } | ||
|  | 
 | ||
|  |   var length = byteLength(string, encoding) | 0 | ||
|  |   var buf = createBuffer(length) | ||
|  | 
 | ||
|  |   var actual = buf.write(string, encoding) | ||
|  | 
 | ||
|  |   if (actual !== length) { | ||
|  |     // Writing a hex string, for example, that contains invalid characters will
 | ||
|  |     // cause everything after the first invalid character to be ignored. (e.g.
 | ||
|  |     // 'abxxcd' will be treated as 'ab')
 | ||
|  |     buf = buf.slice(0, actual) | ||
|  |   } | ||
|  | 
 | ||
|  |   return buf | ||
|  | } | ||
|  | 
 | ||
|  | function fromArrayLike (array) { | ||
|  |   var length = array.length < 0 ? 0 : checked(array.length) | 0 | ||
|  |   var buf = createBuffer(length) | ||
|  |   for (var i = 0; i < length; i += 1) { | ||
|  |     buf[i] = array[i] & 255 | ||
|  |   } | ||
|  |   return buf | ||
|  | } | ||
|  | 
 | ||
|  | function fromArrayView (arrayView) { | ||
|  |   if (isInstance(arrayView, Uint8Array)) { | ||
|  |     var copy = new Uint8Array(arrayView) | ||
|  |     return fromArrayBuffer(copy.buffer, copy.byteOffset, copy.byteLength) | ||
|  |   } | ||
|  |   return fromArrayLike(arrayView) | ||
|  | } | ||
|  | 
 | ||
|  | function fromArrayBuffer (array, byteOffset, length) { | ||
|  |   if (byteOffset < 0 || array.byteLength < byteOffset) { | ||
|  |     throw new RangeError('"offset" is outside of buffer bounds') | ||
|  |   } | ||
|  | 
 | ||
|  |   if (array.byteLength < byteOffset + (length || 0)) { | ||
|  |     throw new RangeError('"length" is outside of buffer bounds') | ||
|  |   } | ||
|  | 
 | ||
|  |   var buf | ||
|  |   if (byteOffset === undefined && length === undefined) { | ||
|  |     buf = new Uint8Array(array) | ||
|  |   } else if (length === undefined) { | ||
|  |     buf = new Uint8Array(array, byteOffset) | ||
|  |   } else { | ||
|  |     buf = new Uint8Array(array, byteOffset, length) | ||
|  |   } | ||
|  | 
 | ||
|  |   // Return an augmented `Uint8Array` instance
 | ||
|  |   Object.setPrototypeOf(buf, Buffer.prototype) | ||
|  | 
 | ||
|  |   return buf | ||
|  | } | ||
|  | 
 | ||
|  | function fromObject (obj) { | ||
|  |   if (Buffer.isBuffer(obj)) { | ||
|  |     var len = checked(obj.length) | 0 | ||
|  |     var buf = createBuffer(len) | ||
|  | 
 | ||
|  |     if (buf.length === 0) { | ||
|  |       return buf | ||
|  |     } | ||
|  | 
 | ||
|  |     obj.copy(buf, 0, 0, len) | ||
|  |     return buf | ||
|  |   } | ||
|  | 
 | ||
|  |   if (obj.length !== undefined) { | ||
|  |     if (typeof obj.length !== 'number' || numberIsNaN(obj.length)) { | ||
|  |       return createBuffer(0) | ||
|  |     } | ||
|  |     return fromArrayLike(obj) | ||
|  |   } | ||
|  | 
 | ||
|  |   if (obj.type === 'Buffer' && Array.isArray(obj.data)) { | ||
|  |     return fromArrayLike(obj.data) | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function checked (length) { | ||
|  |   // Note: cannot use `length < K_MAX_LENGTH` here because that fails when
 | ||
|  |   // length is NaN (which is otherwise coerced to zero.)
 | ||
|  |   if (length >= K_MAX_LENGTH) { | ||
|  |     throw new RangeError('Attempt to allocate Buffer larger than maximum ' + | ||
|  |                          'size: 0x' + K_MAX_LENGTH.toString(16) + ' bytes') | ||
|  |   } | ||
|  |   return length | 0 | ||
|  | } | ||
|  | 
 | ||
|  | function SlowBuffer (length) { | ||
|  |   if (+length != length) { // eslint-disable-line eqeqeq
 | ||
|  |     length = 0 | ||
|  |   } | ||
|  |   return Buffer.alloc(+length) | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.isBuffer = function isBuffer (b) { | ||
|  |   return b != null && b._isBuffer === true && | ||
|  |     b !== Buffer.prototype // so Buffer.isBuffer(Buffer.prototype) will be false
 | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.compare = function compare (a, b) { | ||
|  |   if (isInstance(a, Uint8Array)) a = Buffer.from(a, a.offset, a.byteLength) | ||
|  |   if (isInstance(b, Uint8Array)) b = Buffer.from(b, b.offset, b.byteLength) | ||
|  |   if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) { | ||
|  |     throw new TypeError( | ||
|  |       'The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array' | ||
|  |     ) | ||
|  |   } | ||
|  | 
 | ||
|  |   if (a === b) return 0 | ||
|  | 
 | ||
|  |   var x = a.length | ||
|  |   var y = b.length | ||
|  | 
 | ||
|  |   for (var i = 0, len = Math.min(x, y); i < len; ++i) { | ||
|  |     if (a[i] !== b[i]) { | ||
|  |       x = a[i] | ||
|  |       y = b[i] | ||
|  |       break | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   if (x < y) return -1 | ||
|  |   if (y < x) return 1 | ||
|  |   return 0 | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.isEncoding = function isEncoding (encoding) { | ||
|  |   switch (String(encoding).toLowerCase()) { | ||
|  |     case 'hex': | ||
|  |     case 'utf8': | ||
|  |     case 'utf-8': | ||
|  |     case 'ascii': | ||
|  |     case 'latin1': | ||
|  |     case 'binary': | ||
|  |     case 'base64': | ||
|  |     case 'ucs2': | ||
|  |     case 'ucs-2': | ||
|  |     case 'utf16le': | ||
|  |     case 'utf-16le': | ||
|  |       return true | ||
|  |     default: | ||
|  |       return false | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.concat = function concat (list, length) { | ||
|  |   if (!Array.isArray(list)) { | ||
|  |     throw new TypeError('"list" argument must be an Array of Buffers') | ||
|  |   } | ||
|  | 
 | ||
|  |   if (list.length === 0) { | ||
|  |     return Buffer.alloc(0) | ||
|  |   } | ||
|  | 
 | ||
|  |   var i | ||
|  |   if (length === undefined) { | ||
|  |     length = 0 | ||
|  |     for (i = 0; i < list.length; ++i) { | ||
|  |       length += list[i].length | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   var buffer = Buffer.allocUnsafe(length) | ||
|  |   var pos = 0 | ||
|  |   for (i = 0; i < list.length; ++i) { | ||
|  |     var buf = list[i] | ||
|  |     if (isInstance(buf, Uint8Array)) { | ||
|  |       if (pos + buf.length > buffer.length) { | ||
|  |         Buffer.from(buf).copy(buffer, pos) | ||
|  |       } else { | ||
|  |         Uint8Array.prototype.set.call( | ||
|  |           buffer, | ||
|  |           buf, | ||
|  |           pos | ||
|  |         ) | ||
|  |       } | ||
|  |     } else if (!Buffer.isBuffer(buf)) { | ||
|  |       throw new TypeError('"list" argument must be an Array of Buffers') | ||
|  |     } else { | ||
|  |       buf.copy(buffer, pos) | ||
|  |     } | ||
|  |     pos += buf.length | ||
|  |   } | ||
|  |   return buffer | ||
|  | } | ||
|  | 
 | ||
|  | function byteLength (string, encoding) { | ||
|  |   if (Buffer.isBuffer(string)) { | ||
|  |     return string.length | ||
|  |   } | ||
|  |   if (ArrayBuffer.isView(string) || isInstance(string, ArrayBuffer)) { | ||
|  |     return string.byteLength | ||
|  |   } | ||
|  |   if (typeof string !== 'string') { | ||
|  |     throw new TypeError( | ||
|  |       'The "string" argument must be one of type string, Buffer, or ArrayBuffer. ' + | ||
|  |       'Received type ' + typeof string | ||
|  |     ) | ||
|  |   } | ||
|  | 
 | ||
|  |   var len = string.length | ||
|  |   var mustMatch = (arguments.length > 2 && arguments[2] === true) | ||
|  |   if (!mustMatch && len === 0) return 0 | ||
|  | 
 | ||
|  |   // Use a for loop to avoid recursion
 | ||
|  |   var loweredCase = false | ||
|  |   for (;;) { | ||
|  |     switch (encoding) { | ||
|  |       case 'ascii': | ||
|  |       case 'latin1': | ||
|  |       case 'binary': | ||
|  |         return len | ||
|  |       case 'utf8': | ||
|  |       case 'utf-8': | ||
|  |         return utf8ToBytes(string).length | ||
|  |       case 'ucs2': | ||
|  |       case 'ucs-2': | ||
|  |       case 'utf16le': | ||
|  |       case 'utf-16le': | ||
|  |         return len * 2 | ||
|  |       case 'hex': | ||
|  |         return len >>> 1 | ||
|  |       case 'base64': | ||
|  |         return base64ToBytes(string).length | ||
|  |       default: | ||
|  |         if (loweredCase) { | ||
|  |           return mustMatch ? -1 : utf8ToBytes(string).length // assume utf8
 | ||
|  |         } | ||
|  |         encoding = ('' + encoding).toLowerCase() | ||
|  |         loweredCase = true | ||
|  |     } | ||
|  |   } | ||
|  | } | ||
|  | Buffer.byteLength = byteLength | ||
|  | 
 | ||
|  | function slowToString (encoding, start, end) { | ||
|  |   var loweredCase = false | ||
|  | 
 | ||
|  |   // No need to verify that "this.length <= MAX_UINT32" since it's a read-only
 | ||
|  |   // property of a typed array.
 | ||
|  | 
 | ||
|  |   // This behaves neither like String nor Uint8Array in that we set start/end
 | ||
|  |   // to their upper/lower bounds if the value passed is out of range.
 | ||
|  |   // undefined is handled specially as per ECMA-262 6th Edition,
 | ||
|  |   // Section 13.3.3.7 Runtime Semantics: KeyedBindingInitialization.
 | ||
|  |   if (start === undefined || start < 0) { | ||
|  |     start = 0 | ||
|  |   } | ||
|  |   // Return early if start > this.length. Done here to prevent potential uint32
 | ||
|  |   // coercion fail below.
 | ||
|  |   if (start > this.length) { | ||
|  |     return '' | ||
|  |   } | ||
|  | 
 | ||
|  |   if (end === undefined || end > this.length) { | ||
|  |     end = this.length | ||
|  |   } | ||
|  | 
 | ||
|  |   if (end <= 0) { | ||
|  |     return '' | ||
|  |   } | ||
|  | 
 | ||
|  |   // Force coercion to uint32. This will also coerce falsey/NaN values to 0.
 | ||
|  |   end >>>= 0 | ||
|  |   start >>>= 0 | ||
|  | 
 | ||
|  |   if (end <= start) { | ||
|  |     return '' | ||
|  |   } | ||
|  | 
 | ||
|  |   if (!encoding) encoding = 'utf8' | ||
|  | 
 | ||
|  |   while (true) { | ||
|  |     switch (encoding) { | ||
|  |       case 'hex': | ||
|  |         return hexSlice(this, start, end) | ||
|  | 
 | ||
|  |       case 'utf8': | ||
|  |       case 'utf-8': | ||
|  |         return utf8Slice(this, start, end) | ||
|  | 
 | ||
|  |       case 'ascii': | ||
|  |         return asciiSlice(this, start, end) | ||
|  | 
 | ||
|  |       case 'latin1': | ||
|  |       case 'binary': | ||
|  |         return latin1Slice(this, start, end) | ||
|  | 
 | ||
|  |       case 'base64': | ||
|  |         return base64Slice(this, start, end) | ||
|  | 
 | ||
|  |       case 'ucs2': | ||
|  |       case 'ucs-2': | ||
|  |       case 'utf16le': | ||
|  |       case 'utf-16le': | ||
|  |         return utf16leSlice(this, start, end) | ||
|  | 
 | ||
|  |       default: | ||
|  |         if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding) | ||
|  |         encoding = (encoding + '').toLowerCase() | ||
|  |         loweredCase = true | ||
|  |     } | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | // This property is used by `Buffer.isBuffer` (and the `is-buffer` npm package)
 | ||
|  | // to detect a Buffer instance. It's not possible to use `instanceof Buffer`
 | ||
|  | // reliably in a browserify context because there could be multiple different
 | ||
|  | // copies of the 'buffer' package in use. This method works even for Buffer
 | ||
|  | // instances that were created from another copy of the `buffer` package.
 | ||
|  | // See: https://github.com/feross/buffer/issues/154
 | ||
|  | Buffer.prototype._isBuffer = true | ||
|  | 
 | ||
|  | function swap (b, n, m) { | ||
|  |   var i = b[n] | ||
|  |   b[n] = b[m] | ||
|  |   b[m] = i | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.swap16 = function swap16 () { | ||
|  |   var len = this.length | ||
|  |   if (len % 2 !== 0) { | ||
|  |     throw new RangeError('Buffer size must be a multiple of 16-bits') | ||
|  |   } | ||
|  |   for (var i = 0; i < len; i += 2) { | ||
|  |     swap(this, i, i + 1) | ||
|  |   } | ||
|  |   return this | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.swap32 = function swap32 () { | ||
|  |   var len = this.length | ||
|  |   if (len % 4 !== 0) { | ||
|  |     throw new RangeError('Buffer size must be a multiple of 32-bits') | ||
|  |   } | ||
|  |   for (var i = 0; i < len; i += 4) { | ||
|  |     swap(this, i, i + 3) | ||
|  |     swap(this, i + 1, i + 2) | ||
|  |   } | ||
|  |   return this | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.swap64 = function swap64 () { | ||
|  |   var len = this.length | ||
|  |   if (len % 8 !== 0) { | ||
|  |     throw new RangeError('Buffer size must be a multiple of 64-bits') | ||
|  |   } | ||
|  |   for (var i = 0; i < len; i += 8) { | ||
|  |     swap(this, i, i + 7) | ||
|  |     swap(this, i + 1, i + 6) | ||
|  |     swap(this, i + 2, i + 5) | ||
|  |     swap(this, i + 3, i + 4) | ||
|  |   } | ||
|  |   return this | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.toString = function toString () { | ||
|  |   var length = this.length | ||
|  |   if (length === 0) return '' | ||
|  |   if (arguments.length === 0) return utf8Slice(this, 0, length) | ||
|  |   return slowToString.apply(this, arguments) | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.toLocaleString = Buffer.prototype.toString | ||
|  | 
 | ||
|  | Buffer.prototype.equals = function equals (b) { | ||
|  |   if (!Buffer.isBuffer(b)) throw new TypeError('Argument must be a Buffer') | ||
|  |   if (this === b) return true | ||
|  |   return Buffer.compare(this, b) === 0 | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.inspect = function inspect () { | ||
|  |   var str = '' | ||
|  |   var max = exports.INSPECT_MAX_BYTES | ||
|  |   str = this.toString('hex', 0, max).replace(/(.{2})/g, '$1 ').trim() | ||
|  |   if (this.length > max) str += ' ... ' | ||
|  |   return '<Buffer ' + str + '>' | ||
|  | } | ||
|  | if (customInspectSymbol) { | ||
|  |   Buffer.prototype[customInspectSymbol] = Buffer.prototype.inspect | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.compare = function compare (target, start, end, thisStart, thisEnd) { | ||
|  |   if (isInstance(target, Uint8Array)) { | ||
|  |     target = Buffer.from(target, target.offset, target.byteLength) | ||
|  |   } | ||
|  |   if (!Buffer.isBuffer(target)) { | ||
|  |     throw new TypeError( | ||
|  |       'The "target" argument must be one of type Buffer or Uint8Array. ' + | ||
|  |       'Received type ' + (typeof target) | ||
|  |     ) | ||
|  |   } | ||
|  | 
 | ||
|  |   if (start === undefined) { | ||
|  |     start = 0 | ||
|  |   } | ||
|  |   if (end === undefined) { | ||
|  |     end = target ? target.length : 0 | ||
|  |   } | ||
|  |   if (thisStart === undefined) { | ||
|  |     thisStart = 0 | ||
|  |   } | ||
|  |   if (thisEnd === undefined) { | ||
|  |     thisEnd = this.length | ||
|  |   } | ||
|  | 
 | ||
|  |   if (start < 0 || end > target.length || thisStart < 0 || thisEnd > this.length) { | ||
|  |     throw new RangeError('out of range index') | ||
|  |   } | ||
|  | 
 | ||
|  |   if (thisStart >= thisEnd && start >= end) { | ||
|  |     return 0 | ||
|  |   } | ||
|  |   if (thisStart >= thisEnd) { | ||
|  |     return -1 | ||
|  |   } | ||
|  |   if (start >= end) { | ||
|  |     return 1 | ||
|  |   } | ||
|  | 
 | ||
|  |   start >>>= 0 | ||
|  |   end >>>= 0 | ||
|  |   thisStart >>>= 0 | ||
|  |   thisEnd >>>= 0 | ||
|  | 
 | ||
|  |   if (this === target) return 0 | ||
|  | 
 | ||
|  |   var x = thisEnd - thisStart | ||
|  |   var y = end - start | ||
|  |   var len = Math.min(x, y) | ||
|  | 
 | ||
|  |   var thisCopy = this.slice(thisStart, thisEnd) | ||
|  |   var targetCopy = target.slice(start, end) | ||
|  | 
 | ||
|  |   for (var i = 0; i < len; ++i) { | ||
|  |     if (thisCopy[i] !== targetCopy[i]) { | ||
|  |       x = thisCopy[i] | ||
|  |       y = targetCopy[i] | ||
|  |       break | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   if (x < y) return -1 | ||
|  |   if (y < x) return 1 | ||
|  |   return 0 | ||
|  | } | ||
|  | 
 | ||
|  | // Finds either the first index of `val` in `buffer` at offset >= `byteOffset`,
 | ||
|  | // OR the last index of `val` in `buffer` at offset <= `byteOffset`.
 | ||
|  | //
 | ||
|  | // Arguments:
 | ||
|  | // - buffer - a Buffer to search
 | ||
|  | // - val - a string, Buffer, or number
 | ||
|  | // - byteOffset - an index into `buffer`; will be clamped to an int32
 | ||
|  | // - encoding - an optional encoding, relevant is val is a string
 | ||
|  | // - dir - true for indexOf, false for lastIndexOf
 | ||
|  | function bidirectionalIndexOf (buffer, val, byteOffset, encoding, dir) { | ||
|  |   // Empty buffer means no match
 | ||
|  |   if (buffer.length === 0) return -1 | ||
|  | 
 | ||
|  |   // Normalize byteOffset
 | ||
|  |   if (typeof byteOffset === 'string') { | ||
|  |     encoding = byteOffset | ||
|  |     byteOffset = 0 | ||
|  |   } else if (byteOffset > 0x7fffffff) { | ||
|  |     byteOffset = 0x7fffffff | ||
|  |   } else if (byteOffset < -0x80000000) { | ||
|  |     byteOffset = -0x80000000 | ||
|  |   } | ||
|  |   byteOffset = +byteOffset // Coerce to Number.
 | ||
|  |   if (numberIsNaN(byteOffset)) { | ||
|  |     // byteOffset: it it's undefined, null, NaN, "foo", etc, search whole buffer
 | ||
|  |     byteOffset = dir ? 0 : (buffer.length - 1) | ||
|  |   } | ||
|  | 
 | ||
|  |   // Normalize byteOffset: negative offsets start from the end of the buffer
 | ||
|  |   if (byteOffset < 0) byteOffset = buffer.length + byteOffset | ||
|  |   if (byteOffset >= buffer.length) { | ||
|  |     if (dir) return -1 | ||
|  |     else byteOffset = buffer.length - 1 | ||
|  |   } else if (byteOffset < 0) { | ||
|  |     if (dir) byteOffset = 0 | ||
|  |     else return -1 | ||
|  |   } | ||
|  | 
 | ||
|  |   // Normalize val
 | ||
|  |   if (typeof val === 'string') { | ||
|  |     val = Buffer.from(val, encoding) | ||
|  |   } | ||
|  | 
 | ||
|  |   // Finally, search either indexOf (if dir is true) or lastIndexOf
 | ||
|  |   if (Buffer.isBuffer(val)) { | ||
|  |     // Special case: looking for empty string/buffer always fails
 | ||
|  |     if (val.length === 0) { | ||
|  |       return -1 | ||
|  |     } | ||
|  |     return arrayIndexOf(buffer, val, byteOffset, encoding, dir) | ||
|  |   } else if (typeof val === 'number') { | ||
|  |     val = val & 0xFF // Search for a byte value [0-255]
 | ||
|  |     if (typeof Uint8Array.prototype.indexOf === 'function') { | ||
|  |       if (dir) { | ||
|  |         return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset) | ||
|  |       } else { | ||
|  |         return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset) | ||
|  |       } | ||
|  |     } | ||
|  |     return arrayIndexOf(buffer, [val], byteOffset, encoding, dir) | ||
|  |   } | ||
|  | 
 | ||
|  |   throw new TypeError('val must be string, number or Buffer') | ||
|  | } | ||
|  | 
 | ||
|  | function arrayIndexOf (arr, val, byteOffset, encoding, dir) { | ||
|  |   var indexSize = 1 | ||
|  |   var arrLength = arr.length | ||
|  |   var valLength = val.length | ||
|  | 
 | ||
|  |   if (encoding !== undefined) { | ||
|  |     encoding = String(encoding).toLowerCase() | ||
|  |     if (encoding === 'ucs2' || encoding === 'ucs-2' || | ||
|  |         encoding === 'utf16le' || encoding === 'utf-16le') { | ||
|  |       if (arr.length < 2 || val.length < 2) { | ||
|  |         return -1 | ||
|  |       } | ||
|  |       indexSize = 2 | ||
|  |       arrLength /= 2 | ||
|  |       valLength /= 2 | ||
|  |       byteOffset /= 2 | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   function read (buf, i) { | ||
|  |     if (indexSize === 1) { | ||
|  |       return buf[i] | ||
|  |     } else { | ||
|  |       return buf.readUInt16BE(i * indexSize) | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   var i | ||
|  |   if (dir) { | ||
|  |     var foundIndex = -1 | ||
|  |     for (i = byteOffset; i < arrLength; i++) { | ||
|  |       if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) { | ||
|  |         if (foundIndex === -1) foundIndex = i | ||
|  |         if (i - foundIndex + 1 === valLength) return foundIndex * indexSize | ||
|  |       } else { | ||
|  |         if (foundIndex !== -1) i -= i - foundIndex | ||
|  |         foundIndex = -1 | ||
|  |       } | ||
|  |     } | ||
|  |   } else { | ||
|  |     if (byteOffset + valLength > arrLength) byteOffset = arrLength - valLength | ||
|  |     for (i = byteOffset; i >= 0; i--) { | ||
|  |       var found = true | ||
|  |       for (var j = 0; j < valLength; j++) { | ||
|  |         if (read(arr, i + j) !== read(val, j)) { | ||
|  |           found = false | ||
|  |           break | ||
|  |         } | ||
|  |       } | ||
|  |       if (found) return i | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   return -1 | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.includes = function includes (val, byteOffset, encoding) { | ||
|  |   return this.indexOf(val, byteOffset, encoding) !== -1 | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.indexOf = function indexOf (val, byteOffset, encoding) { | ||
|  |   return bidirectionalIndexOf(this, val, byteOffset, encoding, true) | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.lastIndexOf = function lastIndexOf (val, byteOffset, encoding) { | ||
|  |   return bidirectionalIndexOf(this, val, byteOffset, encoding, false) | ||
|  | } | ||
|  | 
 | ||
|  | function hexWrite (buf, string, offset, length) { | ||
|  |   offset = Number(offset) || 0 | ||
|  |   var remaining = buf.length - offset | ||
|  |   if (!length) { | ||
|  |     length = remaining | ||
|  |   } else { | ||
|  |     length = Number(length) | ||
|  |     if (length > remaining) { | ||
|  |       length = remaining | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   var strLen = string.length | ||
|  | 
 | ||
|  |   if (length > strLen / 2) { | ||
|  |     length = strLen / 2 | ||
|  |   } | ||
|  |   for (var i = 0; i < length; ++i) { | ||
|  |     var parsed = parseInt(string.substr(i * 2, 2), 16) | ||
|  |     if (numberIsNaN(parsed)) return i | ||
|  |     buf[offset + i] = parsed | ||
|  |   } | ||
|  |   return i | ||
|  | } | ||
|  | 
 | ||
|  | function utf8Write (buf, string, offset, length) { | ||
|  |   return blitBuffer(utf8ToBytes(string, buf.length - offset), buf, offset, length) | ||
|  | } | ||
|  | 
 | ||
|  | function asciiWrite (buf, string, offset, length) { | ||
|  |   return blitBuffer(asciiToBytes(string), buf, offset, length) | ||
|  | } | ||
|  | 
 | ||
|  | function base64Write (buf, string, offset, length) { | ||
|  |   return blitBuffer(base64ToBytes(string), buf, offset, length) | ||
|  | } | ||
|  | 
 | ||
|  | function ucs2Write (buf, string, offset, length) { | ||
|  |   return blitBuffer(utf16leToBytes(string, buf.length - offset), buf, offset, length) | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.write = function write (string, offset, length, encoding) { | ||
|  |   // Buffer#write(string)
 | ||
|  |   if (offset === undefined) { | ||
|  |     encoding = 'utf8' | ||
|  |     length = this.length | ||
|  |     offset = 0 | ||
|  |   // Buffer#write(string, encoding)
 | ||
|  |   } else if (length === undefined && typeof offset === 'string') { | ||
|  |     encoding = offset | ||
|  |     length = this.length | ||
|  |     offset = 0 | ||
|  |   // Buffer#write(string, offset[, length][, encoding])
 | ||
|  |   } else if (isFinite(offset)) { | ||
|  |     offset = offset >>> 0 | ||
|  |     if (isFinite(length)) { | ||
|  |       length = length >>> 0 | ||
|  |       if (encoding === undefined) encoding = 'utf8' | ||
|  |     } else { | ||
|  |       encoding = length | ||
|  |       length = undefined | ||
|  |     } | ||
|  |   } else { | ||
|  |     throw new Error( | ||
|  |       'Buffer.write(string, encoding, offset[, length]) is no longer supported' | ||
|  |     ) | ||
|  |   } | ||
|  | 
 | ||
|  |   var remaining = this.length - offset | ||
|  |   if (length === undefined || length > remaining) length = remaining | ||
|  | 
 | ||
|  |   if ((string.length > 0 && (length < 0 || offset < 0)) || offset > this.length) { | ||
|  |     throw new RangeError('Attempt to write outside buffer bounds') | ||
|  |   } | ||
|  | 
 | ||
|  |   if (!encoding) encoding = 'utf8' | ||
|  | 
 | ||
|  |   var loweredCase = false | ||
|  |   for (;;) { | ||
|  |     switch (encoding) { | ||
|  |       case 'hex': | ||
|  |         return hexWrite(this, string, offset, length) | ||
|  | 
 | ||
|  |       case 'utf8': | ||
|  |       case 'utf-8': | ||
|  |         return utf8Write(this, string, offset, length) | ||
|  | 
 | ||
|  |       case 'ascii': | ||
|  |       case 'latin1': | ||
|  |       case 'binary': | ||
|  |         return asciiWrite(this, string, offset, length) | ||
|  | 
 | ||
|  |       case 'base64': | ||
|  |         // Warning: maxLength not taken into account in base64Write
 | ||
|  |         return base64Write(this, string, offset, length) | ||
|  | 
 | ||
|  |       case 'ucs2': | ||
|  |       case 'ucs-2': | ||
|  |       case 'utf16le': | ||
|  |       case 'utf-16le': | ||
|  |         return ucs2Write(this, string, offset, length) | ||
|  | 
 | ||
|  |       default: | ||
|  |         if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding) | ||
|  |         encoding = ('' + encoding).toLowerCase() | ||
|  |         loweredCase = true | ||
|  |     } | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.toJSON = function toJSON () { | ||
|  |   return { | ||
|  |     type: 'Buffer', | ||
|  |     data: Array.prototype.slice.call(this._arr || this, 0) | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function base64Slice (buf, start, end) { | ||
|  |   if (start === 0 && end === buf.length) { | ||
|  |     return base64.fromByteArray(buf) | ||
|  |   } else { | ||
|  |     return base64.fromByteArray(buf.slice(start, end)) | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function utf8Slice (buf, start, end) { | ||
|  |   end = Math.min(buf.length, end) | ||
|  |   var res = [] | ||
|  | 
 | ||
|  |   var i = start | ||
|  |   while (i < end) { | ||
|  |     var firstByte = buf[i] | ||
|  |     var codePoint = null | ||
|  |     var bytesPerSequence = (firstByte > 0xEF) | ||
|  |       ? 4 | ||
|  |       : (firstByte > 0xDF) | ||
|  |           ? 3 | ||
|  |           : (firstByte > 0xBF) | ||
|  |               ? 2 | ||
|  |               : 1 | ||
|  | 
 | ||
|  |     if (i + bytesPerSequence <= end) { | ||
|  |       var secondByte, thirdByte, fourthByte, tempCodePoint | ||
|  | 
 | ||
|  |       switch (bytesPerSequence) { | ||
|  |         case 1: | ||
|  |           if (firstByte < 0x80) { | ||
|  |             codePoint = firstByte | ||
|  |           } | ||
|  |           break | ||
|  |         case 2: | ||
|  |           secondByte = buf[i + 1] | ||
|  |           if ((secondByte & 0xC0) === 0x80) { | ||
|  |             tempCodePoint = (firstByte & 0x1F) << 0x6 | (secondByte & 0x3F) | ||
|  |             if (tempCodePoint > 0x7F) { | ||
|  |               codePoint = tempCodePoint | ||
|  |             } | ||
|  |           } | ||
|  |           break | ||
|  |         case 3: | ||
|  |           secondByte = buf[i + 1] | ||
|  |           thirdByte = buf[i + 2] | ||
|  |           if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80) { | ||
|  |             tempCodePoint = (firstByte & 0xF) << 0xC | (secondByte & 0x3F) << 0x6 | (thirdByte & 0x3F) | ||
|  |             if (tempCodePoint > 0x7FF && (tempCodePoint < 0xD800 || tempCodePoint > 0xDFFF)) { | ||
|  |               codePoint = tempCodePoint | ||
|  |             } | ||
|  |           } | ||
|  |           break | ||
|  |         case 4: | ||
|  |           secondByte = buf[i + 1] | ||
|  |           thirdByte = buf[i + 2] | ||
|  |           fourthByte = buf[i + 3] | ||
|  |           if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80 && (fourthByte & 0xC0) === 0x80) { | ||
|  |             tempCodePoint = (firstByte & 0xF) << 0x12 | (secondByte & 0x3F) << 0xC | (thirdByte & 0x3F) << 0x6 | (fourthByte & 0x3F) | ||
|  |             if (tempCodePoint > 0xFFFF && tempCodePoint < 0x110000) { | ||
|  |               codePoint = tempCodePoint | ||
|  |             } | ||
|  |           } | ||
|  |       } | ||
|  |     } | ||
|  | 
 | ||
|  |     if (codePoint === null) { | ||
|  |       // we did not generate a valid codePoint so insert a
 | ||
|  |       // replacement char (U+FFFD) and advance only 1 byte
 | ||
|  |       codePoint = 0xFFFD | ||
|  |       bytesPerSequence = 1 | ||
|  |     } else if (codePoint > 0xFFFF) { | ||
|  |       // encode to utf16 (surrogate pair dance)
 | ||
|  |       codePoint -= 0x10000 | ||
|  |       res.push(codePoint >>> 10 & 0x3FF | 0xD800) | ||
|  |       codePoint = 0xDC00 | codePoint & 0x3FF | ||
|  |     } | ||
|  | 
 | ||
|  |     res.push(codePoint) | ||
|  |     i += bytesPerSequence | ||
|  |   } | ||
|  | 
 | ||
|  |   return decodeCodePointsArray(res) | ||
|  | } | ||
|  | 
 | ||
|  | // Based on http://stackoverflow.com/a/22747272/680742, the browser with
 | ||
|  | // the lowest limit is Chrome, with 0x10000 args.
 | ||
|  | // We go 1 magnitude less, for safety
 | ||
|  | var MAX_ARGUMENTS_LENGTH = 0x1000 | ||
|  | 
 | ||
|  | function decodeCodePointsArray (codePoints) { | ||
|  |   var len = codePoints.length | ||
|  |   if (len <= MAX_ARGUMENTS_LENGTH) { | ||
|  |     return String.fromCharCode.apply(String, codePoints) // avoid extra slice()
 | ||
|  |   } | ||
|  | 
 | ||
|  |   // Decode in chunks to avoid "call stack size exceeded".
 | ||
|  |   var res = '' | ||
|  |   var i = 0 | ||
|  |   while (i < len) { | ||
|  |     res += String.fromCharCode.apply( | ||
|  |       String, | ||
|  |       codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH) | ||
|  |     ) | ||
|  |   } | ||
|  |   return res | ||
|  | } | ||
|  | 
 | ||
|  | function asciiSlice (buf, start, end) { | ||
|  |   var ret = '' | ||
|  |   end = Math.min(buf.length, end) | ||
|  | 
 | ||
|  |   for (var i = start; i < end; ++i) { | ||
|  |     ret += String.fromCharCode(buf[i] & 0x7F) | ||
|  |   } | ||
|  |   return ret | ||
|  | } | ||
|  | 
 | ||
|  | function latin1Slice (buf, start, end) { | ||
|  |   var ret = '' | ||
|  |   end = Math.min(buf.length, end) | ||
|  | 
 | ||
|  |   for (var i = start; i < end; ++i) { | ||
|  |     ret += String.fromCharCode(buf[i]) | ||
|  |   } | ||
|  |   return ret | ||
|  | } | ||
|  | 
 | ||
|  | function hexSlice (buf, start, end) { | ||
|  |   var len = buf.length | ||
|  | 
 | ||
|  |   if (!start || start < 0) start = 0 | ||
|  |   if (!end || end < 0 || end > len) end = len | ||
|  | 
 | ||
|  |   var out = '' | ||
|  |   for (var i = start; i < end; ++i) { | ||
|  |     out += hexSliceLookupTable[buf[i]] | ||
|  |   } | ||
|  |   return out | ||
|  | } | ||
|  | 
 | ||
|  | function utf16leSlice (buf, start, end) { | ||
|  |   var bytes = buf.slice(start, end) | ||
|  |   var res = '' | ||
|  |   // If bytes.length is odd, the last 8 bits must be ignored (same as node.js)
 | ||
|  |   for (var i = 0; i < bytes.length - 1; i += 2) { | ||
|  |     res += String.fromCharCode(bytes[i] + (bytes[i + 1] * 256)) | ||
|  |   } | ||
|  |   return res | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.slice = function slice (start, end) { | ||
|  |   var len = this.length | ||
|  |   start = ~~start | ||
|  |   end = end === undefined ? len : ~~end | ||
|  | 
 | ||
|  |   if (start < 0) { | ||
|  |     start += len | ||
|  |     if (start < 0) start = 0 | ||
|  |   } else if (start > len) { | ||
|  |     start = len | ||
|  |   } | ||
|  | 
 | ||
|  |   if (end < 0) { | ||
|  |     end += len | ||
|  |     if (end < 0) end = 0 | ||
|  |   } else if (end > len) { | ||
|  |     end = len | ||
|  |   } | ||
|  | 
 | ||
|  |   if (end < start) end = start | ||
|  | 
 | ||
|  |   var newBuf = this.subarray(start, end) | ||
|  |   // Return an augmented `Uint8Array` instance
 | ||
|  |   Object.setPrototypeOf(newBuf, Buffer.prototype) | ||
|  | 
 | ||
|  |   return newBuf | ||
|  | } | ||
|  | 
 | ||
|  | /* | ||
|  |  * Need to make sure that buffer isn't trying to write out of bounds. | ||
|  |  */ | ||
|  | function checkOffset (offset, ext, length) { | ||
|  |   if ((offset % 1) !== 0 || offset < 0) throw new RangeError('offset is not uint') | ||
|  |   if (offset + ext > length) throw new RangeError('Trying to access beyond buffer length') | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.readUintLE = | ||
|  | Buffer.prototype.readUIntLE = function readUIntLE (offset, byteLength, noAssert) { | ||
|  |   offset = offset >>> 0 | ||
|  |   byteLength = byteLength >>> 0 | ||
|  |   if (!noAssert) checkOffset(offset, byteLength, this.length) | ||
|  | 
 | ||
|  |   var val = this[offset] | ||
|  |   var mul = 1 | ||
|  |   var i = 0 | ||
|  |   while (++i < byteLength && (mul *= 0x100)) { | ||
|  |     val += this[offset + i] * mul | ||
|  |   } | ||
|  | 
 | ||
|  |   return val | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.readUintBE = | ||
|  | Buffer.prototype.readUIntBE = function readUIntBE (offset, byteLength, noAssert) { | ||
|  |   offset = offset >>> 0 | ||
|  |   byteLength = byteLength >>> 0 | ||
|  |   if (!noAssert) { | ||
|  |     checkOffset(offset, byteLength, this.length) | ||
|  |   } | ||
|  | 
 | ||
|  |   var val = this[offset + --byteLength] | ||
|  |   var mul = 1 | ||
|  |   while (byteLength > 0 && (mul *= 0x100)) { | ||
|  |     val += this[offset + --byteLength] * mul | ||
|  |   } | ||
|  | 
 | ||
|  |   return val | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.readUint8 = | ||
|  | Buffer.prototype.readUInt8 = function readUInt8 (offset, noAssert) { | ||
|  |   offset = offset >>> 0 | ||
|  |   if (!noAssert) checkOffset(offset, 1, this.length) | ||
|  |   return this[offset] | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.readUint16LE = | ||
|  | Buffer.prototype.readUInt16LE = function readUInt16LE (offset, noAssert) { | ||
|  |   offset = offset >>> 0 | ||
|  |   if (!noAssert) checkOffset(offset, 2, this.length) | ||
|  |   return this[offset] | (this[offset + 1] << 8) | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.readUint16BE = | ||
|  | Buffer.prototype.readUInt16BE = function readUInt16BE (offset, noAssert) { | ||
|  |   offset = offset >>> 0 | ||
|  |   if (!noAssert) checkOffset(offset, 2, this.length) | ||
|  |   return (this[offset] << 8) | this[offset + 1] | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.readUint32LE = | ||
|  | Buffer.prototype.readUInt32LE = function readUInt32LE (offset, noAssert) { | ||
|  |   offset = offset >>> 0 | ||
|  |   if (!noAssert) checkOffset(offset, 4, this.length) | ||
|  | 
 | ||
|  |   return ((this[offset]) | | ||
|  |       (this[offset + 1] << 8) | | ||
|  |       (this[offset + 2] << 16)) + | ||
|  |       (this[offset + 3] * 0x1000000) | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.readUint32BE = | ||
|  | Buffer.prototype.readUInt32BE = function readUInt32BE (offset, noAssert) { | ||
|  |   offset = offset >>> 0 | ||
|  |   if (!noAssert) checkOffset(offset, 4, this.length) | ||
|  | 
 | ||
|  |   return (this[offset] * 0x1000000) + | ||
|  |     ((this[offset + 1] << 16) | | ||
|  |     (this[offset + 2] << 8) | | ||
|  |     this[offset + 3]) | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.readIntLE = function readIntLE (offset, byteLength, noAssert) { | ||
|  |   offset = offset >>> 0 | ||
|  |   byteLength = byteLength >>> 0 | ||
|  |   if (!noAssert) checkOffset(offset, byteLength, this.length) | ||
|  | 
 | ||
|  |   var val = this[offset] | ||
|  |   var mul = 1 | ||
|  |   var i = 0 | ||
|  |   while (++i < byteLength && (mul *= 0x100)) { | ||
|  |     val += this[offset + i] * mul | ||
|  |   } | ||
|  |   mul *= 0x80 | ||
|  | 
 | ||
|  |   if (val >= mul) val -= Math.pow(2, 8 * byteLength) | ||
|  | 
 | ||
|  |   return val | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.readIntBE = function readIntBE (offset, byteLength, noAssert) { | ||
|  |   offset = offset >>> 0 | ||
|  |   byteLength = byteLength >>> 0 | ||
|  |   if (!noAssert) checkOffset(offset, byteLength, this.length) | ||
|  | 
 | ||
|  |   var i = byteLength | ||
|  |   var mul = 1 | ||
|  |   var val = this[offset + --i] | ||
|  |   while (i > 0 && (mul *= 0x100)) { | ||
|  |     val += this[offset + --i] * mul | ||
|  |   } | ||
|  |   mul *= 0x80 | ||
|  | 
 | ||
|  |   if (val >= mul) val -= Math.pow(2, 8 * byteLength) | ||
|  | 
 | ||
|  |   return val | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.readInt8 = function readInt8 (offset, noAssert) { | ||
|  |   offset = offset >>> 0 | ||
|  |   if (!noAssert) checkOffset(offset, 1, this.length) | ||
|  |   if (!(this[offset] & 0x80)) return (this[offset]) | ||
|  |   return ((0xff - this[offset] + 1) * -1) | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.readInt16LE = function readInt16LE (offset, noAssert) { | ||
|  |   offset = offset >>> 0 | ||
|  |   if (!noAssert) checkOffset(offset, 2, this.length) | ||
|  |   var val = this[offset] | (this[offset + 1] << 8) | ||
|  |   return (val & 0x8000) ? val | 0xFFFF0000 : val | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.readInt16BE = function readInt16BE (offset, noAssert) { | ||
|  |   offset = offset >>> 0 | ||
|  |   if (!noAssert) checkOffset(offset, 2, this.length) | ||
|  |   var val = this[offset + 1] | (this[offset] << 8) | ||
|  |   return (val & 0x8000) ? val | 0xFFFF0000 : val | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.readInt32LE = function readInt32LE (offset, noAssert) { | ||
|  |   offset = offset >>> 0 | ||
|  |   if (!noAssert) checkOffset(offset, 4, this.length) | ||
|  | 
 | ||
|  |   return (this[offset]) | | ||
|  |     (this[offset + 1] << 8) | | ||
|  |     (this[offset + 2] << 16) | | ||
|  |     (this[offset + 3] << 24) | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.readInt32BE = function readInt32BE (offset, noAssert) { | ||
|  |   offset = offset >>> 0 | ||
|  |   if (!noAssert) checkOffset(offset, 4, this.length) | ||
|  | 
 | ||
|  |   return (this[offset] << 24) | | ||
|  |     (this[offset + 1] << 16) | | ||
|  |     (this[offset + 2] << 8) | | ||
|  |     (this[offset + 3]) | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.readFloatLE = function readFloatLE (offset, noAssert) { | ||
|  |   offset = offset >>> 0 | ||
|  |   if (!noAssert) checkOffset(offset, 4, this.length) | ||
|  |   return ieee754.read(this, offset, true, 23, 4) | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.readFloatBE = function readFloatBE (offset, noAssert) { | ||
|  |   offset = offset >>> 0 | ||
|  |   if (!noAssert) checkOffset(offset, 4, this.length) | ||
|  |   return ieee754.read(this, offset, false, 23, 4) | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.readDoubleLE = function readDoubleLE (offset, noAssert) { | ||
|  |   offset = offset >>> 0 | ||
|  |   if (!noAssert) checkOffset(offset, 8, this.length) | ||
|  |   return ieee754.read(this, offset, true, 52, 8) | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.readDoubleBE = function readDoubleBE (offset, noAssert) { | ||
|  |   offset = offset >>> 0 | ||
|  |   if (!noAssert) checkOffset(offset, 8, this.length) | ||
|  |   return ieee754.read(this, offset, false, 52, 8) | ||
|  | } | ||
|  | 
 | ||
|  | function checkInt (buf, value, offset, ext, max, min) { | ||
|  |   if (!Buffer.isBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance') | ||
|  |   if (value > max || value < min) throw new RangeError('"value" argument is out of bounds') | ||
|  |   if (offset + ext > buf.length) throw new RangeError('Index out of range') | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.writeUintLE = | ||
|  | Buffer.prototype.writeUIntLE = function writeUIntLE (value, offset, byteLength, noAssert) { | ||
|  |   value = +value | ||
|  |   offset = offset >>> 0 | ||
|  |   byteLength = byteLength >>> 0 | ||
|  |   if (!noAssert) { | ||
|  |     var maxBytes = Math.pow(2, 8 * byteLength) - 1 | ||
|  |     checkInt(this, value, offset, byteLength, maxBytes, 0) | ||
|  |   } | ||
|  | 
 | ||
|  |   var mul = 1 | ||
|  |   var i = 0 | ||
|  |   this[offset] = value & 0xFF | ||
|  |   while (++i < byteLength && (mul *= 0x100)) { | ||
|  |     this[offset + i] = (value / mul) & 0xFF | ||
|  |   } | ||
|  | 
 | ||
|  |   return offset + byteLength | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.writeUintBE = | ||
|  | Buffer.prototype.writeUIntBE = function writeUIntBE (value, offset, byteLength, noAssert) { | ||
|  |   value = +value | ||
|  |   offset = offset >>> 0 | ||
|  |   byteLength = byteLength >>> 0 | ||
|  |   if (!noAssert) { | ||
|  |     var maxBytes = Math.pow(2, 8 * byteLength) - 1 | ||
|  |     checkInt(this, value, offset, byteLength, maxBytes, 0) | ||
|  |   } | ||
|  | 
 | ||
|  |   var i = byteLength - 1 | ||
|  |   var mul = 1 | ||
|  |   this[offset + i] = value & 0xFF | ||
|  |   while (--i >= 0 && (mul *= 0x100)) { | ||
|  |     this[offset + i] = (value / mul) & 0xFF | ||
|  |   } | ||
|  | 
 | ||
|  |   return offset + byteLength | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.writeUint8 = | ||
|  | Buffer.prototype.writeUInt8 = function writeUInt8 (value, offset, noAssert) { | ||
|  |   value = +value | ||
|  |   offset = offset >>> 0 | ||
|  |   if (!noAssert) checkInt(this, value, offset, 1, 0xff, 0) | ||
|  |   this[offset] = (value & 0xff) | ||
|  |   return offset + 1 | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.writeUint16LE = | ||
|  | Buffer.prototype.writeUInt16LE = function writeUInt16LE (value, offset, noAssert) { | ||
|  |   value = +value | ||
|  |   offset = offset >>> 0 | ||
|  |   if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0) | ||
|  |   this[offset] = (value & 0xff) | ||
|  |   this[offset + 1] = (value >>> 8) | ||
|  |   return offset + 2 | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.writeUint16BE = | ||
|  | Buffer.prototype.writeUInt16BE = function writeUInt16BE (value, offset, noAssert) { | ||
|  |   value = +value | ||
|  |   offset = offset >>> 0 | ||
|  |   if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0) | ||
|  |   this[offset] = (value >>> 8) | ||
|  |   this[offset + 1] = (value & 0xff) | ||
|  |   return offset + 2 | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.writeUint32LE = | ||
|  | Buffer.prototype.writeUInt32LE = function writeUInt32LE (value, offset, noAssert) { | ||
|  |   value = +value | ||
|  |   offset = offset >>> 0 | ||
|  |   if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0) | ||
|  |   this[offset + 3] = (value >>> 24) | ||
|  |   this[offset + 2] = (value >>> 16) | ||
|  |   this[offset + 1] = (value >>> 8) | ||
|  |   this[offset] = (value & 0xff) | ||
|  |   return offset + 4 | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.writeUint32BE = | ||
|  | Buffer.prototype.writeUInt32BE = function writeUInt32BE (value, offset, noAssert) { | ||
|  |   value = +value | ||
|  |   offset = offset >>> 0 | ||
|  |   if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0) | ||
|  |   this[offset] = (value >>> 24) | ||
|  |   this[offset + 1] = (value >>> 16) | ||
|  |   this[offset + 2] = (value >>> 8) | ||
|  |   this[offset + 3] = (value & 0xff) | ||
|  |   return offset + 4 | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.writeIntLE = function writeIntLE (value, offset, byteLength, noAssert) { | ||
|  |   value = +value | ||
|  |   offset = offset >>> 0 | ||
|  |   if (!noAssert) { | ||
|  |     var limit = Math.pow(2, (8 * byteLength) - 1) | ||
|  | 
 | ||
|  |     checkInt(this, value, offset, byteLength, limit - 1, -limit) | ||
|  |   } | ||
|  | 
 | ||
|  |   var i = 0 | ||
|  |   var mul = 1 | ||
|  |   var sub = 0 | ||
|  |   this[offset] = value & 0xFF | ||
|  |   while (++i < byteLength && (mul *= 0x100)) { | ||
|  |     if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) { | ||
|  |       sub = 1 | ||
|  |     } | ||
|  |     this[offset + i] = ((value / mul) >> 0) - sub & 0xFF | ||
|  |   } | ||
|  | 
 | ||
|  |   return offset + byteLength | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.writeIntBE = function writeIntBE (value, offset, byteLength, noAssert) { | ||
|  |   value = +value | ||
|  |   offset = offset >>> 0 | ||
|  |   if (!noAssert) { | ||
|  |     var limit = Math.pow(2, (8 * byteLength) - 1) | ||
|  | 
 | ||
|  |     checkInt(this, value, offset, byteLength, limit - 1, -limit) | ||
|  |   } | ||
|  | 
 | ||
|  |   var i = byteLength - 1 | ||
|  |   var mul = 1 | ||
|  |   var sub = 0 | ||
|  |   this[offset + i] = value & 0xFF | ||
|  |   while (--i >= 0 && (mul *= 0x100)) { | ||
|  |     if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) { | ||
|  |       sub = 1 | ||
|  |     } | ||
|  |     this[offset + i] = ((value / mul) >> 0) - sub & 0xFF | ||
|  |   } | ||
|  | 
 | ||
|  |   return offset + byteLength | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.writeInt8 = function writeInt8 (value, offset, noAssert) { | ||
|  |   value = +value | ||
|  |   offset = offset >>> 0 | ||
|  |   if (!noAssert) checkInt(this, value, offset, 1, 0x7f, -0x80) | ||
|  |   if (value < 0) value = 0xff + value + 1 | ||
|  |   this[offset] = (value & 0xff) | ||
|  |   return offset + 1 | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.writeInt16LE = function writeInt16LE (value, offset, noAssert) { | ||
|  |   value = +value | ||
|  |   offset = offset >>> 0 | ||
|  |   if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000) | ||
|  |   this[offset] = (value & 0xff) | ||
|  |   this[offset + 1] = (value >>> 8) | ||
|  |   return offset + 2 | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.writeInt16BE = function writeInt16BE (value, offset, noAssert) { | ||
|  |   value = +value | ||
|  |   offset = offset >>> 0 | ||
|  |   if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000) | ||
|  |   this[offset] = (value >>> 8) | ||
|  |   this[offset + 1] = (value & 0xff) | ||
|  |   return offset + 2 | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.writeInt32LE = function writeInt32LE (value, offset, noAssert) { | ||
|  |   value = +value | ||
|  |   offset = offset >>> 0 | ||
|  |   if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000) | ||
|  |   this[offset] = (value & 0xff) | ||
|  |   this[offset + 1] = (value >>> 8) | ||
|  |   this[offset + 2] = (value >>> 16) | ||
|  |   this[offset + 3] = (value >>> 24) | ||
|  |   return offset + 4 | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.writeInt32BE = function writeInt32BE (value, offset, noAssert) { | ||
|  |   value = +value | ||
|  |   offset = offset >>> 0 | ||
|  |   if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000) | ||
|  |   if (value < 0) value = 0xffffffff + value + 1 | ||
|  |   this[offset] = (value >>> 24) | ||
|  |   this[offset + 1] = (value >>> 16) | ||
|  |   this[offset + 2] = (value >>> 8) | ||
|  |   this[offset + 3] = (value & 0xff) | ||
|  |   return offset + 4 | ||
|  | } | ||
|  | 
 | ||
|  | function checkIEEE754 (buf, value, offset, ext, max, min) { | ||
|  |   if (offset + ext > buf.length) throw new RangeError('Index out of range') | ||
|  |   if (offset < 0) throw new RangeError('Index out of range') | ||
|  | } | ||
|  | 
 | ||
|  | function writeFloat (buf, value, offset, littleEndian, noAssert) { | ||
|  |   value = +value | ||
|  |   offset = offset >>> 0 | ||
|  |   if (!noAssert) { | ||
|  |     checkIEEE754(buf, value, offset, 4, 3.4028234663852886e+38, -3.4028234663852886e+38) | ||
|  |   } | ||
|  |   ieee754.write(buf, value, offset, littleEndian, 23, 4) | ||
|  |   return offset + 4 | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.writeFloatLE = function writeFloatLE (value, offset, noAssert) { | ||
|  |   return writeFloat(this, value, offset, true, noAssert) | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.writeFloatBE = function writeFloatBE (value, offset, noAssert) { | ||
|  |   return writeFloat(this, value, offset, false, noAssert) | ||
|  | } | ||
|  | 
 | ||
|  | function writeDouble (buf, value, offset, littleEndian, noAssert) { | ||
|  |   value = +value | ||
|  |   offset = offset >>> 0 | ||
|  |   if (!noAssert) { | ||
|  |     checkIEEE754(buf, value, offset, 8, 1.7976931348623157E+308, -1.7976931348623157E+308) | ||
|  |   } | ||
|  |   ieee754.write(buf, value, offset, littleEndian, 52, 8) | ||
|  |   return offset + 8 | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.writeDoubleLE = function writeDoubleLE (value, offset, noAssert) { | ||
|  |   return writeDouble(this, value, offset, true, noAssert) | ||
|  | } | ||
|  | 
 | ||
|  | Buffer.prototype.writeDoubleBE = function writeDoubleBE (value, offset, noAssert) { | ||
|  |   return writeDouble(this, value, offset, false, noAssert) | ||
|  | } | ||
|  | 
 | ||
|  | // copy(targetBuffer, targetStart=0, sourceStart=0, sourceEnd=buffer.length)
 | ||
|  | Buffer.prototype.copy = function copy (target, targetStart, start, end) { | ||
|  |   if (!Buffer.isBuffer(target)) throw new TypeError('argument should be a Buffer') | ||
|  |   if (!start) start = 0 | ||
|  |   if (!end && end !== 0) end = this.length | ||
|  |   if (targetStart >= target.length) targetStart = target.length | ||
|  |   if (!targetStart) targetStart = 0 | ||
|  |   if (end > 0 && end < start) end = start | ||
|  | 
 | ||
|  |   // Copy 0 bytes; we're done
 | ||
|  |   if (end === start) return 0 | ||
|  |   if (target.length === 0 || this.length === 0) return 0 | ||
|  | 
 | ||
|  |   // Fatal error conditions
 | ||
|  |   if (targetStart < 0) { | ||
|  |     throw new RangeError('targetStart out of bounds') | ||
|  |   } | ||
|  |   if (start < 0 || start >= this.length) throw new RangeError('Index out of range') | ||
|  |   if (end < 0) throw new RangeError('sourceEnd out of bounds') | ||
|  | 
 | ||
|  |   // Are we oob?
 | ||
|  |   if (end > this.length) end = this.length | ||
|  |   if (target.length - targetStart < end - start) { | ||
|  |     end = target.length - targetStart + start | ||
|  |   } | ||
|  | 
 | ||
|  |   var len = end - start | ||
|  | 
 | ||
|  |   if (this === target && typeof Uint8Array.prototype.copyWithin === 'function') { | ||
|  |     // Use built-in when available, missing from IE11
 | ||
|  |     this.copyWithin(targetStart, start, end) | ||
|  |   } else { | ||
|  |     Uint8Array.prototype.set.call( | ||
|  |       target, | ||
|  |       this.subarray(start, end), | ||
|  |       targetStart | ||
|  |     ) | ||
|  |   } | ||
|  | 
 | ||
|  |   return len | ||
|  | } | ||
|  | 
 | ||
|  | // Usage:
 | ||
|  | //    buffer.fill(number[, offset[, end]])
 | ||
|  | //    buffer.fill(buffer[, offset[, end]])
 | ||
|  | //    buffer.fill(string[, offset[, end]][, encoding])
 | ||
|  | Buffer.prototype.fill = function fill (val, start, end, encoding) { | ||
|  |   // Handle string cases:
 | ||
|  |   if (typeof val === 'string') { | ||
|  |     if (typeof start === 'string') { | ||
|  |       encoding = start | ||
|  |       start = 0 | ||
|  |       end = this.length | ||
|  |     } else if (typeof end === 'string') { | ||
|  |       encoding = end | ||
|  |       end = this.length | ||
|  |     } | ||
|  |     if (encoding !== undefined && typeof encoding !== 'string') { | ||
|  |       throw new TypeError('encoding must be a string') | ||
|  |     } | ||
|  |     if (typeof encoding === 'string' && !Buffer.isEncoding(encoding)) { | ||
|  |       throw new TypeError('Unknown encoding: ' + encoding) | ||
|  |     } | ||
|  |     if (val.length === 1) { | ||
|  |       var code = val.charCodeAt(0) | ||
|  |       if ((encoding === 'utf8' && code < 128) || | ||
|  |           encoding === 'latin1') { | ||
|  |         // Fast path: If `val` fits into a single byte, use that numeric value.
 | ||
|  |         val = code | ||
|  |       } | ||
|  |     } | ||
|  |   } else if (typeof val === 'number') { | ||
|  |     val = val & 255 | ||
|  |   } else if (typeof val === 'boolean') { | ||
|  |     val = Number(val) | ||
|  |   } | ||
|  | 
 | ||
|  |   // Invalid ranges are not set to a default, so can range check early.
 | ||
|  |   if (start < 0 || this.length < start || this.length < end) { | ||
|  |     throw new RangeError('Out of range index') | ||
|  |   } | ||
|  | 
 | ||
|  |   if (end <= start) { | ||
|  |     return this | ||
|  |   } | ||
|  | 
 | ||
|  |   start = start >>> 0 | ||
|  |   end = end === undefined ? this.length : end >>> 0 | ||
|  | 
 | ||
|  |   if (!val) val = 0 | ||
|  | 
 | ||
|  |   var i | ||
|  |   if (typeof val === 'number') { | ||
|  |     for (i = start; i < end; ++i) { | ||
|  |       this[i] = val | ||
|  |     } | ||
|  |   } else { | ||
|  |     var bytes = Buffer.isBuffer(val) | ||
|  |       ? val | ||
|  |       : Buffer.from(val, encoding) | ||
|  |     var len = bytes.length | ||
|  |     if (len === 0) { | ||
|  |       throw new TypeError('The value "' + val + | ||
|  |         '" is invalid for argument "value"') | ||
|  |     } | ||
|  |     for (i = 0; i < end - start; ++i) { | ||
|  |       this[i + start] = bytes[i % len] | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   return this | ||
|  | } | ||
|  | 
 | ||
|  | // HELPER FUNCTIONS
 | ||
|  | // ================
 | ||
|  | 
 | ||
|  | var INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g | ||
|  | 
 | ||
|  | function base64clean (str) { | ||
|  |   // Node takes equal signs as end of the Base64 encoding
 | ||
|  |   str = str.split('=')[0] | ||
|  |   // Node strips out invalid characters like \n and \t from the string, base64-js does not
 | ||
|  |   str = str.trim().replace(INVALID_BASE64_RE, '') | ||
|  |   // Node converts strings with length < 2 to ''
 | ||
|  |   if (str.length < 2) return '' | ||
|  |   // Node allows for non-padded base64 strings (missing trailing ===), base64-js does not
 | ||
|  |   while (str.length % 4 !== 0) { | ||
|  |     str = str + '=' | ||
|  |   } | ||
|  |   return str | ||
|  | } | ||
|  | 
 | ||
|  | function utf8ToBytes (string, units) { | ||
|  |   units = units || Infinity | ||
|  |   var codePoint | ||
|  |   var length = string.length | ||
|  |   var leadSurrogate = null | ||
|  |   var bytes = [] | ||
|  | 
 | ||
|  |   for (var i = 0; i < length; ++i) { | ||
|  |     codePoint = string.charCodeAt(i) | ||
|  | 
 | ||
|  |     // is surrogate component
 | ||
|  |     if (codePoint > 0xD7FF && codePoint < 0xE000) { | ||
|  |       // last char was a lead
 | ||
|  |       if (!leadSurrogate) { | ||
|  |         // no lead yet
 | ||
|  |         if (codePoint > 0xDBFF) { | ||
|  |           // unexpected trail
 | ||
|  |           if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) | ||
|  |           continue | ||
|  |         } else if (i + 1 === length) { | ||
|  |           // unpaired lead
 | ||
|  |           if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) | ||
|  |           continue | ||
|  |         } | ||
|  | 
 | ||
|  |         // valid lead
 | ||
|  |         leadSurrogate = codePoint | ||
|  | 
 | ||
|  |         continue | ||
|  |       } | ||
|  | 
 | ||
|  |       // 2 leads in a row
 | ||
|  |       if (codePoint < 0xDC00) { | ||
|  |         if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) | ||
|  |         leadSurrogate = codePoint | ||
|  |         continue | ||
|  |       } | ||
|  | 
 | ||
|  |       // valid surrogate pair
 | ||
|  |       codePoint = (leadSurrogate - 0xD800 << 10 | codePoint - 0xDC00) + 0x10000 | ||
|  |     } else if (leadSurrogate) { | ||
|  |       // valid bmp char, but last char was a lead
 | ||
|  |       if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) | ||
|  |     } | ||
|  | 
 | ||
|  |     leadSurrogate = null | ||
|  | 
 | ||
|  |     // encode utf8
 | ||
|  |     if (codePoint < 0x80) { | ||
|  |       if ((units -= 1) < 0) break | ||
|  |       bytes.push(codePoint) | ||
|  |     } else if (codePoint < 0x800) { | ||
|  |       if ((units -= 2) < 0) break | ||
|  |       bytes.push( | ||
|  |         codePoint >> 0x6 | 0xC0, | ||
|  |         codePoint & 0x3F | 0x80 | ||
|  |       ) | ||
|  |     } else if (codePoint < 0x10000) { | ||
|  |       if ((units -= 3) < 0) break | ||
|  |       bytes.push( | ||
|  |         codePoint >> 0xC | 0xE0, | ||
|  |         codePoint >> 0x6 & 0x3F | 0x80, | ||
|  |         codePoint & 0x3F | 0x80 | ||
|  |       ) | ||
|  |     } else if (codePoint < 0x110000) { | ||
|  |       if ((units -= 4) < 0) break | ||
|  |       bytes.push( | ||
|  |         codePoint >> 0x12 | 0xF0, | ||
|  |         codePoint >> 0xC & 0x3F | 0x80, | ||
|  |         codePoint >> 0x6 & 0x3F | 0x80, | ||
|  |         codePoint & 0x3F | 0x80 | ||
|  |       ) | ||
|  |     } else { | ||
|  |       throw new Error('Invalid code point') | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   return bytes | ||
|  | } | ||
|  | 
 | ||
|  | function asciiToBytes (str) { | ||
|  |   var byteArray = [] | ||
|  |   for (var i = 0; i < str.length; ++i) { | ||
|  |     // Node's code seems to be doing this and not & 0x7F..
 | ||
|  |     byteArray.push(str.charCodeAt(i) & 0xFF) | ||
|  |   } | ||
|  |   return byteArray | ||
|  | } | ||
|  | 
 | ||
|  | function utf16leToBytes (str, units) { | ||
|  |   var c, hi, lo | ||
|  |   var byteArray = [] | ||
|  |   for (var i = 0; i < str.length; ++i) { | ||
|  |     if ((units -= 2) < 0) break | ||
|  | 
 | ||
|  |     c = str.charCodeAt(i) | ||
|  |     hi = c >> 8 | ||
|  |     lo = c % 256 | ||
|  |     byteArray.push(lo) | ||
|  |     byteArray.push(hi) | ||
|  |   } | ||
|  | 
 | ||
|  |   return byteArray | ||
|  | } | ||
|  | 
 | ||
|  | function base64ToBytes (str) { | ||
|  |   return base64.toByteArray(base64clean(str)) | ||
|  | } | ||
|  | 
 | ||
|  | function blitBuffer (src, dst, offset, length) { | ||
|  |   for (var i = 0; i < length; ++i) { | ||
|  |     if ((i + offset >= dst.length) || (i >= src.length)) break | ||
|  |     dst[i + offset] = src[i] | ||
|  |   } | ||
|  |   return i | ||
|  | } | ||
|  | 
 | ||
|  | // ArrayBuffer or Uint8Array objects from other contexts (i.e. iframes) do not pass
 | ||
|  | // the `instanceof` check but they should be treated as of that type.
 | ||
|  | // See: https://github.com/feross/buffer/issues/166
 | ||
|  | function isInstance (obj, type) { | ||
|  |   return obj instanceof type || | ||
|  |     (obj != null && obj.constructor != null && obj.constructor.name != null && | ||
|  |       obj.constructor.name === type.name) | ||
|  | } | ||
|  | function numberIsNaN (obj) { | ||
|  |   // For IE11 support
 | ||
|  |   return obj !== obj // eslint-disable-line no-self-compare
 | ||
|  | } | ||
|  | 
 | ||
|  | // Create lookup table for `toString('hex')`
 | ||
|  | // See: https://github.com/feross/buffer/issues/219
 | ||
|  | var hexSliceLookupTable = (function () { | ||
|  |   var alphabet = '0123456789abcdef' | ||
|  |   var table = new Array(256) | ||
|  |   for (var i = 0; i < 16; ++i) { | ||
|  |     var i16 = i * 16 | ||
|  |     for (var j = 0; j < 16; ++j) { | ||
|  |       table[i16 + j] = alphabet[i] + alphabet[j] | ||
|  |     } | ||
|  |   } | ||
|  |   return table | ||
|  | })() | ||
|  | 
 | ||
|  | }).call(this)}).call(this,_dereq_(13).Buffer) | ||
|  | },{"10":10,"13":13,"30":30}],14:[function(_dereq_,module,exports){ | ||
|  | // Copyright Joyent, Inc. and other Node contributors.
 | ||
|  | //
 | ||
|  | // Permission is hereby granted, free of charge, to any person obtaining a
 | ||
|  | // copy of this software and associated documentation files (the
 | ||
|  | // "Software"), to deal in the Software without restriction, including
 | ||
|  | // without limitation the rights to use, copy, modify, merge, publish,
 | ||
|  | // distribute, sublicense, and/or sell copies of the Software, and to permit
 | ||
|  | // persons to whom the Software is furnished to do so, subject to the
 | ||
|  | // following conditions:
 | ||
|  | //
 | ||
|  | // The above copyright notice and this permission notice shall be included
 | ||
|  | // in all copies or substantial portions of the Software.
 | ||
|  | //
 | ||
|  | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
 | ||
|  | // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
 | ||
|  | // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
 | ||
|  | // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
 | ||
|  | // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
 | ||
|  | // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
 | ||
|  | // USE OR OTHER DEALINGS IN THE SOFTWARE.
 | ||
|  | 
 | ||
|  | // NOTE: These type checking functions intentionally don't use `instanceof`
 | ||
|  | // because it is fragile and can be easily faked with `Object.create()`.
 | ||
|  | 
 | ||
|  | function isArray(arg) { | ||
|  |   if (Array.isArray) { | ||
|  |     return Array.isArray(arg); | ||
|  |   } | ||
|  |   return objectToString(arg) === '[object Array]'; | ||
|  | } | ||
|  | exports.isArray = isArray; | ||
|  | 
 | ||
|  | function isBoolean(arg) { | ||
|  |   return typeof arg === 'boolean'; | ||
|  | } | ||
|  | exports.isBoolean = isBoolean; | ||
|  | 
 | ||
|  | function isNull(arg) { | ||
|  |   return arg === null; | ||
|  | } | ||
|  | exports.isNull = isNull; | ||
|  | 
 | ||
|  | function isNullOrUndefined(arg) { | ||
|  |   return arg == null; | ||
|  | } | ||
|  | exports.isNullOrUndefined = isNullOrUndefined; | ||
|  | 
 | ||
|  | function isNumber(arg) { | ||
|  |   return typeof arg === 'number'; | ||
|  | } | ||
|  | exports.isNumber = isNumber; | ||
|  | 
 | ||
|  | function isString(arg) { | ||
|  |   return typeof arg === 'string'; | ||
|  | } | ||
|  | exports.isString = isString; | ||
|  | 
 | ||
|  | function isSymbol(arg) { | ||
|  |   return typeof arg === 'symbol'; | ||
|  | } | ||
|  | exports.isSymbol = isSymbol; | ||
|  | 
 | ||
|  | function isUndefined(arg) { | ||
|  |   return arg === void 0; | ||
|  | } | ||
|  | exports.isUndefined = isUndefined; | ||
|  | 
 | ||
|  | function isRegExp(re) { | ||
|  |   return objectToString(re) === '[object RegExp]'; | ||
|  | } | ||
|  | exports.isRegExp = isRegExp; | ||
|  | 
 | ||
|  | function isObject(arg) { | ||
|  |   return typeof arg === 'object' && arg !== null; | ||
|  | } | ||
|  | exports.isObject = isObject; | ||
|  | 
 | ||
|  | function isDate(d) { | ||
|  |   return objectToString(d) === '[object Date]'; | ||
|  | } | ||
|  | exports.isDate = isDate; | ||
|  | 
 | ||
|  | function isError(e) { | ||
|  |   return (objectToString(e) === '[object Error]' || e instanceof Error); | ||
|  | } | ||
|  | exports.isError = isError; | ||
|  | 
 | ||
|  | function isFunction(arg) { | ||
|  |   return typeof arg === 'function'; | ||
|  | } | ||
|  | exports.isFunction = isFunction; | ||
|  | 
 | ||
|  | function isPrimitive(arg) { | ||
|  |   return arg === null || | ||
|  |          typeof arg === 'boolean' || | ||
|  |          typeof arg === 'number' || | ||
|  |          typeof arg === 'string' || | ||
|  |          typeof arg === 'symbol' ||  // ES6 symbol
 | ||
|  |          typeof arg === 'undefined'; | ||
|  | } | ||
|  | exports.isPrimitive = isPrimitive; | ||
|  | 
 | ||
|  | exports.isBuffer = _dereq_(13).Buffer.isBuffer; | ||
|  | 
 | ||
|  | function objectToString(o) { | ||
|  |   return Object.prototype.toString.call(o); | ||
|  | } | ||
|  | 
 | ||
|  | },{"13":13}],15:[function(_dereq_,module,exports){ | ||
|  | var Buffer = _dereq_(13).Buffer | ||
|  | 
 | ||
|  | var CHARS = '.PYFGCRLAOEUIDHTNSQJKXBMWVZ_pyfgcrlaoeuidhtnsqjkxbmwvz1234567890' | ||
|  |   .split('').sort().join('') | ||
|  | 
 | ||
|  | module.exports = function (chars, exports) { | ||
|  |   chars = chars || CHARS | ||
|  |   exports = exports || {} | ||
|  |   if(chars.length !== 64) throw new Error('a base 64 encoding requires 64 chars') | ||
|  | 
 | ||
|  |   var codeToIndex = new Buffer(128) | ||
|  |   codeToIndex.fill() | ||
|  | 
 | ||
|  |   for(var i = 0; i < 64; i++) { | ||
|  |     var code = chars.charCodeAt(i) | ||
|  |     codeToIndex[code] = i | ||
|  |   } | ||
|  | 
 | ||
|  |   exports.encode = function (data) { | ||
|  |       var s = '', l = data.length, hang = 0 | ||
|  |       for(var i = 0; i < l; i++) { | ||
|  |         var v = data[i] | ||
|  | 
 | ||
|  |         switch (i % 3) { | ||
|  |           case 0: | ||
|  |             s += chars[v >> 2] | ||
|  |             hang = (v & 3) << 4 | ||
|  |           break; | ||
|  |           case 1: | ||
|  |             s += chars[hang | v >> 4] | ||
|  |             hang = (v & 0xf) << 2 | ||
|  |           break; | ||
|  |           case 2: | ||
|  |             s += chars[hang | v >> 6] | ||
|  |             s += chars[v & 0x3f] | ||
|  |             hang = 0 | ||
|  |           break; | ||
|  |         } | ||
|  | 
 | ||
|  |       } | ||
|  |       if(l%3) s += chars[hang] | ||
|  |       return s | ||
|  |     } | ||
|  |   exports.decode = function (str) { | ||
|  |       var l = str.length, j = 0 | ||
|  |       var b = new Buffer(~~((l/4)*3)), hang = 0 | ||
|  | 
 | ||
|  |       for(var i = 0; i < l; i++) { | ||
|  |         var v = codeToIndex[str.charCodeAt(i)] | ||
|  | 
 | ||
|  |         switch (i % 4) { | ||
|  |           case 0: | ||
|  |             hang = v << 2; | ||
|  |           break; | ||
|  |           case 1: | ||
|  |             b[j++] = hang | v >> 4 | ||
|  |             hang = (v << 4) & 0xff | ||
|  |           break; | ||
|  |           case 2: | ||
|  |             b[j++] = hang | v >> 2 | ||
|  |             hang = (v << 6) & 0xff | ||
|  |           break; | ||
|  |           case 3: | ||
|  |             b[j++] = hang | v | ||
|  |           break; | ||
|  |         } | ||
|  | 
 | ||
|  |       } | ||
|  |       return b | ||
|  |     } | ||
|  |   return exports | ||
|  | } | ||
|  | 
 | ||
|  | module.exports(CHARS, module.exports) | ||
|  | 
 | ||
|  | 
 | ||
|  | },{"13":13}],16:[function(_dereq_,module,exports){ | ||
|  | var AbstractIterator = _dereq_(21).AbstractIterator | ||
|  | var inherits = _dereq_(37) | ||
|  | 
 | ||
|  | function DeferredIterator (db, options) { | ||
|  |   AbstractIterator.call(this, db) | ||
|  | 
 | ||
|  |   this._options = options | ||
|  |   this._iterator = null | ||
|  |   this._operations = [] | ||
|  | } | ||
|  | 
 | ||
|  | inherits(DeferredIterator, AbstractIterator) | ||
|  | 
 | ||
|  | DeferredIterator.prototype.setDb = function (db) { | ||
|  |   var it = this._iterator = db.iterator(this._options) | ||
|  |   this._operations.forEach(function (op) { | ||
|  |     it[op.method].apply(it, op.args) | ||
|  |   }) | ||
|  | } | ||
|  | 
 | ||
|  | DeferredIterator.prototype._operation = function (method, args) { | ||
|  |   if (this._iterator) return this._iterator[method].apply(this._iterator, args) | ||
|  |   this._operations.push({ method: method, args: args }) | ||
|  | } | ||
|  | 
 | ||
|  | 'next end'.split(' ').forEach(function (m) { | ||
|  |   DeferredIterator.prototype['_' + m] = function () { | ||
|  |     this._operation(m, arguments) | ||
|  |   } | ||
|  | }) | ||
|  | 
 | ||
|  | // Must defer seek() rather than _seek() because it requires db._serializeKey to be available
 | ||
|  | DeferredIterator.prototype.seek = function () { | ||
|  |   this._operation('seek', arguments) | ||
|  | } | ||
|  | 
 | ||
|  | module.exports = DeferredIterator | ||
|  | 
 | ||
|  | },{"21":21,"37":37}],17:[function(_dereq_,module,exports){ | ||
|  | var AbstractLevelDOWN = _dereq_(21).AbstractLevelDOWN | ||
|  | var inherits = _dereq_(37) | ||
|  | var DeferredIterator = _dereq_(16) | ||
|  | var deferrables = 'put get del batch clear'.split(' ') | ||
|  | var optionalDeferrables = 'approximateSize compactRange'.split(' ') | ||
|  | 
 | ||
|  | function DeferredLevelDOWN (db) { | ||
|  |   AbstractLevelDOWN.call(this, db.supports || {}) | ||
|  | 
 | ||
|  |   // TODO (future major): remove this fallback; db must have manifest that
 | ||
|  |   // declares approximateSize and compactRange in additionalMethods.
 | ||
|  |   optionalDeferrables.forEach(function (m) { | ||
|  |     if (typeof db[m] === 'function' && !this.supports.additionalMethods[m]) { | ||
|  |       this.supports.additionalMethods[m] = true | ||
|  |     } | ||
|  |   }, this) | ||
|  | 
 | ||
|  |   this._db = db | ||
|  |   this._operations = [] | ||
|  |   closed(this) | ||
|  | } | ||
|  | 
 | ||
|  | inherits(DeferredLevelDOWN, AbstractLevelDOWN) | ||
|  | 
 | ||
|  | DeferredLevelDOWN.prototype.type = 'deferred-leveldown' | ||
|  | 
 | ||
|  | DeferredLevelDOWN.prototype._open = function (options, callback) { | ||
|  |   var self = this | ||
|  | 
 | ||
|  |   this._db.open(options, function (err) { | ||
|  |     if (err) return callback(err) | ||
|  | 
 | ||
|  |     self._operations.forEach(function (op) { | ||
|  |       if (op.iterator) { | ||
|  |         op.iterator.setDb(self._db) | ||
|  |       } else { | ||
|  |         self._db[op.method].apply(self._db, op.args) | ||
|  |       } | ||
|  |     }) | ||
|  |     self._operations = [] | ||
|  | 
 | ||
|  |     open(self) | ||
|  |     callback() | ||
|  |   }) | ||
|  | } | ||
|  | 
 | ||
|  | DeferredLevelDOWN.prototype._close = function (callback) { | ||
|  |   var self = this | ||
|  | 
 | ||
|  |   this._db.close(function (err) { | ||
|  |     if (err) return callback(err) | ||
|  |     closed(self) | ||
|  |     callback() | ||
|  |   }) | ||
|  | } | ||
|  | 
 | ||
|  | function open (self) { | ||
|  |   deferrables.concat('iterator').forEach(function (m) { | ||
|  |     self['_' + m] = function () { | ||
|  |       return this._db[m].apply(this._db, arguments) | ||
|  |     } | ||
|  |   }) | ||
|  |   Object.keys(self.supports.additionalMethods).forEach(function (m) { | ||
|  |     self[m] = function () { | ||
|  |       return this._db[m].apply(this._db, arguments) | ||
|  |     } | ||
|  |   }) | ||
|  | } | ||
|  | 
 | ||
|  | function closed (self) { | ||
|  |   deferrables.forEach(function (m) { | ||
|  |     self['_' + m] = function () { | ||
|  |       this._operations.push({ method: m, args: arguments }) | ||
|  |     } | ||
|  |   }) | ||
|  |   Object.keys(self.supports.additionalMethods).forEach(function (m) { | ||
|  |     self[m] = function () { | ||
|  |       this._operations.push({ method: m, args: arguments }) | ||
|  |     } | ||
|  |   }) | ||
|  |   self._iterator = function (options) { | ||
|  |     var it = new DeferredIterator(self, options) | ||
|  |     this._operations.push({ iterator: it }) | ||
|  |     return it | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | DeferredLevelDOWN.prototype._serializeKey = function (key) { | ||
|  |   return key | ||
|  | } | ||
|  | 
 | ||
|  | DeferredLevelDOWN.prototype._serializeValue = function (value) { | ||
|  |   return value | ||
|  | } | ||
|  | 
 | ||
|  | module.exports = DeferredLevelDOWN | ||
|  | module.exports.DeferredIterator = DeferredIterator | ||
|  | 
 | ||
|  | },{"16":16,"21":21,"37":37}],18:[function(_dereq_,module,exports){ | ||
|  | var nextTick = _dereq_(22) | ||
|  | 
 | ||
|  | function AbstractChainedBatch (db) { | ||
|  |   if (typeof db !== 'object' || db === null) { | ||
|  |     throw new TypeError('First argument must be an abstract-leveldown compliant store') | ||
|  |   } | ||
|  | 
 | ||
|  |   this.db = db | ||
|  |   this._operations = [] | ||
|  |   this._written = false | ||
|  | } | ||
|  | 
 | ||
|  | AbstractChainedBatch.prototype._checkWritten = function () { | ||
|  |   if (this._written) { | ||
|  |     throw new Error('write() already called on this batch') | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | AbstractChainedBatch.prototype.put = function (key, value) { | ||
|  |   this._checkWritten() | ||
|  | 
 | ||
|  |   var err = this.db._checkKey(key) || this.db._checkValue(value) | ||
|  |   if (err) throw err | ||
|  | 
 | ||
|  |   key = this.db._serializeKey(key) | ||
|  |   value = this.db._serializeValue(value) | ||
|  | 
 | ||
|  |   this._put(key, value) | ||
|  | 
 | ||
|  |   return this | ||
|  | } | ||
|  | 
 | ||
|  | AbstractChainedBatch.prototype._put = function (key, value) { | ||
|  |   this._operations.push({ type: 'put', key: key, value: value }) | ||
|  | } | ||
|  | 
 | ||
|  | AbstractChainedBatch.prototype.del = function (key) { | ||
|  |   this._checkWritten() | ||
|  | 
 | ||
|  |   var err = this.db._checkKey(key) | ||
|  |   if (err) throw err | ||
|  | 
 | ||
|  |   key = this.db._serializeKey(key) | ||
|  |   this._del(key) | ||
|  | 
 | ||
|  |   return this | ||
|  | } | ||
|  | 
 | ||
|  | AbstractChainedBatch.prototype._del = function (key) { | ||
|  |   this._operations.push({ type: 'del', key: key }) | ||
|  | } | ||
|  | 
 | ||
|  | AbstractChainedBatch.prototype.clear = function () { | ||
|  |   this._checkWritten() | ||
|  |   this._clear() | ||
|  | 
 | ||
|  |   return this | ||
|  | } | ||
|  | 
 | ||
|  | AbstractChainedBatch.prototype._clear = function () { | ||
|  |   this._operations = [] | ||
|  | } | ||
|  | 
 | ||
|  | AbstractChainedBatch.prototype.write = function (options, callback) { | ||
|  |   this._checkWritten() | ||
|  | 
 | ||
|  |   if (typeof options === 'function') { callback = options } | ||
|  |   if (typeof callback !== 'function') { | ||
|  |     throw new Error('write() requires a callback argument') | ||
|  |   } | ||
|  |   if (typeof options !== 'object' || options === null) { | ||
|  |     options = {} | ||
|  |   } | ||
|  | 
 | ||
|  |   this._written = true | ||
|  |   this._write(options, callback) | ||
|  | } | ||
|  | 
 | ||
|  | AbstractChainedBatch.prototype._write = function (options, callback) { | ||
|  |   this.db._batch(this._operations, options, callback) | ||
|  | } | ||
|  | 
 | ||
|  | // Expose browser-compatible nextTick for dependents
 | ||
|  | AbstractChainedBatch.prototype._nextTick = nextTick | ||
|  | 
 | ||
|  | module.exports = AbstractChainedBatch | ||
|  | 
 | ||
|  | },{"22":22}],19:[function(_dereq_,module,exports){ | ||
|  | var nextTick = _dereq_(22) | ||
|  | 
 | ||
|  | function AbstractIterator (db) { | ||
|  |   if (typeof db !== 'object' || db === null) { | ||
|  |     throw new TypeError('First argument must be an abstract-leveldown compliant store') | ||
|  |   } | ||
|  | 
 | ||
|  |   this.db = db | ||
|  |   this._ended = false | ||
|  |   this._nexting = false | ||
|  | } | ||
|  | 
 | ||
|  | AbstractIterator.prototype.next = function (callback) { | ||
|  |   var self = this | ||
|  | 
 | ||
|  |   if (typeof callback !== 'function') { | ||
|  |     throw new Error('next() requires a callback argument') | ||
|  |   } | ||
|  | 
 | ||
|  |   if (self._ended) { | ||
|  |     nextTick(callback, new Error('cannot call next() after end()')) | ||
|  |     return self | ||
|  |   } | ||
|  | 
 | ||
|  |   if (self._nexting) { | ||
|  |     nextTick(callback, new Error('cannot call next() before previous next() has completed')) | ||
|  |     return self | ||
|  |   } | ||
|  | 
 | ||
|  |   self._nexting = true | ||
|  |   self._next(function () { | ||
|  |     self._nexting = false | ||
|  |     callback.apply(null, arguments) | ||
|  |   }) | ||
|  | 
 | ||
|  |   return self | ||
|  | } | ||
|  | 
 | ||
|  | AbstractIterator.prototype._next = function (callback) { | ||
|  |   nextTick(callback) | ||
|  | } | ||
|  | 
 | ||
|  | AbstractIterator.prototype.seek = function (target) { | ||
|  |   if (this._ended) { | ||
|  |     throw new Error('cannot call seek() after end()') | ||
|  |   } | ||
|  |   if (this._nexting) { | ||
|  |     throw new Error('cannot call seek() before next() has completed') | ||
|  |   } | ||
|  | 
 | ||
|  |   target = this.db._serializeKey(target) | ||
|  |   this._seek(target) | ||
|  | } | ||
|  | 
 | ||
|  | AbstractIterator.prototype._seek = function (target) {} | ||
|  | 
 | ||
|  | AbstractIterator.prototype.end = function (callback) { | ||
|  |   if (typeof callback !== 'function') { | ||
|  |     throw new Error('end() requires a callback argument') | ||
|  |   } | ||
|  | 
 | ||
|  |   if (this._ended) { | ||
|  |     return nextTick(callback, new Error('end() already called on iterator')) | ||
|  |   } | ||
|  | 
 | ||
|  |   this._ended = true | ||
|  |   this._end(callback) | ||
|  | } | ||
|  | 
 | ||
|  | AbstractIterator.prototype._end = function (callback) { | ||
|  |   nextTick(callback) | ||
|  | } | ||
|  | 
 | ||
|  | // Expose browser-compatible nextTick for dependents
 | ||
|  | AbstractIterator.prototype._nextTick = nextTick | ||
|  | 
 | ||
|  | module.exports = AbstractIterator | ||
|  | 
 | ||
|  | },{"22":22}],20:[function(_dereq_,module,exports){ | ||
|  | var xtend = _dereq_(141) | ||
|  | var supports = _dereq_(58) | ||
|  | var Buffer = _dereq_(13).Buffer | ||
|  | var AbstractIterator = _dereq_(19) | ||
|  | var AbstractChainedBatch = _dereq_(18) | ||
|  | var nextTick = _dereq_(22) | ||
|  | var hasOwnProperty = Object.prototype.hasOwnProperty | ||
|  | var rangeOptions = 'start end gt gte lt lte'.split(' ') | ||
|  | 
 | ||
|  | function AbstractLevelDOWN (manifest) { | ||
|  |   this.status = 'new' | ||
|  | 
 | ||
|  |   // TODO (next major): make this mandatory
 | ||
|  |   this.supports = supports(manifest, { | ||
|  |     status: true | ||
|  |   }) | ||
|  | } | ||
|  | 
 | ||
|  | AbstractLevelDOWN.prototype.open = function (options, callback) { | ||
|  |   var self = this | ||
|  |   var oldStatus = this.status | ||
|  | 
 | ||
|  |   if (typeof options === 'function') callback = options | ||
|  | 
 | ||
|  |   if (typeof callback !== 'function') { | ||
|  |     throw new Error('open() requires a callback argument') | ||
|  |   } | ||
|  | 
 | ||
|  |   if (typeof options !== 'object' || options === null) options = {} | ||
|  | 
 | ||
|  |   options.createIfMissing = options.createIfMissing !== false | ||
|  |   options.errorIfExists = !!options.errorIfExists | ||
|  | 
 | ||
|  |   this.status = 'opening' | ||
|  |   this._open(options, function (err) { | ||
|  |     if (err) { | ||
|  |       self.status = oldStatus | ||
|  |       return callback(err) | ||
|  |     } | ||
|  |     self.status = 'open' | ||
|  |     callback() | ||
|  |   }) | ||
|  | } | ||
|  | 
 | ||
|  | AbstractLevelDOWN.prototype._open = function (options, callback) { | ||
|  |   nextTick(callback) | ||
|  | } | ||
|  | 
 | ||
|  | AbstractLevelDOWN.prototype.close = function (callback) { | ||
|  |   var self = this | ||
|  |   var oldStatus = this.status | ||
|  | 
 | ||
|  |   if (typeof callback !== 'function') { | ||
|  |     throw new Error('close() requires a callback argument') | ||
|  |   } | ||
|  | 
 | ||
|  |   this.status = 'closing' | ||
|  |   this._close(function (err) { | ||
|  |     if (err) { | ||
|  |       self.status = oldStatus | ||
|  |       return callback(err) | ||
|  |     } | ||
|  |     self.status = 'closed' | ||
|  |     callback() | ||
|  |   }) | ||
|  | } | ||
|  | 
 | ||
|  | AbstractLevelDOWN.prototype._close = function (callback) { | ||
|  |   nextTick(callback) | ||
|  | } | ||
|  | 
 | ||
|  | AbstractLevelDOWN.prototype.get = function (key, options, callback) { | ||
|  |   if (typeof options === 'function') callback = options | ||
|  | 
 | ||
|  |   if (typeof callback !== 'function') { | ||
|  |     throw new Error('get() requires a callback argument') | ||
|  |   } | ||
|  | 
 | ||
|  |   var err = this._checkKey(key) | ||
|  |   if (err) return nextTick(callback, err) | ||
|  | 
 | ||
|  |   key = this._serializeKey(key) | ||
|  | 
 | ||
|  |   if (typeof options !== 'object' || options === null) options = {} | ||
|  | 
 | ||
|  |   options.asBuffer = options.asBuffer !== false | ||
|  | 
 | ||
|  |   this._get(key, options, callback) | ||
|  | } | ||
|  | 
 | ||
|  | AbstractLevelDOWN.prototype._get = function (key, options, callback) { | ||
|  |   nextTick(function () { callback(new Error('NotFound')) }) | ||
|  | } | ||
|  | 
 | ||
|  | AbstractLevelDOWN.prototype.put = function (key, value, options, callback) { | ||
|  |   if (typeof options === 'function') callback = options | ||
|  | 
 | ||
|  |   if (typeof callback !== 'function') { | ||
|  |     throw new Error('put() requires a callback argument') | ||
|  |   } | ||
|  | 
 | ||
|  |   var err = this._checkKey(key) || this._checkValue(value) | ||
|  |   if (err) return nextTick(callback, err) | ||
|  | 
 | ||
|  |   key = this._serializeKey(key) | ||
|  |   value = this._serializeValue(value) | ||
|  | 
 | ||
|  |   if (typeof options !== 'object' || options === null) options = {} | ||
|  | 
 | ||
|  |   this._put(key, value, options, callback) | ||
|  | } | ||
|  | 
 | ||
|  | AbstractLevelDOWN.prototype._put = function (key, value, options, callback) { | ||
|  |   nextTick(callback) | ||
|  | } | ||
|  | 
 | ||
|  | AbstractLevelDOWN.prototype.del = function (key, options, callback) { | ||
|  |   if (typeof options === 'function') callback = options | ||
|  | 
 | ||
|  |   if (typeof callback !== 'function') { | ||
|  |     throw new Error('del() requires a callback argument') | ||
|  |   } | ||
|  | 
 | ||
|  |   var err = this._checkKey(key) | ||
|  |   if (err) return nextTick(callback, err) | ||
|  | 
 | ||
|  |   key = this._serializeKey(key) | ||
|  | 
 | ||
|  |   if (typeof options !== 'object' || options === null) options = {} | ||
|  | 
 | ||
|  |   this._del(key, options, callback) | ||
|  | } | ||
|  | 
 | ||
|  | AbstractLevelDOWN.prototype._del = function (key, options, callback) { | ||
|  |   nextTick(callback) | ||
|  | } | ||
|  | 
 | ||
|  | AbstractLevelDOWN.prototype.batch = function (array, options, callback) { | ||
|  |   if (!arguments.length) return this._chainedBatch() | ||
|  | 
 | ||
|  |   if (typeof options === 'function') callback = options | ||
|  | 
 | ||
|  |   if (typeof array === 'function') callback = array | ||
|  | 
 | ||
|  |   if (typeof callback !== 'function') { | ||
|  |     throw new Error('batch(array) requires a callback argument') | ||
|  |   } | ||
|  | 
 | ||
|  |   if (!Array.isArray(array)) { | ||
|  |     return nextTick(callback, new Error('batch(array) requires an array argument')) | ||
|  |   } | ||
|  | 
 | ||
|  |   if (array.length === 0) { | ||
|  |     return nextTick(callback) | ||
|  |   } | ||
|  | 
 | ||
|  |   if (typeof options !== 'object' || options === null) options = {} | ||
|  | 
 | ||
|  |   var serialized = new Array(array.length) | ||
|  | 
 | ||
|  |   for (var i = 0; i < array.length; i++) { | ||
|  |     if (typeof array[i] !== 'object' || array[i] === null) { | ||
|  |       return nextTick(callback, new Error('batch(array) element must be an object and not `null`')) | ||
|  |     } | ||
|  | 
 | ||
|  |     var e = xtend(array[i]) | ||
|  | 
 | ||
|  |     if (e.type !== 'put' && e.type !== 'del') { | ||
|  |       return nextTick(callback, new Error("`type` must be 'put' or 'del'")) | ||
|  |     } | ||
|  | 
 | ||
|  |     var err = this._checkKey(e.key) | ||
|  |     if (err) return nextTick(callback, err) | ||
|  | 
 | ||
|  |     e.key = this._serializeKey(e.key) | ||
|  | 
 | ||
|  |     if (e.type === 'put') { | ||
|  |       var valueErr = this._checkValue(e.value) | ||
|  |       if (valueErr) return nextTick(callback, valueErr) | ||
|  | 
 | ||
|  |       e.value = this._serializeValue(e.value) | ||
|  |     } | ||
|  | 
 | ||
|  |     serialized[i] = e | ||
|  |   } | ||
|  | 
 | ||
|  |   this._batch(serialized, options, callback) | ||
|  | } | ||
|  | 
 | ||
|  | AbstractLevelDOWN.prototype._batch = function (array, options, callback) { | ||
|  |   nextTick(callback) | ||
|  | } | ||
|  | 
 | ||
|  | AbstractLevelDOWN.prototype.clear = function (options, callback) { | ||
|  |   if (typeof options === 'function') { | ||
|  |     callback = options | ||
|  |   } else if (typeof callback !== 'function') { | ||
|  |     throw new Error('clear() requires a callback argument') | ||
|  |   } | ||
|  | 
 | ||
|  |   options = cleanRangeOptions(this, options) | ||
|  |   options.reverse = !!options.reverse | ||
|  |   options.limit = 'limit' in options ? options.limit : -1 | ||
|  | 
 | ||
|  |   this._clear(options, callback) | ||
|  | } | ||
|  | 
 | ||
|  | AbstractLevelDOWN.prototype._clear = function (options, callback) { | ||
|  |   // Avoid setupIteratorOptions, would serialize range options a second time.
 | ||
|  |   options.keys = true | ||
|  |   options.values = false | ||
|  |   options.keyAsBuffer = true | ||
|  |   options.valueAsBuffer = true | ||
|  | 
 | ||
|  |   var iterator = this._iterator(options) | ||
|  |   var emptyOptions = {} | ||
|  |   var self = this | ||
|  | 
 | ||
|  |   var next = function (err) { | ||
|  |     if (err) { | ||
|  |       return iterator.end(function () { | ||
|  |         callback(err) | ||
|  |       }) | ||
|  |     } | ||
|  | 
 | ||
|  |     iterator.next(function (err, key) { | ||
|  |       if (err) return next(err) | ||
|  |       if (key === undefined) return iterator.end(callback) | ||
|  | 
 | ||
|  |       // This could be optimized by using a batch, but the default _clear
 | ||
|  |       // is not meant to be fast. Implementations have more room to optimize
 | ||
|  |       // if they override _clear. Note: using _del bypasses key serialization.
 | ||
|  |       self._del(key, emptyOptions, next) | ||
|  |     }) | ||
|  |   } | ||
|  | 
 | ||
|  |   next() | ||
|  | } | ||
|  | 
 | ||
|  | AbstractLevelDOWN.prototype._setupIteratorOptions = function (options) { | ||
|  |   options = cleanRangeOptions(this, options) | ||
|  | 
 | ||
|  |   options.reverse = !!options.reverse | ||
|  |   options.keys = options.keys !== false | ||
|  |   options.values = options.values !== false | ||
|  |   options.limit = 'limit' in options ? options.limit : -1 | ||
|  |   options.keyAsBuffer = options.keyAsBuffer !== false | ||
|  |   options.valueAsBuffer = options.valueAsBuffer !== false | ||
|  | 
 | ||
|  |   return options | ||
|  | } | ||
|  | 
 | ||
|  | function cleanRangeOptions (db, options) { | ||
|  |   var result = {} | ||
|  | 
 | ||
|  |   for (var k in options) { | ||
|  |     if (!hasOwnProperty.call(options, k)) continue | ||
|  | 
 | ||
|  |     var opt = options[k] | ||
|  | 
 | ||
|  |     if (isRangeOption(k)) { | ||
|  |       // Note that we don't reject nullish and empty options here. While
 | ||
|  |       // those types are invalid as keys, they are valid as range options.
 | ||
|  |       opt = db._serializeKey(opt) | ||
|  |     } | ||
|  | 
 | ||
|  |     result[k] = opt | ||
|  |   } | ||
|  | 
 | ||
|  |   return result | ||
|  | } | ||
|  | 
 | ||
|  | function isRangeOption (k) { | ||
|  |   return rangeOptions.indexOf(k) !== -1 | ||
|  | } | ||
|  | 
 | ||
|  | AbstractLevelDOWN.prototype.iterator = function (options) { | ||
|  |   if (typeof options !== 'object' || options === null) options = {} | ||
|  |   options = this._setupIteratorOptions(options) | ||
|  |   return this._iterator(options) | ||
|  | } | ||
|  | 
 | ||
|  | AbstractLevelDOWN.prototype._iterator = function (options) { | ||
|  |   return new AbstractIterator(this) | ||
|  | } | ||
|  | 
 | ||
|  | AbstractLevelDOWN.prototype._chainedBatch = function () { | ||
|  |   return new AbstractChainedBatch(this) | ||
|  | } | ||
|  | 
 | ||
|  | AbstractLevelDOWN.prototype._serializeKey = function (key) { | ||
|  |   return key | ||
|  | } | ||
|  | 
 | ||
|  | AbstractLevelDOWN.prototype._serializeValue = function (value) { | ||
|  |   return value | ||
|  | } | ||
|  | 
 | ||
|  | AbstractLevelDOWN.prototype._checkKey = function (key) { | ||
|  |   if (key === null || key === undefined) { | ||
|  |     return new Error('key cannot be `null` or `undefined`') | ||
|  |   } else if (Buffer.isBuffer(key) && key.length === 0) { | ||
|  |     return new Error('key cannot be an empty Buffer') | ||
|  |   } else if (key === '') { | ||
|  |     return new Error('key cannot be an empty String') | ||
|  |   } else if (Array.isArray(key) && key.length === 0) { | ||
|  |     return new Error('key cannot be an empty Array') | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | AbstractLevelDOWN.prototype._checkValue = function (value) { | ||
|  |   if (value === null || value === undefined) { | ||
|  |     return new Error('value cannot be `null` or `undefined`') | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | // Expose browser-compatible nextTick for dependents
 | ||
|  | AbstractLevelDOWN.prototype._nextTick = nextTick | ||
|  | 
 | ||
|  | module.exports = AbstractLevelDOWN | ||
|  | 
 | ||
|  | },{"13":13,"141":141,"18":18,"19":19,"22":22,"58":58}],21:[function(_dereq_,module,exports){ | ||
|  | exports.AbstractLevelDOWN = _dereq_(20) | ||
|  | exports.AbstractIterator = _dereq_(19) | ||
|  | exports.AbstractChainedBatch = _dereq_(18) | ||
|  | 
 | ||
|  | },{"18":18,"19":19,"20":20}],22:[function(_dereq_,module,exports){ | ||
|  | module.exports = _dereq_(31) | ||
|  | 
 | ||
|  | },{"31":31}],23:[function(_dereq_,module,exports){ | ||
|  | /** | ||
|  |  * Copyright (c) 2013 Petka Antonov | ||
|  |  *  | ||
|  |  * Permission is hereby granted, free of charge, to any person obtaining a copy | ||
|  |  * of this software and associated documentation files (the "Software"), to deal | ||
|  |  * in the Software without restriction, including without limitation the rights | ||
|  |  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | ||
|  |  * copies of the Software, and to permit persons to whom the Software is | ||
|  |  * furnished to do so, subject to the following conditions:</p> | ||
|  |  *  | ||
|  |  * The above copyright notice and this permission notice shall be included in | ||
|  |  * all copies or substantial portions of the Software. | ||
|  |  *  | ||
|  |  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||
|  |  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||
|  |  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL THE | ||
|  |  * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||
|  |  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | ||
|  |  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN | ||
|  |  * THE SOFTWARE. | ||
|  |  */ | ||
|  | "use strict"; | ||
|  | function Deque(capacity) { | ||
|  |     this._capacity = getCapacity(capacity); | ||
|  |     this._length = 0; | ||
|  |     this._front = 0; | ||
|  |     if (isArray(capacity)) { | ||
|  |         var len = capacity.length; | ||
|  |         for (var i = 0; i < len; ++i) { | ||
|  |             this[i] = capacity[i]; | ||
|  |         } | ||
|  |         this._length = len; | ||
|  |     } | ||
|  | } | ||
|  | 
 | ||
|  | Deque.prototype.toArray = function Deque$toArray() { | ||
|  |     var len = this._length; | ||
|  |     var ret = new Array(len); | ||
|  |     var front = this._front; | ||
|  |     var capacity = this._capacity; | ||
|  |     for (var j = 0; j < len; ++j) { | ||
|  |         ret[j] = this[(front + j) & (capacity - 1)]; | ||
|  |     } | ||
|  |     return ret; | ||
|  | }; | ||
|  | 
 | ||
|  | Deque.prototype.push = function Deque$push(item) { | ||
|  |     var argsLength = arguments.length; | ||
|  |     var length = this._length; | ||
|  |     if (argsLength > 1) { | ||
|  |         var capacity = this._capacity; | ||
|  |         if (length + argsLength > capacity) { | ||
|  |             for (var i = 0; i < argsLength; ++i) { | ||
|  |                 this._checkCapacity(length + 1); | ||
|  |                 var j = (this._front + length) & (this._capacity - 1); | ||
|  |                 this[j] = arguments[i]; | ||
|  |                 length++; | ||
|  |                 this._length = length; | ||
|  |             } | ||
|  |             return length; | ||
|  |         } | ||
|  |         else { | ||
|  |             var j = this._front; | ||
|  |             for (var i = 0; i < argsLength; ++i) { | ||
|  |                 this[(j + length) & (capacity - 1)] = arguments[i]; | ||
|  |                 j++; | ||
|  |             } | ||
|  |             this._length = length + argsLength; | ||
|  |             return length + argsLength; | ||
|  |         } | ||
|  | 
 | ||
|  |     } | ||
|  | 
 | ||
|  |     if (argsLength === 0) return length; | ||
|  | 
 | ||
|  |     this._checkCapacity(length + 1); | ||
|  |     var i = (this._front + length) & (this._capacity - 1); | ||
|  |     this[i] = item; | ||
|  |     this._length = length + 1; | ||
|  |     return length + 1; | ||
|  | }; | ||
|  | 
 | ||
|  | Deque.prototype.pop = function Deque$pop() { | ||
|  |     var length = this._length; | ||
|  |     if (length === 0) { | ||
|  |         return void 0; | ||
|  |     } | ||
|  |     var i = (this._front + length - 1) & (this._capacity - 1); | ||
|  |     var ret = this[i]; | ||
|  |     this[i] = void 0; | ||
|  |     this._length = length - 1; | ||
|  |     return ret; | ||
|  | }; | ||
|  | 
 | ||
|  | Deque.prototype.shift = function Deque$shift() { | ||
|  |     var length = this._length; | ||
|  |     if (length === 0) { | ||
|  |         return void 0; | ||
|  |     } | ||
|  |     var front = this._front; | ||
|  |     var ret = this[front]; | ||
|  |     this[front] = void 0; | ||
|  |     this._front = (front + 1) & (this._capacity - 1); | ||
|  |     this._length = length - 1; | ||
|  |     return ret; | ||
|  | }; | ||
|  | 
 | ||
|  | Deque.prototype.unshift = function Deque$unshift(item) { | ||
|  |     var length = this._length; | ||
|  |     var argsLength = arguments.length; | ||
|  | 
 | ||
|  | 
 | ||
|  |     if (argsLength > 1) { | ||
|  |         var capacity = this._capacity; | ||
|  |         if (length + argsLength > capacity) { | ||
|  |             for (var i = argsLength - 1; i >= 0; i--) { | ||
|  |                 this._checkCapacity(length + 1); | ||
|  |                 var capacity = this._capacity; | ||
|  |                 var j = (((( this._front - 1 ) & | ||
|  |                     ( capacity - 1) ) ^ capacity ) - capacity ); | ||
|  |                 this[j] = arguments[i]; | ||
|  |                 length++; | ||
|  |                 this._length = length; | ||
|  |                 this._front = j; | ||
|  |             } | ||
|  |             return length; | ||
|  |         } | ||
|  |         else { | ||
|  |             var front = this._front; | ||
|  |             for (var i = argsLength - 1; i >= 0; i--) { | ||
|  |                 var j = (((( front - 1 ) & | ||
|  |                     ( capacity - 1) ) ^ capacity ) - capacity ); | ||
|  |                 this[j] = arguments[i]; | ||
|  |                 front = j; | ||
|  |             } | ||
|  |             this._front = front; | ||
|  |             this._length = length + argsLength; | ||
|  |             return length + argsLength; | ||
|  |         } | ||
|  |     } | ||
|  | 
 | ||
|  |     if (argsLength === 0) return length; | ||
|  | 
 | ||
|  |     this._checkCapacity(length + 1); | ||
|  |     var capacity = this._capacity; | ||
|  |     var i = (((( this._front - 1 ) & | ||
|  |         ( capacity - 1) ) ^ capacity ) - capacity ); | ||
|  |     this[i] = item; | ||
|  |     this._length = length + 1; | ||
|  |     this._front = i; | ||
|  |     return length + 1; | ||
|  | }; | ||
|  | 
 | ||
|  | Deque.prototype.peekBack = function Deque$peekBack() { | ||
|  |     var length = this._length; | ||
|  |     if (length === 0) { | ||
|  |         return void 0; | ||
|  |     } | ||
|  |     var index = (this._front + length - 1) & (this._capacity - 1); | ||
|  |     return this[index]; | ||
|  | }; | ||
|  | 
 | ||
|  | Deque.prototype.peekFront = function Deque$peekFront() { | ||
|  |     if (this._length === 0) { | ||
|  |         return void 0; | ||
|  |     } | ||
|  |     return this[this._front]; | ||
|  | }; | ||
|  | 
 | ||
|  | Deque.prototype.get = function Deque$get(index) { | ||
|  |     var i = index; | ||
|  |     if ((i !== (i | 0))) { | ||
|  |         return void 0; | ||
|  |     } | ||
|  |     var len = this._length; | ||
|  |     if (i < 0) { | ||
|  |         i = i + len; | ||
|  |     } | ||
|  |     if (i < 0 || i >= len) { | ||
|  |         return void 0; | ||
|  |     } | ||
|  |     return this[(this._front + i) & (this._capacity - 1)]; | ||
|  | }; | ||
|  | 
 | ||
|  | Deque.prototype.isEmpty = function Deque$isEmpty() { | ||
|  |     return this._length === 0; | ||
|  | }; | ||
|  | 
 | ||
|  | Deque.prototype.clear = function Deque$clear() { | ||
|  |     var len = this._length; | ||
|  |     var front = this._front; | ||
|  |     var capacity = this._capacity; | ||
|  |     for (var j = 0; j < len; ++j) { | ||
|  |         this[(front + j) & (capacity - 1)] = void 0; | ||
|  |     } | ||
|  |     this._length = 0; | ||
|  |     this._front = 0; | ||
|  | }; | ||
|  | 
 | ||
|  | Deque.prototype.toString = function Deque$toString() { | ||
|  |     return this.toArray().toString(); | ||
|  | }; | ||
|  | 
 | ||
|  | Deque.prototype.valueOf = Deque.prototype.toString; | ||
|  | Deque.prototype.removeFront = Deque.prototype.shift; | ||
|  | Deque.prototype.removeBack = Deque.prototype.pop; | ||
|  | Deque.prototype.insertFront = Deque.prototype.unshift; | ||
|  | Deque.prototype.insertBack = Deque.prototype.push; | ||
|  | Deque.prototype.enqueue = Deque.prototype.push; | ||
|  | Deque.prototype.dequeue = Deque.prototype.shift; | ||
|  | Deque.prototype.toJSON = Deque.prototype.toArray; | ||
|  | 
 | ||
|  | Object.defineProperty(Deque.prototype, "length", { | ||
|  |     get: function() { | ||
|  |         return this._length; | ||
|  |     }, | ||
|  |     set: function() { | ||
|  |         throw new RangeError(""); | ||
|  |     } | ||
|  | }); | ||
|  | 
 | ||
|  | Deque.prototype._checkCapacity = function Deque$_checkCapacity(size) { | ||
|  |     if (this._capacity < size) { | ||
|  |         this._resizeTo(getCapacity(this._capacity * 1.5 + 16)); | ||
|  |     } | ||
|  | }; | ||
|  | 
 | ||
|  | Deque.prototype._resizeTo = function Deque$_resizeTo(capacity) { | ||
|  |     var oldCapacity = this._capacity; | ||
|  |     this._capacity = capacity; | ||
|  |     var front = this._front; | ||
|  |     var length = this._length; | ||
|  |     if (front + length > oldCapacity) { | ||
|  |         var moveItemsCount = (front + length) & (oldCapacity - 1); | ||
|  |         arrayMove(this, 0, this, oldCapacity, moveItemsCount); | ||
|  |     } | ||
|  | }; | ||
|  | 
 | ||
|  | 
 | ||
|  | var isArray = Array.isArray; | ||
|  | 
 | ||
|  | function arrayMove(src, srcIndex, dst, dstIndex, len) { | ||
|  |     for (var j = 0; j < len; ++j) { | ||
|  |         dst[j + dstIndex] = src[j + srcIndex]; | ||
|  |         src[j + srcIndex] = void 0; | ||
|  |     } | ||
|  | } | ||
|  | 
 | ||
|  | function pow2AtLeast(n) { | ||
|  |     n = n >>> 0; | ||
|  |     n = n - 1; | ||
|  |     n = n | (n >> 1); | ||
|  |     n = n | (n >> 2); | ||
|  |     n = n | (n >> 4); | ||
|  |     n = n | (n >> 8); | ||
|  |     n = n | (n >> 16); | ||
|  |     return n + 1; | ||
|  | } | ||
|  | 
 | ||
|  | function getCapacity(capacity) { | ||
|  |     if (typeof capacity !== "number") { | ||
|  |         if (isArray(capacity)) { | ||
|  |             capacity = capacity.length; | ||
|  |         } | ||
|  |         else { | ||
|  |             return 16; | ||
|  |         } | ||
|  |     } | ||
|  |     return pow2AtLeast( | ||
|  |         Math.min( | ||
|  |             Math.max(16, capacity), 1073741824) | ||
|  |     ); | ||
|  | } | ||
|  | 
 | ||
|  | module.exports = Deque; | ||
|  | 
 | ||
|  | },{}],24:[function(_dereq_,module,exports){ | ||
|  | var prr = _dereq_(74) | ||
|  | 
 | ||
|  | function init (type, message, cause) { | ||
|  |   if (!!message && typeof message != 'string') { | ||
|  |     message = message.message || message.name | ||
|  |   } | ||
|  |   prr(this, { | ||
|  |       type    : type | ||
|  |     , name    : type | ||
|  |       // can be passed just a 'cause'
 | ||
|  |     , cause   : typeof message != 'string' ? message : cause | ||
|  |     , message : message | ||
|  |   }, 'ewr') | ||
|  | } | ||
|  | 
 | ||
|  | // generic prototype, not intended to be actually used - helpful for `instanceof`
 | ||
|  | function CustomError (message, cause) { | ||
|  |   Error.call(this) | ||
|  |   if (Error.captureStackTrace) | ||
|  |     Error.captureStackTrace(this, this.constructor) | ||
|  |   init.call(this, 'CustomError', message, cause) | ||
|  | } | ||
|  | 
 | ||
|  | CustomError.prototype = new Error() | ||
|  | 
 | ||
|  | function createError (errno, type, proto) { | ||
|  |   var err = function (message, cause) { | ||
|  |     init.call(this, type, message, cause) | ||
|  |     //TODO: the specificity here is stupid, errno should be available everywhere
 | ||
|  |     if (type == 'FilesystemError') { | ||
|  |       this.code    = this.cause.code | ||
|  |       this.path    = this.cause.path | ||
|  |       this.errno   = this.cause.errno | ||
|  |       this.message = | ||
|  |         (errno.errno[this.cause.errno] | ||
|  |           ? errno.errno[this.cause.errno].description | ||
|  |           : this.cause.message) | ||
|  |         + (this.cause.path ? ' [' + this.cause.path + ']' : '') | ||
|  |     } | ||
|  |     Error.call(this) | ||
|  |     if (Error.captureStackTrace) | ||
|  |       Error.captureStackTrace(this, err) | ||
|  |   } | ||
|  |   err.prototype = !!proto ? new proto() : new CustomError() | ||
|  |   return err | ||
|  | } | ||
|  | 
 | ||
|  | module.exports = function (errno) { | ||
|  |   var ce = function (type, proto) { | ||
|  |     return createError(errno, type, proto) | ||
|  |   } | ||
|  |   return { | ||
|  |       CustomError     : CustomError | ||
|  |     , FilesystemError : ce('FilesystemError') | ||
|  |     , createError     : ce | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | },{"74":74}],25:[function(_dereq_,module,exports){ | ||
|  | var all = module.exports.all = [ | ||
|  |   { | ||
|  |     errno: -2, | ||
|  |     code: 'ENOENT', | ||
|  |     description: 'no such file or directory' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: -1, | ||
|  |     code: 'UNKNOWN', | ||
|  |     description: 'unknown error' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 0, | ||
|  |     code: 'OK', | ||
|  |     description: 'success' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 1, | ||
|  |     code: 'EOF', | ||
|  |     description: 'end of file' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 2, | ||
|  |     code: 'EADDRINFO', | ||
|  |     description: 'getaddrinfo error' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 3, | ||
|  |     code: 'EACCES', | ||
|  |     description: 'permission denied' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 4, | ||
|  |     code: 'EAGAIN', | ||
|  |     description: 'resource temporarily unavailable' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 5, | ||
|  |     code: 'EADDRINUSE', | ||
|  |     description: 'address already in use' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 6, | ||
|  |     code: 'EADDRNOTAVAIL', | ||
|  |     description: 'address not available' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 7, | ||
|  |     code: 'EAFNOSUPPORT', | ||
|  |     description: 'address family not supported' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 8, | ||
|  |     code: 'EALREADY', | ||
|  |     description: 'connection already in progress' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 9, | ||
|  |     code: 'EBADF', | ||
|  |     description: 'bad file descriptor' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 10, | ||
|  |     code: 'EBUSY', | ||
|  |     description: 'resource busy or locked' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 11, | ||
|  |     code: 'ECONNABORTED', | ||
|  |     description: 'software caused connection abort' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 12, | ||
|  |     code: 'ECONNREFUSED', | ||
|  |     description: 'connection refused' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 13, | ||
|  |     code: 'ECONNRESET', | ||
|  |     description: 'connection reset by peer' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 14, | ||
|  |     code: 'EDESTADDRREQ', | ||
|  |     description: 'destination address required' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 15, | ||
|  |     code: 'EFAULT', | ||
|  |     description: 'bad address in system call argument' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 16, | ||
|  |     code: 'EHOSTUNREACH', | ||
|  |     description: 'host is unreachable' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 17, | ||
|  |     code: 'EINTR', | ||
|  |     description: 'interrupted system call' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 18, | ||
|  |     code: 'EINVAL', | ||
|  |     description: 'invalid argument' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 19, | ||
|  |     code: 'EISCONN', | ||
|  |     description: 'socket is already connected' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 20, | ||
|  |     code: 'EMFILE', | ||
|  |     description: 'too many open files' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 21, | ||
|  |     code: 'EMSGSIZE', | ||
|  |     description: 'message too long' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 22, | ||
|  |     code: 'ENETDOWN', | ||
|  |     description: 'network is down' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 23, | ||
|  |     code: 'ENETUNREACH', | ||
|  |     description: 'network is unreachable' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 24, | ||
|  |     code: 'ENFILE', | ||
|  |     description: 'file table overflow' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 25, | ||
|  |     code: 'ENOBUFS', | ||
|  |     description: 'no buffer space available' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 26, | ||
|  |     code: 'ENOMEM', | ||
|  |     description: 'not enough memory' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 27, | ||
|  |     code: 'ENOTDIR', | ||
|  |     description: 'not a directory' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 28, | ||
|  |     code: 'EISDIR', | ||
|  |     description: 'illegal operation on a directory' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 29, | ||
|  |     code: 'ENONET', | ||
|  |     description: 'machine is not on the network' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 31, | ||
|  |     code: 'ENOTCONN', | ||
|  |     description: 'socket is not connected' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 32, | ||
|  |     code: 'ENOTSOCK', | ||
|  |     description: 'socket operation on non-socket' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 33, | ||
|  |     code: 'ENOTSUP', | ||
|  |     description: 'operation not supported on socket' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 34, | ||
|  |     code: 'ENOENT', | ||
|  |     description: 'no such file or directory' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 35, | ||
|  |     code: 'ENOSYS', | ||
|  |     description: 'function not implemented' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 36, | ||
|  |     code: 'EPIPE', | ||
|  |     description: 'broken pipe' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 37, | ||
|  |     code: 'EPROTO', | ||
|  |     description: 'protocol error' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 38, | ||
|  |     code: 'EPROTONOSUPPORT', | ||
|  |     description: 'protocol not supported' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 39, | ||
|  |     code: 'EPROTOTYPE', | ||
|  |     description: 'protocol wrong type for socket' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 40, | ||
|  |     code: 'ETIMEDOUT', | ||
|  |     description: 'connection timed out' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 41, | ||
|  |     code: 'ECHARSET', | ||
|  |     description: 'invalid Unicode character' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 42, | ||
|  |     code: 'EAIFAMNOSUPPORT', | ||
|  |     description: 'address family for hostname not supported' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 44, | ||
|  |     code: 'EAISERVICE', | ||
|  |     description: 'servname not supported for ai_socktype' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 45, | ||
|  |     code: 'EAISOCKTYPE', | ||
|  |     description: 'ai_socktype not supported' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 46, | ||
|  |     code: 'ESHUTDOWN', | ||
|  |     description: 'cannot send after transport endpoint shutdown' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 47, | ||
|  |     code: 'EEXIST', | ||
|  |     description: 'file already exists' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 48, | ||
|  |     code: 'ESRCH', | ||
|  |     description: 'no such process' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 49, | ||
|  |     code: 'ENAMETOOLONG', | ||
|  |     description: 'name too long' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 50, | ||
|  |     code: 'EPERM', | ||
|  |     description: 'operation not permitted' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 51, | ||
|  |     code: 'ELOOP', | ||
|  |     description: 'too many symbolic links encountered' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 52, | ||
|  |     code: 'EXDEV', | ||
|  |     description: 'cross-device link not permitted' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 53, | ||
|  |     code: 'ENOTEMPTY', | ||
|  |     description: 'directory not empty' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 54, | ||
|  |     code: 'ENOSPC', | ||
|  |     description: 'no space left on device' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 55, | ||
|  |     code: 'EIO', | ||
|  |     description: 'i/o error' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 56, | ||
|  |     code: 'EROFS', | ||
|  |     description: 'read-only file system' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 57, | ||
|  |     code: 'ENODEV', | ||
|  |     description: 'no such device' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 58, | ||
|  |     code: 'ESPIPE', | ||
|  |     description: 'invalid seek' | ||
|  |   }, | ||
|  |   { | ||
|  |     errno: 59, | ||
|  |     code: 'ECANCELED', | ||
|  |     description: 'operation canceled' | ||
|  |   } | ||
|  | ] | ||
|  | 
 | ||
|  | module.exports.errno = {} | ||
|  | module.exports.code = {} | ||
|  | 
 | ||
|  | all.forEach(function (error) { | ||
|  |   module.exports.errno[error.errno] = error | ||
|  |   module.exports.code[error.code] = error | ||
|  | }) | ||
|  | 
 | ||
|  | module.exports.custom = _dereq_(24)(module.exports) | ||
|  | module.exports.create = module.exports.custom.createError | ||
|  | 
 | ||
|  | },{"24":24}],26:[function(_dereq_,module,exports){ | ||
|  | // Copyright Joyent, Inc. and other Node contributors.
 | ||
|  | //
 | ||
|  | // Permission is hereby granted, free of charge, to any person obtaining a
 | ||
|  | // copy of this software and associated documentation files (the
 | ||
|  | // "Software"), to deal in the Software without restriction, including
 | ||
|  | // without limitation the rights to use, copy, modify, merge, publish,
 | ||
|  | // distribute, sublicense, and/or sell copies of the Software, and to permit
 | ||
|  | // persons to whom the Software is furnished to do so, subject to the
 | ||
|  | // following conditions:
 | ||
|  | //
 | ||
|  | // The above copyright notice and this permission notice shall be included
 | ||
|  | // in all copies or substantial portions of the Software.
 | ||
|  | //
 | ||
|  | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
 | ||
|  | // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
 | ||
|  | // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
 | ||
|  | // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
 | ||
|  | // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
 | ||
|  | // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
 | ||
|  | // USE OR OTHER DEALINGS IN THE SOFTWARE.
 | ||
|  | 
 | ||
|  | var objectCreate = Object.create || objectCreatePolyfill | ||
|  | var objectKeys = Object.keys || objectKeysPolyfill | ||
|  | var bind = Function.prototype.bind || functionBindPolyfill | ||
|  | 
 | ||
|  | function EventEmitter() { | ||
|  |   if (!this._events || !Object.prototype.hasOwnProperty.call(this, '_events')) { | ||
|  |     this._events = objectCreate(null); | ||
|  |     this._eventsCount = 0; | ||
|  |   } | ||
|  | 
 | ||
|  |   this._maxListeners = this._maxListeners || undefined; | ||
|  | } | ||
|  | module.exports = EventEmitter; | ||
|  | 
 | ||
|  | // Backwards-compat with node 0.10.x
 | ||
|  | EventEmitter.EventEmitter = EventEmitter; | ||
|  | 
 | ||
|  | EventEmitter.prototype._events = undefined; | ||
|  | EventEmitter.prototype._maxListeners = undefined; | ||
|  | 
 | ||
|  | // By default EventEmitters will print a warning if more than 10 listeners are
 | ||
|  | // added to it. This is a useful default which helps finding memory leaks.
 | ||
|  | var defaultMaxListeners = 10; | ||
|  | 
 | ||
|  | var hasDefineProperty; | ||
|  | try { | ||
|  |   var o = {}; | ||
|  |   if (Object.defineProperty) Object.defineProperty(o, 'x', { value: 0 }); | ||
|  |   hasDefineProperty = o.x === 0; | ||
|  | } catch (err) { hasDefineProperty = false } | ||
|  | if (hasDefineProperty) { | ||
|  |   Object.defineProperty(EventEmitter, 'defaultMaxListeners', { | ||
|  |     enumerable: true, | ||
|  |     get: function() { | ||
|  |       return defaultMaxListeners; | ||
|  |     }, | ||
|  |     set: function(arg) { | ||
|  |       // check whether the input is a positive number (whose value is zero or
 | ||
|  |       // greater and not a NaN).
 | ||
|  |       if (typeof arg !== 'number' || arg < 0 || arg !== arg) | ||
|  |         throw new TypeError('"defaultMaxListeners" must be a positive number'); | ||
|  |       defaultMaxListeners = arg; | ||
|  |     } | ||
|  |   }); | ||
|  | } else { | ||
|  |   EventEmitter.defaultMaxListeners = defaultMaxListeners; | ||
|  | } | ||
|  | 
 | ||
|  | // Obviously not all Emitters should be limited to 10. This function allows
 | ||
|  | // that to be increased. Set to zero for unlimited.
 | ||
|  | EventEmitter.prototype.setMaxListeners = function setMaxListeners(n) { | ||
|  |   if (typeof n !== 'number' || n < 0 || isNaN(n)) | ||
|  |     throw new TypeError('"n" argument must be a positive number'); | ||
|  |   this._maxListeners = n; | ||
|  |   return this; | ||
|  | }; | ||
|  | 
 | ||
|  | function $getMaxListeners(that) { | ||
|  |   if (that._maxListeners === undefined) | ||
|  |     return EventEmitter.defaultMaxListeners; | ||
|  |   return that._maxListeners; | ||
|  | } | ||
|  | 
 | ||
|  | EventEmitter.prototype.getMaxListeners = function getMaxListeners() { | ||
|  |   return $getMaxListeners(this); | ||
|  | }; | ||
|  | 
 | ||
|  | // These standalone emit* functions are used to optimize calling of event
 | ||
|  | // handlers for fast cases because emit() itself often has a variable number of
 | ||
|  | // arguments and can be deoptimized because of that. These functions always have
 | ||
|  | // the same number of arguments and thus do not get deoptimized, so the code
 | ||
|  | // inside them can execute faster.
 | ||
|  | function emitNone(handler, isFn, self) { | ||
|  |   if (isFn) | ||
|  |     handler.call(self); | ||
|  |   else { | ||
|  |     var len = handler.length; | ||
|  |     var listeners = arrayClone(handler, len); | ||
|  |     for (var i = 0; i < len; ++i) | ||
|  |       listeners[i].call(self); | ||
|  |   } | ||
|  | } | ||
|  | function emitOne(handler, isFn, self, arg1) { | ||
|  |   if (isFn) | ||
|  |     handler.call(self, arg1); | ||
|  |   else { | ||
|  |     var len = handler.length; | ||
|  |     var listeners = arrayClone(handler, len); | ||
|  |     for (var i = 0; i < len; ++i) | ||
|  |       listeners[i].call(self, arg1); | ||
|  |   } | ||
|  | } | ||
|  | function emitTwo(handler, isFn, self, arg1, arg2) { | ||
|  |   if (isFn) | ||
|  |     handler.call(self, arg1, arg2); | ||
|  |   else { | ||
|  |     var len = handler.length; | ||
|  |     var listeners = arrayClone(handler, len); | ||
|  |     for (var i = 0; i < len; ++i) | ||
|  |       listeners[i].call(self, arg1, arg2); | ||
|  |   } | ||
|  | } | ||
|  | function emitThree(handler, isFn, self, arg1, arg2, arg3) { | ||
|  |   if (isFn) | ||
|  |     handler.call(self, arg1, arg2, arg3); | ||
|  |   else { | ||
|  |     var len = handler.length; | ||
|  |     var listeners = arrayClone(handler, len); | ||
|  |     for (var i = 0; i < len; ++i) | ||
|  |       listeners[i].call(self, arg1, arg2, arg3); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function emitMany(handler, isFn, self, args) { | ||
|  |   if (isFn) | ||
|  |     handler.apply(self, args); | ||
|  |   else { | ||
|  |     var len = handler.length; | ||
|  |     var listeners = arrayClone(handler, len); | ||
|  |     for (var i = 0; i < len; ++i) | ||
|  |       listeners[i].apply(self, args); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | EventEmitter.prototype.emit = function emit(type) { | ||
|  |   var er, handler, len, args, i, events; | ||
|  |   var doError = (type === 'error'); | ||
|  | 
 | ||
|  |   events = this._events; | ||
|  |   if (events) | ||
|  |     doError = (doError && events.error == null); | ||
|  |   else if (!doError) | ||
|  |     return false; | ||
|  | 
 | ||
|  |   // If there is no 'error' event listener then throw.
 | ||
|  |   if (doError) { | ||
|  |     if (arguments.length > 1) | ||
|  |       er = arguments[1]; | ||
|  |     if (er instanceof Error) { | ||
|  |       throw er; // Unhandled 'error' event
 | ||
|  |     } else { | ||
|  |       // At least give some kind of context to the user
 | ||
|  |       var err = new Error('Unhandled "error" event. (' + er + ')'); | ||
|  |       err.context = er; | ||
|  |       throw err; | ||
|  |     } | ||
|  |     return false; | ||
|  |   } | ||
|  | 
 | ||
|  |   handler = events[type]; | ||
|  | 
 | ||
|  |   if (!handler) | ||
|  |     return false; | ||
|  | 
 | ||
|  |   var isFn = typeof handler === 'function'; | ||
|  |   len = arguments.length; | ||
|  |   switch (len) { | ||
|  |       // fast cases
 | ||
|  |     case 1: | ||
|  |       emitNone(handler, isFn, this); | ||
|  |       break; | ||
|  |     case 2: | ||
|  |       emitOne(handler, isFn, this, arguments[1]); | ||
|  |       break; | ||
|  |     case 3: | ||
|  |       emitTwo(handler, isFn, this, arguments[1], arguments[2]); | ||
|  |       break; | ||
|  |     case 4: | ||
|  |       emitThree(handler, isFn, this, arguments[1], arguments[2], arguments[3]); | ||
|  |       break; | ||
|  |       // slower
 | ||
|  |     default: | ||
|  |       args = new Array(len - 1); | ||
|  |       for (i = 1; i < len; i++) | ||
|  |         args[i - 1] = arguments[i]; | ||
|  |       emitMany(handler, isFn, this, args); | ||
|  |   } | ||
|  | 
 | ||
|  |   return true; | ||
|  | }; | ||
|  | 
 | ||
|  | function _addListener(target, type, listener, prepend) { | ||
|  |   var m; | ||
|  |   var events; | ||
|  |   var existing; | ||
|  | 
 | ||
|  |   if (typeof listener !== 'function') | ||
|  |     throw new TypeError('"listener" argument must be a function'); | ||
|  | 
 | ||
|  |   events = target._events; | ||
|  |   if (!events) { | ||
|  |     events = target._events = objectCreate(null); | ||
|  |     target._eventsCount = 0; | ||
|  |   } else { | ||
|  |     // To avoid recursion in the case that type === "newListener"! Before
 | ||
|  |     // adding it to the listeners, first emit "newListener".
 | ||
|  |     if (events.newListener) { | ||
|  |       target.emit('newListener', type, | ||
|  |           listener.listener ? listener.listener : listener); | ||
|  | 
 | ||
|  |       // Re-assign `events` because a newListener handler could have caused the
 | ||
|  |       // this._events to be assigned to a new object
 | ||
|  |       events = target._events; | ||
|  |     } | ||
|  |     existing = events[type]; | ||
|  |   } | ||
|  | 
 | ||
|  |   if (!existing) { | ||
|  |     // Optimize the case of one listener. Don't need the extra array object.
 | ||
|  |     existing = events[type] = listener; | ||
|  |     ++target._eventsCount; | ||
|  |   } else { | ||
|  |     if (typeof existing === 'function') { | ||
|  |       // Adding the second element, need to change to array.
 | ||
|  |       existing = events[type] = | ||
|  |           prepend ? [listener, existing] : [existing, listener]; | ||
|  |     } else { | ||
|  |       // If we've already got an array, just append.
 | ||
|  |       if (prepend) { | ||
|  |         existing.unshift(listener); | ||
|  |       } else { | ||
|  |         existing.push(listener); | ||
|  |       } | ||
|  |     } | ||
|  | 
 | ||
|  |     // Check for listener leak
 | ||
|  |     if (!existing.warned) { | ||
|  |       m = $getMaxListeners(target); | ||
|  |       if (m && m > 0 && existing.length > m) { | ||
|  |         existing.warned = true; | ||
|  |         var w = new Error('Possible EventEmitter memory leak detected. ' + | ||
|  |             existing.length + ' "' + String(type) + '" listeners ' + | ||
|  |             'added. Use emitter.setMaxListeners() to ' + | ||
|  |             'increase limit.'); | ||
|  |         w.name = 'MaxListenersExceededWarning'; | ||
|  |         w.emitter = target; | ||
|  |         w.type = type; | ||
|  |         w.count = existing.length; | ||
|  |         if (typeof console === 'object' && console.warn) { | ||
|  |           console.warn('%s: %s', w.name, w.message); | ||
|  |         } | ||
|  |       } | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   return target; | ||
|  | } | ||
|  | 
 | ||
|  | EventEmitter.prototype.addListener = function addListener(type, listener) { | ||
|  |   return _addListener(this, type, listener, false); | ||
|  | }; | ||
|  | 
 | ||
|  | EventEmitter.prototype.on = EventEmitter.prototype.addListener; | ||
|  | 
 | ||
|  | EventEmitter.prototype.prependListener = | ||
|  |     function prependListener(type, listener) { | ||
|  |       return _addListener(this, type, listener, true); | ||
|  |     }; | ||
|  | 
 | ||
|  | function onceWrapper() { | ||
|  |   if (!this.fired) { | ||
|  |     this.target.removeListener(this.type, this.wrapFn); | ||
|  |     this.fired = true; | ||
|  |     switch (arguments.length) { | ||
|  |       case 0: | ||
|  |         return this.listener.call(this.target); | ||
|  |       case 1: | ||
|  |         return this.listener.call(this.target, arguments[0]); | ||
|  |       case 2: | ||
|  |         return this.listener.call(this.target, arguments[0], arguments[1]); | ||
|  |       case 3: | ||
|  |         return this.listener.call(this.target, arguments[0], arguments[1], | ||
|  |             arguments[2]); | ||
|  |       default: | ||
|  |         var args = new Array(arguments.length); | ||
|  |         for (var i = 0; i < args.length; ++i) | ||
|  |           args[i] = arguments[i]; | ||
|  |         this.listener.apply(this.target, args); | ||
|  |     } | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function _onceWrap(target, type, listener) { | ||
|  |   var state = { fired: false, wrapFn: undefined, target: target, type: type, listener: listener }; | ||
|  |   var wrapped = bind.call(onceWrapper, state); | ||
|  |   wrapped.listener = listener; | ||
|  |   state.wrapFn = wrapped; | ||
|  |   return wrapped; | ||
|  | } | ||
|  | 
 | ||
|  | EventEmitter.prototype.once = function once(type, listener) { | ||
|  |   if (typeof listener !== 'function') | ||
|  |     throw new TypeError('"listener" argument must be a function'); | ||
|  |   this.on(type, _onceWrap(this, type, listener)); | ||
|  |   return this; | ||
|  | }; | ||
|  | 
 | ||
|  | EventEmitter.prototype.prependOnceListener = | ||
|  |     function prependOnceListener(type, listener) { | ||
|  |       if (typeof listener !== 'function') | ||
|  |         throw new TypeError('"listener" argument must be a function'); | ||
|  |       this.prependListener(type, _onceWrap(this, type, listener)); | ||
|  |       return this; | ||
|  |     }; | ||
|  | 
 | ||
|  | // Emits a 'removeListener' event if and only if the listener was removed.
 | ||
|  | EventEmitter.prototype.removeListener = | ||
|  |     function removeListener(type, listener) { | ||
|  |       var list, events, position, i, originalListener; | ||
|  | 
 | ||
|  |       if (typeof listener !== 'function') | ||
|  |         throw new TypeError('"listener" argument must be a function'); | ||
|  | 
 | ||
|  |       events = this._events; | ||
|  |       if (!events) | ||
|  |         return this; | ||
|  | 
 | ||
|  |       list = events[type]; | ||
|  |       if (!list) | ||
|  |         return this; | ||
|  | 
 | ||
|  |       if (list === listener || list.listener === listener) { | ||
|  |         if (--this._eventsCount === 0) | ||
|  |           this._events = objectCreate(null); | ||
|  |         else { | ||
|  |           delete events[type]; | ||
|  |           if (events.removeListener) | ||
|  |             this.emit('removeListener', type, list.listener || listener); | ||
|  |         } | ||
|  |       } else if (typeof list !== 'function') { | ||
|  |         position = -1; | ||
|  | 
 | ||
|  |         for (i = list.length - 1; i >= 0; i--) { | ||
|  |           if (list[i] === listener || list[i].listener === listener) { | ||
|  |             originalListener = list[i].listener; | ||
|  |             position = i; | ||
|  |             break; | ||
|  |           } | ||
|  |         } | ||
|  | 
 | ||
|  |         if (position < 0) | ||
|  |           return this; | ||
|  | 
 | ||
|  |         if (position === 0) | ||
|  |           list.shift(); | ||
|  |         else | ||
|  |           spliceOne(list, position); | ||
|  | 
 | ||
|  |         if (list.length === 1) | ||
|  |           events[type] = list[0]; | ||
|  | 
 | ||
|  |         if (events.removeListener) | ||
|  |           this.emit('removeListener', type, originalListener || listener); | ||
|  |       } | ||
|  | 
 | ||
|  |       return this; | ||
|  |     }; | ||
|  | 
 | ||
|  | EventEmitter.prototype.removeAllListeners = | ||
|  |     function removeAllListeners(type) { | ||
|  |       var listeners, events, i; | ||
|  | 
 | ||
|  |       events = this._events; | ||
|  |       if (!events) | ||
|  |         return this; | ||
|  | 
 | ||
|  |       // not listening for removeListener, no need to emit
 | ||
|  |       if (!events.removeListener) { | ||
|  |         if (arguments.length === 0) { | ||
|  |           this._events = objectCreate(null); | ||
|  |           this._eventsCount = 0; | ||
|  |         } else if (events[type]) { | ||
|  |           if (--this._eventsCount === 0) | ||
|  |             this._events = objectCreate(null); | ||
|  |           else | ||
|  |             delete events[type]; | ||
|  |         } | ||
|  |         return this; | ||
|  |       } | ||
|  | 
 | ||
|  |       // emit removeListener for all listeners on all events
 | ||
|  |       if (arguments.length === 0) { | ||
|  |         var keys = objectKeys(events); | ||
|  |         var key; | ||
|  |         for (i = 0; i < keys.length; ++i) { | ||
|  |           key = keys[i]; | ||
|  |           if (key === 'removeListener') continue; | ||
|  |           this.removeAllListeners(key); | ||
|  |         } | ||
|  |         this.removeAllListeners('removeListener'); | ||
|  |         this._events = objectCreate(null); | ||
|  |         this._eventsCount = 0; | ||
|  |         return this; | ||
|  |       } | ||
|  | 
 | ||
|  |       listeners = events[type]; | ||
|  | 
 | ||
|  |       if (typeof listeners === 'function') { | ||
|  |         this.removeListener(type, listeners); | ||
|  |       } else if (listeners) { | ||
|  |         // LIFO order
 | ||
|  |         for (i = listeners.length - 1; i >= 0; i--) { | ||
|  |           this.removeListener(type, listeners[i]); | ||
|  |         } | ||
|  |       } | ||
|  | 
 | ||
|  |       return this; | ||
|  |     }; | ||
|  | 
 | ||
|  | function _listeners(target, type, unwrap) { | ||
|  |   var events = target._events; | ||
|  | 
 | ||
|  |   if (!events) | ||
|  |     return []; | ||
|  | 
 | ||
|  |   var evlistener = events[type]; | ||
|  |   if (!evlistener) | ||
|  |     return []; | ||
|  | 
 | ||
|  |   if (typeof evlistener === 'function') | ||
|  |     return unwrap ? [evlistener.listener || evlistener] : [evlistener]; | ||
|  | 
 | ||
|  |   return unwrap ? unwrapListeners(evlistener) : arrayClone(evlistener, evlistener.length); | ||
|  | } | ||
|  | 
 | ||
|  | EventEmitter.prototype.listeners = function listeners(type) { | ||
|  |   return _listeners(this, type, true); | ||
|  | }; | ||
|  | 
 | ||
|  | EventEmitter.prototype.rawListeners = function rawListeners(type) { | ||
|  |   return _listeners(this, type, false); | ||
|  | }; | ||
|  | 
 | ||
|  | EventEmitter.listenerCount = function(emitter, type) { | ||
|  |   if (typeof emitter.listenerCount === 'function') { | ||
|  |     return emitter.listenerCount(type); | ||
|  |   } else { | ||
|  |     return listenerCount.call(emitter, type); | ||
|  |   } | ||
|  | }; | ||
|  | 
 | ||
|  | EventEmitter.prototype.listenerCount = listenerCount; | ||
|  | function listenerCount(type) { | ||
|  |   var events = this._events; | ||
|  | 
 | ||
|  |   if (events) { | ||
|  |     var evlistener = events[type]; | ||
|  | 
 | ||
|  |     if (typeof evlistener === 'function') { | ||
|  |       return 1; | ||
|  |     } else if (evlistener) { | ||
|  |       return evlistener.length; | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   return 0; | ||
|  | } | ||
|  | 
 | ||
|  | EventEmitter.prototype.eventNames = function eventNames() { | ||
|  |   return this._eventsCount > 0 ? Reflect.ownKeys(this._events) : []; | ||
|  | }; | ||
|  | 
 | ||
|  | // About 1.5x faster than the two-arg version of Array#splice().
 | ||
|  | function spliceOne(list, index) { | ||
|  |   for (var i = index, k = i + 1, n = list.length; k < n; i += 1, k += 1) | ||
|  |     list[i] = list[k]; | ||
|  |   list.pop(); | ||
|  | } | ||
|  | 
 | ||
|  | function arrayClone(arr, n) { | ||
|  |   var copy = new Array(n); | ||
|  |   for (var i = 0; i < n; ++i) | ||
|  |     copy[i] = arr[i]; | ||
|  |   return copy; | ||
|  | } | ||
|  | 
 | ||
|  | function unwrapListeners(arr) { | ||
|  |   var ret = new Array(arr.length); | ||
|  |   for (var i = 0; i < ret.length; ++i) { | ||
|  |     ret[i] = arr[i].listener || arr[i]; | ||
|  |   } | ||
|  |   return ret; | ||
|  | } | ||
|  | 
 | ||
|  | function objectCreatePolyfill(proto) { | ||
|  |   var F = function() {}; | ||
|  |   F.prototype = proto; | ||
|  |   return new F; | ||
|  | } | ||
|  | function objectKeysPolyfill(obj) { | ||
|  |   var keys = []; | ||
|  |   for (var k in obj) if (Object.prototype.hasOwnProperty.call(obj, k)) { | ||
|  |     keys.push(k); | ||
|  |   } | ||
|  |   return k; | ||
|  | } | ||
|  | function functionBindPolyfill(context) { | ||
|  |   var fn = this; | ||
|  |   return function () { | ||
|  |     return fn.apply(context, arguments); | ||
|  |   }; | ||
|  | } | ||
|  | 
 | ||
|  | },{}],27:[function(_dereq_,module,exports){ | ||
|  | /** | ||
|  |  * # hasLocalStorage() | ||
|  |  * | ||
|  |  * returns `true` or `false` depending on whether localStorage is supported or not. | ||
|  |  * Beware that some browsers like Safari do not support localStorage in private mode. | ||
|  |  * | ||
|  |  * inspired by this cappuccino commit | ||
|  |  * https://github.com/cappuccino/cappuccino/commit/063b05d9643c35b303568a28809e4eb3224f71ec
 | ||
|  |  * | ||
|  |  * @returns {Boolean} | ||
|  |  */ | ||
|  | function hasLocalStorage() { | ||
|  |   try { | ||
|  | 
 | ||
|  |     // we've to put this in here. I've seen Firefox throwing `Security error: 1000`
 | ||
|  |     // when cookies have been disabled
 | ||
|  |     if (typeof localStorage === 'undefined') { | ||
|  |       return false; | ||
|  |     } | ||
|  | 
 | ||
|  |     // Just because localStorage exists does not mean it works. In particular it might be disabled
 | ||
|  |     // as it is when Safari's private browsing mode is active.
 | ||
|  |     localStorage.setItem('Storage-Test', '1'); | ||
|  | 
 | ||
|  |     // that should not happen ...
 | ||
|  |     if (localStorage.getItem('Storage-Test') !== '1') { | ||
|  |       return false; | ||
|  |     } | ||
|  | 
 | ||
|  |     // okay, let's clean up if we got here.
 | ||
|  |     localStorage.removeItem('Storage-Test'); | ||
|  |   } catch (_error) { | ||
|  | 
 | ||
|  |     // in case of an error, like Safari's Private Mode, return false
 | ||
|  |     return false; | ||
|  |   } | ||
|  | 
 | ||
|  |   // we're good.
 | ||
|  |   return true; | ||
|  | } | ||
|  | 
 | ||
|  | 
 | ||
|  | if (typeof exports === 'object') { | ||
|  |   module.exports = hasLocalStorage; | ||
|  | } | ||
|  | 
 | ||
|  | },{}],28:[function(_dereq_,module,exports){ | ||
|  | (function (global){(function (){ | ||
|  | var exports = module.exports = {}; | ||
|  | var localStorageMemory = _dereq_(69); | ||
|  | exports.hasLocalStorage = _dereq_(27); | ||
|  | 
 | ||
|  | /** | ||
|  |  * returns localStorage-compatible API, either backed by window.localStorage | ||
|  |  * or memory if it's not available or not persistent. | ||
|  |  * | ||
|  |  * It also adds an object API (`.getObject(key)`, | ||
|  |  * `.setObject(key, properties)`) and a `isPresistent` property | ||
|  |  * | ||
|  |  * @returns {Object} | ||
|  |  */ | ||
|  | exports.create = function () { | ||
|  |   var api; | ||
|  | 
 | ||
|  |   if (!exports.hasLocalStorage()) { | ||
|  |     api = localStorageMemory; | ||
|  |     api.isPersistent = false; | ||
|  |   } else { | ||
|  |     api = global.localStorage; | ||
|  |     api = { | ||
|  |       get length() { return global.localStorage.length; }, | ||
|  |       getItem: global.localStorage.getItem.bind(global.localStorage), | ||
|  |       setItem: global.localStorage.setItem.bind(global.localStorage), | ||
|  |       removeItem: global.localStorage.removeItem.bind(global.localStorage), | ||
|  |       key: global.localStorage.key.bind(global.localStorage), | ||
|  |       clear: global.localStorage.clear.bind(global.localStorage), | ||
|  |     }; | ||
|  | 
 | ||
|  |     api.isPersistent = true; | ||
|  |   } | ||
|  | 
 | ||
|  |   api.getObject = exports.getObject.bind(null, api); | ||
|  |   api.setObject = exports.setObject.bind(null, api); | ||
|  | 
 | ||
|  |   return api; | ||
|  | }; | ||
|  | 
 | ||
|  | /** | ||
|  |  * sets key to passed Object. | ||
|  |  * | ||
|  |  * @returns undefined | ||
|  |  */ | ||
|  | exports.setObject = function (store, key, object) { | ||
|  |   if (typeof object !== 'object') { | ||
|  |     return store.setItem(key, object); | ||
|  |   } | ||
|  | 
 | ||
|  |   return store.setItem(key, JSON.stringify(object)); | ||
|  | }; | ||
|  | 
 | ||
|  | /** | ||
|  |  * returns Object for key, or null | ||
|  |  * | ||
|  |  * @returns {Object|null} | ||
|  |  */ | ||
|  | exports.getObject = function (store, key) { | ||
|  |   var item = store.getItem(key); | ||
|  | 
 | ||
|  |   if (!item) { | ||
|  |     return null; | ||
|  |   } | ||
|  | 
 | ||
|  |   try { | ||
|  |     return JSON.parse(item); | ||
|  |   } catch (e) { | ||
|  |     return item; | ||
|  |   } | ||
|  | }; | ||
|  | 
 | ||
|  | }).call(this)}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | ||
|  | },{"27":27,"69":69}],29:[function(_dereq_,module,exports){ | ||
|  | var api = _dereq_(28); | ||
|  | module.exports = api.create(); | ||
|  | 
 | ||
|  | },{"28":28}],30:[function(_dereq_,module,exports){ | ||
|  | /*! ieee754. BSD-3-Clause License. Feross Aboukhadijeh <https://feross.org/opensource> */ | ||
|  | exports.read = function (buffer, offset, isLE, mLen, nBytes) { | ||
|  |   var e, m | ||
|  |   var eLen = (nBytes * 8) - mLen - 1 | ||
|  |   var eMax = (1 << eLen) - 1 | ||
|  |   var eBias = eMax >> 1 | ||
|  |   var nBits = -7 | ||
|  |   var i = isLE ? (nBytes - 1) : 0 | ||
|  |   var d = isLE ? -1 : 1 | ||
|  |   var s = buffer[offset + i] | ||
|  | 
 | ||
|  |   i += d | ||
|  | 
 | ||
|  |   e = s & ((1 << (-nBits)) - 1) | ||
|  |   s >>= (-nBits) | ||
|  |   nBits += eLen | ||
|  |   for (; nBits > 0; e = (e * 256) + buffer[offset + i], i += d, nBits -= 8) {} | ||
|  | 
 | ||
|  |   m = e & ((1 << (-nBits)) - 1) | ||
|  |   e >>= (-nBits) | ||
|  |   nBits += mLen | ||
|  |   for (; nBits > 0; m = (m * 256) + buffer[offset + i], i += d, nBits -= 8) {} | ||
|  | 
 | ||
|  |   if (e === 0) { | ||
|  |     e = 1 - eBias | ||
|  |   } else if (e === eMax) { | ||
|  |     return m ? NaN : ((s ? -1 : 1) * Infinity) | ||
|  |   } else { | ||
|  |     m = m + Math.pow(2, mLen) | ||
|  |     e = e - eBias | ||
|  |   } | ||
|  |   return (s ? -1 : 1) * m * Math.pow(2, e - mLen) | ||
|  | } | ||
|  | 
 | ||
|  | exports.write = function (buffer, value, offset, isLE, mLen, nBytes) { | ||
|  |   var e, m, c | ||
|  |   var eLen = (nBytes * 8) - mLen - 1 | ||
|  |   var eMax = (1 << eLen) - 1 | ||
|  |   var eBias = eMax >> 1 | ||
|  |   var rt = (mLen === 23 ? Math.pow(2, -24) - Math.pow(2, -77) : 0) | ||
|  |   var i = isLE ? 0 : (nBytes - 1) | ||
|  |   var d = isLE ? 1 : -1 | ||
|  |   var s = value < 0 || (value === 0 && 1 / value < 0) ? 1 : 0 | ||
|  | 
 | ||
|  |   value = Math.abs(value) | ||
|  | 
 | ||
|  |   if (isNaN(value) || value === Infinity) { | ||
|  |     m = isNaN(value) ? 1 : 0 | ||
|  |     e = eMax | ||
|  |   } else { | ||
|  |     e = Math.floor(Math.log(value) / Math.LN2) | ||
|  |     if (value * (c = Math.pow(2, -e)) < 1) { | ||
|  |       e-- | ||
|  |       c *= 2 | ||
|  |     } | ||
|  |     if (e + eBias >= 1) { | ||
|  |       value += rt / c | ||
|  |     } else { | ||
|  |       value += rt * Math.pow(2, 1 - eBias) | ||
|  |     } | ||
|  |     if (value * c >= 2) { | ||
|  |       e++ | ||
|  |       c /= 2 | ||
|  |     } | ||
|  | 
 | ||
|  |     if (e + eBias >= eMax) { | ||
|  |       m = 0 | ||
|  |       e = eMax | ||
|  |     } else if (e + eBias >= 1) { | ||
|  |       m = ((value * c) - 1) * Math.pow(2, mLen) | ||
|  |       e = e + eBias | ||
|  |     } else { | ||
|  |       m = value * Math.pow(2, eBias - 1) * Math.pow(2, mLen) | ||
|  |       e = 0 | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   for (; mLen >= 8; buffer[offset + i] = m & 0xff, i += d, m /= 256, mLen -= 8) {} | ||
|  | 
 | ||
|  |   e = (e << mLen) | m | ||
|  |   eLen += mLen | ||
|  |   for (; eLen > 0; buffer[offset + i] = e & 0xff, i += d, e /= 256, eLen -= 8) {} | ||
|  | 
 | ||
|  |   buffer[offset + i - d] |= s * 128 | ||
|  | } | ||
|  | 
 | ||
|  | },{}],31:[function(_dereq_,module,exports){ | ||
|  | 'use strict'; | ||
|  | var types = [ | ||
|  |   _dereq_(11), | ||
|  |   _dereq_(34), | ||
|  |   _dereq_(33), | ||
|  |   _dereq_(32), | ||
|  |   _dereq_(35), | ||
|  |   _dereq_(36) | ||
|  | ]; | ||
|  | var draining; | ||
|  | var currentQueue; | ||
|  | var queueIndex = -1; | ||
|  | var queue = []; | ||
|  | var scheduled = false; | ||
|  | function cleanUpNextTick() { | ||
|  |   if (!draining || !currentQueue) { | ||
|  |     return; | ||
|  |   } | ||
|  |   draining = false; | ||
|  |   if (currentQueue.length) { | ||
|  |     queue = currentQueue.concat(queue); | ||
|  |   } else { | ||
|  |     queueIndex = -1; | ||
|  |   } | ||
|  |   if (queue.length) { | ||
|  |     nextTick(); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | //named nextTick for less confusing stack traces
 | ||
|  | function nextTick() { | ||
|  |   if (draining) { | ||
|  |     return; | ||
|  |   } | ||
|  |   scheduled = false; | ||
|  |   draining = true; | ||
|  |   var len = queue.length; | ||
|  |   var timeout = setTimeout(cleanUpNextTick); | ||
|  |   while (len) { | ||
|  |     currentQueue = queue; | ||
|  |     queue = []; | ||
|  |     while (currentQueue && ++queueIndex < len) { | ||
|  |       currentQueue[queueIndex].run(); | ||
|  |     } | ||
|  |     queueIndex = -1; | ||
|  |     len = queue.length; | ||
|  |   } | ||
|  |   currentQueue = null; | ||
|  |   queueIndex = -1; | ||
|  |   draining = false; | ||
|  |   clearTimeout(timeout); | ||
|  | } | ||
|  | var scheduleDrain; | ||
|  | var i = -1; | ||
|  | var len = types.length; | ||
|  | while (++i < len) { | ||
|  |   if (types[i] && types[i].test && types[i].test()) { | ||
|  |     scheduleDrain = types[i].install(nextTick); | ||
|  |     break; | ||
|  |   } | ||
|  | } | ||
|  | // v8 likes predictible objects
 | ||
|  | function Item(fun, array) { | ||
|  |   this.fun = fun; | ||
|  |   this.array = array; | ||
|  | } | ||
|  | Item.prototype.run = function () { | ||
|  |   var fun = this.fun; | ||
|  |   var array = this.array; | ||
|  |   switch (array.length) { | ||
|  |   case 0: | ||
|  |     return fun(); | ||
|  |   case 1: | ||
|  |     return fun(array[0]); | ||
|  |   case 2: | ||
|  |     return fun(array[0], array[1]); | ||
|  |   case 3: | ||
|  |     return fun(array[0], array[1], array[2]); | ||
|  |   default: | ||
|  |     return fun.apply(null, array); | ||
|  |   } | ||
|  | 
 | ||
|  | }; | ||
|  | module.exports = immediate; | ||
|  | function immediate(task) { | ||
|  |   var args = new Array(arguments.length - 1); | ||
|  |   if (arguments.length > 1) { | ||
|  |     for (var i = 1; i < arguments.length; i++) { | ||
|  |       args[i - 1] = arguments[i]; | ||
|  |     } | ||
|  |   } | ||
|  |   queue.push(new Item(task, args)); | ||
|  |   if (!scheduled && !draining) { | ||
|  |     scheduled = true; | ||
|  |     scheduleDrain(); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | },{"11":11,"32":32,"33":33,"34":34,"35":35,"36":36}],32:[function(_dereq_,module,exports){ | ||
|  | (function (global){(function (){ | ||
|  | 'use strict'; | ||
|  | 
 | ||
|  | exports.test = function () { | ||
|  |   if (global.setImmediate) { | ||
|  |     // we can only get here in IE10
 | ||
|  |     // which doesn't handel postMessage well
 | ||
|  |     return false; | ||
|  |   } | ||
|  |   return typeof global.MessageChannel !== 'undefined'; | ||
|  | }; | ||
|  | 
 | ||
|  | exports.install = function (func) { | ||
|  |   var channel = new global.MessageChannel(); | ||
|  |   channel.port1.onmessage = func; | ||
|  |   return function () { | ||
|  |     channel.port2.postMessage(0); | ||
|  |   }; | ||
|  | }; | ||
|  | }).call(this)}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | ||
|  | },{}],33:[function(_dereq_,module,exports){ | ||
|  | (function (global){(function (){ | ||
|  | 'use strict'; | ||
|  | //based off rsvp https://github.com/tildeio/rsvp.js
 | ||
|  | //license https://github.com/tildeio/rsvp.js/blob/master/LICENSE
 | ||
|  | //https://github.com/tildeio/rsvp.js/blob/master/lib/rsvp/asap.js
 | ||
|  | 
 | ||
|  | var Mutation = global.MutationObserver || global.WebKitMutationObserver; | ||
|  | 
 | ||
|  | exports.test = function () { | ||
|  |   return Mutation; | ||
|  | }; | ||
|  | 
 | ||
|  | exports.install = function (handle) { | ||
|  |   var called = 0; | ||
|  |   var observer = new Mutation(handle); | ||
|  |   var element = global.document.createTextNode(''); | ||
|  |   observer.observe(element, { | ||
|  |     characterData: true | ||
|  |   }); | ||
|  |   return function () { | ||
|  |     element.data = (called = ++called % 2); | ||
|  |   }; | ||
|  | }; | ||
|  | }).call(this)}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | ||
|  | },{}],34:[function(_dereq_,module,exports){ | ||
|  | (function (global){(function (){ | ||
|  | 'use strict'; | ||
|  | exports.test = function () { | ||
|  |   return typeof global.queueMicrotask === 'function'; | ||
|  | }; | ||
|  | 
 | ||
|  | exports.install = function (func) { | ||
|  |   return function () { | ||
|  |     global.queueMicrotask(func); | ||
|  |   }; | ||
|  | }; | ||
|  | 
 | ||
|  | }).call(this)}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | ||
|  | },{}],35:[function(_dereq_,module,exports){ | ||
|  | (function (global){(function (){ | ||
|  | 'use strict'; | ||
|  | 
 | ||
|  | exports.test = function () { | ||
|  |   return 'document' in global && 'onreadystatechange' in global.document.createElement('script'); | ||
|  | }; | ||
|  | 
 | ||
|  | exports.install = function (handle) { | ||
|  |   return function () { | ||
|  | 
 | ||
|  |     // Create a <script> element; its readystatechange event will be fired asynchronously once it is inserted
 | ||
|  |     // into the document. Do so, thus queuing up the task. Remember to clean up once it's been called.
 | ||
|  |     var scriptEl = global.document.createElement('script'); | ||
|  |     scriptEl.onreadystatechange = function () { | ||
|  |       handle(); | ||
|  | 
 | ||
|  |       scriptEl.onreadystatechange = null; | ||
|  |       scriptEl.parentNode.removeChild(scriptEl); | ||
|  |       scriptEl = null; | ||
|  |     }; | ||
|  |     global.document.documentElement.appendChild(scriptEl); | ||
|  | 
 | ||
|  |     return handle; | ||
|  |   }; | ||
|  | }; | ||
|  | }).call(this)}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | ||
|  | },{}],36:[function(_dereq_,module,exports){ | ||
|  | 'use strict'; | ||
|  | exports.test = function () { | ||
|  |   return true; | ||
|  | }; | ||
|  | 
 | ||
|  | exports.install = function (t) { | ||
|  |   return function () { | ||
|  |     setTimeout(t, 0); | ||
|  |   }; | ||
|  | }; | ||
|  | },{}],37:[function(_dereq_,module,exports){ | ||
|  | if (typeof Object.create === 'function') { | ||
|  |   // implementation from standard node.js 'util' module
 | ||
|  |   module.exports = function inherits(ctor, superCtor) { | ||
|  |     if (superCtor) { | ||
|  |       ctor.super_ = superCtor | ||
|  |       ctor.prototype = Object.create(superCtor.prototype, { | ||
|  |         constructor: { | ||
|  |           value: ctor, | ||
|  |           enumerable: false, | ||
|  |           writable: true, | ||
|  |           configurable: true | ||
|  |         } | ||
|  |       }) | ||
|  |     } | ||
|  |   }; | ||
|  | } else { | ||
|  |   // old school shim for old browsers
 | ||
|  |   module.exports = function inherits(ctor, superCtor) { | ||
|  |     if (superCtor) { | ||
|  |       ctor.super_ = superCtor | ||
|  |       var TempCtor = function () {} | ||
|  |       TempCtor.prototype = superCtor.prototype | ||
|  |       ctor.prototype = new TempCtor() | ||
|  |       ctor.prototype.constructor = ctor | ||
|  |     } | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | },{}],38:[function(_dereq_,module,exports){ | ||
|  | /*! | ||
|  |  * Determine if an object is a Buffer | ||
|  |  * | ||
|  |  * @author   Feross Aboukhadijeh <https://feross.org>
 | ||
|  |  * @license  MIT | ||
|  |  */ | ||
|  | 
 | ||
|  | // The _isBuffer check is for Safari 5-7 support, because it's missing
 | ||
|  | // Object.prototype.constructor. Remove this eventually
 | ||
|  | module.exports = function (obj) { | ||
|  |   return obj != null && (isBuffer(obj) || isSlowBuffer(obj) || !!obj._isBuffer) | ||
|  | } | ||
|  | 
 | ||
|  | function isBuffer (obj) { | ||
|  |   return !!obj.constructor && typeof obj.constructor.isBuffer === 'function' && obj.constructor.isBuffer(obj) | ||
|  | } | ||
|  | 
 | ||
|  | // For Node v0.10 support. Remove this eventually.
 | ||
|  | function isSlowBuffer (obj) { | ||
|  |   return typeof obj.readFloatLE === 'function' && typeof obj.slice === 'function' && isBuffer(obj.slice(0, 0)) | ||
|  | } | ||
|  | 
 | ||
|  | },{}],39:[function(_dereq_,module,exports){ | ||
|  | var encodings = _dereq_(40) | ||
|  | 
 | ||
|  | module.exports = Codec | ||
|  | 
 | ||
|  | function Codec (opts) { | ||
|  |   if (!(this instanceof Codec)) { | ||
|  |     return new Codec(opts) | ||
|  |   } | ||
|  |   this.opts = opts || {} | ||
|  |   this.encodings = encodings | ||
|  | } | ||
|  | 
 | ||
|  | Codec.prototype._encoding = function (encoding) { | ||
|  |   if (typeof encoding === 'string') encoding = encodings[encoding] | ||
|  |   if (!encoding) encoding = encodings.id | ||
|  |   return encoding | ||
|  | } | ||
|  | 
 | ||
|  | Codec.prototype._keyEncoding = function (opts, batchOpts) { | ||
|  |   return this._encoding((batchOpts && batchOpts.keyEncoding) || | ||
|  |                         (opts && opts.keyEncoding) || | ||
|  |                         this.opts.keyEncoding) | ||
|  | } | ||
|  | 
 | ||
|  | Codec.prototype._valueEncoding = function (opts, batchOpts) { | ||
|  |   return this._encoding((batchOpts && (batchOpts.valueEncoding || batchOpts.encoding)) || | ||
|  |                         (opts && (opts.valueEncoding || opts.encoding)) || | ||
|  |                         (this.opts.valueEncoding || this.opts.encoding)) | ||
|  | } | ||
|  | 
 | ||
|  | Codec.prototype.encodeKey = function (key, opts, batchOpts) { | ||
|  |   return this._keyEncoding(opts, batchOpts).encode(key) | ||
|  | } | ||
|  | 
 | ||
|  | Codec.prototype.encodeValue = function (value, opts, batchOpts) { | ||
|  |   return this._valueEncoding(opts, batchOpts).encode(value) | ||
|  | } | ||
|  | 
 | ||
|  | Codec.prototype.decodeKey = function (key, opts) { | ||
|  |   return this._keyEncoding(opts).decode(key) | ||
|  | } | ||
|  | 
 | ||
|  | Codec.prototype.decodeValue = function (value, opts) { | ||
|  |   return this._valueEncoding(opts).decode(value) | ||
|  | } | ||
|  | 
 | ||
|  | Codec.prototype.encodeBatch = function (ops, opts) { | ||
|  |   var self = this | ||
|  | 
 | ||
|  |   return ops.map(function (_op) { | ||
|  |     var op = { | ||
|  |       type: _op.type, | ||
|  |       key: self.encodeKey(_op.key, opts, _op) | ||
|  |     } | ||
|  |     if (self.keyAsBuffer(opts, _op)) op.keyEncoding = 'binary' | ||
|  |     if (_op.prefix) op.prefix = _op.prefix | ||
|  |     if ('value' in _op) { | ||
|  |       op.value = self.encodeValue(_op.value, opts, _op) | ||
|  |       if (self.valueAsBuffer(opts, _op)) op.valueEncoding = 'binary' | ||
|  |     } | ||
|  |     return op | ||
|  |   }) | ||
|  | } | ||
|  | 
 | ||
|  | var ltgtKeys = ['lt', 'gt', 'lte', 'gte', 'start', 'end'] | ||
|  | 
 | ||
|  | Codec.prototype.encodeLtgt = function (ltgt) { | ||
|  |   var self = this | ||
|  |   var ret = {} | ||
|  |   Object.keys(ltgt).forEach(function (key) { | ||
|  |     ret[key] = ltgtKeys.indexOf(key) > -1 | ||
|  |       ? self.encodeKey(ltgt[key], ltgt) | ||
|  |       : ltgt[key] | ||
|  |   }) | ||
|  |   return ret | ||
|  | } | ||
|  | 
 | ||
|  | Codec.prototype.createStreamDecoder = function (opts) { | ||
|  |   var self = this | ||
|  | 
 | ||
|  |   if (opts.keys && opts.values) { | ||
|  |     return function (key, value) { | ||
|  |       return { | ||
|  |         key: self.decodeKey(key, opts), | ||
|  |         value: self.decodeValue(value, opts) | ||
|  |       } | ||
|  |     } | ||
|  |   } else if (opts.keys) { | ||
|  |     return function (key) { | ||
|  |       return self.decodeKey(key, opts) | ||
|  |     } | ||
|  |   } else if (opts.values) { | ||
|  |     return function (_, value) { | ||
|  |       return self.decodeValue(value, opts) | ||
|  |     } | ||
|  |   } else { | ||
|  |     return function () {} | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | Codec.prototype.keyAsBuffer = function (opts) { | ||
|  |   return this._keyEncoding(opts).buffer | ||
|  | } | ||
|  | 
 | ||
|  | Codec.prototype.valueAsBuffer = function (opts) { | ||
|  |   return this._valueEncoding(opts).buffer | ||
|  | } | ||
|  | 
 | ||
|  | },{"40":40}],40:[function(_dereq_,module,exports){ | ||
|  | var Buffer = _dereq_(13).Buffer | ||
|  | 
 | ||
|  | exports.utf8 = exports['utf-8'] = { | ||
|  |   encode: function (data) { | ||
|  |     return isBinary(data) ? data : String(data) | ||
|  |   }, | ||
|  |   decode: identity, | ||
|  |   buffer: false, | ||
|  |   type: 'utf8' | ||
|  | } | ||
|  | 
 | ||
|  | exports.json = { | ||
|  |   encode: JSON.stringify, | ||
|  |   decode: JSON.parse, | ||
|  |   buffer: false, | ||
|  |   type: 'json' | ||
|  | } | ||
|  | 
 | ||
|  | exports.binary = { | ||
|  |   encode: function (data) { | ||
|  |     return isBinary(data) ? data : Buffer.from(data) | ||
|  |   }, | ||
|  |   decode: identity, | ||
|  |   buffer: true, | ||
|  |   type: 'binary' | ||
|  | } | ||
|  | 
 | ||
|  | exports.none = { | ||
|  |   encode: identity, | ||
|  |   decode: identity, | ||
|  |   buffer: false, | ||
|  |   type: 'id' | ||
|  | } | ||
|  | 
 | ||
|  | exports.id = exports.none | ||
|  | 
 | ||
|  | var bufferEncodings = [ | ||
|  |   'hex', | ||
|  |   'ascii', | ||
|  |   'base64', | ||
|  |   'ucs2', | ||
|  |   'ucs-2', | ||
|  |   'utf16le', | ||
|  |   'utf-16le' | ||
|  | ] | ||
|  | 
 | ||
|  | bufferEncodings.forEach(function (type) { | ||
|  |   exports[type] = { | ||
|  |     encode: function (data) { | ||
|  |       return isBinary(data) ? data : Buffer.from(data, type) | ||
|  |     }, | ||
|  |     decode: function (buffer) { | ||
|  |       return buffer.toString(type) | ||
|  |     }, | ||
|  |     buffer: true, | ||
|  |     type: type | ||
|  |   } | ||
|  | }) | ||
|  | 
 | ||
|  | function identity (value) { | ||
|  |   return value | ||
|  | } | ||
|  | 
 | ||
|  | function isBinary (data) { | ||
|  |   return data === undefined || data === null || Buffer.isBuffer(data) | ||
|  | } | ||
|  | 
 | ||
|  | },{"13":13}],41:[function(_dereq_,module,exports){ | ||
|  | var createError = _dereq_(25).create | ||
|  | var LevelUPError = createError('LevelUPError') | ||
|  | var NotFoundError = createError('NotFoundError', LevelUPError) | ||
|  | 
 | ||
|  | NotFoundError.prototype.notFound = true | ||
|  | NotFoundError.prototype.status = 404 | ||
|  | 
 | ||
|  | module.exports = { | ||
|  |   LevelUPError: LevelUPError, | ||
|  |   InitializationError: createError('InitializationError', LevelUPError), | ||
|  |   OpenError: createError('OpenError', LevelUPError), | ||
|  |   ReadError: createError('ReadError', LevelUPError), | ||
|  |   WriteError: createError('WriteError', LevelUPError), | ||
|  |   NotFoundError: NotFoundError, | ||
|  |   EncodingError: createError('EncodingError', LevelUPError) | ||
|  | } | ||
|  | 
 | ||
|  | },{"25":25}],42:[function(_dereq_,module,exports){ | ||
|  | var inherits = _dereq_(37) | ||
|  | var Readable = _dereq_(57).Readable | ||
|  | var extend = _dereq_(141) | ||
|  | 
 | ||
|  | module.exports = ReadStream | ||
|  | inherits(ReadStream, Readable) | ||
|  | 
 | ||
|  | function ReadStream (iterator, options) { | ||
|  |   if (!(this instanceof ReadStream)) return new ReadStream(iterator, options) | ||
|  |   options = options || {} | ||
|  |   Readable.call(this, extend(options, { | ||
|  |     objectMode: true | ||
|  |   })) | ||
|  |   this._iterator = iterator | ||
|  |   this._options = options | ||
|  |   this.on('end', this.destroy.bind(this, null, null)) | ||
|  | } | ||
|  | 
 | ||
|  | ReadStream.prototype._read = function () { | ||
|  |   var self = this | ||
|  |   var options = this._options | ||
|  |   if (this.destroyed) return | ||
|  | 
 | ||
|  |   this._iterator.next(function (err, key, value) { | ||
|  |     if (self.destroyed) return | ||
|  |     if (err) return self.destroy(err) | ||
|  | 
 | ||
|  |     if (key === undefined && value === undefined) { | ||
|  |       self.push(null) | ||
|  |     } else if (options.keys !== false && options.values === false) { | ||
|  |       self.push(key) | ||
|  |     } else if (options.keys === false && options.values !== false) { | ||
|  |       self.push(value) | ||
|  |     } else { | ||
|  |       self.push({ key: key, value: value }) | ||
|  |     } | ||
|  |   }) | ||
|  | } | ||
|  | 
 | ||
|  | ReadStream.prototype._destroy = function (err, callback) { | ||
|  |   this._iterator.end(function (err2) { | ||
|  |     callback(err || err2) | ||
|  |   }) | ||
|  | } | ||
|  | 
 | ||
|  | },{"141":141,"37":37,"57":57}],43:[function(_dereq_,module,exports){ | ||
|  | 'use strict'; | ||
|  | 
 | ||
|  | function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; } | ||
|  | 
 | ||
|  | var codes = {}; | ||
|  | 
 | ||
|  | function createErrorType(code, message, Base) { | ||
|  |   if (!Base) { | ||
|  |     Base = Error; | ||
|  |   } | ||
|  | 
 | ||
|  |   function getMessage(arg1, arg2, arg3) { | ||
|  |     if (typeof message === 'string') { | ||
|  |       return message; | ||
|  |     } else { | ||
|  |       return message(arg1, arg2, arg3); | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   var NodeError = | ||
|  |   /*#__PURE__*/ | ||
|  |   function (_Base) { | ||
|  |     _inheritsLoose(NodeError, _Base); | ||
|  | 
 | ||
|  |     function NodeError(arg1, arg2, arg3) { | ||
|  |       return _Base.call(this, getMessage(arg1, arg2, arg3)) || this; | ||
|  |     } | ||
|  | 
 | ||
|  |     return NodeError; | ||
|  |   }(Base); | ||
|  | 
 | ||
|  |   NodeError.prototype.name = Base.name; | ||
|  |   NodeError.prototype.code = code; | ||
|  |   codes[code] = NodeError; | ||
|  | } // https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js
 | ||
|  | 
 | ||
|  | 
 | ||
|  | function oneOf(expected, thing) { | ||
|  |   if (Array.isArray(expected)) { | ||
|  |     var len = expected.length; | ||
|  |     expected = expected.map(function (i) { | ||
|  |       return String(i); | ||
|  |     }); | ||
|  | 
 | ||
|  |     if (len > 2) { | ||
|  |       return "one of ".concat(thing, " ").concat(expected.slice(0, len - 1).join(', '), ", or ") + expected[len - 1]; | ||
|  |     } else if (len === 2) { | ||
|  |       return "one of ".concat(thing, " ").concat(expected[0], " or ").concat(expected[1]); | ||
|  |     } else { | ||
|  |       return "of ".concat(thing, " ").concat(expected[0]); | ||
|  |     } | ||
|  |   } else { | ||
|  |     return "of ".concat(thing, " ").concat(String(expected)); | ||
|  |   } | ||
|  | } // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith
 | ||
|  | 
 | ||
|  | 
 | ||
|  | function startsWith(str, search, pos) { | ||
|  |   return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; | ||
|  | } // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
 | ||
|  | 
 | ||
|  | 
 | ||
|  | function endsWith(str, search, this_len) { | ||
|  |   if (this_len === undefined || this_len > str.length) { | ||
|  |     this_len = str.length; | ||
|  |   } | ||
|  | 
 | ||
|  |   return str.substring(this_len - search.length, this_len) === search; | ||
|  | } // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes
 | ||
|  | 
 | ||
|  | 
 | ||
|  | function includes(str, search, start) { | ||
|  |   if (typeof start !== 'number') { | ||
|  |     start = 0; | ||
|  |   } | ||
|  | 
 | ||
|  |   if (start + search.length > str.length) { | ||
|  |     return false; | ||
|  |   } else { | ||
|  |     return str.indexOf(search, start) !== -1; | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { | ||
|  |   return 'The value "' + value + '" is invalid for option "' + name + '"'; | ||
|  | }, TypeError); | ||
|  | createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { | ||
|  |   // determiner: 'must be' or 'must not be'
 | ||
|  |   var determiner; | ||
|  | 
 | ||
|  |   if (typeof expected === 'string' && startsWith(expected, 'not ')) { | ||
|  |     determiner = 'must not be'; | ||
|  |     expected = expected.replace(/^not /, ''); | ||
|  |   } else { | ||
|  |     determiner = 'must be'; | ||
|  |   } | ||
|  | 
 | ||
|  |   var msg; | ||
|  | 
 | ||
|  |   if (endsWith(name, ' argument')) { | ||
|  |     // For cases like 'first argument'
 | ||
|  |     msg = "The ".concat(name, " ").concat(determiner, " ").concat(oneOf(expected, 'type')); | ||
|  |   } else { | ||
|  |     var type = includes(name, '.') ? 'property' : 'argument'; | ||
|  |     msg = "The \"".concat(name, "\" ").concat(type, " ").concat(determiner, " ").concat(oneOf(expected, 'type')); | ||
|  |   } | ||
|  | 
 | ||
|  |   msg += ". Received type ".concat(typeof actual); | ||
|  |   return msg; | ||
|  | }, TypeError); | ||
|  | createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); | ||
|  | createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { | ||
|  |   return 'The ' + name + ' method is not implemented'; | ||
|  | }); | ||
|  | createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); | ||
|  | createErrorType('ERR_STREAM_DESTROYED', function (name) { | ||
|  |   return 'Cannot call ' + name + ' after a stream was destroyed'; | ||
|  | }); | ||
|  | createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); | ||
|  | createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); | ||
|  | createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); | ||
|  | createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); | ||
|  | createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { | ||
|  |   return 'Unknown encoding: ' + arg; | ||
|  | }, TypeError); | ||
|  | createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); | ||
|  | module.exports.codes = codes; | ||
|  | 
 | ||
|  | },{}],44:[function(_dereq_,module,exports){ | ||
|  | (function (process){(function (){ | ||
|  | // Copyright Joyent, Inc. and other Node contributors.
 | ||
|  | //
 | ||
|  | // Permission is hereby granted, free of charge, to any person obtaining a
 | ||
|  | // copy of this software and associated documentation files (the
 | ||
|  | // "Software"), to deal in the Software without restriction, including
 | ||
|  | // without limitation the rights to use, copy, modify, merge, publish,
 | ||
|  | // distribute, sublicense, and/or sell copies of the Software, and to permit
 | ||
|  | // persons to whom the Software is furnished to do so, subject to the
 | ||
|  | // following conditions:
 | ||
|  | //
 | ||
|  | // The above copyright notice and this permission notice shall be included
 | ||
|  | // in all copies or substantial portions of the Software.
 | ||
|  | //
 | ||
|  | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
 | ||
|  | // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
 | ||
|  | // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
 | ||
|  | // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
 | ||
|  | // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
 | ||
|  | // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
 | ||
|  | // USE OR OTHER DEALINGS IN THE SOFTWARE.
 | ||
|  | // a duplex stream is just a stream that is both readable and writable.
 | ||
|  | // Since JS doesn't have multiple prototypal inheritance, this class
 | ||
|  | // prototypally inherits from Readable, and then parasitically from
 | ||
|  | // Writable.
 | ||
|  | 'use strict'; | ||
|  | /*<replacement>*/ | ||
|  | 
 | ||
|  | var objectKeys = Object.keys || function (obj) { | ||
|  |   var keys = []; | ||
|  | 
 | ||
|  |   for (var key in obj) { | ||
|  |     keys.push(key); | ||
|  |   } | ||
|  | 
 | ||
|  |   return keys; | ||
|  | }; | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | 
 | ||
|  | module.exports = Duplex; | ||
|  | 
 | ||
|  | var Readable = _dereq_(46); | ||
|  | 
 | ||
|  | var Writable = _dereq_(48); | ||
|  | 
 | ||
|  | _dereq_(37)(Duplex, Readable); | ||
|  | 
 | ||
|  | { | ||
|  |   // Allow the keys array to be GC'ed.
 | ||
|  |   var keys = objectKeys(Writable.prototype); | ||
|  | 
 | ||
|  |   for (var v = 0; v < keys.length; v++) { | ||
|  |     var method = keys[v]; | ||
|  |     if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function Duplex(options) { | ||
|  |   if (!(this instanceof Duplex)) return new Duplex(options); | ||
|  |   Readable.call(this, options); | ||
|  |   Writable.call(this, options); | ||
|  |   this.allowHalfOpen = true; | ||
|  | 
 | ||
|  |   if (options) { | ||
|  |     if (options.readable === false) this.readable = false; | ||
|  |     if (options.writable === false) this.writable = false; | ||
|  | 
 | ||
|  |     if (options.allowHalfOpen === false) { | ||
|  |       this.allowHalfOpen = false; | ||
|  |       this.once('end', onend); | ||
|  |     } | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { | ||
|  |   // making it explicit this property is not enumerable
 | ||
|  |   // because otherwise some prototype manipulation in
 | ||
|  |   // userland will fail
 | ||
|  |   enumerable: false, | ||
|  |   get: function get() { | ||
|  |     return this._writableState.highWaterMark; | ||
|  |   } | ||
|  | }); | ||
|  | Object.defineProperty(Duplex.prototype, 'writableBuffer', { | ||
|  |   // making it explicit this property is not enumerable
 | ||
|  |   // because otherwise some prototype manipulation in
 | ||
|  |   // userland will fail
 | ||
|  |   enumerable: false, | ||
|  |   get: function get() { | ||
|  |     return this._writableState && this._writableState.getBuffer(); | ||
|  |   } | ||
|  | }); | ||
|  | Object.defineProperty(Duplex.prototype, 'writableLength', { | ||
|  |   // making it explicit this property is not enumerable
 | ||
|  |   // because otherwise some prototype manipulation in
 | ||
|  |   // userland will fail
 | ||
|  |   enumerable: false, | ||
|  |   get: function get() { | ||
|  |     return this._writableState.length; | ||
|  |   } | ||
|  | }); // the no-half-open enforcer
 | ||
|  | 
 | ||
|  | function onend() { | ||
|  |   // If the writable side ended, then we're ok.
 | ||
|  |   if (this._writableState.ended) return; // no more data can be written.
 | ||
|  |   // But allow more writes to happen in this tick.
 | ||
|  | 
 | ||
|  |   process.nextTick(onEndNT, this); | ||
|  | } | ||
|  | 
 | ||
|  | function onEndNT(self) { | ||
|  |   self.end(); | ||
|  | } | ||
|  | 
 | ||
|  | Object.defineProperty(Duplex.prototype, 'destroyed', { | ||
|  |   // making it explicit this property is not enumerable
 | ||
|  |   // because otherwise some prototype manipulation in
 | ||
|  |   // userland will fail
 | ||
|  |   enumerable: false, | ||
|  |   get: function get() { | ||
|  |     if (this._readableState === undefined || this._writableState === undefined) { | ||
|  |       return false; | ||
|  |     } | ||
|  | 
 | ||
|  |     return this._readableState.destroyed && this._writableState.destroyed; | ||
|  |   }, | ||
|  |   set: function set(value) { | ||
|  |     // we ignore the value if the stream
 | ||
|  |     // has not been initialized yet
 | ||
|  |     if (this._readableState === undefined || this._writableState === undefined) { | ||
|  |       return; | ||
|  |     } // backward compatibility, the user is explicitly
 | ||
|  |     // managing destroyed
 | ||
|  | 
 | ||
|  | 
 | ||
|  |     this._readableState.destroyed = value; | ||
|  |     this._writableState.destroyed = value; | ||
|  |   } | ||
|  | }); | ||
|  | }).call(this)}).call(this,_dereq_(73)) | ||
|  | },{"37":37,"46":46,"48":48,"73":73}],45:[function(_dereq_,module,exports){ | ||
|  | // Copyright Joyent, Inc. and other Node contributors.
 | ||
|  | //
 | ||
|  | // Permission is hereby granted, free of charge, to any person obtaining a
 | ||
|  | // copy of this software and associated documentation files (the
 | ||
|  | // "Software"), to deal in the Software without restriction, including
 | ||
|  | // without limitation the rights to use, copy, modify, merge, publish,
 | ||
|  | // distribute, sublicense, and/or sell copies of the Software, and to permit
 | ||
|  | // persons to whom the Software is furnished to do so, subject to the
 | ||
|  | // following conditions:
 | ||
|  | //
 | ||
|  | // The above copyright notice and this permission notice shall be included
 | ||
|  | // in all copies or substantial portions of the Software.
 | ||
|  | //
 | ||
|  | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
 | ||
|  | // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
 | ||
|  | // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
 | ||
|  | // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
 | ||
|  | // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
 | ||
|  | // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
 | ||
|  | // USE OR OTHER DEALINGS IN THE SOFTWARE.
 | ||
|  | // a passthrough stream.
 | ||
|  | // basically just the most minimal sort of Transform stream.
 | ||
|  | // Every written chunk gets output as-is.
 | ||
|  | 'use strict'; | ||
|  | 
 | ||
|  | module.exports = PassThrough; | ||
|  | 
 | ||
|  | var Transform = _dereq_(47); | ||
|  | 
 | ||
|  | _dereq_(37)(PassThrough, Transform); | ||
|  | 
 | ||
|  | function PassThrough(options) { | ||
|  |   if (!(this instanceof PassThrough)) return new PassThrough(options); | ||
|  |   Transform.call(this, options); | ||
|  | } | ||
|  | 
 | ||
|  | PassThrough.prototype._transform = function (chunk, encoding, cb) { | ||
|  |   cb(null, chunk); | ||
|  | }; | ||
|  | },{"37":37,"47":47}],46:[function(_dereq_,module,exports){ | ||
|  | (function (process,global){(function (){ | ||
|  | // Copyright Joyent, Inc. and other Node contributors.
 | ||
|  | //
 | ||
|  | // Permission is hereby granted, free of charge, to any person obtaining a
 | ||
|  | // copy of this software and associated documentation files (the
 | ||
|  | // "Software"), to deal in the Software without restriction, including
 | ||
|  | // without limitation the rights to use, copy, modify, merge, publish,
 | ||
|  | // distribute, sublicense, and/or sell copies of the Software, and to permit
 | ||
|  | // persons to whom the Software is furnished to do so, subject to the
 | ||
|  | // following conditions:
 | ||
|  | //
 | ||
|  | // The above copyright notice and this permission notice shall be included
 | ||
|  | // in all copies or substantial portions of the Software.
 | ||
|  | //
 | ||
|  | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
 | ||
|  | // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
 | ||
|  | // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
 | ||
|  | // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
 | ||
|  | // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
 | ||
|  | // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
 | ||
|  | // USE OR OTHER DEALINGS IN THE SOFTWARE.
 | ||
|  | 'use strict'; | ||
|  | 
 | ||
|  | module.exports = Readable; | ||
|  | /*<replacement>*/ | ||
|  | 
 | ||
|  | var Duplex; | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | Readable.ReadableState = ReadableState; | ||
|  | /*<replacement>*/ | ||
|  | 
 | ||
|  | var EE = _dereq_(26).EventEmitter; | ||
|  | 
 | ||
|  | var EElistenerCount = function EElistenerCount(emitter, type) { | ||
|  |   return emitter.listeners(type).length; | ||
|  | }; | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | /*<replacement>*/ | ||
|  | 
 | ||
|  | 
 | ||
|  | var Stream = _dereq_(56); | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | 
 | ||
|  | var Buffer = _dereq_(13).Buffer; | ||
|  | 
 | ||
|  | var OurUint8Array = global.Uint8Array || function () {}; | ||
|  | 
 | ||
|  | function _uint8ArrayToBuffer(chunk) { | ||
|  |   return Buffer.from(chunk); | ||
|  | } | ||
|  | 
 | ||
|  | function _isUint8Array(obj) { | ||
|  |   return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; | ||
|  | } | ||
|  | /*<replacement>*/ | ||
|  | 
 | ||
|  | 
 | ||
|  | var debugUtil = _dereq_(11); | ||
|  | 
 | ||
|  | var debug; | ||
|  | 
 | ||
|  | if (debugUtil && debugUtil.debuglog) { | ||
|  |   debug = debugUtil.debuglog('stream'); | ||
|  | } else { | ||
|  |   debug = function debug() {}; | ||
|  | } | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | 
 | ||
|  | var BufferList = _dereq_(50); | ||
|  | 
 | ||
|  | var destroyImpl = _dereq_(51); | ||
|  | 
 | ||
|  | var _require = _dereq_(55), | ||
|  |     getHighWaterMark = _require.getHighWaterMark; | ||
|  | 
 | ||
|  | var _require$codes = _dereq_(43).codes, | ||
|  |     ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, | ||
|  |     ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF, | ||
|  |     ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, | ||
|  |     ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; // Lazy loaded to improve the startup performance.
 | ||
|  | 
 | ||
|  | 
 | ||
|  | var StringDecoder; | ||
|  | var createReadableStreamAsyncIterator; | ||
|  | var from; | ||
|  | 
 | ||
|  | _dereq_(37)(Readable, Stream); | ||
|  | 
 | ||
|  | var errorOrDestroy = destroyImpl.errorOrDestroy; | ||
|  | var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; | ||
|  | 
 | ||
|  | function prependListener(emitter, event, fn) { | ||
|  |   // Sadly this is not cacheable as some libraries bundle their own
 | ||
|  |   // event emitter implementation with them.
 | ||
|  |   if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); // This is a hack to make sure that our error handler is attached before any
 | ||
|  |   // userland ones.  NEVER DO THIS. This is here only because this code needs
 | ||
|  |   // to continue to work with older versions of Node.js that do not include
 | ||
|  |   // the prependListener() method. The goal is to eventually remove this hack.
 | ||
|  | 
 | ||
|  |   if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; | ||
|  | } | ||
|  | 
 | ||
|  | function ReadableState(options, stream, isDuplex) { | ||
|  |   Duplex = Duplex || _dereq_(44); | ||
|  |   options = options || {}; // Duplex streams are both readable and writable, but share
 | ||
|  |   // the same options object.
 | ||
|  |   // However, some cases require setting options to different
 | ||
|  |   // values for the readable and the writable sides of the duplex stream.
 | ||
|  |   // These options can be provided separately as readableXXX and writableXXX.
 | ||
|  | 
 | ||
|  |   if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag. Used to make read(n) ignore n and to
 | ||
|  |   // make all the buffer merging and length checks go away
 | ||
|  | 
 | ||
|  |   this.objectMode = !!options.objectMode; | ||
|  |   if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; // the point at which it stops calling _read() to fill the buffer
 | ||
|  |   // Note: 0 is a valid value, means "don't call _read preemptively ever"
 | ||
|  | 
 | ||
|  |   this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); // A linked list is used to store data chunks instead of an array because the
 | ||
|  |   // linked list can remove elements from the beginning faster than
 | ||
|  |   // array.shift()
 | ||
|  | 
 | ||
|  |   this.buffer = new BufferList(); | ||
|  |   this.length = 0; | ||
|  |   this.pipes = null; | ||
|  |   this.pipesCount = 0; | ||
|  |   this.flowing = null; | ||
|  |   this.ended = false; | ||
|  |   this.endEmitted = false; | ||
|  |   this.reading = false; // a flag to be able to tell if the event 'readable'/'data' is emitted
 | ||
|  |   // immediately, or on a later tick.  We set this to true at first, because
 | ||
|  |   // any actions that shouldn't happen until "later" should generally also
 | ||
|  |   // not happen before the first read call.
 | ||
|  | 
 | ||
|  |   this.sync = true; // whenever we return null, then we set a flag to say
 | ||
|  |   // that we're awaiting a 'readable' event emission.
 | ||
|  | 
 | ||
|  |   this.needReadable = false; | ||
|  |   this.emittedReadable = false; | ||
|  |   this.readableListening = false; | ||
|  |   this.resumeScheduled = false; | ||
|  |   this.paused = true; // Should close be emitted on destroy. Defaults to true.
 | ||
|  | 
 | ||
|  |   this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'end' (and potentially 'finish')
 | ||
|  | 
 | ||
|  |   this.autoDestroy = !!options.autoDestroy; // has it been destroyed
 | ||
|  | 
 | ||
|  |   this.destroyed = false; // Crypto is kind of old and crusty.  Historically, its default string
 | ||
|  |   // encoding is 'binary' so we have to make this configurable.
 | ||
|  |   // Everything else in the universe uses 'utf8', though.
 | ||
|  | 
 | ||
|  |   this.defaultEncoding = options.defaultEncoding || 'utf8'; // the number of writers that are awaiting a drain event in .pipe()s
 | ||
|  | 
 | ||
|  |   this.awaitDrain = 0; // if true, a maybeReadMore has been scheduled
 | ||
|  | 
 | ||
|  |   this.readingMore = false; | ||
|  |   this.decoder = null; | ||
|  |   this.encoding = null; | ||
|  | 
 | ||
|  |   if (options.encoding) { | ||
|  |     if (!StringDecoder) StringDecoder = _dereq_(102).StringDecoder; | ||
|  |     this.decoder = new StringDecoder(options.encoding); | ||
|  |     this.encoding = options.encoding; | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function Readable(options) { | ||
|  |   Duplex = Duplex || _dereq_(44); | ||
|  |   if (!(this instanceof Readable)) return new Readable(options); // Checking for a Stream.Duplex instance is faster here instead of inside
 | ||
|  |   // the ReadableState constructor, at least with V8 6.5
 | ||
|  | 
 | ||
|  |   var isDuplex = this instanceof Duplex; | ||
|  |   this._readableState = new ReadableState(options, this, isDuplex); // legacy
 | ||
|  | 
 | ||
|  |   this.readable = true; | ||
|  | 
 | ||
|  |   if (options) { | ||
|  |     if (typeof options.read === 'function') this._read = options.read; | ||
|  |     if (typeof options.destroy === 'function') this._destroy = options.destroy; | ||
|  |   } | ||
|  | 
 | ||
|  |   Stream.call(this); | ||
|  | } | ||
|  | 
 | ||
|  | Object.defineProperty(Readable.prototype, 'destroyed', { | ||
|  |   // making it explicit this property is not enumerable
 | ||
|  |   // because otherwise some prototype manipulation in
 | ||
|  |   // userland will fail
 | ||
|  |   enumerable: false, | ||
|  |   get: function get() { | ||
|  |     if (this._readableState === undefined) { | ||
|  |       return false; | ||
|  |     } | ||
|  | 
 | ||
|  |     return this._readableState.destroyed; | ||
|  |   }, | ||
|  |   set: function set(value) { | ||
|  |     // we ignore the value if the stream
 | ||
|  |     // has not been initialized yet
 | ||
|  |     if (!this._readableState) { | ||
|  |       return; | ||
|  |     } // backward compatibility, the user is explicitly
 | ||
|  |     // managing destroyed
 | ||
|  | 
 | ||
|  | 
 | ||
|  |     this._readableState.destroyed = value; | ||
|  |   } | ||
|  | }); | ||
|  | Readable.prototype.destroy = destroyImpl.destroy; | ||
|  | Readable.prototype._undestroy = destroyImpl.undestroy; | ||
|  | 
 | ||
|  | Readable.prototype._destroy = function (err, cb) { | ||
|  |   cb(err); | ||
|  | }; // Manually shove something into the read() buffer.
 | ||
|  | // This returns true if the highWaterMark has not been hit yet,
 | ||
|  | // similar to how Writable.write() returns true if you should
 | ||
|  | // write() some more.
 | ||
|  | 
 | ||
|  | 
 | ||
|  | Readable.prototype.push = function (chunk, encoding) { | ||
|  |   var state = this._readableState; | ||
|  |   var skipChunkCheck; | ||
|  | 
 | ||
|  |   if (!state.objectMode) { | ||
|  |     if (typeof chunk === 'string') { | ||
|  |       encoding = encoding || state.defaultEncoding; | ||
|  | 
 | ||
|  |       if (encoding !== state.encoding) { | ||
|  |         chunk = Buffer.from(chunk, encoding); | ||
|  |         encoding = ''; | ||
|  |       } | ||
|  | 
 | ||
|  |       skipChunkCheck = true; | ||
|  |     } | ||
|  |   } else { | ||
|  |     skipChunkCheck = true; | ||
|  |   } | ||
|  | 
 | ||
|  |   return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); | ||
|  | }; // Unshift should *always* be something directly out of read()
 | ||
|  | 
 | ||
|  | 
 | ||
|  | Readable.prototype.unshift = function (chunk) { | ||
|  |   return readableAddChunk(this, chunk, null, true, false); | ||
|  | }; | ||
|  | 
 | ||
|  | function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { | ||
|  |   debug('readableAddChunk', chunk); | ||
|  |   var state = stream._readableState; | ||
|  | 
 | ||
|  |   if (chunk === null) { | ||
|  |     state.reading = false; | ||
|  |     onEofChunk(stream, state); | ||
|  |   } else { | ||
|  |     var er; | ||
|  |     if (!skipChunkCheck) er = chunkInvalid(state, chunk); | ||
|  | 
 | ||
|  |     if (er) { | ||
|  |       errorOrDestroy(stream, er); | ||
|  |     } else if (state.objectMode || chunk && chunk.length > 0) { | ||
|  |       if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { | ||
|  |         chunk = _uint8ArrayToBuffer(chunk); | ||
|  |       } | ||
|  | 
 | ||
|  |       if (addToFront) { | ||
|  |         if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true); | ||
|  |       } else if (state.ended) { | ||
|  |         errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); | ||
|  |       } else if (state.destroyed) { | ||
|  |         return false; | ||
|  |       } else { | ||
|  |         state.reading = false; | ||
|  | 
 | ||
|  |         if (state.decoder && !encoding) { | ||
|  |           chunk = state.decoder.write(chunk); | ||
|  |           if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); | ||
|  |         } else { | ||
|  |           addChunk(stream, state, chunk, false); | ||
|  |         } | ||
|  |       } | ||
|  |     } else if (!addToFront) { | ||
|  |       state.reading = false; | ||
|  |       maybeReadMore(stream, state); | ||
|  |     } | ||
|  |   } // We can push more data if we are below the highWaterMark.
 | ||
|  |   // Also, if we have no data yet, we can stand some more bytes.
 | ||
|  |   // This is to work around cases where hwm=0, such as the repl.
 | ||
|  | 
 | ||
|  | 
 | ||
|  |   return !state.ended && (state.length < state.highWaterMark || state.length === 0); | ||
|  | } | ||
|  | 
 | ||
|  | function addChunk(stream, state, chunk, addToFront) { | ||
|  |   if (state.flowing && state.length === 0 && !state.sync) { | ||
|  |     state.awaitDrain = 0; | ||
|  |     stream.emit('data', chunk); | ||
|  |   } else { | ||
|  |     // update the buffer info.
 | ||
|  |     state.length += state.objectMode ? 1 : chunk.length; | ||
|  |     if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); | ||
|  |     if (state.needReadable) emitReadable(stream); | ||
|  |   } | ||
|  | 
 | ||
|  |   maybeReadMore(stream, state); | ||
|  | } | ||
|  | 
 | ||
|  | function chunkInvalid(state, chunk) { | ||
|  |   var er; | ||
|  | 
 | ||
|  |   if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { | ||
|  |     er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk); | ||
|  |   } | ||
|  | 
 | ||
|  |   return er; | ||
|  | } | ||
|  | 
 | ||
|  | Readable.prototype.isPaused = function () { | ||
|  |   return this._readableState.flowing === false; | ||
|  | }; // backwards compatibility.
 | ||
|  | 
 | ||
|  | 
 | ||
|  | Readable.prototype.setEncoding = function (enc) { | ||
|  |   if (!StringDecoder) StringDecoder = _dereq_(102).StringDecoder; | ||
|  |   var decoder = new StringDecoder(enc); | ||
|  |   this._readableState.decoder = decoder; // If setEncoding(null), decoder.encoding equals utf8
 | ||
|  | 
 | ||
|  |   this._readableState.encoding = this._readableState.decoder.encoding; // Iterate over current buffer to convert already stored Buffers:
 | ||
|  | 
 | ||
|  |   var p = this._readableState.buffer.head; | ||
|  |   var content = ''; | ||
|  | 
 | ||
|  |   while (p !== null) { | ||
|  |     content += decoder.write(p.data); | ||
|  |     p = p.next; | ||
|  |   } | ||
|  | 
 | ||
|  |   this._readableState.buffer.clear(); | ||
|  | 
 | ||
|  |   if (content !== '') this._readableState.buffer.push(content); | ||
|  |   this._readableState.length = content.length; | ||
|  |   return this; | ||
|  | }; // Don't raise the hwm > 1GB
 | ||
|  | 
 | ||
|  | 
 | ||
|  | var MAX_HWM = 0x40000000; | ||
|  | 
 | ||
|  | function computeNewHighWaterMark(n) { | ||
|  |   if (n >= MAX_HWM) { | ||
|  |     // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE.
 | ||
|  |     n = MAX_HWM; | ||
|  |   } else { | ||
|  |     // Get the next highest power of 2 to prevent increasing hwm excessively in
 | ||
|  |     // tiny amounts
 | ||
|  |     n--; | ||
|  |     n |= n >>> 1; | ||
|  |     n |= n >>> 2; | ||
|  |     n |= n >>> 4; | ||
|  |     n |= n >>> 8; | ||
|  |     n |= n >>> 16; | ||
|  |     n++; | ||
|  |   } | ||
|  | 
 | ||
|  |   return n; | ||
|  | } // This function is designed to be inlinable, so please take care when making
 | ||
|  | // changes to the function body.
 | ||
|  | 
 | ||
|  | 
 | ||
|  | function howMuchToRead(n, state) { | ||
|  |   if (n <= 0 || state.length === 0 && state.ended) return 0; | ||
|  |   if (state.objectMode) return 1; | ||
|  | 
 | ||
|  |   if (n !== n) { | ||
|  |     // Only flow one buffer at a time
 | ||
|  |     if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; | ||
|  |   } // If we're asking for more than the current hwm, then raise the hwm.
 | ||
|  | 
 | ||
|  | 
 | ||
|  |   if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); | ||
|  |   if (n <= state.length) return n; // Don't have enough
 | ||
|  | 
 | ||
|  |   if (!state.ended) { | ||
|  |     state.needReadable = true; | ||
|  |     return 0; | ||
|  |   } | ||
|  | 
 | ||
|  |   return state.length; | ||
|  | } // you can override either this method, or the async _read(n) below.
 | ||
|  | 
 | ||
|  | 
 | ||
|  | Readable.prototype.read = function (n) { | ||
|  |   debug('read', n); | ||
|  |   n = parseInt(n, 10); | ||
|  |   var state = this._readableState; | ||
|  |   var nOrig = n; | ||
|  |   if (n !== 0) state.emittedReadable = false; // if we're doing read(0) to trigger a readable event, but we
 | ||
|  |   // already have a bunch of data in the buffer, then just trigger
 | ||
|  |   // the 'readable' event and move on.
 | ||
|  | 
 | ||
|  |   if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) { | ||
|  |     debug('read: emitReadable', state.length, state.ended); | ||
|  |     if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); | ||
|  |     return null; | ||
|  |   } | ||
|  | 
 | ||
|  |   n = howMuchToRead(n, state); // if we've ended, and we're now clear, then finish it up.
 | ||
|  | 
 | ||
|  |   if (n === 0 && state.ended) { | ||
|  |     if (state.length === 0) endReadable(this); | ||
|  |     return null; | ||
|  |   } // All the actual chunk generation logic needs to be
 | ||
|  |   // *below* the call to _read.  The reason is that in certain
 | ||
|  |   // synthetic stream cases, such as passthrough streams, _read
 | ||
|  |   // may be a completely synchronous operation which may change
 | ||
|  |   // the state of the read buffer, providing enough data when
 | ||
|  |   // before there was *not* enough.
 | ||
|  |   //
 | ||
|  |   // So, the steps are:
 | ||
|  |   // 1. Figure out what the state of things will be after we do
 | ||
|  |   // a read from the buffer.
 | ||
|  |   //
 | ||
|  |   // 2. If that resulting state will trigger a _read, then call _read.
 | ||
|  |   // Note that this may be asynchronous, or synchronous.  Yes, it is
 | ||
|  |   // deeply ugly to write APIs this way, but that still doesn't mean
 | ||
|  |   // that the Readable class should behave improperly, as streams are
 | ||
|  |   // designed to be sync/async agnostic.
 | ||
|  |   // Take note if the _read call is sync or async (ie, if the read call
 | ||
|  |   // has returned yet), so that we know whether or not it's safe to emit
 | ||
|  |   // 'readable' etc.
 | ||
|  |   //
 | ||
|  |   // 3. Actually pull the requested chunks out of the buffer and return.
 | ||
|  |   // if we need a readable event, then we need to do some reading.
 | ||
|  | 
 | ||
|  | 
 | ||
|  |   var doRead = state.needReadable; | ||
|  |   debug('need readable', doRead); // if we currently have less than the highWaterMark, then also read some
 | ||
|  | 
 | ||
|  |   if (state.length === 0 || state.length - n < state.highWaterMark) { | ||
|  |     doRead = true; | ||
|  |     debug('length less than watermark', doRead); | ||
|  |   } // however, if we've ended, then there's no point, and if we're already
 | ||
|  |   // reading, then it's unnecessary.
 | ||
|  | 
 | ||
|  | 
 | ||
|  |   if (state.ended || state.reading) { | ||
|  |     doRead = false; | ||
|  |     debug('reading or ended', doRead); | ||
|  |   } else if (doRead) { | ||
|  |     debug('do read'); | ||
|  |     state.reading = true; | ||
|  |     state.sync = true; // if the length is currently zero, then we *need* a readable event.
 | ||
|  | 
 | ||
|  |     if (state.length === 0) state.needReadable = true; // call internal read method
 | ||
|  | 
 | ||
|  |     this._read(state.highWaterMark); | ||
|  | 
 | ||
|  |     state.sync = false; // If _read pushed data synchronously, then `reading` will be false,
 | ||
|  |     // and we need to re-evaluate how much data we can return to the user.
 | ||
|  | 
 | ||
|  |     if (!state.reading) n = howMuchToRead(nOrig, state); | ||
|  |   } | ||
|  | 
 | ||
|  |   var ret; | ||
|  |   if (n > 0) ret = fromList(n, state);else ret = null; | ||
|  | 
 | ||
|  |   if (ret === null) { | ||
|  |     state.needReadable = state.length <= state.highWaterMark; | ||
|  |     n = 0; | ||
|  |   } else { | ||
|  |     state.length -= n; | ||
|  |     state.awaitDrain = 0; | ||
|  |   } | ||
|  | 
 | ||
|  |   if (state.length === 0) { | ||
|  |     // If we have nothing in the buffer, then we want to know
 | ||
|  |     // as soon as we *do* get something into the buffer.
 | ||
|  |     if (!state.ended) state.needReadable = true; // If we tried to read() past the EOF, then emit end on the next tick.
 | ||
|  | 
 | ||
|  |     if (nOrig !== n && state.ended) endReadable(this); | ||
|  |   } | ||
|  | 
 | ||
|  |   if (ret !== null) this.emit('data', ret); | ||
|  |   return ret; | ||
|  | }; | ||
|  | 
 | ||
|  | function onEofChunk(stream, state) { | ||
|  |   debug('onEofChunk'); | ||
|  |   if (state.ended) return; | ||
|  | 
 | ||
|  |   if (state.decoder) { | ||
|  |     var chunk = state.decoder.end(); | ||
|  | 
 | ||
|  |     if (chunk && chunk.length) { | ||
|  |       state.buffer.push(chunk); | ||
|  |       state.length += state.objectMode ? 1 : chunk.length; | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   state.ended = true; | ||
|  | 
 | ||
|  |   if (state.sync) { | ||
|  |     // if we are sync, wait until next tick to emit the data.
 | ||
|  |     // Otherwise we risk emitting data in the flow()
 | ||
|  |     // the readable code triggers during a read() call
 | ||
|  |     emitReadable(stream); | ||
|  |   } else { | ||
|  |     // emit 'readable' now to make sure it gets picked up.
 | ||
|  |     state.needReadable = false; | ||
|  | 
 | ||
|  |     if (!state.emittedReadable) { | ||
|  |       state.emittedReadable = true; | ||
|  |       emitReadable_(stream); | ||
|  |     } | ||
|  |   } | ||
|  | } // Don't emit readable right away in sync mode, because this can trigger
 | ||
|  | // another read() call => stack overflow.  This way, it might trigger
 | ||
|  | // a nextTick recursion warning, but that's not so bad.
 | ||
|  | 
 | ||
|  | 
 | ||
|  | function emitReadable(stream) { | ||
|  |   var state = stream._readableState; | ||
|  |   debug('emitReadable', state.needReadable, state.emittedReadable); | ||
|  |   state.needReadable = false; | ||
|  | 
 | ||
|  |   if (!state.emittedReadable) { | ||
|  |     debug('emitReadable', state.flowing); | ||
|  |     state.emittedReadable = true; | ||
|  |     process.nextTick(emitReadable_, stream); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function emitReadable_(stream) { | ||
|  |   var state = stream._readableState; | ||
|  |   debug('emitReadable_', state.destroyed, state.length, state.ended); | ||
|  | 
 | ||
|  |   if (!state.destroyed && (state.length || state.ended)) { | ||
|  |     stream.emit('readable'); | ||
|  |     state.emittedReadable = false; | ||
|  |   } // The stream needs another readable event if
 | ||
|  |   // 1. It is not flowing, as the flow mechanism will take
 | ||
|  |   //    care of it.
 | ||
|  |   // 2. It is not ended.
 | ||
|  |   // 3. It is below the highWaterMark, so we can schedule
 | ||
|  |   //    another readable later.
 | ||
|  | 
 | ||
|  | 
 | ||
|  |   state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark; | ||
|  |   flow(stream); | ||
|  | } // at this point, the user has presumably seen the 'readable' event,
 | ||
|  | // and called read() to consume some data.  that may have triggered
 | ||
|  | // in turn another _read(n) call, in which case reading = true if
 | ||
|  | // it's in progress.
 | ||
|  | // However, if we're not ended, or reading, and the length < hwm,
 | ||
|  | // then go ahead and try to read some more preemptively.
 | ||
|  | 
 | ||
|  | 
 | ||
|  | function maybeReadMore(stream, state) { | ||
|  |   if (!state.readingMore) { | ||
|  |     state.readingMore = true; | ||
|  |     process.nextTick(maybeReadMore_, stream, state); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function maybeReadMore_(stream, state) { | ||
|  |   // Attempt to read more data if we should.
 | ||
|  |   //
 | ||
|  |   // The conditions for reading more data are (one of):
 | ||
|  |   // - Not enough data buffered (state.length < state.highWaterMark). The loop
 | ||
|  |   //   is responsible for filling the buffer with enough data if such data
 | ||
|  |   //   is available. If highWaterMark is 0 and we are not in the flowing mode
 | ||
|  |   //   we should _not_ attempt to buffer any extra data. We'll get more data
 | ||
|  |   //   when the stream consumer calls read() instead.
 | ||
|  |   // - No data in the buffer, and the stream is in flowing mode. In this mode
 | ||
|  |   //   the loop below is responsible for ensuring read() is called. Failing to
 | ||
|  |   //   call read here would abort the flow and there's no other mechanism for
 | ||
|  |   //   continuing the flow if the stream consumer has just subscribed to the
 | ||
|  |   //   'data' event.
 | ||
|  |   //
 | ||
|  |   // In addition to the above conditions to keep reading data, the following
 | ||
|  |   // conditions prevent the data from being read:
 | ||
|  |   // - The stream has ended (state.ended).
 | ||
|  |   // - There is already a pending 'read' operation (state.reading). This is a
 | ||
|  |   //   case where the the stream has called the implementation defined _read()
 | ||
|  |   //   method, but they are processing the call asynchronously and have _not_
 | ||
|  |   //   called push() with new data. In this case we skip performing more
 | ||
|  |   //   read()s. The execution ends in this method again after the _read() ends
 | ||
|  |   //   up calling push() with more data.
 | ||
|  |   while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) { | ||
|  |     var len = state.length; | ||
|  |     debug('maybeReadMore read 0'); | ||
|  |     stream.read(0); | ||
|  |     if (len === state.length) // didn't get any data, stop spinning.
 | ||
|  |       break; | ||
|  |   } | ||
|  | 
 | ||
|  |   state.readingMore = false; | ||
|  | } // abstract method.  to be overridden in specific implementation classes.
 | ||
|  | // call cb(er, data) where data is <= n in length.
 | ||
|  | // for virtual (non-string, non-buffer) streams, "length" is somewhat
 | ||
|  | // arbitrary, and perhaps not very meaningful.
 | ||
|  | 
 | ||
|  | 
 | ||
|  | Readable.prototype._read = function (n) { | ||
|  |   errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()')); | ||
|  | }; | ||
|  | 
 | ||
|  | Readable.prototype.pipe = function (dest, pipeOpts) { | ||
|  |   var src = this; | ||
|  |   var state = this._readableState; | ||
|  | 
 | ||
|  |   switch (state.pipesCount) { | ||
|  |     case 0: | ||
|  |       state.pipes = dest; | ||
|  |       break; | ||
|  | 
 | ||
|  |     case 1: | ||
|  |       state.pipes = [state.pipes, dest]; | ||
|  |       break; | ||
|  | 
 | ||
|  |     default: | ||
|  |       state.pipes.push(dest); | ||
|  |       break; | ||
|  |   } | ||
|  | 
 | ||
|  |   state.pipesCount += 1; | ||
|  |   debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); | ||
|  |   var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; | ||
|  |   var endFn = doEnd ? onend : unpipe; | ||
|  |   if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn); | ||
|  |   dest.on('unpipe', onunpipe); | ||
|  | 
 | ||
|  |   function onunpipe(readable, unpipeInfo) { | ||
|  |     debug('onunpipe'); | ||
|  | 
 | ||
|  |     if (readable === src) { | ||
|  |       if (unpipeInfo && unpipeInfo.hasUnpiped === false) { | ||
|  |         unpipeInfo.hasUnpiped = true; | ||
|  |         cleanup(); | ||
|  |       } | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   function onend() { | ||
|  |     debug('onend'); | ||
|  |     dest.end(); | ||
|  |   } // when the dest drains, it reduces the awaitDrain counter
 | ||
|  |   // on the source.  This would be more elegant with a .once()
 | ||
|  |   // handler in flow(), but adding and removing repeatedly is
 | ||
|  |   // too slow.
 | ||
|  | 
 | ||
|  | 
 | ||
|  |   var ondrain = pipeOnDrain(src); | ||
|  |   dest.on('drain', ondrain); | ||
|  |   var cleanedUp = false; | ||
|  | 
 | ||
|  |   function cleanup() { | ||
|  |     debug('cleanup'); // cleanup event handlers once the pipe is broken
 | ||
|  | 
 | ||
|  |     dest.removeListener('close', onclose); | ||
|  |     dest.removeListener('finish', onfinish); | ||
|  |     dest.removeListener('drain', ondrain); | ||
|  |     dest.removeListener('error', onerror); | ||
|  |     dest.removeListener('unpipe', onunpipe); | ||
|  |     src.removeListener('end', onend); | ||
|  |     src.removeListener('end', unpipe); | ||
|  |     src.removeListener('data', ondata); | ||
|  |     cleanedUp = true; // if the reader is waiting for a drain event from this
 | ||
|  |     // specific writer, then it would cause it to never start
 | ||
|  |     // flowing again.
 | ||
|  |     // So, if this is awaiting a drain, then we just call it now.
 | ||
|  |     // If we don't know, then assume that we are waiting for one.
 | ||
|  | 
 | ||
|  |     if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); | ||
|  |   } | ||
|  | 
 | ||
|  |   src.on('data', ondata); | ||
|  | 
 | ||
|  |   function ondata(chunk) { | ||
|  |     debug('ondata'); | ||
|  |     var ret = dest.write(chunk); | ||
|  |     debug('dest.write', ret); | ||
|  | 
 | ||
|  |     if (ret === false) { | ||
|  |       // If the user unpiped during `dest.write()`, it is possible
 | ||
|  |       // to get stuck in a permanently paused state if that write
 | ||
|  |       // also returned false.
 | ||
|  |       // => Check whether `dest` is still a piping destination.
 | ||
|  |       if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { | ||
|  |         debug('false write response, pause', state.awaitDrain); | ||
|  |         state.awaitDrain++; | ||
|  |       } | ||
|  | 
 | ||
|  |       src.pause(); | ||
|  |     } | ||
|  |   } // if the dest has an error, then stop piping into it.
 | ||
|  |   // however, don't suppress the throwing behavior for this.
 | ||
|  | 
 | ||
|  | 
 | ||
|  |   function onerror(er) { | ||
|  |     debug('onerror', er); | ||
|  |     unpipe(); | ||
|  |     dest.removeListener('error', onerror); | ||
|  |     if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er); | ||
|  |   } // Make sure our error handler is attached before userland ones.
 | ||
|  | 
 | ||
|  | 
 | ||
|  |   prependListener(dest, 'error', onerror); // Both close and finish should trigger unpipe, but only once.
 | ||
|  | 
 | ||
|  |   function onclose() { | ||
|  |     dest.removeListener('finish', onfinish); | ||
|  |     unpipe(); | ||
|  |   } | ||
|  | 
 | ||
|  |   dest.once('close', onclose); | ||
|  | 
 | ||
|  |   function onfinish() { | ||
|  |     debug('onfinish'); | ||
|  |     dest.removeListener('close', onclose); | ||
|  |     unpipe(); | ||
|  |   } | ||
|  | 
 | ||
|  |   dest.once('finish', onfinish); | ||
|  | 
 | ||
|  |   function unpipe() { | ||
|  |     debug('unpipe'); | ||
|  |     src.unpipe(dest); | ||
|  |   } // tell the dest that it's being piped to
 | ||
|  | 
 | ||
|  | 
 | ||
|  |   dest.emit('pipe', src); // start the flow if it hasn't been started already.
 | ||
|  | 
 | ||
|  |   if (!state.flowing) { | ||
|  |     debug('pipe resume'); | ||
|  |     src.resume(); | ||
|  |   } | ||
|  | 
 | ||
|  |   return dest; | ||
|  | }; | ||
|  | 
 | ||
|  | function pipeOnDrain(src) { | ||
|  |   return function pipeOnDrainFunctionResult() { | ||
|  |     var state = src._readableState; | ||
|  |     debug('pipeOnDrain', state.awaitDrain); | ||
|  |     if (state.awaitDrain) state.awaitDrain--; | ||
|  | 
 | ||
|  |     if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { | ||
|  |       state.flowing = true; | ||
|  |       flow(src); | ||
|  |     } | ||
|  |   }; | ||
|  | } | ||
|  | 
 | ||
|  | Readable.prototype.unpipe = function (dest) { | ||
|  |   var state = this._readableState; | ||
|  |   var unpipeInfo = { | ||
|  |     hasUnpiped: false | ||
|  |   }; // if we're not piping anywhere, then do nothing.
 | ||
|  | 
 | ||
|  |   if (state.pipesCount === 0) return this; // just one destination.  most common case.
 | ||
|  | 
 | ||
|  |   if (state.pipesCount === 1) { | ||
|  |     // passed in one, but it's not the right one.
 | ||
|  |     if (dest && dest !== state.pipes) return this; | ||
|  |     if (!dest) dest = state.pipes; // got a match.
 | ||
|  | 
 | ||
|  |     state.pipes = null; | ||
|  |     state.pipesCount = 0; | ||
|  |     state.flowing = false; | ||
|  |     if (dest) dest.emit('unpipe', this, unpipeInfo); | ||
|  |     return this; | ||
|  |   } // slow case. multiple pipe destinations.
 | ||
|  | 
 | ||
|  | 
 | ||
|  |   if (!dest) { | ||
|  |     // remove all.
 | ||
|  |     var dests = state.pipes; | ||
|  |     var len = state.pipesCount; | ||
|  |     state.pipes = null; | ||
|  |     state.pipesCount = 0; | ||
|  |     state.flowing = false; | ||
|  | 
 | ||
|  |     for (var i = 0; i < len; i++) { | ||
|  |       dests[i].emit('unpipe', this, { | ||
|  |         hasUnpiped: false | ||
|  |       }); | ||
|  |     } | ||
|  | 
 | ||
|  |     return this; | ||
|  |   } // try to find the right one.
 | ||
|  | 
 | ||
|  | 
 | ||
|  |   var index = indexOf(state.pipes, dest); | ||
|  |   if (index === -1) return this; | ||
|  |   state.pipes.splice(index, 1); | ||
|  |   state.pipesCount -= 1; | ||
|  |   if (state.pipesCount === 1) state.pipes = state.pipes[0]; | ||
|  |   dest.emit('unpipe', this, unpipeInfo); | ||
|  |   return this; | ||
|  | }; // set up data events if they are asked for
 | ||
|  | // Ensure readable listeners eventually get something
 | ||
|  | 
 | ||
|  | 
 | ||
|  | Readable.prototype.on = function (ev, fn) { | ||
|  |   var res = Stream.prototype.on.call(this, ev, fn); | ||
|  |   var state = this._readableState; | ||
|  | 
 | ||
|  |   if (ev === 'data') { | ||
|  |     // update readableListening so that resume() may be a no-op
 | ||
|  |     // a few lines down. This is needed to support once('readable').
 | ||
|  |     state.readableListening = this.listenerCount('readable') > 0; // Try start flowing on next tick if stream isn't explicitly paused
 | ||
|  | 
 | ||
|  |     if (state.flowing !== false) this.resume(); | ||
|  |   } else if (ev === 'readable') { | ||
|  |     if (!state.endEmitted && !state.readableListening) { | ||
|  |       state.readableListening = state.needReadable = true; | ||
|  |       state.flowing = false; | ||
|  |       state.emittedReadable = false; | ||
|  |       debug('on readable', state.length, state.reading); | ||
|  | 
 | ||
|  |       if (state.length) { | ||
|  |         emitReadable(this); | ||
|  |       } else if (!state.reading) { | ||
|  |         process.nextTick(nReadingNextTick, this); | ||
|  |       } | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   return res; | ||
|  | }; | ||
|  | 
 | ||
|  | Readable.prototype.addListener = Readable.prototype.on; | ||
|  | 
 | ||
|  | Readable.prototype.removeListener = function (ev, fn) { | ||
|  |   var res = Stream.prototype.removeListener.call(this, ev, fn); | ||
|  | 
 | ||
|  |   if (ev === 'readable') { | ||
|  |     // We need to check if there is someone still listening to
 | ||
|  |     // readable and reset the state. However this needs to happen
 | ||
|  |     // after readable has been emitted but before I/O (nextTick) to
 | ||
|  |     // support once('readable', fn) cycles. This means that calling
 | ||
|  |     // resume within the same tick will have no
 | ||
|  |     // effect.
 | ||
|  |     process.nextTick(updateReadableListening, this); | ||
|  |   } | ||
|  | 
 | ||
|  |   return res; | ||
|  | }; | ||
|  | 
 | ||
|  | Readable.prototype.removeAllListeners = function (ev) { | ||
|  |   var res = Stream.prototype.removeAllListeners.apply(this, arguments); | ||
|  | 
 | ||
|  |   if (ev === 'readable' || ev === undefined) { | ||
|  |     // We need to check if there is someone still listening to
 | ||
|  |     // readable and reset the state. However this needs to happen
 | ||
|  |     // after readable has been emitted but before I/O (nextTick) to
 | ||
|  |     // support once('readable', fn) cycles. This means that calling
 | ||
|  |     // resume within the same tick will have no
 | ||
|  |     // effect.
 | ||
|  |     process.nextTick(updateReadableListening, this); | ||
|  |   } | ||
|  | 
 | ||
|  |   return res; | ||
|  | }; | ||
|  | 
 | ||
|  | function updateReadableListening(self) { | ||
|  |   var state = self._readableState; | ||
|  |   state.readableListening = self.listenerCount('readable') > 0; | ||
|  | 
 | ||
|  |   if (state.resumeScheduled && !state.paused) { | ||
|  |     // flowing needs to be set to true now, otherwise
 | ||
|  |     // the upcoming resume will not flow.
 | ||
|  |     state.flowing = true; // crude way to check if we should resume
 | ||
|  |   } else if (self.listenerCount('data') > 0) { | ||
|  |     self.resume(); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function nReadingNextTick(self) { | ||
|  |   debug('readable nexttick read 0'); | ||
|  |   self.read(0); | ||
|  | } // pause() and resume() are remnants of the legacy readable stream API
 | ||
|  | // If the user uses them, then switch into old mode.
 | ||
|  | 
 | ||
|  | 
 | ||
|  | Readable.prototype.resume = function () { | ||
|  |   var state = this._readableState; | ||
|  | 
 | ||
|  |   if (!state.flowing) { | ||
|  |     debug('resume'); // we flow only if there is no one listening
 | ||
|  |     // for readable, but we still have to call
 | ||
|  |     // resume()
 | ||
|  | 
 | ||
|  |     state.flowing = !state.readableListening; | ||
|  |     resume(this, state); | ||
|  |   } | ||
|  | 
 | ||
|  |   state.paused = false; | ||
|  |   return this; | ||
|  | }; | ||
|  | 
 | ||
|  | function resume(stream, state) { | ||
|  |   if (!state.resumeScheduled) { | ||
|  |     state.resumeScheduled = true; | ||
|  |     process.nextTick(resume_, stream, state); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function resume_(stream, state) { | ||
|  |   debug('resume', state.reading); | ||
|  | 
 | ||
|  |   if (!state.reading) { | ||
|  |     stream.read(0); | ||
|  |   } | ||
|  | 
 | ||
|  |   state.resumeScheduled = false; | ||
|  |   stream.emit('resume'); | ||
|  |   flow(stream); | ||
|  |   if (state.flowing && !state.reading) stream.read(0); | ||
|  | } | ||
|  | 
 | ||
|  | Readable.prototype.pause = function () { | ||
|  |   debug('call pause flowing=%j', this._readableState.flowing); | ||
|  | 
 | ||
|  |   if (this._readableState.flowing !== false) { | ||
|  |     debug('pause'); | ||
|  |     this._readableState.flowing = false; | ||
|  |     this.emit('pause'); | ||
|  |   } | ||
|  | 
 | ||
|  |   this._readableState.paused = true; | ||
|  |   return this; | ||
|  | }; | ||
|  | 
 | ||
|  | function flow(stream) { | ||
|  |   var state = stream._readableState; | ||
|  |   debug('flow', state.flowing); | ||
|  | 
 | ||
|  |   while (state.flowing && stream.read() !== null) { | ||
|  |     ; | ||
|  |   } | ||
|  | } // wrap an old-style stream as the async data source.
 | ||
|  | // This is *not* part of the readable stream interface.
 | ||
|  | // It is an ugly unfortunate mess of history.
 | ||
|  | 
 | ||
|  | 
 | ||
|  | Readable.prototype.wrap = function (stream) { | ||
|  |   var _this = this; | ||
|  | 
 | ||
|  |   var state = this._readableState; | ||
|  |   var paused = false; | ||
|  |   stream.on('end', function () { | ||
|  |     debug('wrapped end'); | ||
|  | 
 | ||
|  |     if (state.decoder && !state.ended) { | ||
|  |       var chunk = state.decoder.end(); | ||
|  |       if (chunk && chunk.length) _this.push(chunk); | ||
|  |     } | ||
|  | 
 | ||
|  |     _this.push(null); | ||
|  |   }); | ||
|  |   stream.on('data', function (chunk) { | ||
|  |     debug('wrapped data'); | ||
|  |     if (state.decoder) chunk = state.decoder.write(chunk); // don't skip over falsy values in objectMode
 | ||
|  | 
 | ||
|  |     if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; | ||
|  | 
 | ||
|  |     var ret = _this.push(chunk); | ||
|  | 
 | ||
|  |     if (!ret) { | ||
|  |       paused = true; | ||
|  |       stream.pause(); | ||
|  |     } | ||
|  |   }); // proxy all the other methods.
 | ||
|  |   // important when wrapping filters and duplexes.
 | ||
|  | 
 | ||
|  |   for (var i in stream) { | ||
|  |     if (this[i] === undefined && typeof stream[i] === 'function') { | ||
|  |       this[i] = function methodWrap(method) { | ||
|  |         return function methodWrapReturnFunction() { | ||
|  |           return stream[method].apply(stream, arguments); | ||
|  |         }; | ||
|  |       }(i); | ||
|  |     } | ||
|  |   } // proxy certain important events.
 | ||
|  | 
 | ||
|  | 
 | ||
|  |   for (var n = 0; n < kProxyEvents.length; n++) { | ||
|  |     stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); | ||
|  |   } // when we try to consume some more bytes, simply unpause the
 | ||
|  |   // underlying stream.
 | ||
|  | 
 | ||
|  | 
 | ||
|  |   this._read = function (n) { | ||
|  |     debug('wrapped _read', n); | ||
|  | 
 | ||
|  |     if (paused) { | ||
|  |       paused = false; | ||
|  |       stream.resume(); | ||
|  |     } | ||
|  |   }; | ||
|  | 
 | ||
|  |   return this; | ||
|  | }; | ||
|  | 
 | ||
|  | if (typeof Symbol === 'function') { | ||
|  |   Readable.prototype[Symbol.asyncIterator] = function () { | ||
|  |     if (createReadableStreamAsyncIterator === undefined) { | ||
|  |       createReadableStreamAsyncIterator = _dereq_(49); | ||
|  |     } | ||
|  | 
 | ||
|  |     return createReadableStreamAsyncIterator(this); | ||
|  |   }; | ||
|  | } | ||
|  | 
 | ||
|  | Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { | ||
|  |   // making it explicit this property is not enumerable
 | ||
|  |   // because otherwise some prototype manipulation in
 | ||
|  |   // userland will fail
 | ||
|  |   enumerable: false, | ||
|  |   get: function get() { | ||
|  |     return this._readableState.highWaterMark; | ||
|  |   } | ||
|  | }); | ||
|  | Object.defineProperty(Readable.prototype, 'readableBuffer', { | ||
|  |   // making it explicit this property is not enumerable
 | ||
|  |   // because otherwise some prototype manipulation in
 | ||
|  |   // userland will fail
 | ||
|  |   enumerable: false, | ||
|  |   get: function get() { | ||
|  |     return this._readableState && this._readableState.buffer; | ||
|  |   } | ||
|  | }); | ||
|  | Object.defineProperty(Readable.prototype, 'readableFlowing', { | ||
|  |   // making it explicit this property is not enumerable
 | ||
|  |   // because otherwise some prototype manipulation in
 | ||
|  |   // userland will fail
 | ||
|  |   enumerable: false, | ||
|  |   get: function get() { | ||
|  |     return this._readableState.flowing; | ||
|  |   }, | ||
|  |   set: function set(state) { | ||
|  |     if (this._readableState) { | ||
|  |       this._readableState.flowing = state; | ||
|  |     } | ||
|  |   } | ||
|  | }); // exposed for testing purposes only.
 | ||
|  | 
 | ||
|  | Readable._fromList = fromList; | ||
|  | Object.defineProperty(Readable.prototype, 'readableLength', { | ||
|  |   // making it explicit this property is not enumerable
 | ||
|  |   // because otherwise some prototype manipulation in
 | ||
|  |   // userland will fail
 | ||
|  |   enumerable: false, | ||
|  |   get: function get() { | ||
|  |     return this._readableState.length; | ||
|  |   } | ||
|  | }); // Pluck off n bytes from an array of buffers.
 | ||
|  | // Length is the combined lengths of all the buffers in the list.
 | ||
|  | // This function is designed to be inlinable, so please take care when making
 | ||
|  | // changes to the function body.
 | ||
|  | 
 | ||
|  | function fromList(n, state) { | ||
|  |   // nothing buffered
 | ||
|  |   if (state.length === 0) return null; | ||
|  |   var ret; | ||
|  |   if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { | ||
|  |     // read it all, truncate the list
 | ||
|  |     if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length); | ||
|  |     state.buffer.clear(); | ||
|  |   } else { | ||
|  |     // read part of list
 | ||
|  |     ret = state.buffer.consume(n, state.decoder); | ||
|  |   } | ||
|  |   return ret; | ||
|  | } | ||
|  | 
 | ||
|  | function endReadable(stream) { | ||
|  |   var state = stream._readableState; | ||
|  |   debug('endReadable', state.endEmitted); | ||
|  | 
 | ||
|  |   if (!state.endEmitted) { | ||
|  |     state.ended = true; | ||
|  |     process.nextTick(endReadableNT, state, stream); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function endReadableNT(state, stream) { | ||
|  |   debug('endReadableNT', state.endEmitted, state.length); // Check that we didn't get one last unshift.
 | ||
|  | 
 | ||
|  |   if (!state.endEmitted && state.length === 0) { | ||
|  |     state.endEmitted = true; | ||
|  |     stream.readable = false; | ||
|  |     stream.emit('end'); | ||
|  | 
 | ||
|  |     if (state.autoDestroy) { | ||
|  |       // In case of duplex streams we need a way to detect
 | ||
|  |       // if the writable side is ready for autoDestroy as well
 | ||
|  |       var wState = stream._writableState; | ||
|  | 
 | ||
|  |       if (!wState || wState.autoDestroy && wState.finished) { | ||
|  |         stream.destroy(); | ||
|  |       } | ||
|  |     } | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | if (typeof Symbol === 'function') { | ||
|  |   Readable.from = function (iterable, opts) { | ||
|  |     if (from === undefined) { | ||
|  |       from = _dereq_(53); | ||
|  |     } | ||
|  | 
 | ||
|  |     return from(Readable, iterable, opts); | ||
|  |   }; | ||
|  | } | ||
|  | 
 | ||
|  | function indexOf(xs, x) { | ||
|  |   for (var i = 0, l = xs.length; i < l; i++) { | ||
|  |     if (xs[i] === x) return i; | ||
|  |   } | ||
|  | 
 | ||
|  |   return -1; | ||
|  | } | ||
|  | }).call(this)}).call(this,_dereq_(73),typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | ||
|  | },{"102":102,"11":11,"13":13,"26":26,"37":37,"43":43,"44":44,"49":49,"50":50,"51":51,"53":53,"55":55,"56":56,"73":73}],47:[function(_dereq_,module,exports){ | ||
|  | // Copyright Joyent, Inc. and other Node contributors.
 | ||
|  | //
 | ||
|  | // Permission is hereby granted, free of charge, to any person obtaining a
 | ||
|  | // copy of this software and associated documentation files (the
 | ||
|  | // "Software"), to deal in the Software without restriction, including
 | ||
|  | // without limitation the rights to use, copy, modify, merge, publish,
 | ||
|  | // distribute, sublicense, and/or sell copies of the Software, and to permit
 | ||
|  | // persons to whom the Software is furnished to do so, subject to the
 | ||
|  | // following conditions:
 | ||
|  | //
 | ||
|  | // The above copyright notice and this permission notice shall be included
 | ||
|  | // in all copies or substantial portions of the Software.
 | ||
|  | //
 | ||
|  | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
 | ||
|  | // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
 | ||
|  | // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
 | ||
|  | // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
 | ||
|  | // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
 | ||
|  | // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
 | ||
|  | // USE OR OTHER DEALINGS IN THE SOFTWARE.
 | ||
|  | // a transform stream is a readable/writable stream where you do
 | ||
|  | // something with the data.  Sometimes it's called a "filter",
 | ||
|  | // but that's not a great name for it, since that implies a thing where
 | ||
|  | // some bits pass through, and others are simply ignored.  (That would
 | ||
|  | // be a valid example of a transform, of course.)
 | ||
|  | //
 | ||
|  | // While the output is causally related to the input, it's not a
 | ||
|  | // necessarily symmetric or synchronous transformation.  For example,
 | ||
|  | // a zlib stream might take multiple plain-text writes(), and then
 | ||
|  | // emit a single compressed chunk some time in the future.
 | ||
|  | //
 | ||
|  | // Here's how this works:
 | ||
|  | //
 | ||
|  | // The Transform stream has all the aspects of the readable and writable
 | ||
|  | // stream classes.  When you write(chunk), that calls _write(chunk,cb)
 | ||
|  | // internally, and returns false if there's a lot of pending writes
 | ||
|  | // buffered up.  When you call read(), that calls _read(n) until
 | ||
|  | // there's enough pending readable data buffered up.
 | ||
|  | //
 | ||
|  | // In a transform stream, the written data is placed in a buffer.  When
 | ||
|  | // _read(n) is called, it transforms the queued up data, calling the
 | ||
|  | // buffered _write cb's as it consumes chunks.  If consuming a single
 | ||
|  | // written chunk would result in multiple output chunks, then the first
 | ||
|  | // outputted bit calls the readcb, and subsequent chunks just go into
 | ||
|  | // the read buffer, and will cause it to emit 'readable' if necessary.
 | ||
|  | //
 | ||
|  | // This way, back-pressure is actually determined by the reading side,
 | ||
|  | // since _read has to be called to start processing a new chunk.  However,
 | ||
|  | // a pathological inflate type of transform can cause excessive buffering
 | ||
|  | // here.  For example, imagine a stream where every byte of input is
 | ||
|  | // interpreted as an integer from 0-255, and then results in that many
 | ||
|  | // bytes of output.  Writing the 4 bytes {ff,ff,ff,ff} would result in
 | ||
|  | // 1kb of data being output.  In this case, you could write a very small
 | ||
|  | // amount of input, and end up with a very large amount of output.  In
 | ||
|  | // such a pathological inflating mechanism, there'd be no way to tell
 | ||
|  | // the system to stop doing the transform.  A single 4MB write could
 | ||
|  | // cause the system to run out of memory.
 | ||
|  | //
 | ||
|  | // However, even in such a pathological case, only a single written chunk
 | ||
|  | // would be consumed, and then the rest would wait (un-transformed) until
 | ||
|  | // the results of the previous transformed chunk were consumed.
 | ||
|  | 'use strict'; | ||
|  | 
 | ||
|  | module.exports = Transform; | ||
|  | 
 | ||
|  | var _require$codes = _dereq_(43).codes, | ||
|  |     ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, | ||
|  |     ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, | ||
|  |     ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING, | ||
|  |     ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0; | ||
|  | 
 | ||
|  | var Duplex = _dereq_(44); | ||
|  | 
 | ||
|  | _dereq_(37)(Transform, Duplex); | ||
|  | 
 | ||
|  | function afterTransform(er, data) { | ||
|  |   var ts = this._transformState; | ||
|  |   ts.transforming = false; | ||
|  |   var cb = ts.writecb; | ||
|  | 
 | ||
|  |   if (cb === null) { | ||
|  |     return this.emit('error', new ERR_MULTIPLE_CALLBACK()); | ||
|  |   } | ||
|  | 
 | ||
|  |   ts.writechunk = null; | ||
|  |   ts.writecb = null; | ||
|  |   if (data != null) // single equals check for both `null` and `undefined`
 | ||
|  |     this.push(data); | ||
|  |   cb(er); | ||
|  |   var rs = this._readableState; | ||
|  |   rs.reading = false; | ||
|  | 
 | ||
|  |   if (rs.needReadable || rs.length < rs.highWaterMark) { | ||
|  |     this._read(rs.highWaterMark); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function Transform(options) { | ||
|  |   if (!(this instanceof Transform)) return new Transform(options); | ||
|  |   Duplex.call(this, options); | ||
|  |   this._transformState = { | ||
|  |     afterTransform: afterTransform.bind(this), | ||
|  |     needTransform: false, | ||
|  |     transforming: false, | ||
|  |     writecb: null, | ||
|  |     writechunk: null, | ||
|  |     writeencoding: null | ||
|  |   }; // start out asking for a readable event once data is transformed.
 | ||
|  | 
 | ||
|  |   this._readableState.needReadable = true; // we have implemented the _read method, and done the other things
 | ||
|  |   // that Readable wants before the first _read call, so unset the
 | ||
|  |   // sync guard flag.
 | ||
|  | 
 | ||
|  |   this._readableState.sync = false; | ||
|  | 
 | ||
|  |   if (options) { | ||
|  |     if (typeof options.transform === 'function') this._transform = options.transform; | ||
|  |     if (typeof options.flush === 'function') this._flush = options.flush; | ||
|  |   } // When the writable side finishes, then flush out anything remaining.
 | ||
|  | 
 | ||
|  | 
 | ||
|  |   this.on('prefinish', prefinish); | ||
|  | } | ||
|  | 
 | ||
|  | function prefinish() { | ||
|  |   var _this = this; | ||
|  | 
 | ||
|  |   if (typeof this._flush === 'function' && !this._readableState.destroyed) { | ||
|  |     this._flush(function (er, data) { | ||
|  |       done(_this, er, data); | ||
|  |     }); | ||
|  |   } else { | ||
|  |     done(this, null, null); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | Transform.prototype.push = function (chunk, encoding) { | ||
|  |   this._transformState.needTransform = false; | ||
|  |   return Duplex.prototype.push.call(this, chunk, encoding); | ||
|  | }; // This is the part where you do stuff!
 | ||
|  | // override this function in implementation classes.
 | ||
|  | // 'chunk' is an input chunk.
 | ||
|  | //
 | ||
|  | // Call `push(newChunk)` to pass along transformed output
 | ||
|  | // to the readable side.  You may call 'push' zero or more times.
 | ||
|  | //
 | ||
|  | // Call `cb(err)` when you are done with this chunk.  If you pass
 | ||
|  | // an error, then that'll put the hurt on the whole operation.  If you
 | ||
|  | // never call cb(), then you'll never get another chunk.
 | ||
|  | 
 | ||
|  | 
 | ||
|  | Transform.prototype._transform = function (chunk, encoding, cb) { | ||
|  |   cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()')); | ||
|  | }; | ||
|  | 
 | ||
|  | Transform.prototype._write = function (chunk, encoding, cb) { | ||
|  |   var ts = this._transformState; | ||
|  |   ts.writecb = cb; | ||
|  |   ts.writechunk = chunk; | ||
|  |   ts.writeencoding = encoding; | ||
|  | 
 | ||
|  |   if (!ts.transforming) { | ||
|  |     var rs = this._readableState; | ||
|  |     if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); | ||
|  |   } | ||
|  | }; // Doesn't matter what the args are here.
 | ||
|  | // _transform does all the work.
 | ||
|  | // That we got here means that the readable side wants more data.
 | ||
|  | 
 | ||
|  | 
 | ||
|  | Transform.prototype._read = function (n) { | ||
|  |   var ts = this._transformState; | ||
|  | 
 | ||
|  |   if (ts.writechunk !== null && !ts.transforming) { | ||
|  |     ts.transforming = true; | ||
|  | 
 | ||
|  |     this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); | ||
|  |   } else { | ||
|  |     // mark that we need a transform, so that any data that comes in
 | ||
|  |     // will get processed, now that we've asked for it.
 | ||
|  |     ts.needTransform = true; | ||
|  |   } | ||
|  | }; | ||
|  | 
 | ||
|  | Transform.prototype._destroy = function (err, cb) { | ||
|  |   Duplex.prototype._destroy.call(this, err, function (err2) { | ||
|  |     cb(err2); | ||
|  |   }); | ||
|  | }; | ||
|  | 
 | ||
|  | function done(stream, er, data) { | ||
|  |   if (er) return stream.emit('error', er); | ||
|  |   if (data != null) // single equals check for both `null` and `undefined`
 | ||
|  |     stream.push(data); // TODO(BridgeAR): Write a test for these two error cases
 | ||
|  |   // if there's nothing in the write buffer, then that means
 | ||
|  |   // that nothing more will ever be provided
 | ||
|  | 
 | ||
|  |   if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0(); | ||
|  |   if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING(); | ||
|  |   return stream.push(null); | ||
|  | } | ||
|  | },{"37":37,"43":43,"44":44}],48:[function(_dereq_,module,exports){ | ||
|  | (function (process,global){(function (){ | ||
|  | // Copyright Joyent, Inc. and other Node contributors.
 | ||
|  | //
 | ||
|  | // Permission is hereby granted, free of charge, to any person obtaining a
 | ||
|  | // copy of this software and associated documentation files (the
 | ||
|  | // "Software"), to deal in the Software without restriction, including
 | ||
|  | // without limitation the rights to use, copy, modify, merge, publish,
 | ||
|  | // distribute, sublicense, and/or sell copies of the Software, and to permit
 | ||
|  | // persons to whom the Software is furnished to do so, subject to the
 | ||
|  | // following conditions:
 | ||
|  | //
 | ||
|  | // The above copyright notice and this permission notice shall be included
 | ||
|  | // in all copies or substantial portions of the Software.
 | ||
|  | //
 | ||
|  | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
 | ||
|  | // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
 | ||
|  | // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
 | ||
|  | // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
 | ||
|  | // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
 | ||
|  | // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
 | ||
|  | // USE OR OTHER DEALINGS IN THE SOFTWARE.
 | ||
|  | // A bit simpler than readable streams.
 | ||
|  | // Implement an async ._write(chunk, encoding, cb), and it'll handle all
 | ||
|  | // the drain event emission and buffering.
 | ||
|  | 'use strict'; | ||
|  | 
 | ||
|  | module.exports = Writable; | ||
|  | /* <replacement> */ | ||
|  | 
 | ||
|  | function WriteReq(chunk, encoding, cb) { | ||
|  |   this.chunk = chunk; | ||
|  |   this.encoding = encoding; | ||
|  |   this.callback = cb; | ||
|  |   this.next = null; | ||
|  | } // It seems a linked list but it is not
 | ||
|  | // there will be only 2 of these for each stream
 | ||
|  | 
 | ||
|  | 
 | ||
|  | function CorkedRequest(state) { | ||
|  |   var _this = this; | ||
|  | 
 | ||
|  |   this.next = null; | ||
|  |   this.entry = null; | ||
|  | 
 | ||
|  |   this.finish = function () { | ||
|  |     onCorkedFinish(_this, state); | ||
|  |   }; | ||
|  | } | ||
|  | /* </replacement> */ | ||
|  | 
 | ||
|  | /*<replacement>*/ | ||
|  | 
 | ||
|  | 
 | ||
|  | var Duplex; | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | Writable.WritableState = WritableState; | ||
|  | /*<replacement>*/ | ||
|  | 
 | ||
|  | var internalUtil = { | ||
|  |   deprecate: _dereq_(121) | ||
|  | }; | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | /*<replacement>*/ | ||
|  | 
 | ||
|  | var Stream = _dereq_(56); | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | 
 | ||
|  | var Buffer = _dereq_(13).Buffer; | ||
|  | 
 | ||
|  | var OurUint8Array = global.Uint8Array || function () {}; | ||
|  | 
 | ||
|  | function _uint8ArrayToBuffer(chunk) { | ||
|  |   return Buffer.from(chunk); | ||
|  | } | ||
|  | 
 | ||
|  | function _isUint8Array(obj) { | ||
|  |   return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; | ||
|  | } | ||
|  | 
 | ||
|  | var destroyImpl = _dereq_(51); | ||
|  | 
 | ||
|  | var _require = _dereq_(55), | ||
|  |     getHighWaterMark = _require.getHighWaterMark; | ||
|  | 
 | ||
|  | var _require$codes = _dereq_(43).codes, | ||
|  |     ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, | ||
|  |     ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, | ||
|  |     ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, | ||
|  |     ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE, | ||
|  |     ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED, | ||
|  |     ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES, | ||
|  |     ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END, | ||
|  |     ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING; | ||
|  | 
 | ||
|  | var errorOrDestroy = destroyImpl.errorOrDestroy; | ||
|  | 
 | ||
|  | _dereq_(37)(Writable, Stream); | ||
|  | 
 | ||
|  | function nop() {} | ||
|  | 
 | ||
|  | function WritableState(options, stream, isDuplex) { | ||
|  |   Duplex = Duplex || _dereq_(44); | ||
|  |   options = options || {}; // Duplex streams are both readable and writable, but share
 | ||
|  |   // the same options object.
 | ||
|  |   // However, some cases require setting options to different
 | ||
|  |   // values for the readable and the writable sides of the duplex stream,
 | ||
|  |   // e.g. options.readableObjectMode vs. options.writableObjectMode, etc.
 | ||
|  | 
 | ||
|  |   if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag to indicate whether or not this stream
 | ||
|  |   // contains buffers or objects.
 | ||
|  | 
 | ||
|  |   this.objectMode = !!options.objectMode; | ||
|  |   if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; // the point at which write() starts returning false
 | ||
|  |   // Note: 0 is a valid value, means that we always return false if
 | ||
|  |   // the entire buffer is not flushed immediately on write()
 | ||
|  | 
 | ||
|  |   this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); // if _final has been called
 | ||
|  | 
 | ||
|  |   this.finalCalled = false; // drain event flag.
 | ||
|  | 
 | ||
|  |   this.needDrain = false; // at the start of calling end()
 | ||
|  | 
 | ||
|  |   this.ending = false; // when end() has been called, and returned
 | ||
|  | 
 | ||
|  |   this.ended = false; // when 'finish' is emitted
 | ||
|  | 
 | ||
|  |   this.finished = false; // has it been destroyed
 | ||
|  | 
 | ||
|  |   this.destroyed = false; // should we decode strings into buffers before passing to _write?
 | ||
|  |   // this is here so that some node-core streams can optimize string
 | ||
|  |   // handling at a lower level.
 | ||
|  | 
 | ||
|  |   var noDecode = options.decodeStrings === false; | ||
|  |   this.decodeStrings = !noDecode; // Crypto is kind of old and crusty.  Historically, its default string
 | ||
|  |   // encoding is 'binary' so we have to make this configurable.
 | ||
|  |   // Everything else in the universe uses 'utf8', though.
 | ||
|  | 
 | ||
|  |   this.defaultEncoding = options.defaultEncoding || 'utf8'; // not an actual buffer we keep track of, but a measurement
 | ||
|  |   // of how much we're waiting to get pushed to some underlying
 | ||
|  |   // socket or file.
 | ||
|  | 
 | ||
|  |   this.length = 0; // a flag to see when we're in the middle of a write.
 | ||
|  | 
 | ||
|  |   this.writing = false; // when true all writes will be buffered until .uncork() call
 | ||
|  | 
 | ||
|  |   this.corked = 0; // a flag to be able to tell if the onwrite cb is called immediately,
 | ||
|  |   // or on a later tick.  We set this to true at first, because any
 | ||
|  |   // actions that shouldn't happen until "later" should generally also
 | ||
|  |   // not happen before the first write call.
 | ||
|  | 
 | ||
|  |   this.sync = true; // a flag to know if we're processing previously buffered items, which
 | ||
|  |   // may call the _write() callback in the same tick, so that we don't
 | ||
|  |   // end up in an overlapped onwrite situation.
 | ||
|  | 
 | ||
|  |   this.bufferProcessing = false; // the callback that's passed to _write(chunk,cb)
 | ||
|  | 
 | ||
|  |   this.onwrite = function (er) { | ||
|  |     onwrite(stream, er); | ||
|  |   }; // the callback that the user supplies to write(chunk,encoding,cb)
 | ||
|  | 
 | ||
|  | 
 | ||
|  |   this.writecb = null; // the amount that is being written when _write is called.
 | ||
|  | 
 | ||
|  |   this.writelen = 0; | ||
|  |   this.bufferedRequest = null; | ||
|  |   this.lastBufferedRequest = null; // number of pending user-supplied write callbacks
 | ||
|  |   // this must be 0 before 'finish' can be emitted
 | ||
|  | 
 | ||
|  |   this.pendingcb = 0; // emit prefinish if the only thing we're waiting for is _write cbs
 | ||
|  |   // This is relevant for synchronous Transform streams
 | ||
|  | 
 | ||
|  |   this.prefinished = false; // True if the error was already emitted and should not be thrown again
 | ||
|  | 
 | ||
|  |   this.errorEmitted = false; // Should close be emitted on destroy. Defaults to true.
 | ||
|  | 
 | ||
|  |   this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'finish' (and potentially 'end')
 | ||
|  | 
 | ||
|  |   this.autoDestroy = !!options.autoDestroy; // count buffered requests
 | ||
|  | 
 | ||
|  |   this.bufferedRequestCount = 0; // allocate the first CorkedRequest, there is always
 | ||
|  |   // one allocated and free to use, and we maintain at most two
 | ||
|  | 
 | ||
|  |   this.corkedRequestsFree = new CorkedRequest(this); | ||
|  | } | ||
|  | 
 | ||
|  | WritableState.prototype.getBuffer = function getBuffer() { | ||
|  |   var current = this.bufferedRequest; | ||
|  |   var out = []; | ||
|  | 
 | ||
|  |   while (current) { | ||
|  |     out.push(current); | ||
|  |     current = current.next; | ||
|  |   } | ||
|  | 
 | ||
|  |   return out; | ||
|  | }; | ||
|  | 
 | ||
|  | (function () { | ||
|  |   try { | ||
|  |     Object.defineProperty(WritableState.prototype, 'buffer', { | ||
|  |       get: internalUtil.deprecate(function writableStateBufferGetter() { | ||
|  |         return this.getBuffer(); | ||
|  |       }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') | ||
|  |     }); | ||
|  |   } catch (_) {} | ||
|  | })(); // Test _writableState for inheritance to account for Duplex streams,
 | ||
|  | // whose prototype chain only points to Readable.
 | ||
|  | 
 | ||
|  | 
 | ||
|  | var realHasInstance; | ||
|  | 
 | ||
|  | if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { | ||
|  |   realHasInstance = Function.prototype[Symbol.hasInstance]; | ||
|  |   Object.defineProperty(Writable, Symbol.hasInstance, { | ||
|  |     value: function value(object) { | ||
|  |       if (realHasInstance.call(this, object)) return true; | ||
|  |       if (this !== Writable) return false; | ||
|  |       return object && object._writableState instanceof WritableState; | ||
|  |     } | ||
|  |   }); | ||
|  | } else { | ||
|  |   realHasInstance = function realHasInstance(object) { | ||
|  |     return object instanceof this; | ||
|  |   }; | ||
|  | } | ||
|  | 
 | ||
|  | function Writable(options) { | ||
|  |   Duplex = Duplex || _dereq_(44); // Writable ctor is applied to Duplexes, too.
 | ||
|  |   // `realHasInstance` is necessary because using plain `instanceof`
 | ||
|  |   // would return false, as no `_writableState` property is attached.
 | ||
|  |   // Trying to use the custom `instanceof` for Writable here will also break the
 | ||
|  |   // Node.js LazyTransform implementation, which has a non-trivial getter for
 | ||
|  |   // `_writableState` that would lead to infinite recursion.
 | ||
|  |   // Checking for a Stream.Duplex instance is faster here instead of inside
 | ||
|  |   // the WritableState constructor, at least with V8 6.5
 | ||
|  | 
 | ||
|  |   var isDuplex = this instanceof Duplex; | ||
|  |   if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options); | ||
|  |   this._writableState = new WritableState(options, this, isDuplex); // legacy.
 | ||
|  | 
 | ||
|  |   this.writable = true; | ||
|  | 
 | ||
|  |   if (options) { | ||
|  |     if (typeof options.write === 'function') this._write = options.write; | ||
|  |     if (typeof options.writev === 'function') this._writev = options.writev; | ||
|  |     if (typeof options.destroy === 'function') this._destroy = options.destroy; | ||
|  |     if (typeof options.final === 'function') this._final = options.final; | ||
|  |   } | ||
|  | 
 | ||
|  |   Stream.call(this); | ||
|  | } // Otherwise people can pipe Writable streams, which is just wrong.
 | ||
|  | 
 | ||
|  | 
 | ||
|  | Writable.prototype.pipe = function () { | ||
|  |   errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); | ||
|  | }; | ||
|  | 
 | ||
|  | function writeAfterEnd(stream, cb) { | ||
|  |   var er = new ERR_STREAM_WRITE_AFTER_END(); // TODO: defer error events consistently everywhere, not just the cb
 | ||
|  | 
 | ||
|  |   errorOrDestroy(stream, er); | ||
|  |   process.nextTick(cb, er); | ||
|  | } // Checks that a user-supplied chunk is valid, especially for the particular
 | ||
|  | // mode the stream is in. Currently this means that `null` is never accepted
 | ||
|  | // and undefined/non-string values are only allowed in object mode.
 | ||
|  | 
 | ||
|  | 
 | ||
|  | function validChunk(stream, state, chunk, cb) { | ||
|  |   var er; | ||
|  | 
 | ||
|  |   if (chunk === null) { | ||
|  |     er = new ERR_STREAM_NULL_VALUES(); | ||
|  |   } else if (typeof chunk !== 'string' && !state.objectMode) { | ||
|  |     er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk); | ||
|  |   } | ||
|  | 
 | ||
|  |   if (er) { | ||
|  |     errorOrDestroy(stream, er); | ||
|  |     process.nextTick(cb, er); | ||
|  |     return false; | ||
|  |   } | ||
|  | 
 | ||
|  |   return true; | ||
|  | } | ||
|  | 
 | ||
|  | Writable.prototype.write = function (chunk, encoding, cb) { | ||
|  |   var state = this._writableState; | ||
|  |   var ret = false; | ||
|  | 
 | ||
|  |   var isBuf = !state.objectMode && _isUint8Array(chunk); | ||
|  | 
 | ||
|  |   if (isBuf && !Buffer.isBuffer(chunk)) { | ||
|  |     chunk = _uint8ArrayToBuffer(chunk); | ||
|  |   } | ||
|  | 
 | ||
|  |   if (typeof encoding === 'function') { | ||
|  |     cb = encoding; | ||
|  |     encoding = null; | ||
|  |   } | ||
|  | 
 | ||
|  |   if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; | ||
|  |   if (typeof cb !== 'function') cb = nop; | ||
|  |   if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { | ||
|  |     state.pendingcb++; | ||
|  |     ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); | ||
|  |   } | ||
|  |   return ret; | ||
|  | }; | ||
|  | 
 | ||
|  | Writable.prototype.cork = function () { | ||
|  |   this._writableState.corked++; | ||
|  | }; | ||
|  | 
 | ||
|  | Writable.prototype.uncork = function () { | ||
|  |   var state = this._writableState; | ||
|  | 
 | ||
|  |   if (state.corked) { | ||
|  |     state.corked--; | ||
|  |     if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); | ||
|  |   } | ||
|  | }; | ||
|  | 
 | ||
|  | Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { | ||
|  |   // node::ParseEncoding() requires lower case.
 | ||
|  |   if (typeof encoding === 'string') encoding = encoding.toLowerCase(); | ||
|  |   if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding); | ||
|  |   this._writableState.defaultEncoding = encoding; | ||
|  |   return this; | ||
|  | }; | ||
|  | 
 | ||
|  | Object.defineProperty(Writable.prototype, 'writableBuffer', { | ||
|  |   // making it explicit this property is not enumerable
 | ||
|  |   // because otherwise some prototype manipulation in
 | ||
|  |   // userland will fail
 | ||
|  |   enumerable: false, | ||
|  |   get: function get() { | ||
|  |     return this._writableState && this._writableState.getBuffer(); | ||
|  |   } | ||
|  | }); | ||
|  | 
 | ||
|  | function decodeChunk(state, chunk, encoding) { | ||
|  |   if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { | ||
|  |     chunk = Buffer.from(chunk, encoding); | ||
|  |   } | ||
|  | 
 | ||
|  |   return chunk; | ||
|  | } | ||
|  | 
 | ||
|  | Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { | ||
|  |   // making it explicit this property is not enumerable
 | ||
|  |   // because otherwise some prototype manipulation in
 | ||
|  |   // userland will fail
 | ||
|  |   enumerable: false, | ||
|  |   get: function get() { | ||
|  |     return this._writableState.highWaterMark; | ||
|  |   } | ||
|  | }); // if we're already writing something, then just put this
 | ||
|  | // in the queue, and wait our turn.  Otherwise, call _write
 | ||
|  | // If we return false, then we need a drain event, so set that flag.
 | ||
|  | 
 | ||
|  | function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { | ||
|  |   if (!isBuf) { | ||
|  |     var newChunk = decodeChunk(state, chunk, encoding); | ||
|  | 
 | ||
|  |     if (chunk !== newChunk) { | ||
|  |       isBuf = true; | ||
|  |       encoding = 'buffer'; | ||
|  |       chunk = newChunk; | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   var len = state.objectMode ? 1 : chunk.length; | ||
|  |   state.length += len; | ||
|  |   var ret = state.length < state.highWaterMark; // we must ensure that previous needDrain will not be reset to false.
 | ||
|  | 
 | ||
|  |   if (!ret) state.needDrain = true; | ||
|  | 
 | ||
|  |   if (state.writing || state.corked) { | ||
|  |     var last = state.lastBufferedRequest; | ||
|  |     state.lastBufferedRequest = { | ||
|  |       chunk: chunk, | ||
|  |       encoding: encoding, | ||
|  |       isBuf: isBuf, | ||
|  |       callback: cb, | ||
|  |       next: null | ||
|  |     }; | ||
|  | 
 | ||
|  |     if (last) { | ||
|  |       last.next = state.lastBufferedRequest; | ||
|  |     } else { | ||
|  |       state.bufferedRequest = state.lastBufferedRequest; | ||
|  |     } | ||
|  | 
 | ||
|  |     state.bufferedRequestCount += 1; | ||
|  |   } else { | ||
|  |     doWrite(stream, state, false, len, chunk, encoding, cb); | ||
|  |   } | ||
|  | 
 | ||
|  |   return ret; | ||
|  | } | ||
|  | 
 | ||
|  | function doWrite(stream, state, writev, len, chunk, encoding, cb) { | ||
|  |   state.writelen = len; | ||
|  |   state.writecb = cb; | ||
|  |   state.writing = true; | ||
|  |   state.sync = true; | ||
|  |   if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); | ||
|  |   state.sync = false; | ||
|  | } | ||
|  | 
 | ||
|  | function onwriteError(stream, state, sync, er, cb) { | ||
|  |   --state.pendingcb; | ||
|  | 
 | ||
|  |   if (sync) { | ||
|  |     // defer the callback if we are being called synchronously
 | ||
|  |     // to avoid piling up things on the stack
 | ||
|  |     process.nextTick(cb, er); // this can emit finish, and it will always happen
 | ||
|  |     // after error
 | ||
|  | 
 | ||
|  |     process.nextTick(finishMaybe, stream, state); | ||
|  |     stream._writableState.errorEmitted = true; | ||
|  |     errorOrDestroy(stream, er); | ||
|  |   } else { | ||
|  |     // the caller expect this to happen before if
 | ||
|  |     // it is async
 | ||
|  |     cb(er); | ||
|  |     stream._writableState.errorEmitted = true; | ||
|  |     errorOrDestroy(stream, er); // this can emit finish, but finish must
 | ||
|  |     // always follow error
 | ||
|  | 
 | ||
|  |     finishMaybe(stream, state); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function onwriteStateUpdate(state) { | ||
|  |   state.writing = false; | ||
|  |   state.writecb = null; | ||
|  |   state.length -= state.writelen; | ||
|  |   state.writelen = 0; | ||
|  | } | ||
|  | 
 | ||
|  | function onwrite(stream, er) { | ||
|  |   var state = stream._writableState; | ||
|  |   var sync = state.sync; | ||
|  |   var cb = state.writecb; | ||
|  |   if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK(); | ||
|  |   onwriteStateUpdate(state); | ||
|  |   if (er) onwriteError(stream, state, sync, er, cb);else { | ||
|  |     // Check if we're actually ready to finish, but don't emit yet
 | ||
|  |     var finished = needFinish(state) || stream.destroyed; | ||
|  | 
 | ||
|  |     if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { | ||
|  |       clearBuffer(stream, state); | ||
|  |     } | ||
|  | 
 | ||
|  |     if (sync) { | ||
|  |       process.nextTick(afterWrite, stream, state, finished, cb); | ||
|  |     } else { | ||
|  |       afterWrite(stream, state, finished, cb); | ||
|  |     } | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function afterWrite(stream, state, finished, cb) { | ||
|  |   if (!finished) onwriteDrain(stream, state); | ||
|  |   state.pendingcb--; | ||
|  |   cb(); | ||
|  |   finishMaybe(stream, state); | ||
|  | } // Must force callback to be called on nextTick, so that we don't
 | ||
|  | // emit 'drain' before the write() consumer gets the 'false' return
 | ||
|  | // value, and has a chance to attach a 'drain' listener.
 | ||
|  | 
 | ||
|  | 
 | ||
|  | function onwriteDrain(stream, state) { | ||
|  |   if (state.length === 0 && state.needDrain) { | ||
|  |     state.needDrain = false; | ||
|  |     stream.emit('drain'); | ||
|  |   } | ||
|  | } // if there's something in the buffer waiting, then process it
 | ||
|  | 
 | ||
|  | 
 | ||
|  | function clearBuffer(stream, state) { | ||
|  |   state.bufferProcessing = true; | ||
|  |   var entry = state.bufferedRequest; | ||
|  | 
 | ||
|  |   if (stream._writev && entry && entry.next) { | ||
|  |     // Fast case, write everything using _writev()
 | ||
|  |     var l = state.bufferedRequestCount; | ||
|  |     var buffer = new Array(l); | ||
|  |     var holder = state.corkedRequestsFree; | ||
|  |     holder.entry = entry; | ||
|  |     var count = 0; | ||
|  |     var allBuffers = true; | ||
|  | 
 | ||
|  |     while (entry) { | ||
|  |       buffer[count] = entry; | ||
|  |       if (!entry.isBuf) allBuffers = false; | ||
|  |       entry = entry.next; | ||
|  |       count += 1; | ||
|  |     } | ||
|  | 
 | ||
|  |     buffer.allBuffers = allBuffers; | ||
|  |     doWrite(stream, state, true, state.length, buffer, '', holder.finish); // doWrite is almost always async, defer these to save a bit of time
 | ||
|  |     // as the hot path ends with doWrite
 | ||
|  | 
 | ||
|  |     state.pendingcb++; | ||
|  |     state.lastBufferedRequest = null; | ||
|  | 
 | ||
|  |     if (holder.next) { | ||
|  |       state.corkedRequestsFree = holder.next; | ||
|  |       holder.next = null; | ||
|  |     } else { | ||
|  |       state.corkedRequestsFree = new CorkedRequest(state); | ||
|  |     } | ||
|  | 
 | ||
|  |     state.bufferedRequestCount = 0; | ||
|  |   } else { | ||
|  |     // Slow case, write chunks one-by-one
 | ||
|  |     while (entry) { | ||
|  |       var chunk = entry.chunk; | ||
|  |       var encoding = entry.encoding; | ||
|  |       var cb = entry.callback; | ||
|  |       var len = state.objectMode ? 1 : chunk.length; | ||
|  |       doWrite(stream, state, false, len, chunk, encoding, cb); | ||
|  |       entry = entry.next; | ||
|  |       state.bufferedRequestCount--; // if we didn't call the onwrite immediately, then
 | ||
|  |       // it means that we need to wait until it does.
 | ||
|  |       // also, that means that the chunk and cb are currently
 | ||
|  |       // being processed, so move the buffer counter past them.
 | ||
|  | 
 | ||
|  |       if (state.writing) { | ||
|  |         break; | ||
|  |       } | ||
|  |     } | ||
|  | 
 | ||
|  |     if (entry === null) state.lastBufferedRequest = null; | ||
|  |   } | ||
|  | 
 | ||
|  |   state.bufferedRequest = entry; | ||
|  |   state.bufferProcessing = false; | ||
|  | } | ||
|  | 
 | ||
|  | Writable.prototype._write = function (chunk, encoding, cb) { | ||
|  |   cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()')); | ||
|  | }; | ||
|  | 
 | ||
|  | Writable.prototype._writev = null; | ||
|  | 
 | ||
|  | Writable.prototype.end = function (chunk, encoding, cb) { | ||
|  |   var state = this._writableState; | ||
|  | 
 | ||
|  |   if (typeof chunk === 'function') { | ||
|  |     cb = chunk; | ||
|  |     chunk = null; | ||
|  |     encoding = null; | ||
|  |   } else if (typeof encoding === 'function') { | ||
|  |     cb = encoding; | ||
|  |     encoding = null; | ||
|  |   } | ||
|  | 
 | ||
|  |   if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); // .end() fully uncorks
 | ||
|  | 
 | ||
|  |   if (state.corked) { | ||
|  |     state.corked = 1; | ||
|  |     this.uncork(); | ||
|  |   } // ignore unnecessary end() calls.
 | ||
|  | 
 | ||
|  | 
 | ||
|  |   if (!state.ending) endWritable(this, state, cb); | ||
|  |   return this; | ||
|  | }; | ||
|  | 
 | ||
|  | Object.defineProperty(Writable.prototype, 'writableLength', { | ||
|  |   // making it explicit this property is not enumerable
 | ||
|  |   // because otherwise some prototype manipulation in
 | ||
|  |   // userland will fail
 | ||
|  |   enumerable: false, | ||
|  |   get: function get() { | ||
|  |     return this._writableState.length; | ||
|  |   } | ||
|  | }); | ||
|  | 
 | ||
|  | function needFinish(state) { | ||
|  |   return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; | ||
|  | } | ||
|  | 
 | ||
|  | function callFinal(stream, state) { | ||
|  |   stream._final(function (err) { | ||
|  |     state.pendingcb--; | ||
|  | 
 | ||
|  |     if (err) { | ||
|  |       errorOrDestroy(stream, err); | ||
|  |     } | ||
|  | 
 | ||
|  |     state.prefinished = true; | ||
|  |     stream.emit('prefinish'); | ||
|  |     finishMaybe(stream, state); | ||
|  |   }); | ||
|  | } | ||
|  | 
 | ||
|  | function prefinish(stream, state) { | ||
|  |   if (!state.prefinished && !state.finalCalled) { | ||
|  |     if (typeof stream._final === 'function' && !state.destroyed) { | ||
|  |       state.pendingcb++; | ||
|  |       state.finalCalled = true; | ||
|  |       process.nextTick(callFinal, stream, state); | ||
|  |     } else { | ||
|  |       state.prefinished = true; | ||
|  |       stream.emit('prefinish'); | ||
|  |     } | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function finishMaybe(stream, state) { | ||
|  |   var need = needFinish(state); | ||
|  | 
 | ||
|  |   if (need) { | ||
|  |     prefinish(stream, state); | ||
|  | 
 | ||
|  |     if (state.pendingcb === 0) { | ||
|  |       state.finished = true; | ||
|  |       stream.emit('finish'); | ||
|  | 
 | ||
|  |       if (state.autoDestroy) { | ||
|  |         // In case of duplex streams we need a way to detect
 | ||
|  |         // if the readable side is ready for autoDestroy as well
 | ||
|  |         var rState = stream._readableState; | ||
|  | 
 | ||
|  |         if (!rState || rState.autoDestroy && rState.endEmitted) { | ||
|  |           stream.destroy(); | ||
|  |         } | ||
|  |       } | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   return need; | ||
|  | } | ||
|  | 
 | ||
|  | function endWritable(stream, state, cb) { | ||
|  |   state.ending = true; | ||
|  |   finishMaybe(stream, state); | ||
|  | 
 | ||
|  |   if (cb) { | ||
|  |     if (state.finished) process.nextTick(cb);else stream.once('finish', cb); | ||
|  |   } | ||
|  | 
 | ||
|  |   state.ended = true; | ||
|  |   stream.writable = false; | ||
|  | } | ||
|  | 
 | ||
|  | function onCorkedFinish(corkReq, state, err) { | ||
|  |   var entry = corkReq.entry; | ||
|  |   corkReq.entry = null; | ||
|  | 
 | ||
|  |   while (entry) { | ||
|  |     var cb = entry.callback; | ||
|  |     state.pendingcb--; | ||
|  |     cb(err); | ||
|  |     entry = entry.next; | ||
|  |   } // reuse the free corkReq.
 | ||
|  | 
 | ||
|  | 
 | ||
|  |   state.corkedRequestsFree.next = corkReq; | ||
|  | } | ||
|  | 
 | ||
|  | Object.defineProperty(Writable.prototype, 'destroyed', { | ||
|  |   // making it explicit this property is not enumerable
 | ||
|  |   // because otherwise some prototype manipulation in
 | ||
|  |   // userland will fail
 | ||
|  |   enumerable: false, | ||
|  |   get: function get() { | ||
|  |     if (this._writableState === undefined) { | ||
|  |       return false; | ||
|  |     } | ||
|  | 
 | ||
|  |     return this._writableState.destroyed; | ||
|  |   }, | ||
|  |   set: function set(value) { | ||
|  |     // we ignore the value if the stream
 | ||
|  |     // has not been initialized yet
 | ||
|  |     if (!this._writableState) { | ||
|  |       return; | ||
|  |     } // backward compatibility, the user is explicitly
 | ||
|  |     // managing destroyed
 | ||
|  | 
 | ||
|  | 
 | ||
|  |     this._writableState.destroyed = value; | ||
|  |   } | ||
|  | }); | ||
|  | Writable.prototype.destroy = destroyImpl.destroy; | ||
|  | Writable.prototype._undestroy = destroyImpl.undestroy; | ||
|  | 
 | ||
|  | Writable.prototype._destroy = function (err, cb) { | ||
|  |   cb(err); | ||
|  | }; | ||
|  | }).call(this)}).call(this,_dereq_(73),typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | ||
|  | },{"121":121,"13":13,"37":37,"43":43,"44":44,"51":51,"55":55,"56":56,"73":73}],49:[function(_dereq_,module,exports){ | ||
|  | (function (process){(function (){ | ||
|  | 'use strict'; | ||
|  | 
 | ||
|  | var _Object$setPrototypeO; | ||
|  | 
 | ||
|  | function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } | ||
|  | 
 | ||
|  | var finished = _dereq_(52); | ||
|  | 
 | ||
|  | var kLastResolve = Symbol('lastResolve'); | ||
|  | var kLastReject = Symbol('lastReject'); | ||
|  | var kError = Symbol('error'); | ||
|  | var kEnded = Symbol('ended'); | ||
|  | var kLastPromise = Symbol('lastPromise'); | ||
|  | var kHandlePromise = Symbol('handlePromise'); | ||
|  | var kStream = Symbol('stream'); | ||
|  | 
 | ||
|  | function createIterResult(value, done) { | ||
|  |   return { | ||
|  |     value: value, | ||
|  |     done: done | ||
|  |   }; | ||
|  | } | ||
|  | 
 | ||
|  | function readAndResolve(iter) { | ||
|  |   var resolve = iter[kLastResolve]; | ||
|  | 
 | ||
|  |   if (resolve !== null) { | ||
|  |     var data = iter[kStream].read(); // we defer if data is null
 | ||
|  |     // we can be expecting either 'end' or
 | ||
|  |     // 'error'
 | ||
|  | 
 | ||
|  |     if (data !== null) { | ||
|  |       iter[kLastPromise] = null; | ||
|  |       iter[kLastResolve] = null; | ||
|  |       iter[kLastReject] = null; | ||
|  |       resolve(createIterResult(data, false)); | ||
|  |     } | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function onReadable(iter) { | ||
|  |   // we wait for the next tick, because it might
 | ||
|  |   // emit an error with process.nextTick
 | ||
|  |   process.nextTick(readAndResolve, iter); | ||
|  | } | ||
|  | 
 | ||
|  | function wrapForNext(lastPromise, iter) { | ||
|  |   return function (resolve, reject) { | ||
|  |     lastPromise.then(function () { | ||
|  |       if (iter[kEnded]) { | ||
|  |         resolve(createIterResult(undefined, true)); | ||
|  |         return; | ||
|  |       } | ||
|  | 
 | ||
|  |       iter[kHandlePromise](resolve, reject); | ||
|  |     }, reject); | ||
|  |   }; | ||
|  | } | ||
|  | 
 | ||
|  | var AsyncIteratorPrototype = Object.getPrototypeOf(function () {}); | ||
|  | var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = { | ||
|  |   get stream() { | ||
|  |     return this[kStream]; | ||
|  |   }, | ||
|  | 
 | ||
|  |   next: function next() { | ||
|  |     var _this = this; | ||
|  | 
 | ||
|  |     // if we have detected an error in the meanwhile
 | ||
|  |     // reject straight away
 | ||
|  |     var error = this[kError]; | ||
|  | 
 | ||
|  |     if (error !== null) { | ||
|  |       return Promise.reject(error); | ||
|  |     } | ||
|  | 
 | ||
|  |     if (this[kEnded]) { | ||
|  |       return Promise.resolve(createIterResult(undefined, true)); | ||
|  |     } | ||
|  | 
 | ||
|  |     if (this[kStream].destroyed) { | ||
|  |       // We need to defer via nextTick because if .destroy(err) is
 | ||
|  |       // called, the error will be emitted via nextTick, and
 | ||
|  |       // we cannot guarantee that there is no error lingering around
 | ||
|  |       // waiting to be emitted.
 | ||
|  |       return new Promise(function (resolve, reject) { | ||
|  |         process.nextTick(function () { | ||
|  |           if (_this[kError]) { | ||
|  |             reject(_this[kError]); | ||
|  |           } else { | ||
|  |             resolve(createIterResult(undefined, true)); | ||
|  |           } | ||
|  |         }); | ||
|  |       }); | ||
|  |     } // if we have multiple next() calls
 | ||
|  |     // we will wait for the previous Promise to finish
 | ||
|  |     // this logic is optimized to support for await loops,
 | ||
|  |     // where next() is only called once at a time
 | ||
|  | 
 | ||
|  | 
 | ||
|  |     var lastPromise = this[kLastPromise]; | ||
|  |     var promise; | ||
|  | 
 | ||
|  |     if (lastPromise) { | ||
|  |       promise = new Promise(wrapForNext(lastPromise, this)); | ||
|  |     } else { | ||
|  |       // fast path needed to support multiple this.push()
 | ||
|  |       // without triggering the next() queue
 | ||
|  |       var data = this[kStream].read(); | ||
|  | 
 | ||
|  |       if (data !== null) { | ||
|  |         return Promise.resolve(createIterResult(data, false)); | ||
|  |       } | ||
|  | 
 | ||
|  |       promise = new Promise(this[kHandlePromise]); | ||
|  |     } | ||
|  | 
 | ||
|  |     this[kLastPromise] = promise; | ||
|  |     return promise; | ||
|  |   } | ||
|  | }, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () { | ||
|  |   return this; | ||
|  | }), _defineProperty(_Object$setPrototypeO, "return", function _return() { | ||
|  |   var _this2 = this; | ||
|  | 
 | ||
|  |   // destroy(err, cb) is a private API
 | ||
|  |   // we can guarantee we have that here, because we control the
 | ||
|  |   // Readable class this is attached to
 | ||
|  |   return new Promise(function (resolve, reject) { | ||
|  |     _this2[kStream].destroy(null, function (err) { | ||
|  |       if (err) { | ||
|  |         reject(err); | ||
|  |         return; | ||
|  |       } | ||
|  | 
 | ||
|  |       resolve(createIterResult(undefined, true)); | ||
|  |     }); | ||
|  |   }); | ||
|  | }), _Object$setPrototypeO), AsyncIteratorPrototype); | ||
|  | 
 | ||
|  | var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) { | ||
|  |   var _Object$create; | ||
|  | 
 | ||
|  |   var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, { | ||
|  |     value: stream, | ||
|  |     writable: true | ||
|  |   }), _defineProperty(_Object$create, kLastResolve, { | ||
|  |     value: null, | ||
|  |     writable: true | ||
|  |   }), _defineProperty(_Object$create, kLastReject, { | ||
|  |     value: null, | ||
|  |     writable: true | ||
|  |   }), _defineProperty(_Object$create, kError, { | ||
|  |     value: null, | ||
|  |     writable: true | ||
|  |   }), _defineProperty(_Object$create, kEnded, { | ||
|  |     value: stream._readableState.endEmitted, | ||
|  |     writable: true | ||
|  |   }), _defineProperty(_Object$create, kHandlePromise, { | ||
|  |     value: function value(resolve, reject) { | ||
|  |       var data = iterator[kStream].read(); | ||
|  | 
 | ||
|  |       if (data) { | ||
|  |         iterator[kLastPromise] = null; | ||
|  |         iterator[kLastResolve] = null; | ||
|  |         iterator[kLastReject] = null; | ||
|  |         resolve(createIterResult(data, false)); | ||
|  |       } else { | ||
|  |         iterator[kLastResolve] = resolve; | ||
|  |         iterator[kLastReject] = reject; | ||
|  |       } | ||
|  |     }, | ||
|  |     writable: true | ||
|  |   }), _Object$create)); | ||
|  |   iterator[kLastPromise] = null; | ||
|  |   finished(stream, function (err) { | ||
|  |     if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { | ||
|  |       var reject = iterator[kLastReject]; // reject if we are waiting for data in the Promise
 | ||
|  |       // returned by next() and store the error
 | ||
|  | 
 | ||
|  |       if (reject !== null) { | ||
|  |         iterator[kLastPromise] = null; | ||
|  |         iterator[kLastResolve] = null; | ||
|  |         iterator[kLastReject] = null; | ||
|  |         reject(err); | ||
|  |       } | ||
|  | 
 | ||
|  |       iterator[kError] = err; | ||
|  |       return; | ||
|  |     } | ||
|  | 
 | ||
|  |     var resolve = iterator[kLastResolve]; | ||
|  | 
 | ||
|  |     if (resolve !== null) { | ||
|  |       iterator[kLastPromise] = null; | ||
|  |       iterator[kLastResolve] = null; | ||
|  |       iterator[kLastReject] = null; | ||
|  |       resolve(createIterResult(undefined, true)); | ||
|  |     } | ||
|  | 
 | ||
|  |     iterator[kEnded] = true; | ||
|  |   }); | ||
|  |   stream.on('readable', onReadable.bind(null, iterator)); | ||
|  |   return iterator; | ||
|  | }; | ||
|  | 
 | ||
|  | module.exports = createReadableStreamAsyncIterator; | ||
|  | }).call(this)}).call(this,_dereq_(73)) | ||
|  | },{"52":52,"73":73}],50:[function(_dereq_,module,exports){ | ||
|  | 'use strict'; | ||
|  | 
 | ||
|  | function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; } | ||
|  | 
 | ||
|  | function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; } | ||
|  | 
 | ||
|  | function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } | ||
|  | 
 | ||
|  | function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } | ||
|  | 
 | ||
|  | function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } | ||
|  | 
 | ||
|  | function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } | ||
|  | 
 | ||
|  | var _require = _dereq_(13), | ||
|  |     Buffer = _require.Buffer; | ||
|  | 
 | ||
|  | var _require2 = _dereq_(11), | ||
|  |     inspect = _require2.inspect; | ||
|  | 
 | ||
|  | var custom = inspect && inspect.custom || 'inspect'; | ||
|  | 
 | ||
|  | function copyBuffer(src, target, offset) { | ||
|  |   Buffer.prototype.copy.call(src, target, offset); | ||
|  | } | ||
|  | 
 | ||
|  | module.exports = | ||
|  | /*#__PURE__*/ | ||
|  | function () { | ||
|  |   function BufferList() { | ||
|  |     _classCallCheck(this, BufferList); | ||
|  | 
 | ||
|  |     this.head = null; | ||
|  |     this.tail = null; | ||
|  |     this.length = 0; | ||
|  |   } | ||
|  | 
 | ||
|  |   _createClass(BufferList, [{ | ||
|  |     key: "push", | ||
|  |     value: function push(v) { | ||
|  |       var entry = { | ||
|  |         data: v, | ||
|  |         next: null | ||
|  |       }; | ||
|  |       if (this.length > 0) this.tail.next = entry;else this.head = entry; | ||
|  |       this.tail = entry; | ||
|  |       ++this.length; | ||
|  |     } | ||
|  |   }, { | ||
|  |     key: "unshift", | ||
|  |     value: function unshift(v) { | ||
|  |       var entry = { | ||
|  |         data: v, | ||
|  |         next: this.head | ||
|  |       }; | ||
|  |       if (this.length === 0) this.tail = entry; | ||
|  |       this.head = entry; | ||
|  |       ++this.length; | ||
|  |     } | ||
|  |   }, { | ||
|  |     key: "shift", | ||
|  |     value: function shift() { | ||
|  |       if (this.length === 0) return; | ||
|  |       var ret = this.head.data; | ||
|  |       if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; | ||
|  |       --this.length; | ||
|  |       return ret; | ||
|  |     } | ||
|  |   }, { | ||
|  |     key: "clear", | ||
|  |     value: function clear() { | ||
|  |       this.head = this.tail = null; | ||
|  |       this.length = 0; | ||
|  |     } | ||
|  |   }, { | ||
|  |     key: "join", | ||
|  |     value: function join(s) { | ||
|  |       if (this.length === 0) return ''; | ||
|  |       var p = this.head; | ||
|  |       var ret = '' + p.data; | ||
|  | 
 | ||
|  |       while (p = p.next) { | ||
|  |         ret += s + p.data; | ||
|  |       } | ||
|  | 
 | ||
|  |       return ret; | ||
|  |     } | ||
|  |   }, { | ||
|  |     key: "concat", | ||
|  |     value: function concat(n) { | ||
|  |       if (this.length === 0) return Buffer.alloc(0); | ||
|  |       var ret = Buffer.allocUnsafe(n >>> 0); | ||
|  |       var p = this.head; | ||
|  |       var i = 0; | ||
|  | 
 | ||
|  |       while (p) { | ||
|  |         copyBuffer(p.data, ret, i); | ||
|  |         i += p.data.length; | ||
|  |         p = p.next; | ||
|  |       } | ||
|  | 
 | ||
|  |       return ret; | ||
|  |     } // Consumes a specified amount of bytes or characters from the buffered data.
 | ||
|  | 
 | ||
|  |   }, { | ||
|  |     key: "consume", | ||
|  |     value: function consume(n, hasStrings) { | ||
|  |       var ret; | ||
|  | 
 | ||
|  |       if (n < this.head.data.length) { | ||
|  |         // `slice` is the same for buffers and strings.
 | ||
|  |         ret = this.head.data.slice(0, n); | ||
|  |         this.head.data = this.head.data.slice(n); | ||
|  |       } else if (n === this.head.data.length) { | ||
|  |         // First chunk is a perfect match.
 | ||
|  |         ret = this.shift(); | ||
|  |       } else { | ||
|  |         // Result spans more than one buffer.
 | ||
|  |         ret = hasStrings ? this._getString(n) : this._getBuffer(n); | ||
|  |       } | ||
|  | 
 | ||
|  |       return ret; | ||
|  |     } | ||
|  |   }, { | ||
|  |     key: "first", | ||
|  |     value: function first() { | ||
|  |       return this.head.data; | ||
|  |     } // Consumes a specified amount of characters from the buffered data.
 | ||
|  | 
 | ||
|  |   }, { | ||
|  |     key: "_getString", | ||
|  |     value: function _getString(n) { | ||
|  |       var p = this.head; | ||
|  |       var c = 1; | ||
|  |       var ret = p.data; | ||
|  |       n -= ret.length; | ||
|  | 
 | ||
|  |       while (p = p.next) { | ||
|  |         var str = p.data; | ||
|  |         var nb = n > str.length ? str.length : n; | ||
|  |         if (nb === str.length) ret += str;else ret += str.slice(0, n); | ||
|  |         n -= nb; | ||
|  | 
 | ||
|  |         if (n === 0) { | ||
|  |           if (nb === str.length) { | ||
|  |             ++c; | ||
|  |             if (p.next) this.head = p.next;else this.head = this.tail = null; | ||
|  |           } else { | ||
|  |             this.head = p; | ||
|  |             p.data = str.slice(nb); | ||
|  |           } | ||
|  | 
 | ||
|  |           break; | ||
|  |         } | ||
|  | 
 | ||
|  |         ++c; | ||
|  |       } | ||
|  | 
 | ||
|  |       this.length -= c; | ||
|  |       return ret; | ||
|  |     } // Consumes a specified amount of bytes from the buffered data.
 | ||
|  | 
 | ||
|  |   }, { | ||
|  |     key: "_getBuffer", | ||
|  |     value: function _getBuffer(n) { | ||
|  |       var ret = Buffer.allocUnsafe(n); | ||
|  |       var p = this.head; | ||
|  |       var c = 1; | ||
|  |       p.data.copy(ret); | ||
|  |       n -= p.data.length; | ||
|  | 
 | ||
|  |       while (p = p.next) { | ||
|  |         var buf = p.data; | ||
|  |         var nb = n > buf.length ? buf.length : n; | ||
|  |         buf.copy(ret, ret.length - n, 0, nb); | ||
|  |         n -= nb; | ||
|  | 
 | ||
|  |         if (n === 0) { | ||
|  |           if (nb === buf.length) { | ||
|  |             ++c; | ||
|  |             if (p.next) this.head = p.next;else this.head = this.tail = null; | ||
|  |           } else { | ||
|  |             this.head = p; | ||
|  |             p.data = buf.slice(nb); | ||
|  |           } | ||
|  | 
 | ||
|  |           break; | ||
|  |         } | ||
|  | 
 | ||
|  |         ++c; | ||
|  |       } | ||
|  | 
 | ||
|  |       this.length -= c; | ||
|  |       return ret; | ||
|  |     } // Make sure the linked list only shows the minimal necessary information.
 | ||
|  | 
 | ||
|  |   }, { | ||
|  |     key: custom, | ||
|  |     value: function value(_, options) { | ||
|  |       return inspect(this, _objectSpread({}, options, { | ||
|  |         // Only inspect one level.
 | ||
|  |         depth: 0, | ||
|  |         // It should not recurse.
 | ||
|  |         customInspect: false | ||
|  |       })); | ||
|  |     } | ||
|  |   }]); | ||
|  | 
 | ||
|  |   return BufferList; | ||
|  | }(); | ||
|  | },{"11":11,"13":13}],51:[function(_dereq_,module,exports){ | ||
|  | (function (process){(function (){ | ||
|  | 'use strict'; // undocumented cb() API, needed for core, not for public API
 | ||
|  | 
 | ||
|  | function destroy(err, cb) { | ||
|  |   var _this = this; | ||
|  | 
 | ||
|  |   var readableDestroyed = this._readableState && this._readableState.destroyed; | ||
|  |   var writableDestroyed = this._writableState && this._writableState.destroyed; | ||
|  | 
 | ||
|  |   if (readableDestroyed || writableDestroyed) { | ||
|  |     if (cb) { | ||
|  |       cb(err); | ||
|  |     } else if (err) { | ||
|  |       if (!this._writableState) { | ||
|  |         process.nextTick(emitErrorNT, this, err); | ||
|  |       } else if (!this._writableState.errorEmitted) { | ||
|  |         this._writableState.errorEmitted = true; | ||
|  |         process.nextTick(emitErrorNT, this, err); | ||
|  |       } | ||
|  |     } | ||
|  | 
 | ||
|  |     return this; | ||
|  |   } // we set destroyed to true before firing error callbacks in order
 | ||
|  |   // to make it re-entrance safe in case destroy() is called within callbacks
 | ||
|  | 
 | ||
|  | 
 | ||
|  |   if (this._readableState) { | ||
|  |     this._readableState.destroyed = true; | ||
|  |   } // if this is a duplex stream mark the writable part as destroyed as well
 | ||
|  | 
 | ||
|  | 
 | ||
|  |   if (this._writableState) { | ||
|  |     this._writableState.destroyed = true; | ||
|  |   } | ||
|  | 
 | ||
|  |   this._destroy(err || null, function (err) { | ||
|  |     if (!cb && err) { | ||
|  |       if (!_this._writableState) { | ||
|  |         process.nextTick(emitErrorAndCloseNT, _this, err); | ||
|  |       } else if (!_this._writableState.errorEmitted) { | ||
|  |         _this._writableState.errorEmitted = true; | ||
|  |         process.nextTick(emitErrorAndCloseNT, _this, err); | ||
|  |       } else { | ||
|  |         process.nextTick(emitCloseNT, _this); | ||
|  |       } | ||
|  |     } else if (cb) { | ||
|  |       process.nextTick(emitCloseNT, _this); | ||
|  |       cb(err); | ||
|  |     } else { | ||
|  |       process.nextTick(emitCloseNT, _this); | ||
|  |     } | ||
|  |   }); | ||
|  | 
 | ||
|  |   return this; | ||
|  | } | ||
|  | 
 | ||
|  | function emitErrorAndCloseNT(self, err) { | ||
|  |   emitErrorNT(self, err); | ||
|  |   emitCloseNT(self); | ||
|  | } | ||
|  | 
 | ||
|  | function emitCloseNT(self) { | ||
|  |   if (self._writableState && !self._writableState.emitClose) return; | ||
|  |   if (self._readableState && !self._readableState.emitClose) return; | ||
|  |   self.emit('close'); | ||
|  | } | ||
|  | 
 | ||
|  | function undestroy() { | ||
|  |   if (this._readableState) { | ||
|  |     this._readableState.destroyed = false; | ||
|  |     this._readableState.reading = false; | ||
|  |     this._readableState.ended = false; | ||
|  |     this._readableState.endEmitted = false; | ||
|  |   } | ||
|  | 
 | ||
|  |   if (this._writableState) { | ||
|  |     this._writableState.destroyed = false; | ||
|  |     this._writableState.ended = false; | ||
|  |     this._writableState.ending = false; | ||
|  |     this._writableState.finalCalled = false; | ||
|  |     this._writableState.prefinished = false; | ||
|  |     this._writableState.finished = false; | ||
|  |     this._writableState.errorEmitted = false; | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function emitErrorNT(self, err) { | ||
|  |   self.emit('error', err); | ||
|  | } | ||
|  | 
 | ||
|  | function errorOrDestroy(stream, err) { | ||
|  |   // We have tests that rely on errors being emitted
 | ||
|  |   // in the same tick, so changing this is semver major.
 | ||
|  |   // For now when you opt-in to autoDestroy we allow
 | ||
|  |   // the error to be emitted nextTick. In a future
 | ||
|  |   // semver major update we should change the default to this.
 | ||
|  |   var rState = stream._readableState; | ||
|  |   var wState = stream._writableState; | ||
|  |   if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err); | ||
|  | } | ||
|  | 
 | ||
|  | module.exports = { | ||
|  |   destroy: destroy, | ||
|  |   undestroy: undestroy, | ||
|  |   errorOrDestroy: errorOrDestroy | ||
|  | }; | ||
|  | }).call(this)}).call(this,_dereq_(73)) | ||
|  | },{"73":73}],52:[function(_dereq_,module,exports){ | ||
|  | // Ported from https://github.com/mafintosh/end-of-stream with
 | ||
|  | // permission from the author, Mathias Buus (@mafintosh).
 | ||
|  | 'use strict'; | ||
|  | 
 | ||
|  | var ERR_STREAM_PREMATURE_CLOSE = _dereq_(43).codes.ERR_STREAM_PREMATURE_CLOSE; | ||
|  | 
 | ||
|  | function once(callback) { | ||
|  |   var called = false; | ||
|  |   return function () { | ||
|  |     if (called) return; | ||
|  |     called = true; | ||
|  | 
 | ||
|  |     for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { | ||
|  |       args[_key] = arguments[_key]; | ||
|  |     } | ||
|  | 
 | ||
|  |     callback.apply(this, args); | ||
|  |   }; | ||
|  | } | ||
|  | 
 | ||
|  | function noop() {} | ||
|  | 
 | ||
|  | function isRequest(stream) { | ||
|  |   return stream.setHeader && typeof stream.abort === 'function'; | ||
|  | } | ||
|  | 
 | ||
|  | function eos(stream, opts, callback) { | ||
|  |   if (typeof opts === 'function') return eos(stream, null, opts); | ||
|  |   if (!opts) opts = {}; | ||
|  |   callback = once(callback || noop); | ||
|  |   var readable = opts.readable || opts.readable !== false && stream.readable; | ||
|  |   var writable = opts.writable || opts.writable !== false && stream.writable; | ||
|  | 
 | ||
|  |   var onlegacyfinish = function onlegacyfinish() { | ||
|  |     if (!stream.writable) onfinish(); | ||
|  |   }; | ||
|  | 
 | ||
|  |   var writableEnded = stream._writableState && stream._writableState.finished; | ||
|  | 
 | ||
|  |   var onfinish = function onfinish() { | ||
|  |     writable = false; | ||
|  |     writableEnded = true; | ||
|  |     if (!readable) callback.call(stream); | ||
|  |   }; | ||
|  | 
 | ||
|  |   var readableEnded = stream._readableState && stream._readableState.endEmitted; | ||
|  | 
 | ||
|  |   var onend = function onend() { | ||
|  |     readable = false; | ||
|  |     readableEnded = true; | ||
|  |     if (!writable) callback.call(stream); | ||
|  |   }; | ||
|  | 
 | ||
|  |   var onerror = function onerror(err) { | ||
|  |     callback.call(stream, err); | ||
|  |   }; | ||
|  | 
 | ||
|  |   var onclose = function onclose() { | ||
|  |     var err; | ||
|  | 
 | ||
|  |     if (readable && !readableEnded) { | ||
|  |       if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); | ||
|  |       return callback.call(stream, err); | ||
|  |     } | ||
|  | 
 | ||
|  |     if (writable && !writableEnded) { | ||
|  |       if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); | ||
|  |       return callback.call(stream, err); | ||
|  |     } | ||
|  |   }; | ||
|  | 
 | ||
|  |   var onrequest = function onrequest() { | ||
|  |     stream.req.on('finish', onfinish); | ||
|  |   }; | ||
|  | 
 | ||
|  |   if (isRequest(stream)) { | ||
|  |     stream.on('complete', onfinish); | ||
|  |     stream.on('abort', onclose); | ||
|  |     if (stream.req) onrequest();else stream.on('request', onrequest); | ||
|  |   } else if (writable && !stream._writableState) { | ||
|  |     // legacy streams
 | ||
|  |     stream.on('end', onlegacyfinish); | ||
|  |     stream.on('close', onlegacyfinish); | ||
|  |   } | ||
|  | 
 | ||
|  |   stream.on('end', onend); | ||
|  |   stream.on('finish', onfinish); | ||
|  |   if (opts.error !== false) stream.on('error', onerror); | ||
|  |   stream.on('close', onclose); | ||
|  |   return function () { | ||
|  |     stream.removeListener('complete', onfinish); | ||
|  |     stream.removeListener('abort', onclose); | ||
|  |     stream.removeListener('request', onrequest); | ||
|  |     if (stream.req) stream.req.removeListener('finish', onfinish); | ||
|  |     stream.removeListener('end', onlegacyfinish); | ||
|  |     stream.removeListener('close', onlegacyfinish); | ||
|  |     stream.removeListener('finish', onfinish); | ||
|  |     stream.removeListener('end', onend); | ||
|  |     stream.removeListener('error', onerror); | ||
|  |     stream.removeListener('close', onclose); | ||
|  |   }; | ||
|  | } | ||
|  | 
 | ||
|  | module.exports = eos; | ||
|  | },{"43":43}],53:[function(_dereq_,module,exports){ | ||
|  | module.exports = function () { | ||
|  |   throw new Error('Readable.from is not available in the browser') | ||
|  | }; | ||
|  | 
 | ||
|  | },{}],54:[function(_dereq_,module,exports){ | ||
|  | // Ported from https://github.com/mafintosh/pump with
 | ||
|  | // permission from the author, Mathias Buus (@mafintosh).
 | ||
|  | 'use strict'; | ||
|  | 
 | ||
|  | var eos; | ||
|  | 
 | ||
|  | function once(callback) { | ||
|  |   var called = false; | ||
|  |   return function () { | ||
|  |     if (called) return; | ||
|  |     called = true; | ||
|  |     callback.apply(void 0, arguments); | ||
|  |   }; | ||
|  | } | ||
|  | 
 | ||
|  | var _require$codes = _dereq_(43).codes, | ||
|  |     ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS, | ||
|  |     ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED; | ||
|  | 
 | ||
|  | function noop(err) { | ||
|  |   // Rethrow the error if it exists to avoid swallowing it
 | ||
|  |   if (err) throw err; | ||
|  | } | ||
|  | 
 | ||
|  | function isRequest(stream) { | ||
|  |   return stream.setHeader && typeof stream.abort === 'function'; | ||
|  | } | ||
|  | 
 | ||
|  | function destroyer(stream, reading, writing, callback) { | ||
|  |   callback = once(callback); | ||
|  |   var closed = false; | ||
|  |   stream.on('close', function () { | ||
|  |     closed = true; | ||
|  |   }); | ||
|  |   if (eos === undefined) eos = _dereq_(52); | ||
|  |   eos(stream, { | ||
|  |     readable: reading, | ||
|  |     writable: writing | ||
|  |   }, function (err) { | ||
|  |     if (err) return callback(err); | ||
|  |     closed = true; | ||
|  |     callback(); | ||
|  |   }); | ||
|  |   var destroyed = false; | ||
|  |   return function (err) { | ||
|  |     if (closed) return; | ||
|  |     if (destroyed) return; | ||
|  |     destroyed = true; // request.destroy just do .end - .abort is what we want
 | ||
|  | 
 | ||
|  |     if (isRequest(stream)) return stream.abort(); | ||
|  |     if (typeof stream.destroy === 'function') return stream.destroy(); | ||
|  |     callback(err || new ERR_STREAM_DESTROYED('pipe')); | ||
|  |   }; | ||
|  | } | ||
|  | 
 | ||
|  | function call(fn) { | ||
|  |   fn(); | ||
|  | } | ||
|  | 
 | ||
|  | function pipe(from, to) { | ||
|  |   return from.pipe(to); | ||
|  | } | ||
|  | 
 | ||
|  | function popCallback(streams) { | ||
|  |   if (!streams.length) return noop; | ||
|  |   if (typeof streams[streams.length - 1] !== 'function') return noop; | ||
|  |   return streams.pop(); | ||
|  | } | ||
|  | 
 | ||
|  | function pipeline() { | ||
|  |   for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) { | ||
|  |     streams[_key] = arguments[_key]; | ||
|  |   } | ||
|  | 
 | ||
|  |   var callback = popCallback(streams); | ||
|  |   if (Array.isArray(streams[0])) streams = streams[0]; | ||
|  | 
 | ||
|  |   if (streams.length < 2) { | ||
|  |     throw new ERR_MISSING_ARGS('streams'); | ||
|  |   } | ||
|  | 
 | ||
|  |   var error; | ||
|  |   var destroys = streams.map(function (stream, i) { | ||
|  |     var reading = i < streams.length - 1; | ||
|  |     var writing = i > 0; | ||
|  |     return destroyer(stream, reading, writing, function (err) { | ||
|  |       if (!error) error = err; | ||
|  |       if (err) destroys.forEach(call); | ||
|  |       if (reading) return; | ||
|  |       destroys.forEach(call); | ||
|  |       callback(error); | ||
|  |     }); | ||
|  |   }); | ||
|  |   return streams.reduce(pipe); | ||
|  | } | ||
|  | 
 | ||
|  | module.exports = pipeline; | ||
|  | },{"43":43,"52":52}],55:[function(_dereq_,module,exports){ | ||
|  | 'use strict'; | ||
|  | 
 | ||
|  | var ERR_INVALID_OPT_VALUE = _dereq_(43).codes.ERR_INVALID_OPT_VALUE; | ||
|  | 
 | ||
|  | function highWaterMarkFrom(options, isDuplex, duplexKey) { | ||
|  |   return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null; | ||
|  | } | ||
|  | 
 | ||
|  | function getHighWaterMark(state, options, duplexKey, isDuplex) { | ||
|  |   var hwm = highWaterMarkFrom(options, isDuplex, duplexKey); | ||
|  | 
 | ||
|  |   if (hwm != null) { | ||
|  |     if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) { | ||
|  |       var name = isDuplex ? duplexKey : 'highWaterMark'; | ||
|  |       throw new ERR_INVALID_OPT_VALUE(name, hwm); | ||
|  |     } | ||
|  | 
 | ||
|  |     return Math.floor(hwm); | ||
|  |   } // Default value
 | ||
|  | 
 | ||
|  | 
 | ||
|  |   return state.objectMode ? 16 : 16 * 1024; | ||
|  | } | ||
|  | 
 | ||
|  | module.exports = { | ||
|  |   getHighWaterMark: getHighWaterMark | ||
|  | }; | ||
|  | },{"43":43}],56:[function(_dereq_,module,exports){ | ||
|  | module.exports = _dereq_(26).EventEmitter; | ||
|  | 
 | ||
|  | },{"26":26}],57:[function(_dereq_,module,exports){ | ||
|  | exports = module.exports = _dereq_(46); | ||
|  | exports.Stream = exports; | ||
|  | exports.Readable = exports; | ||
|  | exports.Writable = _dereq_(48); | ||
|  | exports.Duplex = _dereq_(44); | ||
|  | exports.Transform = _dereq_(47); | ||
|  | exports.PassThrough = _dereq_(45); | ||
|  | exports.finished = _dereq_(52); | ||
|  | exports.pipeline = _dereq_(54); | ||
|  | 
 | ||
|  | },{"44":44,"45":45,"46":46,"47":47,"48":48,"52":52,"54":54}],58:[function(_dereq_,module,exports){ | ||
|  | 'use strict' | ||
|  | 
 | ||
|  | // For (old) browser support
 | ||
|  | var xtend = _dereq_(141) | ||
|  | var assign = _dereq_(142) | ||
|  | 
 | ||
|  | module.exports = function supports () { | ||
|  |   var manifest = xtend.apply(null, arguments) | ||
|  | 
 | ||
|  |   return assign(manifest, { | ||
|  |     // Features of abstract-leveldown
 | ||
|  |     bufferKeys: manifest.bufferKeys || false, | ||
|  |     snapshots: manifest.snapshots || false, | ||
|  |     permanence: manifest.permanence || false, | ||
|  |     seek: manifest.seek || false, | ||
|  |     clear: manifest.clear || false, | ||
|  | 
 | ||
|  |     // Features of abstract-leveldown that levelup doesn't have
 | ||
|  |     status: manifest.status || false, | ||
|  | 
 | ||
|  |     // Features of disk-based implementations
 | ||
|  |     createIfMissing: manifest.createIfMissing || false, | ||
|  |     errorIfExists: manifest.errorIfExists || false, | ||
|  | 
 | ||
|  |     // Features of level(up) that abstract-leveldown doesn't have yet
 | ||
|  |     deferredOpen: manifest.deferredOpen || false, | ||
|  |     openCallback: manifest.openCallback || false, | ||
|  |     promises: manifest.promises || false, | ||
|  |     streams: manifest.streams || false, | ||
|  |     encodings: manifest.encodings || false, | ||
|  | 
 | ||
|  |     // Methods that are not part of abstract-leveldown or levelup
 | ||
|  |     additionalMethods: xtend(manifest.additionalMethods) | ||
|  |   }) | ||
|  | } | ||
|  | 
 | ||
|  | },{"141":141,"142":142}],59:[function(_dereq_,module,exports){ | ||
|  | var WriteError = _dereq_(41).WriteError | ||
|  | var promisify = _dereq_(62) | ||
|  | var getCallback = _dereq_(60).getCallback | ||
|  | var getOptions = _dereq_(60).getOptions | ||
|  | 
 | ||
|  | function Batch (levelup) { | ||
|  |   // TODO (next major): remove this._levelup alias
 | ||
|  |   this.db = this._levelup = levelup | ||
|  |   this.batch = levelup.db.batch() | ||
|  |   this.ops = [] | ||
|  |   this.length = 0 | ||
|  | } | ||
|  | 
 | ||
|  | Batch.prototype.put = function (key, value) { | ||
|  |   try { | ||
|  |     this.batch.put(key, value) | ||
|  |   } catch (e) { | ||
|  |     throw new WriteError(e) | ||
|  |   } | ||
|  | 
 | ||
|  |   this.ops.push({ type: 'put', key: key, value: value }) | ||
|  |   this.length++ | ||
|  | 
 | ||
|  |   return this | ||
|  | } | ||
|  | 
 | ||
|  | Batch.prototype.del = function (key) { | ||
|  |   try { | ||
|  |     this.batch.del(key) | ||
|  |   } catch (err) { | ||
|  |     throw new WriteError(err) | ||
|  |   } | ||
|  | 
 | ||
|  |   this.ops.push({ type: 'del', key: key }) | ||
|  |   this.length++ | ||
|  | 
 | ||
|  |   return this | ||
|  | } | ||
|  | 
 | ||
|  | Batch.prototype.clear = function () { | ||
|  |   try { | ||
|  |     this.batch.clear() | ||
|  |   } catch (err) { | ||
|  |     throw new WriteError(err) | ||
|  |   } | ||
|  | 
 | ||
|  |   this.ops = [] | ||
|  |   this.length = 0 | ||
|  | 
 | ||
|  |   return this | ||
|  | } | ||
|  | 
 | ||
|  | Batch.prototype.write = function (options, callback) { | ||
|  |   var levelup = this._levelup | ||
|  |   var ops = this.ops | ||
|  |   var promise | ||
|  | 
 | ||
|  |   callback = getCallback(options, callback) | ||
|  | 
 | ||
|  |   if (!callback) { | ||
|  |     callback = promisify() | ||
|  |     promise = callback.promise | ||
|  |   } | ||
|  | 
 | ||
|  |   options = getOptions(options) | ||
|  | 
 | ||
|  |   try { | ||
|  |     this.batch.write(options, function (err) { | ||
|  |       if (err) { return callback(new WriteError(err)) } | ||
|  |       levelup.emit('batch', ops) | ||
|  |       callback() | ||
|  |     }) | ||
|  |   } catch (err) { | ||
|  |     throw new WriteError(err) | ||
|  |   } | ||
|  | 
 | ||
|  |   return promise | ||
|  | } | ||
|  | 
 | ||
|  | module.exports = Batch | ||
|  | 
 | ||
|  | },{"41":41,"60":60,"62":62}],60:[function(_dereq_,module,exports){ | ||
|  | exports.getCallback = function (options, callback) { | ||
|  |   return typeof options === 'function' ? options : callback | ||
|  | } | ||
|  | 
 | ||
|  | exports.getOptions = function (options) { | ||
|  |   return typeof options === 'object' && options !== null ? options : {} | ||
|  | } | ||
|  | 
 | ||
|  | },{}],61:[function(_dereq_,module,exports){ | ||
|  | (function (process){(function (){ | ||
|  | var EventEmitter = _dereq_(26).EventEmitter | ||
|  | var inherits = _dereq_(124).inherits | ||
|  | var extend = _dereq_(141) | ||
|  | var DeferredLevelDOWN = _dereq_(17) | ||
|  | var IteratorStream = _dereq_(42) | ||
|  | var Batch = _dereq_(59) | ||
|  | var errors = _dereq_(41) | ||
|  | var supports = _dereq_(58) | ||
|  | var assert = _dereq_(6) | ||
|  | var promisify = _dereq_(62) | ||
|  | var getCallback = _dereq_(60).getCallback | ||
|  | var getOptions = _dereq_(60).getOptions | ||
|  | 
 | ||
|  | var WriteError = errors.WriteError | ||
|  | var ReadError = errors.ReadError | ||
|  | var NotFoundError = errors.NotFoundError | ||
|  | var OpenError = errors.OpenError | ||
|  | var InitializationError = errors.InitializationError | ||
|  | 
 | ||
|  | // Possible AbstractLevelDOWN#status values:
 | ||
|  | //  - 'new'     - newly created, not opened or closed
 | ||
|  | //  - 'opening' - waiting for the database to be opened, post open()
 | ||
|  | //  - 'open'    - successfully opened the database, available for use
 | ||
|  | //  - 'closing' - waiting for the database to be closed, post close()
 | ||
|  | //  - 'closed'  - database has been successfully closed, should not be
 | ||
|  | //                 used except for another open() operation
 | ||
|  | 
 | ||
|  | function LevelUP (db, options, callback) { | ||
|  |   if (!(this instanceof LevelUP)) { | ||
|  |     return new LevelUP(db, options, callback) | ||
|  |   } | ||
|  | 
 | ||
|  |   var error | ||
|  |   var self = this | ||
|  | 
 | ||
|  |   EventEmitter.call(this) | ||
|  |   this.setMaxListeners(Infinity) | ||
|  | 
 | ||
|  |   if (typeof options === 'function') { | ||
|  |     callback = options | ||
|  |     options = {} | ||
|  |   } | ||
|  | 
 | ||
|  |   options = options || {} | ||
|  | 
 | ||
|  |   if (!db || typeof db !== 'object') { | ||
|  |     error = new InitializationError('First argument must be an abstract-leveldown compliant store') | ||
|  |     if (typeof callback === 'function') { | ||
|  |       return process.nextTick(callback, error) | ||
|  |     } | ||
|  |     throw error | ||
|  |   } | ||
|  | 
 | ||
|  |   assert.strictEqual(typeof db.status, 'string', '.status required, old abstract-leveldown') | ||
|  | 
 | ||
|  |   this.options = getOptions(options) | ||
|  |   this._db = db | ||
|  |   this.db = new DeferredLevelDOWN(db) | ||
|  |   this.open(callback || function (err) { | ||
|  |     if (err) self.emit('error', err) | ||
|  |   }) | ||
|  | 
 | ||
|  |   // Create manifest based on deferred-leveldown's
 | ||
|  |   this.supports = supports(this.db.supports, { | ||
|  |     status: false, | ||
|  |     deferredOpen: true, | ||
|  |     openCallback: true, | ||
|  |     promises: true, | ||
|  |     streams: true | ||
|  |   }) | ||
|  | 
 | ||
|  |   // Experimental: enrich levelup interface
 | ||
|  |   Object.keys(this.supports.additionalMethods).forEach(function (method) { | ||
|  |     if (this[method] != null) return | ||
|  | 
 | ||
|  |     // Don't do this.db[method].bind() because this.db is dynamic.
 | ||
|  |     this[method] = function () { | ||
|  |       return this.db[method].apply(this.db, arguments) | ||
|  |     } | ||
|  |   }, this) | ||
|  | } | ||
|  | 
 | ||
|  | LevelUP.prototype.emit = EventEmitter.prototype.emit | ||
|  | LevelUP.prototype.once = EventEmitter.prototype.once | ||
|  | inherits(LevelUP, EventEmitter) | ||
|  | 
 | ||
|  | LevelUP.prototype.open = function (opts, callback) { | ||
|  |   var self = this | ||
|  |   var promise | ||
|  | 
 | ||
|  |   if (typeof opts === 'function') { | ||
|  |     callback = opts | ||
|  |     opts = null | ||
|  |   } | ||
|  | 
 | ||
|  |   if (!callback) { | ||
|  |     callback = promisify() | ||
|  |     promise = callback.promise | ||
|  |   } | ||
|  | 
 | ||
|  |   if (!opts) { | ||
|  |     opts = this.options | ||
|  |   } | ||
|  | 
 | ||
|  |   if (this.isOpen()) { | ||
|  |     process.nextTick(callback, null, self) | ||
|  |     return promise | ||
|  |   } | ||
|  | 
 | ||
|  |   if (this._isOpening()) { | ||
|  |     this.once('open', function () { callback(null, self) }) | ||
|  |     return promise | ||
|  |   } | ||
|  | 
 | ||
|  |   this.emit('opening') | ||
|  | 
 | ||
|  |   this.db.open(opts, function (err) { | ||
|  |     if (err) { | ||
|  |       return callback(new OpenError(err)) | ||
|  |     } | ||
|  |     self.db = self._db | ||
|  |     callback(null, self) | ||
|  |     self.emit('open') | ||
|  |     self.emit('ready') | ||
|  |   }) | ||
|  | 
 | ||
|  |   return promise | ||
|  | } | ||
|  | 
 | ||
|  | LevelUP.prototype.close = function (callback) { | ||
|  |   var self = this | ||
|  |   var promise | ||
|  | 
 | ||
|  |   if (!callback) { | ||
|  |     callback = promisify() | ||
|  |     promise = callback.promise | ||
|  |   } | ||
|  | 
 | ||
|  |   if (this.isOpen()) { | ||
|  |     this.db.close(function () { | ||
|  |       self.emit('closed') | ||
|  |       callback.apply(null, arguments) | ||
|  |     }) | ||
|  |     this.emit('closing') | ||
|  |     this.db = new DeferredLevelDOWN(this._db) | ||
|  |   } else if (this.isClosed()) { | ||
|  |     process.nextTick(callback) | ||
|  |   } else if (this.db.status === 'closing') { | ||
|  |     this.once('closed', callback) | ||
|  |   } else if (this._isOpening()) { | ||
|  |     this.once('open', function () { | ||
|  |       self.close(callback) | ||
|  |     }) | ||
|  |   } | ||
|  | 
 | ||
|  |   return promise | ||
|  | } | ||
|  | 
 | ||
|  | LevelUP.prototype.isOpen = function () { | ||
|  |   return this.db.status === 'open' | ||
|  | } | ||
|  | 
 | ||
|  | LevelUP.prototype._isOpening = function () { | ||
|  |   return this.db.status === 'opening' | ||
|  | } | ||
|  | 
 | ||
|  | LevelUP.prototype.isClosed = function () { | ||
|  |   return (/^clos|new/).test(this.db.status) | ||
|  | } | ||
|  | 
 | ||
|  | LevelUP.prototype.get = function (key, options, callback) { | ||
|  |   var promise | ||
|  | 
 | ||
|  |   callback = getCallback(options, callback) | ||
|  | 
 | ||
|  |   if (!callback) { | ||
|  |     callback = promisify() | ||
|  |     promise = callback.promise | ||
|  |   } | ||
|  | 
 | ||
|  |   if (maybeError(this, callback)) { return promise } | ||
|  | 
 | ||
|  |   options = getOptions(options) | ||
|  | 
 | ||
|  |   this.db.get(key, options, function (err, value) { | ||
|  |     if (err) { | ||
|  |       if ((/notfound/i).test(err) || err.notFound) { | ||
|  |         err = new NotFoundError('Key not found in database [' + key + ']', err) | ||
|  |       } else { | ||
|  |         err = new ReadError(err) | ||
|  |       } | ||
|  |       return callback(err) | ||
|  |     } | ||
|  |     callback(null, value) | ||
|  |   }) | ||
|  | 
 | ||
|  |   return promise | ||
|  | } | ||
|  | 
 | ||
|  | LevelUP.prototype.put = function (key, value, options, callback) { | ||
|  |   var self = this | ||
|  |   var promise | ||
|  | 
 | ||
|  |   callback = getCallback(options, callback) | ||
|  | 
 | ||
|  |   if (!callback) { | ||
|  |     callback = promisify() | ||
|  |     promise = callback.promise | ||
|  |   } | ||
|  | 
 | ||
|  |   if (maybeError(this, callback)) { return promise } | ||
|  | 
 | ||
|  |   options = getOptions(options) | ||
|  | 
 | ||
|  |   this.db.put(key, value, options, function (err) { | ||
|  |     if (err) { | ||
|  |       return callback(new WriteError(err)) | ||
|  |     } | ||
|  |     self.emit('put', key, value) | ||
|  |     callback() | ||
|  |   }) | ||
|  | 
 | ||
|  |   return promise | ||
|  | } | ||
|  | 
 | ||
|  | LevelUP.prototype.del = function (key, options, callback) { | ||
|  |   var self = this | ||
|  |   var promise | ||
|  | 
 | ||
|  |   callback = getCallback(options, callback) | ||
|  | 
 | ||
|  |   if (!callback) { | ||
|  |     callback = promisify() | ||
|  |     promise = callback.promise | ||
|  |   } | ||
|  | 
 | ||
|  |   if (maybeError(this, callback)) { return promise } | ||
|  | 
 | ||
|  |   options = getOptions(options) | ||
|  | 
 | ||
|  |   this.db.del(key, options, function (err) { | ||
|  |     if (err) { | ||
|  |       return callback(new WriteError(err)) | ||
|  |     } | ||
|  |     self.emit('del', key) | ||
|  |     callback() | ||
|  |   }) | ||
|  | 
 | ||
|  |   return promise | ||
|  | } | ||
|  | 
 | ||
|  | LevelUP.prototype.batch = function (arr, options, callback) { | ||
|  |   if (!arguments.length) { | ||
|  |     return new Batch(this) | ||
|  |   } | ||
|  | 
 | ||
|  |   var self = this | ||
|  |   var promise | ||
|  | 
 | ||
|  |   if (typeof arr === 'function') callback = arr | ||
|  |   else callback = getCallback(options, callback) | ||
|  | 
 | ||
|  |   if (!callback) { | ||
|  |     callback = promisify() | ||
|  |     promise = callback.promise | ||
|  |   } | ||
|  | 
 | ||
|  |   if (maybeError(this, callback)) { return promise } | ||
|  | 
 | ||
|  |   options = getOptions(options) | ||
|  | 
 | ||
|  |   this.db.batch(arr, options, function (err) { | ||
|  |     if (err) { | ||
|  |       return callback(new WriteError(err)) | ||
|  |     } | ||
|  |     self.emit('batch', arr) | ||
|  |     callback() | ||
|  |   }) | ||
|  | 
 | ||
|  |   return promise | ||
|  | } | ||
|  | 
 | ||
|  | LevelUP.prototype.iterator = function (options) { | ||
|  |   return this.db.iterator(options) | ||
|  | } | ||
|  | 
 | ||
|  | LevelUP.prototype.clear = function (options, callback) { | ||
|  |   var self = this | ||
|  |   var promise | ||
|  | 
 | ||
|  |   callback = getCallback(options, callback) | ||
|  |   options = getOptions(options) | ||
|  | 
 | ||
|  |   if (!callback) { | ||
|  |     callback = promisify() | ||
|  |     promise = callback.promise | ||
|  |   } | ||
|  | 
 | ||
|  |   if (maybeError(this, callback)) { | ||
|  |     return promise | ||
|  |   } | ||
|  | 
 | ||
|  |   this.db.clear(options, function (err) { | ||
|  |     if (err) { | ||
|  |       return callback(new WriteError(err)) | ||
|  |     } | ||
|  |     self.emit('clear', options) | ||
|  |     callback() | ||
|  |   }) | ||
|  | 
 | ||
|  |   return promise | ||
|  | } | ||
|  | 
 | ||
|  | LevelUP.prototype.readStream = | ||
|  | LevelUP.prototype.createReadStream = function (options) { | ||
|  |   options = extend({ keys: true, values: true }, options) | ||
|  |   if (typeof options.limit !== 'number') { options.limit = -1 } | ||
|  |   return new IteratorStream(this.db.iterator(options), options) | ||
|  | } | ||
|  | 
 | ||
|  | LevelUP.prototype.keyStream = | ||
|  | LevelUP.prototype.createKeyStream = function (options) { | ||
|  |   return this.createReadStream(extend(options, { keys: true, values: false })) | ||
|  | } | ||
|  | 
 | ||
|  | LevelUP.prototype.valueStream = | ||
|  | LevelUP.prototype.createValueStream = function (options) { | ||
|  |   return this.createReadStream(extend(options, { keys: false, values: true })) | ||
|  | } | ||
|  | 
 | ||
|  | LevelUP.prototype.toString = function () { | ||
|  |   return 'LevelUP' | ||
|  | } | ||
|  | 
 | ||
|  | LevelUP.prototype.type = 'levelup' | ||
|  | 
 | ||
|  | function maybeError (db, callback) { | ||
|  |   if (!db._isOpening() && !db.isOpen()) { | ||
|  |     process.nextTick(callback, new ReadError('Database is not open')) | ||
|  |     return true | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | LevelUP.errors = errors | ||
|  | module.exports = LevelUP.default = LevelUP | ||
|  | 
 | ||
|  | }).call(this)}).call(this,_dereq_(73)) | ||
|  | },{"124":124,"141":141,"17":17,"26":26,"41":41,"42":42,"58":58,"59":59,"6":6,"60":60,"62":62,"73":73}],62:[function(_dereq_,module,exports){ | ||
|  | function promisify () { | ||
|  |   var callback | ||
|  |   var promise = new Promise(function (resolve, reject) { | ||
|  |     callback = function callback (err, value) { | ||
|  |       if (err) reject(err) | ||
|  |       else resolve(value) | ||
|  |     } | ||
|  |   }) | ||
|  |   callback.promise = promise | ||
|  |   return callback | ||
|  | } | ||
|  | 
 | ||
|  | module.exports = promisify | ||
|  | 
 | ||
|  | },{}],63:[function(_dereq_,module,exports){ | ||
|  | (function (Buffer,process,global){(function (){ | ||
|  | 'use strict'; | ||
|  | 
 | ||
|  | var inherits = _dereq_(37); | ||
|  | var bufferFrom = _dereq_(68); | ||
|  | var AbstractLevelDOWN = _dereq_(3).AbstractLevelDOWN; | ||
|  | var AbstractIterator = _dereq_(3).AbstractIterator; | ||
|  | 
 | ||
|  | var LocalStorage = _dereq_(65).LocalStorage; | ||
|  | var LocalStorageCore = _dereq_(64); | ||
|  | var utils = _dereq_(67); | ||
|  | 
 | ||
|  | // see http://stackoverflow.com/a/15349865/680742
 | ||
|  | var nextTick = global.setImmediate || process.nextTick; | ||
|  | 
 | ||
|  | function LDIterator(db, options) { | ||
|  | 
 | ||
|  |   AbstractIterator.call(this, db); | ||
|  | 
 | ||
|  |   this._reverse = !!options.reverse; | ||
|  |   this._endkey     = options.end; | ||
|  |   this._startkey   = options.start; | ||
|  |   this._gt      = options.gt; | ||
|  |   this._gte     = options.gte; | ||
|  |   this._lt      = options.lt; | ||
|  |   this._lte     = options.lte; | ||
|  |   this._exclusiveStart = options.exclusiveStart; | ||
|  |   this._keysOnly = options.values === false; | ||
|  |   this._limit = options.limit; | ||
|  |   this._count = 0; | ||
|  | 
 | ||
|  |   this.onInitCompleteListeners = []; | ||
|  | } | ||
|  | 
 | ||
|  | inherits(LDIterator, AbstractIterator); | ||
|  | 
 | ||
|  | LDIterator.prototype._init = function (callback) { | ||
|  |   nextTick(function () { | ||
|  |     callback(); | ||
|  |   }); | ||
|  | }; | ||
|  | 
 | ||
|  | LDIterator.prototype._next = function (callback) { | ||
|  |   var self = this; | ||
|  | 
 | ||
|  |   function onInitComplete() { | ||
|  |     if (self._pos === self._keys.length || self._pos < 0) { // done reading
 | ||
|  |       return callback(); | ||
|  |     } | ||
|  | 
 | ||
|  |     var key = self._keys[self._pos]; | ||
|  | 
 | ||
|  |     if (!!self._endkey && (self._reverse ? key < self._endkey : key > self._endkey)) { | ||
|  |       return callback(); | ||
|  |     } | ||
|  | 
 | ||
|  |     if (!!self._limit && self._limit > 0 && self._count++ >= self._limit) { | ||
|  |       return callback(); | ||
|  |     } | ||
|  | 
 | ||
|  |     if ((self._lt  && key >= self._lt) || | ||
|  |       (self._lte && key > self._lte) || | ||
|  |       (self._gt  && key <= self._gt) || | ||
|  |       (self._gte && key < self._gte)) { | ||
|  |       return callback(); | ||
|  |     } | ||
|  | 
 | ||
|  |     self._pos += self._reverse ? -1 : 1; | ||
|  |     if (self._keysOnly) { | ||
|  |       return callback(null, key); | ||
|  |     } | ||
|  | 
 | ||
|  |     self.db.container.getItem(key, function (err, value) { | ||
|  |       if (err) { | ||
|  |         if (err.message === 'NotFound') { | ||
|  |           return nextTick(function () { | ||
|  |             self._next(callback); | ||
|  |           }); | ||
|  |         } | ||
|  |         return callback(err); | ||
|  |       } | ||
|  |       callback(null, key, value); | ||
|  |     }); | ||
|  |   } | ||
|  |   if (!self.initStarted) { | ||
|  |     process.nextTick(function () { | ||
|  |       self.initStarted = true; | ||
|  |       self._init(function (err) { | ||
|  |         if (err) { | ||
|  |           return callback(err); | ||
|  |         } | ||
|  |         self.db.container.keys(function (err, keys) { | ||
|  |           if (err) { | ||
|  |             return callback(err); | ||
|  |           } | ||
|  |           self._keys = keys; | ||
|  |           if (self._startkey) { | ||
|  |             var index = utils.sortedIndexOf(self._keys, self._startkey); | ||
|  |             var startkey = (index >= self._keys.length || index < 0) ? | ||
|  |               undefined : self._keys[index]; | ||
|  |             self._pos = index; | ||
|  |             if (self._reverse) { | ||
|  |               if (self._exclusiveStart || startkey !== self._startkey) { | ||
|  |                 self._pos--; | ||
|  |               } | ||
|  |             } else if (self._exclusiveStart && startkey === self._startkey) { | ||
|  |               self._pos++; | ||
|  |             } | ||
|  |           } else { | ||
|  |             self._pos = self._reverse ? self._keys.length - 1 : 0; | ||
|  |           } | ||
|  |           onInitComplete(); | ||
|  | 
 | ||
|  |           self.initCompleted = true; | ||
|  |           var i = -1; | ||
|  |           while (++i < self.onInitCompleteListeners.length) { | ||
|  |             nextTick(self.onInitCompleteListeners[i]); | ||
|  |           } | ||
|  |         }); | ||
|  |       }); | ||
|  |     }); | ||
|  |   } else if (!self.initCompleted) { | ||
|  |     self.onInitCompleteListeners.push(onInitComplete); | ||
|  |   } else { | ||
|  |     process.nextTick(onInitComplete); | ||
|  |   } | ||
|  | }; | ||
|  | 
 | ||
|  | function LD(location) { | ||
|  |   if (!(this instanceof LD)) { | ||
|  |     return new LD(location); | ||
|  |   } | ||
|  |   AbstractLevelDOWN.call(this, location); | ||
|  |   this.container = new LocalStorage(location); | ||
|  | } | ||
|  | 
 | ||
|  | inherits(LD, AbstractLevelDOWN); | ||
|  | 
 | ||
|  | LD.prototype._open = function (options, callback) { | ||
|  |   this.container.init(callback); | ||
|  | }; | ||
|  | 
 | ||
|  | LD.prototype._put = function (key, value, options, callback) { | ||
|  | 
 | ||
|  |   var err = checkKeyValue(key, 'key'); | ||
|  | 
 | ||
|  |   if (err) { | ||
|  |     return nextTick(function () { | ||
|  |       callback(err); | ||
|  |     }); | ||
|  |   } | ||
|  | 
 | ||
|  |   err = checkKeyValue(value, 'value'); | ||
|  | 
 | ||
|  |   if (err) { | ||
|  |     return nextTick(function () { | ||
|  |       callback(err); | ||
|  |     }); | ||
|  |   } | ||
|  | 
 | ||
|  |   if (typeof value === 'object' && !Buffer.isBuffer(value) && value.buffer === undefined) { | ||
|  |     var obj = {}; | ||
|  |     obj.storetype = "json"; | ||
|  |     obj.data = value; | ||
|  |     value = JSON.stringify(obj); | ||
|  |   } | ||
|  | 
 | ||
|  |   this.container.setItem(key, value, callback); | ||
|  | }; | ||
|  | 
 | ||
|  | LD.prototype._get = function (key, options, callback) { | ||
|  | 
 | ||
|  |   var err = checkKeyValue(key, 'key'); | ||
|  | 
 | ||
|  |   if (err) { | ||
|  |     return nextTick(function () { | ||
|  |       callback(err); | ||
|  |     }); | ||
|  |   } | ||
|  | 
 | ||
|  |   if (!Buffer.isBuffer(key)) { | ||
|  |     key = String(key); | ||
|  |   } | ||
|  |   this.container.getItem(key, function (err, value) { | ||
|  | 
 | ||
|  |     if (err) { | ||
|  |       return callback(err); | ||
|  |     } | ||
|  | 
 | ||
|  |     if (options.asBuffer !== false && !Buffer.isBuffer(value)) { | ||
|  |       value = bufferFrom(value); | ||
|  |     } | ||
|  | 
 | ||
|  | 
 | ||
|  |     if (options.asBuffer === false) { | ||
|  |       if (value.indexOf("{\"storetype\":\"json\",\"data\"") > -1) { | ||
|  |         var res = JSON.parse(value); | ||
|  |         value = res.data; | ||
|  |       } | ||
|  |     } | ||
|  |     callback(null, value); | ||
|  |   }); | ||
|  | }; | ||
|  | 
 | ||
|  | LD.prototype._del = function (key, options, callback) { | ||
|  | 
 | ||
|  |   var err = checkKeyValue(key, 'key'); | ||
|  | 
 | ||
|  |   if (err) { | ||
|  |     return nextTick(function () { | ||
|  |       callback(err); | ||
|  |     }); | ||
|  |   } | ||
|  |   if (!Buffer.isBuffer(key)) { | ||
|  |     key = String(key); | ||
|  |   } | ||
|  | 
 | ||
|  |   this.container.removeItem(key, callback); | ||
|  | }; | ||
|  | 
 | ||
|  | LD.prototype._batch = function (array, options, callback) { | ||
|  |   var self = this; | ||
|  |   nextTick(function () { | ||
|  |     var err; | ||
|  |     var key; | ||
|  |     var value; | ||
|  | 
 | ||
|  |     var numDone = 0; | ||
|  |     var overallErr; | ||
|  |     function checkDone() { | ||
|  |       if (++numDone === array.length) { | ||
|  |         callback(overallErr); | ||
|  |       } | ||
|  |     } | ||
|  | 
 | ||
|  |     if (Array.isArray(array) && array.length) { | ||
|  |       for (var i = 0; i < array.length; i++) { | ||
|  |         var task = array[i]; | ||
|  |         if (task) { | ||
|  |           key = Buffer.isBuffer(task.key) ? task.key : String(task.key); | ||
|  |           err = checkKeyValue(key, 'key'); | ||
|  |           if (err) { | ||
|  |             overallErr = err; | ||
|  |             checkDone(); | ||
|  |           } else if (task.type === 'del') { | ||
|  |             self._del(task.key, options, checkDone); | ||
|  |           } else if (task.type === 'put') { | ||
|  |             value = Buffer.isBuffer(task.value) ? task.value : String(task.value); | ||
|  |             err = checkKeyValue(value, 'value'); | ||
|  |             if (err) { | ||
|  |               overallErr = err; | ||
|  |               checkDone(); | ||
|  |             } else { | ||
|  |               self._put(key, value, options, checkDone); | ||
|  |             } | ||
|  |           } | ||
|  |         } else { | ||
|  |           checkDone(); | ||
|  |         } | ||
|  |       } | ||
|  |     } else { | ||
|  |       callback(); | ||
|  |     } | ||
|  |   }); | ||
|  | }; | ||
|  | 
 | ||
|  | LD.prototype._iterator = function (options) { | ||
|  |   return new LDIterator(this, options); | ||
|  | }; | ||
|  | 
 | ||
|  | LD.destroy = function (name, callback) { | ||
|  |   LocalStorageCore.destroy(name, callback); | ||
|  | }; | ||
|  | 
 | ||
|  | function checkKeyValue(obj, type) { | ||
|  |   if (obj === null || obj === undefined) { | ||
|  |     return new Error(type + ' cannot be `null` or `undefined`'); | ||
|  |   } | ||
|  |   if (obj === null || obj === undefined) { | ||
|  |     return new Error(type + ' cannot be `null` or `undefined`'); | ||
|  |   } | ||
|  | 
 | ||
|  |   if (type === 'key') { | ||
|  | 
 | ||
|  |     if (obj instanceof Boolean) { | ||
|  |       return new Error(type + ' cannot be `null` or `undefined`'); | ||
|  |     } | ||
|  |     if (obj === '') { | ||
|  |       return new Error(type + ' cannot be empty'); | ||
|  |     } | ||
|  |   } | ||
|  |   if (obj.toString().indexOf("[object ArrayBuffer]") === 0) { | ||
|  |     if (obj.byteLength === 0 || obj.byteLength === undefined) { | ||
|  |       return new Error(type + ' cannot be an empty Buffer'); | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   if (Buffer.isBuffer(obj)) { | ||
|  |     if (obj.length === 0) { | ||
|  |       return new Error(type + ' cannot be an empty Buffer'); | ||
|  |     } | ||
|  |   } else if (String(obj) === '') { | ||
|  |     return new Error(type + ' cannot be an empty String'); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | module.exports = LD; | ||
|  | 
 | ||
|  | }).call(this)}).call(this,{"isBuffer":_dereq_(38)},_dereq_(73),typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | ||
|  | },{"3":3,"37":37,"38":38,"64":64,"65":65,"67":67,"68":68,"73":73}],64:[function(_dereq_,module,exports){ | ||
|  | (function (process,global){(function (){ | ||
|  | 'use strict'; | ||
|  | 
 | ||
|  | //
 | ||
|  | // Class that should contain everything necessary to interact
 | ||
|  | // with localStorage as a generic key-value store.
 | ||
|  | // The idea is that authors who want to create an AbstractKeyValueDOWN
 | ||
|  | // module (e.g. on lawnchair, S3, whatever) will only have to
 | ||
|  | // reimplement this file.
 | ||
|  | //
 | ||
|  | 
 | ||
|  | // see http://stackoverflow.com/a/15349865/680742
 | ||
|  | var nextTick = global.setImmediate || process.nextTick; | ||
|  | 
 | ||
|  | // We use humble-localstorage as a wrapper for localStorage because
 | ||
|  | // it falls back to an in-memory implementation in environments without
 | ||
|  | // localStorage, like Node or Safari private browsing.
 | ||
|  | var storage = _dereq_(29); | ||
|  | 
 | ||
|  | function callbackify(callback, fun) { | ||
|  |   var val; | ||
|  |   var err; | ||
|  |   try { | ||
|  |     val = fun(); | ||
|  |   } catch (e) { | ||
|  |     err = e; | ||
|  |   } | ||
|  |   nextTick(function () { | ||
|  |     callback(err, val); | ||
|  |   }); | ||
|  | } | ||
|  | 
 | ||
|  | function createPrefix(dbname) { | ||
|  |   return dbname.replace(/!/g, '!!') + '!'; // escape bangs in dbname;
 | ||
|  | } | ||
|  | 
 | ||
|  | function LocalStorageCore(dbname) { | ||
|  |   this._prefix = createPrefix(dbname); | ||
|  | } | ||
|  | 
 | ||
|  | LocalStorageCore.prototype.getKeys = function (callback) { | ||
|  |   var self = this; | ||
|  |   callbackify(callback, function () { | ||
|  |     var keys = []; | ||
|  |     var prefixLen = self._prefix.length; | ||
|  |     var i = -1; | ||
|  |     var len = storage.length; | ||
|  |     while (++i < len) { | ||
|  |       var fullKey = storage.key(i); | ||
|  |       if (fullKey.substring(0, prefixLen) === self._prefix) { | ||
|  |         keys.push(fullKey.substring(prefixLen)); | ||
|  |       } | ||
|  |     } | ||
|  |     keys.sort(); | ||
|  |     return keys; | ||
|  |   }); | ||
|  | }; | ||
|  | 
 | ||
|  | LocalStorageCore.prototype.put = function (key, value, callback) { | ||
|  |   var self = this; | ||
|  |   callbackify(callback, function () { | ||
|  |     storage.setItem(self._prefix + key, value); | ||
|  |   }); | ||
|  | }; | ||
|  | 
 | ||
|  | LocalStorageCore.prototype.get = function (key, callback) { | ||
|  |   var self = this; | ||
|  |   callbackify(callback, function () { | ||
|  |     return storage.getItem(self._prefix + key); | ||
|  |   }); | ||
|  | }; | ||
|  | 
 | ||
|  | LocalStorageCore.prototype.remove = function (key, callback) { | ||
|  |   var self = this; | ||
|  |   callbackify(callback, function () { | ||
|  |     storage.removeItem(self._prefix + key); | ||
|  |   }); | ||
|  | }; | ||
|  | 
 | ||
|  | LocalStorageCore.destroy = function (dbname, callback) { | ||
|  |   var prefix = createPrefix(dbname); | ||
|  |   callbackify(callback, function () { | ||
|  |     var keysToDelete = []; | ||
|  |     var i = -1; | ||
|  |     var len = storage.length; | ||
|  |     while (++i < len) { | ||
|  |       var key = storage.key(i); | ||
|  |       if (key.substring(0, prefix.length) === prefix) { | ||
|  |         keysToDelete.push(key); | ||
|  |       } | ||
|  |     } | ||
|  |     keysToDelete.forEach(function (key) { | ||
|  |       storage.removeItem(key); | ||
|  |     }); | ||
|  |   }); | ||
|  | }; | ||
|  | 
 | ||
|  | module.exports = LocalStorageCore; | ||
|  | }).call(this)}).call(this,_dereq_(73),typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | ||
|  | },{"29":29,"73":73}],65:[function(_dereq_,module,exports){ | ||
|  | (function (Buffer){(function (){ | ||
|  | 'use strict'; | ||
|  | 
 | ||
|  | // ArrayBuffer/Uint8Array are old formats that date back to before we
 | ||
|  | // had a proper browserified buffer type. they may be removed later
 | ||
|  | var arrayBuffPrefix = 'ArrayBuffer:'; | ||
|  | var arrayBuffRegex = new RegExp('^' + arrayBuffPrefix); | ||
|  | var uintPrefix = 'Uint8Array:'; | ||
|  | var uintRegex = new RegExp('^' + uintPrefix); | ||
|  | 
 | ||
|  | // this is the new encoding format used going forward
 | ||
|  | var bufferPrefix = 'Buff:'; | ||
|  | var bufferRegex = new RegExp('^' + bufferPrefix); | ||
|  | 
 | ||
|  | var utils = _dereq_(67); | ||
|  | var LocalStorageCore = _dereq_(64); | ||
|  | var TaskQueue = _dereq_(66); | ||
|  | var d64 = _dereq_(15); | ||
|  | 
 | ||
|  | function LocalStorage(dbname) { | ||
|  |   this._store = new LocalStorageCore(dbname); | ||
|  |   this._queue = new TaskQueue(); | ||
|  | } | ||
|  | 
 | ||
|  | LocalStorage.prototype.sequentialize = function (callback, fun) { | ||
|  |   this._queue.add(fun, callback); | ||
|  | }; | ||
|  | 
 | ||
|  | LocalStorage.prototype.init = function (callback) { | ||
|  |   var self = this; | ||
|  |   self.sequentialize(callback, function (callback) { | ||
|  |     self._store.getKeys(function (err, keys) { | ||
|  |       if (err) { | ||
|  |         return callback(err); | ||
|  |       } | ||
|  |       self._keys = keys; | ||
|  |       return callback(); | ||
|  |     }); | ||
|  |   }); | ||
|  | }; | ||
|  | 
 | ||
|  | LocalStorage.prototype.keys = function (callback) { | ||
|  |   var self = this; | ||
|  |   self.sequentialize(callback, function (callback) { | ||
|  |     self._store.getKeys(function (err, keys) { | ||
|  |       callback(null, keys.slice()); | ||
|  |     }); | ||
|  |   }); | ||
|  | }; | ||
|  | 
 | ||
|  | //setItem: Saves and item at the key provided.
 | ||
|  | LocalStorage.prototype.setItem = function (key, value, callback) { | ||
|  |   var self = this; | ||
|  |   self.sequentialize(callback, function (callback) { | ||
|  |     if (Buffer.isBuffer(value)) { | ||
|  |       value = bufferPrefix + d64.encode(value); | ||
|  |     } | ||
|  | 
 | ||
|  |     var idx = utils.sortedIndexOf(self._keys, key); | ||
|  |     if (self._keys[idx] !== key) { | ||
|  |       self._keys.splice(idx, 0, key); | ||
|  |     } | ||
|  |     self._store.put(key, value, callback); | ||
|  |   }); | ||
|  | }; | ||
|  | 
 | ||
|  | //getItem: Returns the item identified by it's key.
 | ||
|  | LocalStorage.prototype.getItem = function (key, callback) { | ||
|  |   var self = this; | ||
|  |   self.sequentialize(callback, function (callback) { | ||
|  |     self._store.get(key, function (err, retval) { | ||
|  |       if (err) { | ||
|  |         return callback(err); | ||
|  |       } | ||
|  |       if (typeof retval === 'undefined' || retval === null) { | ||
|  |         // 'NotFound' error, consistent with LevelDOWN API
 | ||
|  |         return callback(new Error('NotFound')); | ||
|  |       } | ||
|  |       if (typeof retval !== 'undefined') { | ||
|  |         if (bufferRegex.test(retval)) { | ||
|  |           retval = d64.decode(retval.substring(bufferPrefix.length)); | ||
|  |         } else if (arrayBuffRegex.test(retval)) { | ||
|  |           // this type is kept for backwards
 | ||
|  |           // compatibility with older databases, but may be removed
 | ||
|  |           // after a major version bump
 | ||
|  |           retval = retval.substring(arrayBuffPrefix.length); | ||
|  |           retval = new ArrayBuffer(atob(retval).split('').map(function (c) { | ||
|  |             return c.charCodeAt(0); | ||
|  |           })); | ||
|  |         } else if (uintRegex.test(retval)) { | ||
|  |           // ditto
 | ||
|  |           retval = retval.substring(uintPrefix.length); | ||
|  |           retval = new Uint8Array(atob(retval).split('').map(function (c) { | ||
|  |             return c.charCodeAt(0); | ||
|  |           })); | ||
|  |         } | ||
|  |       } | ||
|  |       callback(null, retval); | ||
|  |     }); | ||
|  |   }); | ||
|  | }; | ||
|  | 
 | ||
|  | //removeItem: Removes the item identified by it's key.
 | ||
|  | LocalStorage.prototype.removeItem = function (key, callback) { | ||
|  |   var self = this; | ||
|  |   self.sequentialize(callback, function (callback) { | ||
|  |     var idx = utils.sortedIndexOf(self._keys, key); | ||
|  |     if (self._keys[idx] === key) { | ||
|  |       self._keys.splice(idx, 1); | ||
|  |       self._store.remove(key, function (err) { | ||
|  |         if (err) { | ||
|  |           return callback(err); | ||
|  |         } | ||
|  |         callback(); | ||
|  |       }); | ||
|  |     } else { | ||
|  |       callback(); | ||
|  |     } | ||
|  |   }); | ||
|  | }; | ||
|  | 
 | ||
|  | LocalStorage.prototype.length = function (callback) { | ||
|  |   var self = this; | ||
|  |   self.sequentialize(callback, function (callback) { | ||
|  |     callback(null, self._keys.length); | ||
|  |   }); | ||
|  | }; | ||
|  | 
 | ||
|  | exports.LocalStorage = LocalStorage; | ||
|  | 
 | ||
|  | }).call(this)}).call(this,{"isBuffer":_dereq_(38)}) | ||
|  | },{"15":15,"38":38,"64":64,"66":66,"67":67}],66:[function(_dereq_,module,exports){ | ||
|  | (function (process,global){(function (){ | ||
|  | 'use strict'; | ||
|  | 
 | ||
|  | var argsarray = _dereq_(5); | ||
|  | var Queue = _dereq_(120); | ||
|  | 
 | ||
|  | // see http://stackoverflow.com/a/15349865/680742
 | ||
|  | var nextTick = global.setImmediate || process.nextTick; | ||
|  | 
 | ||
|  | function TaskQueue() { | ||
|  |   this.queue = new Queue(); | ||
|  |   this.running = false; | ||
|  | } | ||
|  | 
 | ||
|  | TaskQueue.prototype.add = function (fun, callback) { | ||
|  |   this.queue.push({fun: fun, callback: callback}); | ||
|  |   this.processNext(); | ||
|  | }; | ||
|  | 
 | ||
|  | TaskQueue.prototype.processNext = function () { | ||
|  |   var self = this; | ||
|  |   if (self.running || !self.queue.length) { | ||
|  |     return; | ||
|  |   } | ||
|  |   self.running = true; | ||
|  | 
 | ||
|  |   var task = self.queue.shift(); | ||
|  |   nextTick(function () { | ||
|  |     task.fun(argsarray(function (args) { | ||
|  |       task.callback.apply(null, args); | ||
|  |       self.running = false; | ||
|  |       self.processNext(); | ||
|  |     })); | ||
|  |   }); | ||
|  | }; | ||
|  | 
 | ||
|  | module.exports = TaskQueue; | ||
|  | 
 | ||
|  | }).call(this)}).call(this,_dereq_(73),typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | ||
|  | },{"120":120,"5":5,"73":73}],67:[function(_dereq_,module,exports){ | ||
|  | 'use strict'; | ||
|  | // taken from rvagg/memdown commit 2078b40
 | ||
|  | exports.sortedIndexOf = function(arr, item) { | ||
|  |   var low = 0; | ||
|  |   var high = arr.length; | ||
|  |   var mid; | ||
|  |   while (low < high) { | ||
|  |     mid = (low + high) >>> 1; | ||
|  |     if (arr[mid] < item) { | ||
|  |       low = mid + 1; | ||
|  |     } else { | ||
|  |       high = mid; | ||
|  |     } | ||
|  |   } | ||
|  |   return low; | ||
|  | }; | ||
|  | 
 | ||
|  | },{}],68:[function(_dereq_,module,exports){ | ||
|  | (function (Buffer){(function (){ | ||
|  | var toString = Object.prototype.toString | ||
|  | 
 | ||
|  | var isModern = ( | ||
|  |   typeof Buffer.alloc === 'function' && | ||
|  |   typeof Buffer.allocUnsafe === 'function' && | ||
|  |   typeof Buffer.from === 'function' | ||
|  | ) | ||
|  | 
 | ||
|  | function isArrayBuffer (input) { | ||
|  |   return toString.call(input).slice(8, -1) === 'ArrayBuffer' | ||
|  | } | ||
|  | 
 | ||
|  | function fromArrayBuffer (obj, byteOffset, length) { | ||
|  |   byteOffset >>>= 0 | ||
|  | 
 | ||
|  |   var maxLength = obj.byteLength - byteOffset | ||
|  | 
 | ||
|  |   if (maxLength < 0) { | ||
|  |     throw new RangeError("'offset' is out of bounds") | ||
|  |   } | ||
|  | 
 | ||
|  |   if (length === undefined) { | ||
|  |     length = maxLength | ||
|  |   } else { | ||
|  |     length >>>= 0 | ||
|  | 
 | ||
|  |     if (length > maxLength) { | ||
|  |       throw new RangeError("'length' is out of bounds") | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   return isModern | ||
|  |     ? Buffer.from(obj.slice(byteOffset, byteOffset + length)) | ||
|  |     : new Buffer(new Uint8Array(obj.slice(byteOffset, byteOffset + length))) | ||
|  | } | ||
|  | 
 | ||
|  | function fromString (string, encoding) { | ||
|  |   if (typeof encoding !== 'string' || encoding === '') { | ||
|  |     encoding = 'utf8' | ||
|  |   } | ||
|  | 
 | ||
|  |   if (!Buffer.isEncoding(encoding)) { | ||
|  |     throw new TypeError('"encoding" must be a valid string encoding') | ||
|  |   } | ||
|  | 
 | ||
|  |   return isModern | ||
|  |     ? Buffer.from(string, encoding) | ||
|  |     : new Buffer(string, encoding) | ||
|  | } | ||
|  | 
 | ||
|  | function bufferFrom (value, encodingOrOffset, length) { | ||
|  |   if (typeof value === 'number') { | ||
|  |     throw new TypeError('"value" argument must not be a number') | ||
|  |   } | ||
|  | 
 | ||
|  |   if (isArrayBuffer(value)) { | ||
|  |     return fromArrayBuffer(value, encodingOrOffset, length) | ||
|  |   } | ||
|  | 
 | ||
|  |   if (typeof value === 'string') { | ||
|  |     return fromString(value, encodingOrOffset) | ||
|  |   } | ||
|  | 
 | ||
|  |   return isModern | ||
|  |     ? Buffer.from(value) | ||
|  |     : new Buffer(value) | ||
|  | } | ||
|  | 
 | ||
|  | module.exports = bufferFrom | ||
|  | 
 | ||
|  | }).call(this)}).call(this,_dereq_(13).Buffer) | ||
|  | },{"13":13}],69:[function(_dereq_,module,exports){ | ||
|  | (function (root) { | ||
|  |   var localStorageMemory = {} | ||
|  |   var cache = {} | ||
|  | 
 | ||
|  |   /** | ||
|  |    * number of stored items. | ||
|  |    */ | ||
|  |   localStorageMemory.length = 0 | ||
|  | 
 | ||
|  |   /** | ||
|  |    * returns item for passed key, or null | ||
|  |    * | ||
|  |    * @para {String} key | ||
|  |    *       name of item to be returned | ||
|  |    * @returns {String|null} | ||
|  |    */ | ||
|  |   localStorageMemory.getItem = function (key) { | ||
|  |     if (key in cache) { | ||
|  |       return cache[key] | ||
|  |     } | ||
|  | 
 | ||
|  |     return null | ||
|  |   } | ||
|  | 
 | ||
|  |   /** | ||
|  |    * sets item for key to passed value, as String | ||
|  |    * | ||
|  |    * @para {String} key | ||
|  |    *       name of item to be set | ||
|  |    * @para {String} value | ||
|  |    *       value, will always be turned into a String | ||
|  |    * @returns {undefined} | ||
|  |    */ | ||
|  |   localStorageMemory.setItem = function (key, value) { | ||
|  |     if (typeof value === 'undefined') { | ||
|  |       localStorageMemory.removeItem(key) | ||
|  |     } else { | ||
|  |       if (!(cache.hasOwnProperty(key))) { | ||
|  |         localStorageMemory.length++ | ||
|  |       } | ||
|  | 
 | ||
|  |       cache[key] = '' + value | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   /** | ||
|  |    * removes item for passed key | ||
|  |    * | ||
|  |    * @para {String} key | ||
|  |    *       name of item to be removed | ||
|  |    * @returns {undefined} | ||
|  |    */ | ||
|  |   localStorageMemory.removeItem = function (key) { | ||
|  |     if (cache.hasOwnProperty(key)) { | ||
|  |       delete cache[key] | ||
|  |       localStorageMemory.length-- | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   /** | ||
|  |    * returns name of key at passed index | ||
|  |    * | ||
|  |    * @para {Number} index | ||
|  |    *       Position for key to be returned (starts at 0) | ||
|  |    * @returns {String|null} | ||
|  |    */ | ||
|  |   localStorageMemory.key = function (index) { | ||
|  |     return Object.keys(cache)[index] || null | ||
|  |   } | ||
|  | 
 | ||
|  |   /** | ||
|  |    * removes all stored items and sets length to 0 | ||
|  |    * | ||
|  |    * @returns {undefined} | ||
|  |    */ | ||
|  |   localStorageMemory.clear = function () { | ||
|  |     cache = {} | ||
|  |     localStorageMemory.length = 0 | ||
|  |   } | ||
|  | 
 | ||
|  |   if (typeof exports === 'object') { | ||
|  |     module.exports = localStorageMemory | ||
|  |   } else { | ||
|  |     root.localStorageMemory = localStorageMemory | ||
|  |   } | ||
|  | })(this) | ||
|  | 
 | ||
|  | },{}],70:[function(_dereq_,module,exports){ | ||
|  | (function (Buffer){(function (){ | ||
|  | 
 | ||
|  | exports.compare = function (a, b) { | ||
|  | 
 | ||
|  |   if(Buffer.isBuffer(a)) { | ||
|  |     var l = Math.min(a.length, b.length) | ||
|  |     for(var i = 0; i < l; i++) { | ||
|  |       var cmp = a[i] - b[i] | ||
|  |       if(cmp) return cmp | ||
|  |     } | ||
|  |     return a.length - b.length | ||
|  |   } | ||
|  | 
 | ||
|  |   return a < b ? -1 : a > b ? 1 : 0 | ||
|  | } | ||
|  | 
 | ||
|  | // to be compatible with the current abstract-leveldown tests
 | ||
|  | // nullish or empty strings.
 | ||
|  | // I could use !!val but I want to permit numbers and booleans,
 | ||
|  | // if possible.
 | ||
|  | 
 | ||
|  | function isDef (val) { | ||
|  |   return val !== undefined && val !== '' | ||
|  | } | ||
|  | 
 | ||
|  | function has (range, name) { | ||
|  |   return Object.hasOwnProperty.call(range, name) | ||
|  | } | ||
|  | 
 | ||
|  | function hasKey(range, name) { | ||
|  |   return Object.hasOwnProperty.call(range, name) && name | ||
|  | } | ||
|  | 
 | ||
|  | var lowerBoundKey = exports.lowerBoundKey = function (range) { | ||
|  |     return ( | ||
|  |        hasKey(range, 'gt') | ||
|  |     || hasKey(range, 'gte') | ||
|  |     || hasKey(range, 'min') | ||
|  |     || (range.reverse ? hasKey(range, 'end') : hasKey(range, 'start')) | ||
|  |     || undefined | ||
|  |     ) | ||
|  | } | ||
|  | 
 | ||
|  | var lowerBound = exports.lowerBound = function (range, def) { | ||
|  |   var k = lowerBoundKey(range) | ||
|  |   return k ? range[k] : def | ||
|  | } | ||
|  | 
 | ||
|  | var lowerBoundInclusive = exports.lowerBoundInclusive = function (range) { | ||
|  |   return has(range, 'gt') ? false : true | ||
|  | } | ||
|  | 
 | ||
|  | var upperBoundInclusive = exports.upperBoundInclusive = | ||
|  |   function (range) { | ||
|  |     return (has(range, 'lt') /*&& !range.maxEx*/) ? false : true | ||
|  |   } | ||
|  | 
 | ||
|  | var lowerBoundExclusive = exports.lowerBoundExclusive = | ||
|  |   function (range) { | ||
|  |     return !lowerBoundInclusive(range) | ||
|  |   } | ||
|  | 
 | ||
|  | var upperBoundExclusive = exports.upperBoundExclusive = | ||
|  |   function (range) { | ||
|  |     return !upperBoundInclusive(range) | ||
|  |   } | ||
|  | 
 | ||
|  | var upperBoundKey = exports.upperBoundKey = function (range) { | ||
|  |     return ( | ||
|  |        hasKey(range, 'lt') | ||
|  |     || hasKey(range, 'lte') | ||
|  |     || hasKey(range, 'max') | ||
|  |     || (range.reverse ? hasKey(range, 'start') : hasKey(range, 'end')) | ||
|  |     || undefined | ||
|  |     ) | ||
|  | } | ||
|  | 
 | ||
|  | var upperBound = exports.upperBound = function (range, def) { | ||
|  |   var k = upperBoundKey(range) | ||
|  |   return k ? range[k] : def | ||
|  | } | ||
|  | 
 | ||
|  | exports.start = function (range, def) { | ||
|  |   return range.reverse ? upperBound(range, def) : lowerBound(range, def) | ||
|  | } | ||
|  | exports.end = function (range, def) { | ||
|  |   return range.reverse ? lowerBound(range, def) : upperBound(range, def) | ||
|  | } | ||
|  | exports.startInclusive = function (range) { | ||
|  |   return ( | ||
|  |     range.reverse | ||
|  |   ? upperBoundInclusive(range) | ||
|  |   : lowerBoundInclusive(range) | ||
|  |   ) | ||
|  | } | ||
|  | exports.endInclusive = function (range) { | ||
|  |   return ( | ||
|  |     range.reverse | ||
|  |   ? lowerBoundInclusive(range) | ||
|  |   : upperBoundInclusive(range) | ||
|  |   ) | ||
|  | } | ||
|  | 
 | ||
|  | function id (e) { return e } | ||
|  | 
 | ||
|  | exports.toLtgt = function (range, _range, map, lower, upper) { | ||
|  |   _range = _range || {} | ||
|  |   map = map || id | ||
|  |   var defaults = arguments.length > 3 | ||
|  |   var lb = exports.lowerBoundKey(range) | ||
|  |   var ub = exports.upperBoundKey(range) | ||
|  |   if(lb) { | ||
|  |     if(lb === 'gt') _range.gt = map(range.gt, false) | ||
|  |     else            _range.gte = map(range[lb], false) | ||
|  |   } | ||
|  |   else if(defaults) | ||
|  |     _range.gte = map(lower, false) | ||
|  | 
 | ||
|  |   if(ub) { | ||
|  |     if(ub === 'lt') _range.lt = map(range.lt, true) | ||
|  |     else            _range.lte = map(range[ub], true) | ||
|  |   } | ||
|  |   else if(defaults) | ||
|  |     _range.lte = map(upper, true) | ||
|  | 
 | ||
|  |   if(range.reverse != null) | ||
|  |     _range.reverse = !!range.reverse | ||
|  | 
 | ||
|  |   //if range was used mutably
 | ||
|  |   //(in level-sublevel it's part of an options object
 | ||
|  |   //that has more properties on it.)
 | ||
|  |   if(has(_range, 'max'))   delete _range.max | ||
|  |   if(has(_range, 'min'))   delete _range.min | ||
|  |   if(has(_range, 'start')) delete _range.start | ||
|  |   if(has(_range, 'end'))   delete _range.end | ||
|  | 
 | ||
|  |   return _range | ||
|  | } | ||
|  | 
 | ||
|  | exports.contains = function (range, key, compare) { | ||
|  |   compare = compare || exports.compare | ||
|  | 
 | ||
|  |   var lb = lowerBound(range) | ||
|  |   if(isDef(lb)) { | ||
|  |     var cmp = compare(key, lb) | ||
|  |     if(cmp < 0 || (cmp === 0 && lowerBoundExclusive(range))) | ||
|  |       return false | ||
|  |   } | ||
|  | 
 | ||
|  |   var ub = upperBound(range) | ||
|  |   if(isDef(ub)) { | ||
|  |     var cmp = compare(key, ub) | ||
|  |     if(cmp > 0 || (cmp === 0) && upperBoundExclusive(range)) | ||
|  |       return false | ||
|  |   } | ||
|  | 
 | ||
|  |   return true | ||
|  | } | ||
|  | 
 | ||
|  | exports.filter = function (range, compare) { | ||
|  |   return function (key) { | ||
|  |     return exports.contains(range, key, compare) | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | 
 | ||
|  | 
 | ||
|  | }).call(this)}).call(this,{"isBuffer":_dereq_(38)}) | ||
|  | },{"38":38}],71:[function(_dereq_,module,exports){ | ||
|  | /* | ||
|  | object-assign | ||
|  | (c) Sindre Sorhus | ||
|  | @license MIT | ||
|  | */ | ||
|  | 
 | ||
|  | 'use strict'; | ||
|  | /* eslint-disable no-unused-vars */ | ||
|  | var getOwnPropertySymbols = Object.getOwnPropertySymbols; | ||
|  | var hasOwnProperty = Object.prototype.hasOwnProperty; | ||
|  | var propIsEnumerable = Object.prototype.propertyIsEnumerable; | ||
|  | 
 | ||
|  | function toObject(val) { | ||
|  | 	if (val === null || val === undefined) { | ||
|  | 		throw new TypeError('Object.assign cannot be called with null or undefined'); | ||
|  | 	} | ||
|  | 
 | ||
|  | 	return Object(val); | ||
|  | } | ||
|  | 
 | ||
|  | function shouldUseNative() { | ||
|  | 	try { | ||
|  | 		if (!Object.assign) { | ||
|  | 			return false; | ||
|  | 		} | ||
|  | 
 | ||
|  | 		// Detect buggy property enumeration order in older V8 versions.
 | ||
|  | 
 | ||
|  | 		// https://bugs.chromium.org/p/v8/issues/detail?id=4118
 | ||
|  | 		var test1 = new String('abc');  // eslint-disable-line no-new-wrappers
 | ||
|  | 		test1[5] = 'de'; | ||
|  | 		if (Object.getOwnPropertyNames(test1)[0] === '5') { | ||
|  | 			return false; | ||
|  | 		} | ||
|  | 
 | ||
|  | 		// https://bugs.chromium.org/p/v8/issues/detail?id=3056
 | ||
|  | 		var test2 = {}; | ||
|  | 		for (var i = 0; i < 10; i++) { | ||
|  | 			test2['_' + String.fromCharCode(i)] = i; | ||
|  | 		} | ||
|  | 		var order2 = Object.getOwnPropertyNames(test2).map(function (n) { | ||
|  | 			return test2[n]; | ||
|  | 		}); | ||
|  | 		if (order2.join('') !== '0123456789') { | ||
|  | 			return false; | ||
|  | 		} | ||
|  | 
 | ||
|  | 		// https://bugs.chromium.org/p/v8/issues/detail?id=3056
 | ||
|  | 		var test3 = {}; | ||
|  | 		'abcdefghijklmnopqrst'.split('').forEach(function (letter) { | ||
|  | 			test3[letter] = letter; | ||
|  | 		}); | ||
|  | 		if (Object.keys(Object.assign({}, test3)).join('') !== | ||
|  | 				'abcdefghijklmnopqrst') { | ||
|  | 			return false; | ||
|  | 		} | ||
|  | 
 | ||
|  | 		return true; | ||
|  | 	} catch (err) { | ||
|  | 		// We don't expect any of the above to throw, but better to be safe.
 | ||
|  | 		return false; | ||
|  | 	} | ||
|  | } | ||
|  | 
 | ||
|  | module.exports = shouldUseNative() ? Object.assign : function (target, source) { | ||
|  | 	var from; | ||
|  | 	var to = toObject(target); | ||
|  | 	var symbols; | ||
|  | 
 | ||
|  | 	for (var s = 1; s < arguments.length; s++) { | ||
|  | 		from = Object(arguments[s]); | ||
|  | 
 | ||
|  | 		for (var key in from) { | ||
|  | 			if (hasOwnProperty.call(from, key)) { | ||
|  | 				to[key] = from[key]; | ||
|  | 			} | ||
|  | 		} | ||
|  | 
 | ||
|  | 		if (getOwnPropertySymbols) { | ||
|  | 			symbols = getOwnPropertySymbols(from); | ||
|  | 			for (var i = 0; i < symbols.length; i++) { | ||
|  | 				if (propIsEnumerable.call(from, symbols[i])) { | ||
|  | 					to[symbols[i]] = from[symbols[i]]; | ||
|  | 				} | ||
|  | 			} | ||
|  | 		} | ||
|  | 	} | ||
|  | 
 | ||
|  | 	return to; | ||
|  | }; | ||
|  | 
 | ||
|  | },{}],72:[function(_dereq_,module,exports){ | ||
|  | (function (process){(function (){ | ||
|  | 'use strict'; | ||
|  | 
 | ||
|  | if (typeof process === 'undefined' || | ||
|  |     !process.version || | ||
|  |     process.version.indexOf('v0.') === 0 || | ||
|  |     process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) { | ||
|  |   module.exports = { nextTick: nextTick }; | ||
|  | } else { | ||
|  |   module.exports = process | ||
|  | } | ||
|  | 
 | ||
|  | function nextTick(fn, arg1, arg2, arg3) { | ||
|  |   if (typeof fn !== 'function') { | ||
|  |     throw new TypeError('"callback" argument must be a function'); | ||
|  |   } | ||
|  |   var len = arguments.length; | ||
|  |   var args, i; | ||
|  |   switch (len) { | ||
|  |   case 0: | ||
|  |   case 1: | ||
|  |     return process.nextTick(fn); | ||
|  |   case 2: | ||
|  |     return process.nextTick(function afterTickOne() { | ||
|  |       fn.call(null, arg1); | ||
|  |     }); | ||
|  |   case 3: | ||
|  |     return process.nextTick(function afterTickTwo() { | ||
|  |       fn.call(null, arg1, arg2); | ||
|  |     }); | ||
|  |   case 4: | ||
|  |     return process.nextTick(function afterTickThree() { | ||
|  |       fn.call(null, arg1, arg2, arg3); | ||
|  |     }); | ||
|  |   default: | ||
|  |     args = new Array(len - 1); | ||
|  |     i = 0; | ||
|  |     while (i < args.length) { | ||
|  |       args[i++] = arguments[i]; | ||
|  |     } | ||
|  |     return process.nextTick(function afterTick() { | ||
|  |       fn.apply(null, args); | ||
|  |     }); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | 
 | ||
|  | }).call(this)}).call(this,_dereq_(73)) | ||
|  | },{"73":73}],73:[function(_dereq_,module,exports){ | ||
|  | // shim for using process in browser
 | ||
|  | var process = module.exports = {}; | ||
|  | 
 | ||
|  | // cached from whatever global is present so that test runners that stub it
 | ||
|  | // don't break things.  But we need to wrap it in a try catch in case it is
 | ||
|  | // wrapped in strict mode code which doesn't define any globals.  It's inside a
 | ||
|  | // function because try/catches deoptimize in certain engines.
 | ||
|  | 
 | ||
|  | var cachedSetTimeout; | ||
|  | var cachedClearTimeout; | ||
|  | 
 | ||
|  | function defaultSetTimout() { | ||
|  |     throw new Error('setTimeout has not been defined'); | ||
|  | } | ||
|  | function defaultClearTimeout () { | ||
|  |     throw new Error('clearTimeout has not been defined'); | ||
|  | } | ||
|  | (function () { | ||
|  |     try { | ||
|  |         if (typeof setTimeout === 'function') { | ||
|  |             cachedSetTimeout = setTimeout; | ||
|  |         } else { | ||
|  |             cachedSetTimeout = defaultSetTimout; | ||
|  |         } | ||
|  |     } catch (e) { | ||
|  |         cachedSetTimeout = defaultSetTimout; | ||
|  |     } | ||
|  |     try { | ||
|  |         if (typeof clearTimeout === 'function') { | ||
|  |             cachedClearTimeout = clearTimeout; | ||
|  |         } else { | ||
|  |             cachedClearTimeout = defaultClearTimeout; | ||
|  |         } | ||
|  |     } catch (e) { | ||
|  |         cachedClearTimeout = defaultClearTimeout; | ||
|  |     } | ||
|  | } ()) | ||
|  | function runTimeout(fun) { | ||
|  |     if (cachedSetTimeout === setTimeout) { | ||
|  |         //normal enviroments in sane situations
 | ||
|  |         return setTimeout(fun, 0); | ||
|  |     } | ||
|  |     // if setTimeout wasn't available but was latter defined
 | ||
|  |     if ((cachedSetTimeout === defaultSetTimout || !cachedSetTimeout) && setTimeout) { | ||
|  |         cachedSetTimeout = setTimeout; | ||
|  |         return setTimeout(fun, 0); | ||
|  |     } | ||
|  |     try { | ||
|  |         // when when somebody has screwed with setTimeout but no I.E. maddness
 | ||
|  |         return cachedSetTimeout(fun, 0); | ||
|  |     } catch(e){ | ||
|  |         try { | ||
|  |             // When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally
 | ||
|  |             return cachedSetTimeout.call(null, fun, 0); | ||
|  |         } catch(e){ | ||
|  |             // same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error
 | ||
|  |             return cachedSetTimeout.call(this, fun, 0); | ||
|  |         } | ||
|  |     } | ||
|  | 
 | ||
|  | 
 | ||
|  | } | ||
|  | function runClearTimeout(marker) { | ||
|  |     if (cachedClearTimeout === clearTimeout) { | ||
|  |         //normal enviroments in sane situations
 | ||
|  |         return clearTimeout(marker); | ||
|  |     } | ||
|  |     // if clearTimeout wasn't available but was latter defined
 | ||
|  |     if ((cachedClearTimeout === defaultClearTimeout || !cachedClearTimeout) && clearTimeout) { | ||
|  |         cachedClearTimeout = clearTimeout; | ||
|  |         return clearTimeout(marker); | ||
|  |     } | ||
|  |     try { | ||
|  |         // when when somebody has screwed with setTimeout but no I.E. maddness
 | ||
|  |         return cachedClearTimeout(marker); | ||
|  |     } catch (e){ | ||
|  |         try { | ||
|  |             // When we are in I.E. but the script has been evaled so I.E. doesn't  trust the global object when called normally
 | ||
|  |             return cachedClearTimeout.call(null, marker); | ||
|  |         } catch (e){ | ||
|  |             // same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error.
 | ||
|  |             // Some versions of I.E. have different rules for clearTimeout vs setTimeout
 | ||
|  |             return cachedClearTimeout.call(this, marker); | ||
|  |         } | ||
|  |     } | ||
|  | 
 | ||
|  | 
 | ||
|  | 
 | ||
|  | } | ||
|  | var queue = []; | ||
|  | var draining = false; | ||
|  | var currentQueue; | ||
|  | var queueIndex = -1; | ||
|  | 
 | ||
|  | function cleanUpNextTick() { | ||
|  |     if (!draining || !currentQueue) { | ||
|  |         return; | ||
|  |     } | ||
|  |     draining = false; | ||
|  |     if (currentQueue.length) { | ||
|  |         queue = currentQueue.concat(queue); | ||
|  |     } else { | ||
|  |         queueIndex = -1; | ||
|  |     } | ||
|  |     if (queue.length) { | ||
|  |         drainQueue(); | ||
|  |     } | ||
|  | } | ||
|  | 
 | ||
|  | function drainQueue() { | ||
|  |     if (draining) { | ||
|  |         return; | ||
|  |     } | ||
|  |     var timeout = runTimeout(cleanUpNextTick); | ||
|  |     draining = true; | ||
|  | 
 | ||
|  |     var len = queue.length; | ||
|  |     while(len) { | ||
|  |         currentQueue = queue; | ||
|  |         queue = []; | ||
|  |         while (++queueIndex < len) { | ||
|  |             if (currentQueue) { | ||
|  |                 currentQueue[queueIndex].run(); | ||
|  |             } | ||
|  |         } | ||
|  |         queueIndex = -1; | ||
|  |         len = queue.length; | ||
|  |     } | ||
|  |     currentQueue = null; | ||
|  |     draining = false; | ||
|  |     runClearTimeout(timeout); | ||
|  | } | ||
|  | 
 | ||
|  | process.nextTick = function (fun) { | ||
|  |     var args = new Array(arguments.length - 1); | ||
|  |     if (arguments.length > 1) { | ||
|  |         for (var i = 1; i < arguments.length; i++) { | ||
|  |             args[i - 1] = arguments[i]; | ||
|  |         } | ||
|  |     } | ||
|  |     queue.push(new Item(fun, args)); | ||
|  |     if (queue.length === 1 && !draining) { | ||
|  |         runTimeout(drainQueue); | ||
|  |     } | ||
|  | }; | ||
|  | 
 | ||
|  | // v8 likes predictible objects
 | ||
|  | function Item(fun, array) { | ||
|  |     this.fun = fun; | ||
|  |     this.array = array; | ||
|  | } | ||
|  | Item.prototype.run = function () { | ||
|  |     this.fun.apply(null, this.array); | ||
|  | }; | ||
|  | process.title = 'browser'; | ||
|  | process.browser = true; | ||
|  | process.env = {}; | ||
|  | process.argv = []; | ||
|  | process.version = ''; // empty string to avoid regexp issues
 | ||
|  | process.versions = {}; | ||
|  | 
 | ||
|  | function noop() {} | ||
|  | 
 | ||
|  | process.on = noop; | ||
|  | process.addListener = noop; | ||
|  | process.once = noop; | ||
|  | process.off = noop; | ||
|  | process.removeListener = noop; | ||
|  | process.removeAllListeners = noop; | ||
|  | process.emit = noop; | ||
|  | process.prependListener = noop; | ||
|  | process.prependOnceListener = noop; | ||
|  | 
 | ||
|  | process.listeners = function (name) { return [] } | ||
|  | 
 | ||
|  | process.binding = function (name) { | ||
|  |     throw new Error('process.binding is not supported'); | ||
|  | }; | ||
|  | 
 | ||
|  | process.cwd = function () { return '/' }; | ||
|  | process.chdir = function (dir) { | ||
|  |     throw new Error('process.chdir is not supported'); | ||
|  | }; | ||
|  | process.umask = function() { return 0; }; | ||
|  | 
 | ||
|  | },{}],74:[function(_dereq_,module,exports){ | ||
|  | /*! | ||
|  |   * prr | ||
|  |   * (c) 2013 Rod Vagg <rod@vagg.org> | ||
|  |   * https://github.com/rvagg/prr
 | ||
|  |   * License: MIT | ||
|  |   */ | ||
|  | 
 | ||
|  | (function (name, context, definition) { | ||
|  |   if (typeof module != 'undefined' && module.exports) | ||
|  |     module.exports = definition() | ||
|  |   else | ||
|  |     context[name] = definition() | ||
|  | })('prr', this, function() { | ||
|  | 
 | ||
|  |   var setProperty = typeof Object.defineProperty == 'function' | ||
|  |       ? function (obj, key, options) { | ||
|  |           Object.defineProperty(obj, key, options) | ||
|  |           return obj | ||
|  |         } | ||
|  |       : function (obj, key, options) { // < es5
 | ||
|  |           obj[key] = options.value | ||
|  |           return obj | ||
|  |         } | ||
|  | 
 | ||
|  |     , makeOptions = function (value, options) { | ||
|  |         var oo = typeof options == 'object' | ||
|  |           , os = !oo && typeof options == 'string' | ||
|  |           , op = function (p) { | ||
|  |               return oo | ||
|  |                 ? !!options[p] | ||
|  |                 : os | ||
|  |                   ? options.indexOf(p[0]) > -1 | ||
|  |                   : false | ||
|  |             } | ||
|  | 
 | ||
|  |         return { | ||
|  |             enumerable   : op('enumerable') | ||
|  |           , configurable : op('configurable') | ||
|  |           , writable     : op('writable') | ||
|  |           , value        : value | ||
|  |         } | ||
|  |       } | ||
|  | 
 | ||
|  |     , prr = function (obj, key, value, options) { | ||
|  |         var k | ||
|  | 
 | ||
|  |         options = makeOptions(value, options) | ||
|  | 
 | ||
|  |         if (typeof key == 'object') { | ||
|  |           for (k in key) { | ||
|  |             if (Object.hasOwnProperty.call(key, k)) { | ||
|  |               options.value = key[k] | ||
|  |               setProperty(obj, k, options) | ||
|  |             } | ||
|  |           } | ||
|  |           return obj | ||
|  |         } | ||
|  | 
 | ||
|  |         return setProperty(obj, key, options) | ||
|  |       } | ||
|  | 
 | ||
|  |   return prr | ||
|  | }) | ||
|  | },{}],75:[function(_dereq_,module,exports){ | ||
|  | (function (process){(function (){ | ||
|  | // Copyright Joyent, Inc. and other Node contributors.
 | ||
|  | //
 | ||
|  | // Permission is hereby granted, free of charge, to any person obtaining a
 | ||
|  | // copy of this software and associated documentation files (the
 | ||
|  | // "Software"), to deal in the Software without restriction, including
 | ||
|  | // without limitation the rights to use, copy, modify, merge, publish,
 | ||
|  | // distribute, sublicense, and/or sell copies of the Software, and to permit
 | ||
|  | // persons to whom the Software is furnished to do so, subject to the
 | ||
|  | // following conditions:
 | ||
|  | //
 | ||
|  | // The above copyright notice and this permission notice shall be included
 | ||
|  | // in all copies or substantial portions of the Software.
 | ||
|  | //
 | ||
|  | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
 | ||
|  | // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
 | ||
|  | // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
 | ||
|  | // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
 | ||
|  | // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
 | ||
|  | // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
 | ||
|  | // USE OR OTHER DEALINGS IN THE SOFTWARE.
 | ||
|  | 
 | ||
|  | // a duplex stream is just a stream that is both readable and writable.
 | ||
|  | // Since JS doesn't have multiple prototypal inheritance, this class
 | ||
|  | // prototypally inherits from Readable, and then parasitically from
 | ||
|  | // Writable.
 | ||
|  | 
 | ||
|  | module.exports = Duplex; | ||
|  | 
 | ||
|  | /*<replacement>*/ | ||
|  | var objectKeys = Object.keys || function (obj) { | ||
|  |   var keys = []; | ||
|  |   for (var key in obj) keys.push(key); | ||
|  |   return keys; | ||
|  | } | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | 
 | ||
|  | /*<replacement>*/ | ||
|  | var util = _dereq_(14); | ||
|  | util.inherits = _dereq_(37); | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | var Readable = _dereq_(77); | ||
|  | var Writable = _dereq_(79); | ||
|  | 
 | ||
|  | util.inherits(Duplex, Readable); | ||
|  | 
 | ||
|  | forEach(objectKeys(Writable.prototype), function(method) { | ||
|  |   if (!Duplex.prototype[method]) | ||
|  |     Duplex.prototype[method] = Writable.prototype[method]; | ||
|  | }); | ||
|  | 
 | ||
|  | function Duplex(options) { | ||
|  |   if (!(this instanceof Duplex)) | ||
|  |     return new Duplex(options); | ||
|  | 
 | ||
|  |   Readable.call(this, options); | ||
|  |   Writable.call(this, options); | ||
|  | 
 | ||
|  |   if (options && options.readable === false) | ||
|  |     this.readable = false; | ||
|  | 
 | ||
|  |   if (options && options.writable === false) | ||
|  |     this.writable = false; | ||
|  | 
 | ||
|  |   this.allowHalfOpen = true; | ||
|  |   if (options && options.allowHalfOpen === false) | ||
|  |     this.allowHalfOpen = false; | ||
|  | 
 | ||
|  |   this.once('end', onend); | ||
|  | } | ||
|  | 
 | ||
|  | // the no-half-open enforcer
 | ||
|  | function onend() { | ||
|  |   // if we allow half-open state, or if the writable side ended,
 | ||
|  |   // then we're ok.
 | ||
|  |   if (this.allowHalfOpen || this._writableState.ended) | ||
|  |     return; | ||
|  | 
 | ||
|  |   // no more data can be written.
 | ||
|  |   // But allow more writes to happen in this tick.
 | ||
|  |   process.nextTick(this.end.bind(this)); | ||
|  | } | ||
|  | 
 | ||
|  | function forEach (xs, f) { | ||
|  |   for (var i = 0, l = xs.length; i < l; i++) { | ||
|  |     f(xs[i], i); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | }).call(this)}).call(this,_dereq_(73)) | ||
|  | },{"14":14,"37":37,"73":73,"77":77,"79":79}],76:[function(_dereq_,module,exports){ | ||
|  | // Copyright Joyent, Inc. and other Node contributors.
 | ||
|  | //
 | ||
|  | // Permission is hereby granted, free of charge, to any person obtaining a
 | ||
|  | // copy of this software and associated documentation files (the
 | ||
|  | // "Software"), to deal in the Software without restriction, including
 | ||
|  | // without limitation the rights to use, copy, modify, merge, publish,
 | ||
|  | // distribute, sublicense, and/or sell copies of the Software, and to permit
 | ||
|  | // persons to whom the Software is furnished to do so, subject to the
 | ||
|  | // following conditions:
 | ||
|  | //
 | ||
|  | // The above copyright notice and this permission notice shall be included
 | ||
|  | // in all copies or substantial portions of the Software.
 | ||
|  | //
 | ||
|  | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
 | ||
|  | // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
 | ||
|  | // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
 | ||
|  | // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
 | ||
|  | // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
 | ||
|  | // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
 | ||
|  | // USE OR OTHER DEALINGS IN THE SOFTWARE.
 | ||
|  | 
 | ||
|  | // a passthrough stream.
 | ||
|  | // basically just the most minimal sort of Transform stream.
 | ||
|  | // Every written chunk gets output as-is.
 | ||
|  | 
 | ||
|  | module.exports = PassThrough; | ||
|  | 
 | ||
|  | var Transform = _dereq_(78); | ||
|  | 
 | ||
|  | /*<replacement>*/ | ||
|  | var util = _dereq_(14); | ||
|  | util.inherits = _dereq_(37); | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | util.inherits(PassThrough, Transform); | ||
|  | 
 | ||
|  | function PassThrough(options) { | ||
|  |   if (!(this instanceof PassThrough)) | ||
|  |     return new PassThrough(options); | ||
|  | 
 | ||
|  |   Transform.call(this, options); | ||
|  | } | ||
|  | 
 | ||
|  | PassThrough.prototype._transform = function(chunk, encoding, cb) { | ||
|  |   cb(null, chunk); | ||
|  | }; | ||
|  | 
 | ||
|  | },{"14":14,"37":37,"78":78}],77:[function(_dereq_,module,exports){ | ||
|  | (function (process){(function (){ | ||
|  | // Copyright Joyent, Inc. and other Node contributors.
 | ||
|  | //
 | ||
|  | // Permission is hereby granted, free of charge, to any person obtaining a
 | ||
|  | // copy of this software and associated documentation files (the
 | ||
|  | // "Software"), to deal in the Software without restriction, including
 | ||
|  | // without limitation the rights to use, copy, modify, merge, publish,
 | ||
|  | // distribute, sublicense, and/or sell copies of the Software, and to permit
 | ||
|  | // persons to whom the Software is furnished to do so, subject to the
 | ||
|  | // following conditions:
 | ||
|  | //
 | ||
|  | // The above copyright notice and this permission notice shall be included
 | ||
|  | // in all copies or substantial portions of the Software.
 | ||
|  | //
 | ||
|  | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
 | ||
|  | // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
 | ||
|  | // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
 | ||
|  | // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
 | ||
|  | // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
 | ||
|  | // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
 | ||
|  | // USE OR OTHER DEALINGS IN THE SOFTWARE.
 | ||
|  | 
 | ||
|  | module.exports = Readable; | ||
|  | 
 | ||
|  | /*<replacement>*/ | ||
|  | var isArray = _dereq_(80); | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | 
 | ||
|  | /*<replacement>*/ | ||
|  | var Buffer = _dereq_(13).Buffer; | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | Readable.ReadableState = ReadableState; | ||
|  | 
 | ||
|  | var EE = _dereq_(26).EventEmitter; | ||
|  | 
 | ||
|  | /*<replacement>*/ | ||
|  | if (!EE.listenerCount) EE.listenerCount = function(emitter, type) { | ||
|  |   return emitter.listeners(type).length; | ||
|  | }; | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | var Stream = _dereq_(85); | ||
|  | 
 | ||
|  | /*<replacement>*/ | ||
|  | var util = _dereq_(14); | ||
|  | util.inherits = _dereq_(37); | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | var StringDecoder; | ||
|  | 
 | ||
|  | 
 | ||
|  | /*<replacement>*/ | ||
|  | var debug = _dereq_(11); | ||
|  | if (debug && debug.debuglog) { | ||
|  |   debug = debug.debuglog('stream'); | ||
|  | } else { | ||
|  |   debug = function () {}; | ||
|  | } | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | 
 | ||
|  | util.inherits(Readable, Stream); | ||
|  | 
 | ||
|  | function ReadableState(options, stream) { | ||
|  |   var Duplex = _dereq_(75); | ||
|  | 
 | ||
|  |   options = options || {}; | ||
|  | 
 | ||
|  |   // the point at which it stops calling _read() to fill the buffer
 | ||
|  |   // Note: 0 is a valid value, means "don't call _read preemptively ever"
 | ||
|  |   var hwm = options.highWaterMark; | ||
|  |   var defaultHwm = options.objectMode ? 16 : 16 * 1024; | ||
|  |   this.highWaterMark = (hwm || hwm === 0) ? hwm : defaultHwm; | ||
|  | 
 | ||
|  |   // cast to ints.
 | ||
|  |   this.highWaterMark = ~~this.highWaterMark; | ||
|  | 
 | ||
|  |   this.buffer = []; | ||
|  |   this.length = 0; | ||
|  |   this.pipes = null; | ||
|  |   this.pipesCount = 0; | ||
|  |   this.flowing = null; | ||
|  |   this.ended = false; | ||
|  |   this.endEmitted = false; | ||
|  |   this.reading = false; | ||
|  | 
 | ||
|  |   // a flag to be able to tell if the onwrite cb is called immediately,
 | ||
|  |   // or on a later tick.  We set this to true at first, because any
 | ||
|  |   // actions that shouldn't happen until "later" should generally also
 | ||
|  |   // not happen before the first write call.
 | ||
|  |   this.sync = true; | ||
|  | 
 | ||
|  |   // whenever we return null, then we set a flag to say
 | ||
|  |   // that we're awaiting a 'readable' event emission.
 | ||
|  |   this.needReadable = false; | ||
|  |   this.emittedReadable = false; | ||
|  |   this.readableListening = false; | ||
|  | 
 | ||
|  | 
 | ||
|  |   // object stream flag. Used to make read(n) ignore n and to
 | ||
|  |   // make all the buffer merging and length checks go away
 | ||
|  |   this.objectMode = !!options.objectMode; | ||
|  | 
 | ||
|  |   if (stream instanceof Duplex) | ||
|  |     this.objectMode = this.objectMode || !!options.readableObjectMode; | ||
|  | 
 | ||
|  |   // Crypto is kind of old and crusty.  Historically, its default string
 | ||
|  |   // encoding is 'binary' so we have to make this configurable.
 | ||
|  |   // Everything else in the universe uses 'utf8', though.
 | ||
|  |   this.defaultEncoding = options.defaultEncoding || 'utf8'; | ||
|  | 
 | ||
|  |   // when piping, we only care about 'readable' events that happen
 | ||
|  |   // after read()ing all the bytes and not getting any pushback.
 | ||
|  |   this.ranOut = false; | ||
|  | 
 | ||
|  |   // the number of writers that are awaiting a drain event in .pipe()s
 | ||
|  |   this.awaitDrain = 0; | ||
|  | 
 | ||
|  |   // if true, a maybeReadMore has been scheduled
 | ||
|  |   this.readingMore = false; | ||
|  | 
 | ||
|  |   this.decoder = null; | ||
|  |   this.encoding = null; | ||
|  |   if (options.encoding) { | ||
|  |     if (!StringDecoder) | ||
|  |       StringDecoder = _dereq_(81).StringDecoder; | ||
|  |     this.decoder = new StringDecoder(options.encoding); | ||
|  |     this.encoding = options.encoding; | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function Readable(options) { | ||
|  |   var Duplex = _dereq_(75); | ||
|  | 
 | ||
|  |   if (!(this instanceof Readable)) | ||
|  |     return new Readable(options); | ||
|  | 
 | ||
|  |   this._readableState = new ReadableState(options, this); | ||
|  | 
 | ||
|  |   // legacy
 | ||
|  |   this.readable = true; | ||
|  | 
 | ||
|  |   Stream.call(this); | ||
|  | } | ||
|  | 
 | ||
|  | // Manually shove something into the read() buffer.
 | ||
|  | // This returns true if the highWaterMark has not been hit yet,
 | ||
|  | // similar to how Writable.write() returns true if you should
 | ||
|  | // write() some more.
 | ||
|  | Readable.prototype.push = function(chunk, encoding) { | ||
|  |   var state = this._readableState; | ||
|  | 
 | ||
|  |   if (util.isString(chunk) && !state.objectMode) { | ||
|  |     encoding = encoding || state.defaultEncoding; | ||
|  |     if (encoding !== state.encoding) { | ||
|  |       chunk = new Buffer(chunk, encoding); | ||
|  |       encoding = ''; | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   return readableAddChunk(this, state, chunk, encoding, false); | ||
|  | }; | ||
|  | 
 | ||
|  | // Unshift should *always* be something directly out of read()
 | ||
|  | Readable.prototype.unshift = function(chunk) { | ||
|  |   var state = this._readableState; | ||
|  |   return readableAddChunk(this, state, chunk, '', true); | ||
|  | }; | ||
|  | 
 | ||
|  | function readableAddChunk(stream, state, chunk, encoding, addToFront) { | ||
|  |   var er = chunkInvalid(state, chunk); | ||
|  |   if (er) { | ||
|  |     stream.emit('error', er); | ||
|  |   } else if (util.isNullOrUndefined(chunk)) { | ||
|  |     state.reading = false; | ||
|  |     if (!state.ended) | ||
|  |       onEofChunk(stream, state); | ||
|  |   } else if (state.objectMode || chunk && chunk.length > 0) { | ||
|  |     if (state.ended && !addToFront) { | ||
|  |       var e = new Error('stream.push() after EOF'); | ||
|  |       stream.emit('error', e); | ||
|  |     } else if (state.endEmitted && addToFront) { | ||
|  |       var e = new Error('stream.unshift() after end event'); | ||
|  |       stream.emit('error', e); | ||
|  |     } else { | ||
|  |       if (state.decoder && !addToFront && !encoding) | ||
|  |         chunk = state.decoder.write(chunk); | ||
|  | 
 | ||
|  |       if (!addToFront) | ||
|  |         state.reading = false; | ||
|  | 
 | ||
|  |       // if we want the data now, just emit it.
 | ||
|  |       if (state.flowing && state.length === 0 && !state.sync) { | ||
|  |         stream.emit('data', chunk); | ||
|  |         stream.read(0); | ||
|  |       } else { | ||
|  |         // update the buffer info.
 | ||
|  |         state.length += state.objectMode ? 1 : chunk.length; | ||
|  |         if (addToFront) | ||
|  |           state.buffer.unshift(chunk); | ||
|  |         else | ||
|  |           state.buffer.push(chunk); | ||
|  | 
 | ||
|  |         if (state.needReadable) | ||
|  |           emitReadable(stream); | ||
|  |       } | ||
|  | 
 | ||
|  |       maybeReadMore(stream, state); | ||
|  |     } | ||
|  |   } else if (!addToFront) { | ||
|  |     state.reading = false; | ||
|  |   } | ||
|  | 
 | ||
|  |   return needMoreData(state); | ||
|  | } | ||
|  | 
 | ||
|  | 
 | ||
|  | 
 | ||
|  | // if it's past the high water mark, we can push in some more.
 | ||
|  | // Also, if we have no data yet, we can stand some
 | ||
|  | // more bytes.  This is to work around cases where hwm=0,
 | ||
|  | // such as the repl.  Also, if the push() triggered a
 | ||
|  | // readable event, and the user called read(largeNumber) such that
 | ||
|  | // needReadable was set, then we ought to push more, so that another
 | ||
|  | // 'readable' event will be triggered.
 | ||
|  | function needMoreData(state) { | ||
|  |   return !state.ended && | ||
|  |          (state.needReadable || | ||
|  |           state.length < state.highWaterMark || | ||
|  |           state.length === 0); | ||
|  | } | ||
|  | 
 | ||
|  | // backwards compatibility.
 | ||
|  | Readable.prototype.setEncoding = function(enc) { | ||
|  |   if (!StringDecoder) | ||
|  |     StringDecoder = _dereq_(81).StringDecoder; | ||
|  |   this._readableState.decoder = new StringDecoder(enc); | ||
|  |   this._readableState.encoding = enc; | ||
|  |   return this; | ||
|  | }; | ||
|  | 
 | ||
|  | // Don't raise the hwm > 128MB
 | ||
|  | var MAX_HWM = 0x800000; | ||
|  | function roundUpToNextPowerOf2(n) { | ||
|  |   if (n >= MAX_HWM) { | ||
|  |     n = MAX_HWM; | ||
|  |   } else { | ||
|  |     // Get the next highest power of 2
 | ||
|  |     n--; | ||
|  |     for (var p = 1; p < 32; p <<= 1) n |= n >> p; | ||
|  |     n++; | ||
|  |   } | ||
|  |   return n; | ||
|  | } | ||
|  | 
 | ||
|  | function howMuchToRead(n, state) { | ||
|  |   if (state.length === 0 && state.ended) | ||
|  |     return 0; | ||
|  | 
 | ||
|  |   if (state.objectMode) | ||
|  |     return n === 0 ? 0 : 1; | ||
|  | 
 | ||
|  |   if (isNaN(n) || util.isNull(n)) { | ||
|  |     // only flow one buffer at a time
 | ||
|  |     if (state.flowing && state.buffer.length) | ||
|  |       return state.buffer[0].length; | ||
|  |     else | ||
|  |       return state.length; | ||
|  |   } | ||
|  | 
 | ||
|  |   if (n <= 0) | ||
|  |     return 0; | ||
|  | 
 | ||
|  |   // If we're asking for more than the target buffer level,
 | ||
|  |   // then raise the water mark.  Bump up to the next highest
 | ||
|  |   // power of 2, to prevent increasing it excessively in tiny
 | ||
|  |   // amounts.
 | ||
|  |   if (n > state.highWaterMark) | ||
|  |     state.highWaterMark = roundUpToNextPowerOf2(n); | ||
|  | 
 | ||
|  |   // don't have that much.  return null, unless we've ended.
 | ||
|  |   if (n > state.length) { | ||
|  |     if (!state.ended) { | ||
|  |       state.needReadable = true; | ||
|  |       return 0; | ||
|  |     } else | ||
|  |       return state.length; | ||
|  |   } | ||
|  | 
 | ||
|  |   return n; | ||
|  | } | ||
|  | 
 | ||
|  | // you can override either this method, or the async _read(n) below.
 | ||
|  | Readable.prototype.read = function(n) { | ||
|  |   debug('read', n); | ||
|  |   var state = this._readableState; | ||
|  |   var nOrig = n; | ||
|  | 
 | ||
|  |   if (!util.isNumber(n) || n > 0) | ||
|  |     state.emittedReadable = false; | ||
|  | 
 | ||
|  |   // if we're doing read(0) to trigger a readable event, but we
 | ||
|  |   // already have a bunch of data in the buffer, then just trigger
 | ||
|  |   // the 'readable' event and move on.
 | ||
|  |   if (n === 0 && | ||
|  |       state.needReadable && | ||
|  |       (state.length >= state.highWaterMark || state.ended)) { | ||
|  |     debug('read: emitReadable', state.length, state.ended); | ||
|  |     if (state.length === 0 && state.ended) | ||
|  |       endReadable(this); | ||
|  |     else | ||
|  |       emitReadable(this); | ||
|  |     return null; | ||
|  |   } | ||
|  | 
 | ||
|  |   n = howMuchToRead(n, state); | ||
|  | 
 | ||
|  |   // if we've ended, and we're now clear, then finish it up.
 | ||
|  |   if (n === 0 && state.ended) { | ||
|  |     if (state.length === 0) | ||
|  |       endReadable(this); | ||
|  |     return null; | ||
|  |   } | ||
|  | 
 | ||
|  |   // All the actual chunk generation logic needs to be
 | ||
|  |   // *below* the call to _read.  The reason is that in certain
 | ||
|  |   // synthetic stream cases, such as passthrough streams, _read
 | ||
|  |   // may be a completely synchronous operation which may change
 | ||
|  |   // the state of the read buffer, providing enough data when
 | ||
|  |   // before there was *not* enough.
 | ||
|  |   //
 | ||
|  |   // So, the steps are:
 | ||
|  |   // 1. Figure out what the state of things will be after we do
 | ||
|  |   // a read from the buffer.
 | ||
|  |   //
 | ||
|  |   // 2. If that resulting state will trigger a _read, then call _read.
 | ||
|  |   // Note that this may be asynchronous, or synchronous.  Yes, it is
 | ||
|  |   // deeply ugly to write APIs this way, but that still doesn't mean
 | ||
|  |   // that the Readable class should behave improperly, as streams are
 | ||
|  |   // designed to be sync/async agnostic.
 | ||
|  |   // Take note if the _read call is sync or async (ie, if the read call
 | ||
|  |   // has returned yet), so that we know whether or not it's safe to emit
 | ||
|  |   // 'readable' etc.
 | ||
|  |   //
 | ||
|  |   // 3. Actually pull the requested chunks out of the buffer and return.
 | ||
|  | 
 | ||
|  |   // if we need a readable event, then we need to do some reading.
 | ||
|  |   var doRead = state.needReadable; | ||
|  |   debug('need readable', doRead); | ||
|  | 
 | ||
|  |   // if we currently have less than the highWaterMark, then also read some
 | ||
|  |   if (state.length === 0 || state.length - n < state.highWaterMark) { | ||
|  |     doRead = true; | ||
|  |     debug('length less than watermark', doRead); | ||
|  |   } | ||
|  | 
 | ||
|  |   // however, if we've ended, then there's no point, and if we're already
 | ||
|  |   // reading, then it's unnecessary.
 | ||
|  |   if (state.ended || state.reading) { | ||
|  |     doRead = false; | ||
|  |     debug('reading or ended', doRead); | ||
|  |   } | ||
|  | 
 | ||
|  |   if (doRead) { | ||
|  |     debug('do read'); | ||
|  |     state.reading = true; | ||
|  |     state.sync = true; | ||
|  |     // if the length is currently zero, then we *need* a readable event.
 | ||
|  |     if (state.length === 0) | ||
|  |       state.needReadable = true; | ||
|  |     // call internal read method
 | ||
|  |     this._read(state.highWaterMark); | ||
|  |     state.sync = false; | ||
|  |   } | ||
|  | 
 | ||
|  |   // If _read pushed data synchronously, then `reading` will be false,
 | ||
|  |   // and we need to re-evaluate how much data we can return to the user.
 | ||
|  |   if (doRead && !state.reading) | ||
|  |     n = howMuchToRead(nOrig, state); | ||
|  | 
 | ||
|  |   var ret; | ||
|  |   if (n > 0) | ||
|  |     ret = fromList(n, state); | ||
|  |   else | ||
|  |     ret = null; | ||
|  | 
 | ||
|  |   if (util.isNull(ret)) { | ||
|  |     state.needReadable = true; | ||
|  |     n = 0; | ||
|  |   } | ||
|  | 
 | ||
|  |   state.length -= n; | ||
|  | 
 | ||
|  |   // If we have nothing in the buffer, then we want to know
 | ||
|  |   // as soon as we *do* get something into the buffer.
 | ||
|  |   if (state.length === 0 && !state.ended) | ||
|  |     state.needReadable = true; | ||
|  | 
 | ||
|  |   // If we tried to read() past the EOF, then emit end on the next tick.
 | ||
|  |   if (nOrig !== n && state.ended && state.length === 0) | ||
|  |     endReadable(this); | ||
|  | 
 | ||
|  |   if (!util.isNull(ret)) | ||
|  |     this.emit('data', ret); | ||
|  | 
 | ||
|  |   return ret; | ||
|  | }; | ||
|  | 
 | ||
|  | function chunkInvalid(state, chunk) { | ||
|  |   var er = null; | ||
|  |   if (!util.isBuffer(chunk) && | ||
|  |       !util.isString(chunk) && | ||
|  |       !util.isNullOrUndefined(chunk) && | ||
|  |       !state.objectMode) { | ||
|  |     er = new TypeError('Invalid non-string/buffer chunk'); | ||
|  |   } | ||
|  |   return er; | ||
|  | } | ||
|  | 
 | ||
|  | 
 | ||
|  | function onEofChunk(stream, state) { | ||
|  |   if (state.decoder && !state.ended) { | ||
|  |     var chunk = state.decoder.end(); | ||
|  |     if (chunk && chunk.length) { | ||
|  |       state.buffer.push(chunk); | ||
|  |       state.length += state.objectMode ? 1 : chunk.length; | ||
|  |     } | ||
|  |   } | ||
|  |   state.ended = true; | ||
|  | 
 | ||
|  |   // emit 'readable' now to make sure it gets picked up.
 | ||
|  |   emitReadable(stream); | ||
|  | } | ||
|  | 
 | ||
|  | // Don't emit readable right away in sync mode, because this can trigger
 | ||
|  | // another read() call => stack overflow.  This way, it might trigger
 | ||
|  | // a nextTick recursion warning, but that's not so bad.
 | ||
|  | function emitReadable(stream) { | ||
|  |   var state = stream._readableState; | ||
|  |   state.needReadable = false; | ||
|  |   if (!state.emittedReadable) { | ||
|  |     debug('emitReadable', state.flowing); | ||
|  |     state.emittedReadable = true; | ||
|  |     if (state.sync) | ||
|  |       process.nextTick(function() { | ||
|  |         emitReadable_(stream); | ||
|  |       }); | ||
|  |     else | ||
|  |       emitReadable_(stream); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function emitReadable_(stream) { | ||
|  |   debug('emit readable'); | ||
|  |   stream.emit('readable'); | ||
|  |   flow(stream); | ||
|  | } | ||
|  | 
 | ||
|  | 
 | ||
|  | // at this point, the user has presumably seen the 'readable' event,
 | ||
|  | // and called read() to consume some data.  that may have triggered
 | ||
|  | // in turn another _read(n) call, in which case reading = true if
 | ||
|  | // it's in progress.
 | ||
|  | // However, if we're not ended, or reading, and the length < hwm,
 | ||
|  | // then go ahead and try to read some more preemptively.
 | ||
|  | function maybeReadMore(stream, state) { | ||
|  |   if (!state.readingMore) { | ||
|  |     state.readingMore = true; | ||
|  |     process.nextTick(function() { | ||
|  |       maybeReadMore_(stream, state); | ||
|  |     }); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function maybeReadMore_(stream, state) { | ||
|  |   var len = state.length; | ||
|  |   while (!state.reading && !state.flowing && !state.ended && | ||
|  |          state.length < state.highWaterMark) { | ||
|  |     debug('maybeReadMore read 0'); | ||
|  |     stream.read(0); | ||
|  |     if (len === state.length) | ||
|  |       // didn't get any data, stop spinning.
 | ||
|  |       break; | ||
|  |     else | ||
|  |       len = state.length; | ||
|  |   } | ||
|  |   state.readingMore = false; | ||
|  | } | ||
|  | 
 | ||
|  | // abstract method.  to be overridden in specific implementation classes.
 | ||
|  | // call cb(er, data) where data is <= n in length.
 | ||
|  | // for virtual (non-string, non-buffer) streams, "length" is somewhat
 | ||
|  | // arbitrary, and perhaps not very meaningful.
 | ||
|  | Readable.prototype._read = function(n) { | ||
|  |   this.emit('error', new Error('not implemented')); | ||
|  | }; | ||
|  | 
 | ||
|  | Readable.prototype.pipe = function(dest, pipeOpts) { | ||
|  |   var src = this; | ||
|  |   var state = this._readableState; | ||
|  | 
 | ||
|  |   switch (state.pipesCount) { | ||
|  |     case 0: | ||
|  |       state.pipes = dest; | ||
|  |       break; | ||
|  |     case 1: | ||
|  |       state.pipes = [state.pipes, dest]; | ||
|  |       break; | ||
|  |     default: | ||
|  |       state.pipes.push(dest); | ||
|  |       break; | ||
|  |   } | ||
|  |   state.pipesCount += 1; | ||
|  |   debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); | ||
|  | 
 | ||
|  |   var doEnd = (!pipeOpts || pipeOpts.end !== false) && | ||
|  |               dest !== process.stdout && | ||
|  |               dest !== process.stderr; | ||
|  | 
 | ||
|  |   var endFn = doEnd ? onend : cleanup; | ||
|  |   if (state.endEmitted) | ||
|  |     process.nextTick(endFn); | ||
|  |   else | ||
|  |     src.once('end', endFn); | ||
|  | 
 | ||
|  |   dest.on('unpipe', onunpipe); | ||
|  |   function onunpipe(readable) { | ||
|  |     debug('onunpipe'); | ||
|  |     if (readable === src) { | ||
|  |       cleanup(); | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   function onend() { | ||
|  |     debug('onend'); | ||
|  |     dest.end(); | ||
|  |   } | ||
|  | 
 | ||
|  |   // when the dest drains, it reduces the awaitDrain counter
 | ||
|  |   // on the source.  This would be more elegant with a .once()
 | ||
|  |   // handler in flow(), but adding and removing repeatedly is
 | ||
|  |   // too slow.
 | ||
|  |   var ondrain = pipeOnDrain(src); | ||
|  |   dest.on('drain', ondrain); | ||
|  | 
 | ||
|  |   function cleanup() { | ||
|  |     debug('cleanup'); | ||
|  |     // cleanup event handlers once the pipe is broken
 | ||
|  |     dest.removeListener('close', onclose); | ||
|  |     dest.removeListener('finish', onfinish); | ||
|  |     dest.removeListener('drain', ondrain); | ||
|  |     dest.removeListener('error', onerror); | ||
|  |     dest.removeListener('unpipe', onunpipe); | ||
|  |     src.removeListener('end', onend); | ||
|  |     src.removeListener('end', cleanup); | ||
|  |     src.removeListener('data', ondata); | ||
|  | 
 | ||
|  |     // if the reader is waiting for a drain event from this
 | ||
|  |     // specific writer, then it would cause it to never start
 | ||
|  |     // flowing again.
 | ||
|  |     // So, if this is awaiting a drain, then we just call it now.
 | ||
|  |     // If we don't know, then assume that we are waiting for one.
 | ||
|  |     if (state.awaitDrain && | ||
|  |         (!dest._writableState || dest._writableState.needDrain)) | ||
|  |       ondrain(); | ||
|  |   } | ||
|  | 
 | ||
|  |   src.on('data', ondata); | ||
|  |   function ondata(chunk) { | ||
|  |     debug('ondata'); | ||
|  |     var ret = dest.write(chunk); | ||
|  |     if (false === ret) { | ||
|  |       debug('false write response, pause', | ||
|  |             src._readableState.awaitDrain); | ||
|  |       src._readableState.awaitDrain++; | ||
|  |       src.pause(); | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   // if the dest has an error, then stop piping into it.
 | ||
|  |   // however, don't suppress the throwing behavior for this.
 | ||
|  |   function onerror(er) { | ||
|  |     debug('onerror', er); | ||
|  |     unpipe(); | ||
|  |     dest.removeListener('error', onerror); | ||
|  |     if (EE.listenerCount(dest, 'error') === 0) | ||
|  |       dest.emit('error', er); | ||
|  |   } | ||
|  |   // This is a brutally ugly hack to make sure that our error handler
 | ||
|  |   // is attached before any userland ones.  NEVER DO THIS.
 | ||
|  |   if (!dest._events || !dest._events.error) | ||
|  |     dest.on('error', onerror); | ||
|  |   else if (isArray(dest._events.error)) | ||
|  |     dest._events.error.unshift(onerror); | ||
|  |   else | ||
|  |     dest._events.error = [onerror, dest._events.error]; | ||
|  | 
 | ||
|  | 
 | ||
|  | 
 | ||
|  |   // Both close and finish should trigger unpipe, but only once.
 | ||
|  |   function onclose() { | ||
|  |     dest.removeListener('finish', onfinish); | ||
|  |     unpipe(); | ||
|  |   } | ||
|  |   dest.once('close', onclose); | ||
|  |   function onfinish() { | ||
|  |     debug('onfinish'); | ||
|  |     dest.removeListener('close', onclose); | ||
|  |     unpipe(); | ||
|  |   } | ||
|  |   dest.once('finish', onfinish); | ||
|  | 
 | ||
|  |   function unpipe() { | ||
|  |     debug('unpipe'); | ||
|  |     src.unpipe(dest); | ||
|  |   } | ||
|  | 
 | ||
|  |   // tell the dest that it's being piped to
 | ||
|  |   dest.emit('pipe', src); | ||
|  | 
 | ||
|  |   // start the flow if it hasn't been started already.
 | ||
|  |   if (!state.flowing) { | ||
|  |     debug('pipe resume'); | ||
|  |     src.resume(); | ||
|  |   } | ||
|  | 
 | ||
|  |   return dest; | ||
|  | }; | ||
|  | 
 | ||
|  | function pipeOnDrain(src) { | ||
|  |   return function() { | ||
|  |     var state = src._readableState; | ||
|  |     debug('pipeOnDrain', state.awaitDrain); | ||
|  |     if (state.awaitDrain) | ||
|  |       state.awaitDrain--; | ||
|  |     if (state.awaitDrain === 0 && EE.listenerCount(src, 'data')) { | ||
|  |       state.flowing = true; | ||
|  |       flow(src); | ||
|  |     } | ||
|  |   }; | ||
|  | } | ||
|  | 
 | ||
|  | 
 | ||
|  | Readable.prototype.unpipe = function(dest) { | ||
|  |   var state = this._readableState; | ||
|  | 
 | ||
|  |   // if we're not piping anywhere, then do nothing.
 | ||
|  |   if (state.pipesCount === 0) | ||
|  |     return this; | ||
|  | 
 | ||
|  |   // just one destination.  most common case.
 | ||
|  |   if (state.pipesCount === 1) { | ||
|  |     // passed in one, but it's not the right one.
 | ||
|  |     if (dest && dest !== state.pipes) | ||
|  |       return this; | ||
|  | 
 | ||
|  |     if (!dest) | ||
|  |       dest = state.pipes; | ||
|  | 
 | ||
|  |     // got a match.
 | ||
|  |     state.pipes = null; | ||
|  |     state.pipesCount = 0; | ||
|  |     state.flowing = false; | ||
|  |     if (dest) | ||
|  |       dest.emit('unpipe', this); | ||
|  |     return this; | ||
|  |   } | ||
|  | 
 | ||
|  |   // slow case. multiple pipe destinations.
 | ||
|  | 
 | ||
|  |   if (!dest) { | ||
|  |     // remove all.
 | ||
|  |     var dests = state.pipes; | ||
|  |     var len = state.pipesCount; | ||
|  |     state.pipes = null; | ||
|  |     state.pipesCount = 0; | ||
|  |     state.flowing = false; | ||
|  | 
 | ||
|  |     for (var i = 0; i < len; i++) | ||
|  |       dests[i].emit('unpipe', this); | ||
|  |     return this; | ||
|  |   } | ||
|  | 
 | ||
|  |   // try to find the right one.
 | ||
|  |   var i = indexOf(state.pipes, dest); | ||
|  |   if (i === -1) | ||
|  |     return this; | ||
|  | 
 | ||
|  |   state.pipes.splice(i, 1); | ||
|  |   state.pipesCount -= 1; | ||
|  |   if (state.pipesCount === 1) | ||
|  |     state.pipes = state.pipes[0]; | ||
|  | 
 | ||
|  |   dest.emit('unpipe', this); | ||
|  | 
 | ||
|  |   return this; | ||
|  | }; | ||
|  | 
 | ||
|  | // set up data events if they are asked for
 | ||
|  | // Ensure readable listeners eventually get something
 | ||
|  | Readable.prototype.on = function(ev, fn) { | ||
|  |   var res = Stream.prototype.on.call(this, ev, fn); | ||
|  | 
 | ||
|  |   // If listening to data, and it has not explicitly been paused,
 | ||
|  |   // then call resume to start the flow of data on the next tick.
 | ||
|  |   if (ev === 'data' && false !== this._readableState.flowing) { | ||
|  |     this.resume(); | ||
|  |   } | ||
|  | 
 | ||
|  |   if (ev === 'readable' && this.readable) { | ||
|  |     var state = this._readableState; | ||
|  |     if (!state.readableListening) { | ||
|  |       state.readableListening = true; | ||
|  |       state.emittedReadable = false; | ||
|  |       state.needReadable = true; | ||
|  |       if (!state.reading) { | ||
|  |         var self = this; | ||
|  |         process.nextTick(function() { | ||
|  |           debug('readable nexttick read 0'); | ||
|  |           self.read(0); | ||
|  |         }); | ||
|  |       } else if (state.length) { | ||
|  |         emitReadable(this, state); | ||
|  |       } | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   return res; | ||
|  | }; | ||
|  | Readable.prototype.addListener = Readable.prototype.on; | ||
|  | 
 | ||
|  | // pause() and resume() are remnants of the legacy readable stream API
 | ||
|  | // If the user uses them, then switch into old mode.
 | ||
|  | Readable.prototype.resume = function() { | ||
|  |   var state = this._readableState; | ||
|  |   if (!state.flowing) { | ||
|  |     debug('resume'); | ||
|  |     state.flowing = true; | ||
|  |     if (!state.reading) { | ||
|  |       debug('resume read 0'); | ||
|  |       this.read(0); | ||
|  |     } | ||
|  |     resume(this, state); | ||
|  |   } | ||
|  |   return this; | ||
|  | }; | ||
|  | 
 | ||
|  | function resume(stream, state) { | ||
|  |   if (!state.resumeScheduled) { | ||
|  |     state.resumeScheduled = true; | ||
|  |     process.nextTick(function() { | ||
|  |       resume_(stream, state); | ||
|  |     }); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function resume_(stream, state) { | ||
|  |   state.resumeScheduled = false; | ||
|  |   stream.emit('resume'); | ||
|  |   flow(stream); | ||
|  |   if (state.flowing && !state.reading) | ||
|  |     stream.read(0); | ||
|  | } | ||
|  | 
 | ||
|  | Readable.prototype.pause = function() { | ||
|  |   debug('call pause flowing=%j', this._readableState.flowing); | ||
|  |   if (false !== this._readableState.flowing) { | ||
|  |     debug('pause'); | ||
|  |     this._readableState.flowing = false; | ||
|  |     this.emit('pause'); | ||
|  |   } | ||
|  |   return this; | ||
|  | }; | ||
|  | 
 | ||
|  | function flow(stream) { | ||
|  |   var state = stream._readableState; | ||
|  |   debug('flow', state.flowing); | ||
|  |   if (state.flowing) { | ||
|  |     do { | ||
|  |       var chunk = stream.read(); | ||
|  |     } while (null !== chunk && state.flowing); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | // wrap an old-style stream as the async data source.
 | ||
|  | // This is *not* part of the readable stream interface.
 | ||
|  | // It is an ugly unfortunate mess of history.
 | ||
|  | Readable.prototype.wrap = function(stream) { | ||
|  |   var state = this._readableState; | ||
|  |   var paused = false; | ||
|  | 
 | ||
|  |   var self = this; | ||
|  |   stream.on('end', function() { | ||
|  |     debug('wrapped end'); | ||
|  |     if (state.decoder && !state.ended) { | ||
|  |       var chunk = state.decoder.end(); | ||
|  |       if (chunk && chunk.length) | ||
|  |         self.push(chunk); | ||
|  |     } | ||
|  | 
 | ||
|  |     self.push(null); | ||
|  |   }); | ||
|  | 
 | ||
|  |   stream.on('data', function(chunk) { | ||
|  |     debug('wrapped data'); | ||
|  |     if (state.decoder) | ||
|  |       chunk = state.decoder.write(chunk); | ||
|  |     if (!chunk || !state.objectMode && !chunk.length) | ||
|  |       return; | ||
|  | 
 | ||
|  |     var ret = self.push(chunk); | ||
|  |     if (!ret) { | ||
|  |       paused = true; | ||
|  |       stream.pause(); | ||
|  |     } | ||
|  |   }); | ||
|  | 
 | ||
|  |   // proxy all the other methods.
 | ||
|  |   // important when wrapping filters and duplexes.
 | ||
|  |   for (var i in stream) { | ||
|  |     if (util.isFunction(stream[i]) && util.isUndefined(this[i])) { | ||
|  |       this[i] = function(method) { return function() { | ||
|  |         return stream[method].apply(stream, arguments); | ||
|  |       }}(i); | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   // proxy certain important events.
 | ||
|  |   var events = ['error', 'close', 'destroy', 'pause', 'resume']; | ||
|  |   forEach(events, function(ev) { | ||
|  |     stream.on(ev, self.emit.bind(self, ev)); | ||
|  |   }); | ||
|  | 
 | ||
|  |   // when we try to consume some more bytes, simply unpause the
 | ||
|  |   // underlying stream.
 | ||
|  |   self._read = function(n) { | ||
|  |     debug('wrapped _read', n); | ||
|  |     if (paused) { | ||
|  |       paused = false; | ||
|  |       stream.resume(); | ||
|  |     } | ||
|  |   }; | ||
|  | 
 | ||
|  |   return self; | ||
|  | }; | ||
|  | 
 | ||
|  | 
 | ||
|  | 
 | ||
|  | // exposed for testing purposes only.
 | ||
|  | Readable._fromList = fromList; | ||
|  | 
 | ||
|  | // Pluck off n bytes from an array of buffers.
 | ||
|  | // Length is the combined lengths of all the buffers in the list.
 | ||
|  | function fromList(n, state) { | ||
|  |   var list = state.buffer; | ||
|  |   var length = state.length; | ||
|  |   var stringMode = !!state.decoder; | ||
|  |   var objectMode = !!state.objectMode; | ||
|  |   var ret; | ||
|  | 
 | ||
|  |   // nothing in the list, definitely empty.
 | ||
|  |   if (list.length === 0) | ||
|  |     return null; | ||
|  | 
 | ||
|  |   if (length === 0) | ||
|  |     ret = null; | ||
|  |   else if (objectMode) | ||
|  |     ret = list.shift(); | ||
|  |   else if (!n || n >= length) { | ||
|  |     // read it all, truncate the array.
 | ||
|  |     if (stringMode) | ||
|  |       ret = list.join(''); | ||
|  |     else | ||
|  |       ret = Buffer.concat(list, length); | ||
|  |     list.length = 0; | ||
|  |   } else { | ||
|  |     // read just some of it.
 | ||
|  |     if (n < list[0].length) { | ||
|  |       // just take a part of the first list item.
 | ||
|  |       // slice is the same for buffers and strings.
 | ||
|  |       var buf = list[0]; | ||
|  |       ret = buf.slice(0, n); | ||
|  |       list[0] = buf.slice(n); | ||
|  |     } else if (n === list[0].length) { | ||
|  |       // first list is a perfect match
 | ||
|  |       ret = list.shift(); | ||
|  |     } else { | ||
|  |       // complex case.
 | ||
|  |       // we have enough to cover it, but it spans past the first buffer.
 | ||
|  |       if (stringMode) | ||
|  |         ret = ''; | ||
|  |       else | ||
|  |         ret = new Buffer(n); | ||
|  | 
 | ||
|  |       var c = 0; | ||
|  |       for (var i = 0, l = list.length; i < l && c < n; i++) { | ||
|  |         var buf = list[0]; | ||
|  |         var cpy = Math.min(n - c, buf.length); | ||
|  | 
 | ||
|  |         if (stringMode) | ||
|  |           ret += buf.slice(0, cpy); | ||
|  |         else | ||
|  |           buf.copy(ret, c, 0, cpy); | ||
|  | 
 | ||
|  |         if (cpy < buf.length) | ||
|  |           list[0] = buf.slice(cpy); | ||
|  |         else | ||
|  |           list.shift(); | ||
|  | 
 | ||
|  |         c += cpy; | ||
|  |       } | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   return ret; | ||
|  | } | ||
|  | 
 | ||
|  | function endReadable(stream) { | ||
|  |   var state = stream._readableState; | ||
|  | 
 | ||
|  |   // If we get here before consuming all the bytes, then that is a
 | ||
|  |   // bug in node.  Should never happen.
 | ||
|  |   if (state.length > 0) | ||
|  |     throw new Error('endReadable called on non-empty stream'); | ||
|  | 
 | ||
|  |   if (!state.endEmitted) { | ||
|  |     state.ended = true; | ||
|  |     process.nextTick(function() { | ||
|  |       // Check that we didn't get one last unshift.
 | ||
|  |       if (!state.endEmitted && state.length === 0) { | ||
|  |         state.endEmitted = true; | ||
|  |         stream.readable = false; | ||
|  |         stream.emit('end'); | ||
|  |       } | ||
|  |     }); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function forEach (xs, f) { | ||
|  |   for (var i = 0, l = xs.length; i < l; i++) { | ||
|  |     f(xs[i], i); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function indexOf (xs, x) { | ||
|  |   for (var i = 0, l = xs.length; i < l; i++) { | ||
|  |     if (xs[i] === x) return i; | ||
|  |   } | ||
|  |   return -1; | ||
|  | } | ||
|  | 
 | ||
|  | }).call(this)}).call(this,_dereq_(73)) | ||
|  | },{"11":11,"13":13,"14":14,"26":26,"37":37,"73":73,"75":75,"80":80,"81":81,"85":85}],78:[function(_dereq_,module,exports){ | ||
|  | // Copyright Joyent, Inc. and other Node contributors.
 | ||
|  | //
 | ||
|  | // Permission is hereby granted, free of charge, to any person obtaining a
 | ||
|  | // copy of this software and associated documentation files (the
 | ||
|  | // "Software"), to deal in the Software without restriction, including
 | ||
|  | // without limitation the rights to use, copy, modify, merge, publish,
 | ||
|  | // distribute, sublicense, and/or sell copies of the Software, and to permit
 | ||
|  | // persons to whom the Software is furnished to do so, subject to the
 | ||
|  | // following conditions:
 | ||
|  | //
 | ||
|  | // The above copyright notice and this permission notice shall be included
 | ||
|  | // in all copies or substantial portions of the Software.
 | ||
|  | //
 | ||
|  | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
 | ||
|  | // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
 | ||
|  | // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
 | ||
|  | // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
 | ||
|  | // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
 | ||
|  | // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
 | ||
|  | // USE OR OTHER DEALINGS IN THE SOFTWARE.
 | ||
|  | 
 | ||
|  | 
 | ||
|  | // a transform stream is a readable/writable stream where you do
 | ||
|  | // something with the data.  Sometimes it's called a "filter",
 | ||
|  | // but that's not a great name for it, since that implies a thing where
 | ||
|  | // some bits pass through, and others are simply ignored.  (That would
 | ||
|  | // be a valid example of a transform, of course.)
 | ||
|  | //
 | ||
|  | // While the output is causally related to the input, it's not a
 | ||
|  | // necessarily symmetric or synchronous transformation.  For example,
 | ||
|  | // a zlib stream might take multiple plain-text writes(), and then
 | ||
|  | // emit a single compressed chunk some time in the future.
 | ||
|  | //
 | ||
|  | // Here's how this works:
 | ||
|  | //
 | ||
|  | // The Transform stream has all the aspects of the readable and writable
 | ||
|  | // stream classes.  When you write(chunk), that calls _write(chunk,cb)
 | ||
|  | // internally, and returns false if there's a lot of pending writes
 | ||
|  | // buffered up.  When you call read(), that calls _read(n) until
 | ||
|  | // there's enough pending readable data buffered up.
 | ||
|  | //
 | ||
|  | // In a transform stream, the written data is placed in a buffer.  When
 | ||
|  | // _read(n) is called, it transforms the queued up data, calling the
 | ||
|  | // buffered _write cb's as it consumes chunks.  If consuming a single
 | ||
|  | // written chunk would result in multiple output chunks, then the first
 | ||
|  | // outputted bit calls the readcb, and subsequent chunks just go into
 | ||
|  | // the read buffer, and will cause it to emit 'readable' if necessary.
 | ||
|  | //
 | ||
|  | // This way, back-pressure is actually determined by the reading side,
 | ||
|  | // since _read has to be called to start processing a new chunk.  However,
 | ||
|  | // a pathological inflate type of transform can cause excessive buffering
 | ||
|  | // here.  For example, imagine a stream where every byte of input is
 | ||
|  | // interpreted as an integer from 0-255, and then results in that many
 | ||
|  | // bytes of output.  Writing the 4 bytes {ff,ff,ff,ff} would result in
 | ||
|  | // 1kb of data being output.  In this case, you could write a very small
 | ||
|  | // amount of input, and end up with a very large amount of output.  In
 | ||
|  | // such a pathological inflating mechanism, there'd be no way to tell
 | ||
|  | // the system to stop doing the transform.  A single 4MB write could
 | ||
|  | // cause the system to run out of memory.
 | ||
|  | //
 | ||
|  | // However, even in such a pathological case, only a single written chunk
 | ||
|  | // would be consumed, and then the rest would wait (un-transformed) until
 | ||
|  | // the results of the previous transformed chunk were consumed.
 | ||
|  | 
 | ||
|  | module.exports = Transform; | ||
|  | 
 | ||
|  | var Duplex = _dereq_(75); | ||
|  | 
 | ||
|  | /*<replacement>*/ | ||
|  | var util = _dereq_(14); | ||
|  | util.inherits = _dereq_(37); | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | util.inherits(Transform, Duplex); | ||
|  | 
 | ||
|  | 
 | ||
|  | function TransformState(options, stream) { | ||
|  |   this.afterTransform = function(er, data) { | ||
|  |     return afterTransform(stream, er, data); | ||
|  |   }; | ||
|  | 
 | ||
|  |   this.needTransform = false; | ||
|  |   this.transforming = false; | ||
|  |   this.writecb = null; | ||
|  |   this.writechunk = null; | ||
|  | } | ||
|  | 
 | ||
|  | function afterTransform(stream, er, data) { | ||
|  |   var ts = stream._transformState; | ||
|  |   ts.transforming = false; | ||
|  | 
 | ||
|  |   var cb = ts.writecb; | ||
|  | 
 | ||
|  |   if (!cb) | ||
|  |     return stream.emit('error', new Error('no writecb in Transform class')); | ||
|  | 
 | ||
|  |   ts.writechunk = null; | ||
|  |   ts.writecb = null; | ||
|  | 
 | ||
|  |   if (!util.isNullOrUndefined(data)) | ||
|  |     stream.push(data); | ||
|  | 
 | ||
|  |   if (cb) | ||
|  |     cb(er); | ||
|  | 
 | ||
|  |   var rs = stream._readableState; | ||
|  |   rs.reading = false; | ||
|  |   if (rs.needReadable || rs.length < rs.highWaterMark) { | ||
|  |     stream._read(rs.highWaterMark); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | 
 | ||
|  | function Transform(options) { | ||
|  |   if (!(this instanceof Transform)) | ||
|  |     return new Transform(options); | ||
|  | 
 | ||
|  |   Duplex.call(this, options); | ||
|  | 
 | ||
|  |   this._transformState = new TransformState(options, this); | ||
|  | 
 | ||
|  |   // when the writable side finishes, then flush out anything remaining.
 | ||
|  |   var stream = this; | ||
|  | 
 | ||
|  |   // start out asking for a readable event once data is transformed.
 | ||
|  |   this._readableState.needReadable = true; | ||
|  | 
 | ||
|  |   // we have implemented the _read method, and done the other things
 | ||
|  |   // that Readable wants before the first _read call, so unset the
 | ||
|  |   // sync guard flag.
 | ||
|  |   this._readableState.sync = false; | ||
|  | 
 | ||
|  |   this.once('prefinish', function() { | ||
|  |     if (util.isFunction(this._flush)) | ||
|  |       this._flush(function(er) { | ||
|  |         done(stream, er); | ||
|  |       }); | ||
|  |     else | ||
|  |       done(stream); | ||
|  |   }); | ||
|  | } | ||
|  | 
 | ||
|  | Transform.prototype.push = function(chunk, encoding) { | ||
|  |   this._transformState.needTransform = false; | ||
|  |   return Duplex.prototype.push.call(this, chunk, encoding); | ||
|  | }; | ||
|  | 
 | ||
|  | // This is the part where you do stuff!
 | ||
|  | // override this function in implementation classes.
 | ||
|  | // 'chunk' is an input chunk.
 | ||
|  | //
 | ||
|  | // Call `push(newChunk)` to pass along transformed output
 | ||
|  | // to the readable side.  You may call 'push' zero or more times.
 | ||
|  | //
 | ||
|  | // Call `cb(err)` when you are done with this chunk.  If you pass
 | ||
|  | // an error, then that'll put the hurt on the whole operation.  If you
 | ||
|  | // never call cb(), then you'll never get another chunk.
 | ||
|  | Transform.prototype._transform = function(chunk, encoding, cb) { | ||
|  |   throw new Error('not implemented'); | ||
|  | }; | ||
|  | 
 | ||
|  | Transform.prototype._write = function(chunk, encoding, cb) { | ||
|  |   var ts = this._transformState; | ||
|  |   ts.writecb = cb; | ||
|  |   ts.writechunk = chunk; | ||
|  |   ts.writeencoding = encoding; | ||
|  |   if (!ts.transforming) { | ||
|  |     var rs = this._readableState; | ||
|  |     if (ts.needTransform || | ||
|  |         rs.needReadable || | ||
|  |         rs.length < rs.highWaterMark) | ||
|  |       this._read(rs.highWaterMark); | ||
|  |   } | ||
|  | }; | ||
|  | 
 | ||
|  | // Doesn't matter what the args are here.
 | ||
|  | // _transform does all the work.
 | ||
|  | // That we got here means that the readable side wants more data.
 | ||
|  | Transform.prototype._read = function(n) { | ||
|  |   var ts = this._transformState; | ||
|  | 
 | ||
|  |   if (!util.isNull(ts.writechunk) && ts.writecb && !ts.transforming) { | ||
|  |     ts.transforming = true; | ||
|  |     this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); | ||
|  |   } else { | ||
|  |     // mark that we need a transform, so that any data that comes in
 | ||
|  |     // will get processed, now that we've asked for it.
 | ||
|  |     ts.needTransform = true; | ||
|  |   } | ||
|  | }; | ||
|  | 
 | ||
|  | 
 | ||
|  | function done(stream, er) { | ||
|  |   if (er) | ||
|  |     return stream.emit('error', er); | ||
|  | 
 | ||
|  |   // if there's nothing in the write buffer, then that means
 | ||
|  |   // that nothing more will ever be provided
 | ||
|  |   var ws = stream._writableState; | ||
|  |   var ts = stream._transformState; | ||
|  | 
 | ||
|  |   if (ws.length) | ||
|  |     throw new Error('calling transform done when ws.length != 0'); | ||
|  | 
 | ||
|  |   if (ts.transforming) | ||
|  |     throw new Error('calling transform done when still transforming'); | ||
|  | 
 | ||
|  |   return stream.push(null); | ||
|  | } | ||
|  | 
 | ||
|  | },{"14":14,"37":37,"75":75}],79:[function(_dereq_,module,exports){ | ||
|  | (function (process){(function (){ | ||
|  | // Copyright Joyent, Inc. and other Node contributors.
 | ||
|  | //
 | ||
|  | // Permission is hereby granted, free of charge, to any person obtaining a
 | ||
|  | // copy of this software and associated documentation files (the
 | ||
|  | // "Software"), to deal in the Software without restriction, including
 | ||
|  | // without limitation the rights to use, copy, modify, merge, publish,
 | ||
|  | // distribute, sublicense, and/or sell copies of the Software, and to permit
 | ||
|  | // persons to whom the Software is furnished to do so, subject to the
 | ||
|  | // following conditions:
 | ||
|  | //
 | ||
|  | // The above copyright notice and this permission notice shall be included
 | ||
|  | // in all copies or substantial portions of the Software.
 | ||
|  | //
 | ||
|  | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
 | ||
|  | // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
 | ||
|  | // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
 | ||
|  | // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
 | ||
|  | // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
 | ||
|  | // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
 | ||
|  | // USE OR OTHER DEALINGS IN THE SOFTWARE.
 | ||
|  | 
 | ||
|  | // A bit simpler than readable streams.
 | ||
|  | // Implement an async ._write(chunk, cb), and it'll handle all
 | ||
|  | // the drain event emission and buffering.
 | ||
|  | 
 | ||
|  | module.exports = Writable; | ||
|  | 
 | ||
|  | /*<replacement>*/ | ||
|  | var Buffer = _dereq_(13).Buffer; | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | Writable.WritableState = WritableState; | ||
|  | 
 | ||
|  | 
 | ||
|  | /*<replacement>*/ | ||
|  | var util = _dereq_(14); | ||
|  | util.inherits = _dereq_(37); | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | var Stream = _dereq_(85); | ||
|  | 
 | ||
|  | util.inherits(Writable, Stream); | ||
|  | 
 | ||
|  | function WriteReq(chunk, encoding, cb) { | ||
|  |   this.chunk = chunk; | ||
|  |   this.encoding = encoding; | ||
|  |   this.callback = cb; | ||
|  | } | ||
|  | 
 | ||
|  | function WritableState(options, stream) { | ||
|  |   var Duplex = _dereq_(75); | ||
|  | 
 | ||
|  |   options = options || {}; | ||
|  | 
 | ||
|  |   // the point at which write() starts returning false
 | ||
|  |   // Note: 0 is a valid value, means that we always return false if
 | ||
|  |   // the entire buffer is not flushed immediately on write()
 | ||
|  |   var hwm = options.highWaterMark; | ||
|  |   var defaultHwm = options.objectMode ? 16 : 16 * 1024; | ||
|  |   this.highWaterMark = (hwm || hwm === 0) ? hwm : defaultHwm; | ||
|  | 
 | ||
|  |   // object stream flag to indicate whether or not this stream
 | ||
|  |   // contains buffers or objects.
 | ||
|  |   this.objectMode = !!options.objectMode; | ||
|  | 
 | ||
|  |   if (stream instanceof Duplex) | ||
|  |     this.objectMode = this.objectMode || !!options.writableObjectMode; | ||
|  | 
 | ||
|  |   // cast to ints.
 | ||
|  |   this.highWaterMark = ~~this.highWaterMark; | ||
|  | 
 | ||
|  |   this.needDrain = false; | ||
|  |   // at the start of calling end()
 | ||
|  |   this.ending = false; | ||
|  |   // when end() has been called, and returned
 | ||
|  |   this.ended = false; | ||
|  |   // when 'finish' is emitted
 | ||
|  |   this.finished = false; | ||
|  | 
 | ||
|  |   // should we decode strings into buffers before passing to _write?
 | ||
|  |   // this is here so that some node-core streams can optimize string
 | ||
|  |   // handling at a lower level.
 | ||
|  |   var noDecode = options.decodeStrings === false; | ||
|  |   this.decodeStrings = !noDecode; | ||
|  | 
 | ||
|  |   // Crypto is kind of old and crusty.  Historically, its default string
 | ||
|  |   // encoding is 'binary' so we have to make this configurable.
 | ||
|  |   // Everything else in the universe uses 'utf8', though.
 | ||
|  |   this.defaultEncoding = options.defaultEncoding || 'utf8'; | ||
|  | 
 | ||
|  |   // not an actual buffer we keep track of, but a measurement
 | ||
|  |   // of how much we're waiting to get pushed to some underlying
 | ||
|  |   // socket or file.
 | ||
|  |   this.length = 0; | ||
|  | 
 | ||
|  |   // a flag to see when we're in the middle of a write.
 | ||
|  |   this.writing = false; | ||
|  | 
 | ||
|  |   // when true all writes will be buffered until .uncork() call
 | ||
|  |   this.corked = 0; | ||
|  | 
 | ||
|  |   // a flag to be able to tell if the onwrite cb is called immediately,
 | ||
|  |   // or on a later tick.  We set this to true at first, because any
 | ||
|  |   // actions that shouldn't happen until "later" should generally also
 | ||
|  |   // not happen before the first write call.
 | ||
|  |   this.sync = true; | ||
|  | 
 | ||
|  |   // a flag to know if we're processing previously buffered items, which
 | ||
|  |   // may call the _write() callback in the same tick, so that we don't
 | ||
|  |   // end up in an overlapped onwrite situation.
 | ||
|  |   this.bufferProcessing = false; | ||
|  | 
 | ||
|  |   // the callback that's passed to _write(chunk,cb)
 | ||
|  |   this.onwrite = function(er) { | ||
|  |     onwrite(stream, er); | ||
|  |   }; | ||
|  | 
 | ||
|  |   // the callback that the user supplies to write(chunk,encoding,cb)
 | ||
|  |   this.writecb = null; | ||
|  | 
 | ||
|  |   // the amount that is being written when _write is called.
 | ||
|  |   this.writelen = 0; | ||
|  | 
 | ||
|  |   this.buffer = []; | ||
|  | 
 | ||
|  |   // number of pending user-supplied write callbacks
 | ||
|  |   // this must be 0 before 'finish' can be emitted
 | ||
|  |   this.pendingcb = 0; | ||
|  | 
 | ||
|  |   // emit prefinish if the only thing we're waiting for is _write cbs
 | ||
|  |   // This is relevant for synchronous Transform streams
 | ||
|  |   this.prefinished = false; | ||
|  | 
 | ||
|  |   // True if the error was already emitted and should not be thrown again
 | ||
|  |   this.errorEmitted = false; | ||
|  | } | ||
|  | 
 | ||
|  | function Writable(options) { | ||
|  |   var Duplex = _dereq_(75); | ||
|  | 
 | ||
|  |   // Writable ctor is applied to Duplexes, though they're not
 | ||
|  |   // instanceof Writable, they're instanceof Readable.
 | ||
|  |   if (!(this instanceof Writable) && !(this instanceof Duplex)) | ||
|  |     return new Writable(options); | ||
|  | 
 | ||
|  |   this._writableState = new WritableState(options, this); | ||
|  | 
 | ||
|  |   // legacy.
 | ||
|  |   this.writable = true; | ||
|  | 
 | ||
|  |   Stream.call(this); | ||
|  | } | ||
|  | 
 | ||
|  | // Otherwise people can pipe Writable streams, which is just wrong.
 | ||
|  | Writable.prototype.pipe = function() { | ||
|  |   this.emit('error', new Error('Cannot pipe. Not readable.')); | ||
|  | }; | ||
|  | 
 | ||
|  | 
 | ||
|  | function writeAfterEnd(stream, state, cb) { | ||
|  |   var er = new Error('write after end'); | ||
|  |   // TODO: defer error events consistently everywhere, not just the cb
 | ||
|  |   stream.emit('error', er); | ||
|  |   process.nextTick(function() { | ||
|  |     cb(er); | ||
|  |   }); | ||
|  | } | ||
|  | 
 | ||
|  | // If we get something that is not a buffer, string, null, or undefined,
 | ||
|  | // and we're not in objectMode, then that's an error.
 | ||
|  | // Otherwise stream chunks are all considered to be of length=1, and the
 | ||
|  | // watermarks determine how many objects to keep in the buffer, rather than
 | ||
|  | // how many bytes or characters.
 | ||
|  | function validChunk(stream, state, chunk, cb) { | ||
|  |   var valid = true; | ||
|  |   if (!util.isBuffer(chunk) && | ||
|  |       !util.isString(chunk) && | ||
|  |       !util.isNullOrUndefined(chunk) && | ||
|  |       !state.objectMode) { | ||
|  |     var er = new TypeError('Invalid non-string/buffer chunk'); | ||
|  |     stream.emit('error', er); | ||
|  |     process.nextTick(function() { | ||
|  |       cb(er); | ||
|  |     }); | ||
|  |     valid = false; | ||
|  |   } | ||
|  |   return valid; | ||
|  | } | ||
|  | 
 | ||
|  | Writable.prototype.write = function(chunk, encoding, cb) { | ||
|  |   var state = this._writableState; | ||
|  |   var ret = false; | ||
|  | 
 | ||
|  |   if (util.isFunction(encoding)) { | ||
|  |     cb = encoding; | ||
|  |     encoding = null; | ||
|  |   } | ||
|  | 
 | ||
|  |   if (util.isBuffer(chunk)) | ||
|  |     encoding = 'buffer'; | ||
|  |   else if (!encoding) | ||
|  |     encoding = state.defaultEncoding; | ||
|  | 
 | ||
|  |   if (!util.isFunction(cb)) | ||
|  |     cb = function() {}; | ||
|  | 
 | ||
|  |   if (state.ended) | ||
|  |     writeAfterEnd(this, state, cb); | ||
|  |   else if (validChunk(this, state, chunk, cb)) { | ||
|  |     state.pendingcb++; | ||
|  |     ret = writeOrBuffer(this, state, chunk, encoding, cb); | ||
|  |   } | ||
|  | 
 | ||
|  |   return ret; | ||
|  | }; | ||
|  | 
 | ||
|  | Writable.prototype.cork = function() { | ||
|  |   var state = this._writableState; | ||
|  | 
 | ||
|  |   state.corked++; | ||
|  | }; | ||
|  | 
 | ||
|  | Writable.prototype.uncork = function() { | ||
|  |   var state = this._writableState; | ||
|  | 
 | ||
|  |   if (state.corked) { | ||
|  |     state.corked--; | ||
|  | 
 | ||
|  |     if (!state.writing && | ||
|  |         !state.corked && | ||
|  |         !state.finished && | ||
|  |         !state.bufferProcessing && | ||
|  |         state.buffer.length) | ||
|  |       clearBuffer(this, state); | ||
|  |   } | ||
|  | }; | ||
|  | 
 | ||
|  | function decodeChunk(state, chunk, encoding) { | ||
|  |   if (!state.objectMode && | ||
|  |       state.decodeStrings !== false && | ||
|  |       util.isString(chunk)) { | ||
|  |     chunk = new Buffer(chunk, encoding); | ||
|  |   } | ||
|  |   return chunk; | ||
|  | } | ||
|  | 
 | ||
|  | // if we're already writing something, then just put this
 | ||
|  | // in the queue, and wait our turn.  Otherwise, call _write
 | ||
|  | // If we return false, then we need a drain event, so set that flag.
 | ||
|  | function writeOrBuffer(stream, state, chunk, encoding, cb) { | ||
|  |   chunk = decodeChunk(state, chunk, encoding); | ||
|  |   if (util.isBuffer(chunk)) | ||
|  |     encoding = 'buffer'; | ||
|  |   var len = state.objectMode ? 1 : chunk.length; | ||
|  | 
 | ||
|  |   state.length += len; | ||
|  | 
 | ||
|  |   var ret = state.length < state.highWaterMark; | ||
|  |   // we must ensure that previous needDrain will not be reset to false.
 | ||
|  |   if (!ret) | ||
|  |     state.needDrain = true; | ||
|  | 
 | ||
|  |   if (state.writing || state.corked) | ||
|  |     state.buffer.push(new WriteReq(chunk, encoding, cb)); | ||
|  |   else | ||
|  |     doWrite(stream, state, false, len, chunk, encoding, cb); | ||
|  | 
 | ||
|  |   return ret; | ||
|  | } | ||
|  | 
 | ||
|  | function doWrite(stream, state, writev, len, chunk, encoding, cb) { | ||
|  |   state.writelen = len; | ||
|  |   state.writecb = cb; | ||
|  |   state.writing = true; | ||
|  |   state.sync = true; | ||
|  |   if (writev) | ||
|  |     stream._writev(chunk, state.onwrite); | ||
|  |   else | ||
|  |     stream._write(chunk, encoding, state.onwrite); | ||
|  |   state.sync = false; | ||
|  | } | ||
|  | 
 | ||
|  | function onwriteError(stream, state, sync, er, cb) { | ||
|  |   if (sync) | ||
|  |     process.nextTick(function() { | ||
|  |       state.pendingcb--; | ||
|  |       cb(er); | ||
|  |     }); | ||
|  |   else { | ||
|  |     state.pendingcb--; | ||
|  |     cb(er); | ||
|  |   } | ||
|  | 
 | ||
|  |   stream._writableState.errorEmitted = true; | ||
|  |   stream.emit('error', er); | ||
|  | } | ||
|  | 
 | ||
|  | function onwriteStateUpdate(state) { | ||
|  |   state.writing = false; | ||
|  |   state.writecb = null; | ||
|  |   state.length -= state.writelen; | ||
|  |   state.writelen = 0; | ||
|  | } | ||
|  | 
 | ||
|  | function onwrite(stream, er) { | ||
|  |   var state = stream._writableState; | ||
|  |   var sync = state.sync; | ||
|  |   var cb = state.writecb; | ||
|  | 
 | ||
|  |   onwriteStateUpdate(state); | ||
|  | 
 | ||
|  |   if (er) | ||
|  |     onwriteError(stream, state, sync, er, cb); | ||
|  |   else { | ||
|  |     // Check if we're actually ready to finish, but don't emit yet
 | ||
|  |     var finished = needFinish(stream, state); | ||
|  | 
 | ||
|  |     if (!finished && | ||
|  |         !state.corked && | ||
|  |         !state.bufferProcessing && | ||
|  |         state.buffer.length) { | ||
|  |       clearBuffer(stream, state); | ||
|  |     } | ||
|  | 
 | ||
|  |     if (sync) { | ||
|  |       process.nextTick(function() { | ||
|  |         afterWrite(stream, state, finished, cb); | ||
|  |       }); | ||
|  |     } else { | ||
|  |       afterWrite(stream, state, finished, cb); | ||
|  |     } | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function afterWrite(stream, state, finished, cb) { | ||
|  |   if (!finished) | ||
|  |     onwriteDrain(stream, state); | ||
|  |   state.pendingcb--; | ||
|  |   cb(); | ||
|  |   finishMaybe(stream, state); | ||
|  | } | ||
|  | 
 | ||
|  | // Must force callback to be called on nextTick, so that we don't
 | ||
|  | // emit 'drain' before the write() consumer gets the 'false' return
 | ||
|  | // value, and has a chance to attach a 'drain' listener.
 | ||
|  | function onwriteDrain(stream, state) { | ||
|  |   if (state.length === 0 && state.needDrain) { | ||
|  |     state.needDrain = false; | ||
|  |     stream.emit('drain'); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | 
 | ||
|  | // if there's something in the buffer waiting, then process it
 | ||
|  | function clearBuffer(stream, state) { | ||
|  |   state.bufferProcessing = true; | ||
|  | 
 | ||
|  |   if (stream._writev && state.buffer.length > 1) { | ||
|  |     // Fast case, write everything using _writev()
 | ||
|  |     var cbs = []; | ||
|  |     for (var c = 0; c < state.buffer.length; c++) | ||
|  |       cbs.push(state.buffer[c].callback); | ||
|  | 
 | ||
|  |     // count the one we are adding, as well.
 | ||
|  |     // TODO(isaacs) clean this up
 | ||
|  |     state.pendingcb++; | ||
|  |     doWrite(stream, state, true, state.length, state.buffer, '', function(err) { | ||
|  |       for (var i = 0; i < cbs.length; i++) { | ||
|  |         state.pendingcb--; | ||
|  |         cbs[i](err); | ||
|  |       } | ||
|  |     }); | ||
|  | 
 | ||
|  |     // Clear buffer
 | ||
|  |     state.buffer = []; | ||
|  |   } else { | ||
|  |     // Slow case, write chunks one-by-one
 | ||
|  |     for (var c = 0; c < state.buffer.length; c++) { | ||
|  |       var entry = state.buffer[c]; | ||
|  |       var chunk = entry.chunk; | ||
|  |       var encoding = entry.encoding; | ||
|  |       var cb = entry.callback; | ||
|  |       var len = state.objectMode ? 1 : chunk.length; | ||
|  | 
 | ||
|  |       doWrite(stream, state, false, len, chunk, encoding, cb); | ||
|  | 
 | ||
|  |       // if we didn't call the onwrite immediately, then
 | ||
|  |       // it means that we need to wait until it does.
 | ||
|  |       // also, that means that the chunk and cb are currently
 | ||
|  |       // being processed, so move the buffer counter past them.
 | ||
|  |       if (state.writing) { | ||
|  |         c++; | ||
|  |         break; | ||
|  |       } | ||
|  |     } | ||
|  | 
 | ||
|  |     if (c < state.buffer.length) | ||
|  |       state.buffer = state.buffer.slice(c); | ||
|  |     else | ||
|  |       state.buffer.length = 0; | ||
|  |   } | ||
|  | 
 | ||
|  |   state.bufferProcessing = false; | ||
|  | } | ||
|  | 
 | ||
|  | Writable.prototype._write = function(chunk, encoding, cb) { | ||
|  |   cb(new Error('not implemented')); | ||
|  | 
 | ||
|  | }; | ||
|  | 
 | ||
|  | Writable.prototype._writev = null; | ||
|  | 
 | ||
|  | Writable.prototype.end = function(chunk, encoding, cb) { | ||
|  |   var state = this._writableState; | ||
|  | 
 | ||
|  |   if (util.isFunction(chunk)) { | ||
|  |     cb = chunk; | ||
|  |     chunk = null; | ||
|  |     encoding = null; | ||
|  |   } else if (util.isFunction(encoding)) { | ||
|  |     cb = encoding; | ||
|  |     encoding = null; | ||
|  |   } | ||
|  | 
 | ||
|  |   if (!util.isNullOrUndefined(chunk)) | ||
|  |     this.write(chunk, encoding); | ||
|  | 
 | ||
|  |   // .end() fully uncorks
 | ||
|  |   if (state.corked) { | ||
|  |     state.corked = 1; | ||
|  |     this.uncork(); | ||
|  |   } | ||
|  | 
 | ||
|  |   // ignore unnecessary end() calls.
 | ||
|  |   if (!state.ending && !state.finished) | ||
|  |     endWritable(this, state, cb); | ||
|  | }; | ||
|  | 
 | ||
|  | 
 | ||
|  | function needFinish(stream, state) { | ||
|  |   return (state.ending && | ||
|  |           state.length === 0 && | ||
|  |           !state.finished && | ||
|  |           !state.writing); | ||
|  | } | ||
|  | 
 | ||
|  | function prefinish(stream, state) { | ||
|  |   if (!state.prefinished) { | ||
|  |     state.prefinished = true; | ||
|  |     stream.emit('prefinish'); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function finishMaybe(stream, state) { | ||
|  |   var need = needFinish(stream, state); | ||
|  |   if (need) { | ||
|  |     if (state.pendingcb === 0) { | ||
|  |       prefinish(stream, state); | ||
|  |       state.finished = true; | ||
|  |       stream.emit('finish'); | ||
|  |     } else | ||
|  |       prefinish(stream, state); | ||
|  |   } | ||
|  |   return need; | ||
|  | } | ||
|  | 
 | ||
|  | function endWritable(stream, state, cb) { | ||
|  |   state.ending = true; | ||
|  |   finishMaybe(stream, state); | ||
|  |   if (cb) { | ||
|  |     if (state.finished) | ||
|  |       process.nextTick(cb); | ||
|  |     else | ||
|  |       stream.once('finish', cb); | ||
|  |   } | ||
|  |   state.ended = true; | ||
|  | } | ||
|  | 
 | ||
|  | }).call(this)}).call(this,_dereq_(73)) | ||
|  | },{"13":13,"14":14,"37":37,"73":73,"75":75,"85":85}],80:[function(_dereq_,module,exports){ | ||
|  | module.exports = Array.isArray || function (arr) { | ||
|  |   return Object.prototype.toString.call(arr) == '[object Array]'; | ||
|  | }; | ||
|  | 
 | ||
|  | },{}],81:[function(_dereq_,module,exports){ | ||
|  | // Copyright Joyent, Inc. and other Node contributors.
 | ||
|  | //
 | ||
|  | // Permission is hereby granted, free of charge, to any person obtaining a
 | ||
|  | // copy of this software and associated documentation files (the
 | ||
|  | // "Software"), to deal in the Software without restriction, including
 | ||
|  | // without limitation the rights to use, copy, modify, merge, publish,
 | ||
|  | // distribute, sublicense, and/or sell copies of the Software, and to permit
 | ||
|  | // persons to whom the Software is furnished to do so, subject to the
 | ||
|  | // following conditions:
 | ||
|  | //
 | ||
|  | // The above copyright notice and this permission notice shall be included
 | ||
|  | // in all copies or substantial portions of the Software.
 | ||
|  | //
 | ||
|  | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
 | ||
|  | // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
 | ||
|  | // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
 | ||
|  | // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
 | ||
|  | // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
 | ||
|  | // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
 | ||
|  | // USE OR OTHER DEALINGS IN THE SOFTWARE.
 | ||
|  | 
 | ||
|  | var Buffer = _dereq_(13).Buffer; | ||
|  | 
 | ||
|  | var isBufferEncoding = Buffer.isEncoding | ||
|  |   || function(encoding) { | ||
|  |        switch (encoding && encoding.toLowerCase()) { | ||
|  |          case 'hex': case 'utf8': case 'utf-8': case 'ascii': case 'binary': case 'base64': case 'ucs2': case 'ucs-2': case 'utf16le': case 'utf-16le': case 'raw': return true; | ||
|  |          default: return false; | ||
|  |        } | ||
|  |      } | ||
|  | 
 | ||
|  | 
 | ||
|  | function assertEncoding(encoding) { | ||
|  |   if (encoding && !isBufferEncoding(encoding)) { | ||
|  |     throw new Error('Unknown encoding: ' + encoding); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | // StringDecoder provides an interface for efficiently splitting a series of
 | ||
|  | // buffers into a series of JS strings without breaking apart multi-byte
 | ||
|  | // characters. CESU-8 is handled as part of the UTF-8 encoding.
 | ||
|  | //
 | ||
|  | // @TODO Handling all encodings inside a single object makes it very difficult
 | ||
|  | // to reason about this code, so it should be split up in the future.
 | ||
|  | // @TODO There should be a utf8-strict encoding that rejects invalid UTF-8 code
 | ||
|  | // points as used by CESU-8.
 | ||
|  | var StringDecoder = exports.StringDecoder = function(encoding) { | ||
|  |   this.encoding = (encoding || 'utf8').toLowerCase().replace(/[-_]/, ''); | ||
|  |   assertEncoding(encoding); | ||
|  |   switch (this.encoding) { | ||
|  |     case 'utf8': | ||
|  |       // CESU-8 represents each of Surrogate Pair by 3-bytes
 | ||
|  |       this.surrogateSize = 3; | ||
|  |       break; | ||
|  |     case 'ucs2': | ||
|  |     case 'utf16le': | ||
|  |       // UTF-16 represents each of Surrogate Pair by 2-bytes
 | ||
|  |       this.surrogateSize = 2; | ||
|  |       this.detectIncompleteChar = utf16DetectIncompleteChar; | ||
|  |       break; | ||
|  |     case 'base64': | ||
|  |       // Base-64 stores 3 bytes in 4 chars, and pads the remainder.
 | ||
|  |       this.surrogateSize = 3; | ||
|  |       this.detectIncompleteChar = base64DetectIncompleteChar; | ||
|  |       break; | ||
|  |     default: | ||
|  |       this.write = passThroughWrite; | ||
|  |       return; | ||
|  |   } | ||
|  | 
 | ||
|  |   // Enough space to store all bytes of a single character. UTF-8 needs 4
 | ||
|  |   // bytes, but CESU-8 may require up to 6 (3 bytes per surrogate).
 | ||
|  |   this.charBuffer = new Buffer(6); | ||
|  |   // Number of bytes received for the current incomplete multi-byte character.
 | ||
|  |   this.charReceived = 0; | ||
|  |   // Number of bytes expected for the current incomplete multi-byte character.
 | ||
|  |   this.charLength = 0; | ||
|  | }; | ||
|  | 
 | ||
|  | 
 | ||
|  | // write decodes the given buffer and returns it as JS string that is
 | ||
|  | // guaranteed to not contain any partial multi-byte characters. Any partial
 | ||
|  | // character found at the end of the buffer is buffered up, and will be
 | ||
|  | // returned when calling write again with the remaining bytes.
 | ||
|  | //
 | ||
|  | // Note: Converting a Buffer containing an orphan surrogate to a String
 | ||
|  | // currently works, but converting a String to a Buffer (via `new Buffer`, or
 | ||
|  | // Buffer#write) will replace incomplete surrogates with the unicode
 | ||
|  | // replacement character. See https://codereview.chromium.org/121173009/ .
 | ||
|  | StringDecoder.prototype.write = function(buffer) { | ||
|  |   var charStr = ''; | ||
|  |   // if our last write ended with an incomplete multibyte character
 | ||
|  |   while (this.charLength) { | ||
|  |     // determine how many remaining bytes this buffer has to offer for this char
 | ||
|  |     var available = (buffer.length >= this.charLength - this.charReceived) ? | ||
|  |         this.charLength - this.charReceived : | ||
|  |         buffer.length; | ||
|  | 
 | ||
|  |     // add the new bytes to the char buffer
 | ||
|  |     buffer.copy(this.charBuffer, this.charReceived, 0, available); | ||
|  |     this.charReceived += available; | ||
|  | 
 | ||
|  |     if (this.charReceived < this.charLength) { | ||
|  |       // still not enough chars in this buffer? wait for more ...
 | ||
|  |       return ''; | ||
|  |     } | ||
|  | 
 | ||
|  |     // remove bytes belonging to the current character from the buffer
 | ||
|  |     buffer = buffer.slice(available, buffer.length); | ||
|  | 
 | ||
|  |     // get the character that was split
 | ||
|  |     charStr = this.charBuffer.slice(0, this.charLength).toString(this.encoding); | ||
|  | 
 | ||
|  |     // CESU-8: lead surrogate (D800-DBFF) is also the incomplete character
 | ||
|  |     var charCode = charStr.charCodeAt(charStr.length - 1); | ||
|  |     if (charCode >= 0xD800 && charCode <= 0xDBFF) { | ||
|  |       this.charLength += this.surrogateSize; | ||
|  |       charStr = ''; | ||
|  |       continue; | ||
|  |     } | ||
|  |     this.charReceived = this.charLength = 0; | ||
|  | 
 | ||
|  |     // if there are no more bytes in this buffer, just emit our char
 | ||
|  |     if (buffer.length === 0) { | ||
|  |       return charStr; | ||
|  |     } | ||
|  |     break; | ||
|  |   } | ||
|  | 
 | ||
|  |   // determine and set charLength / charReceived
 | ||
|  |   this.detectIncompleteChar(buffer); | ||
|  | 
 | ||
|  |   var end = buffer.length; | ||
|  |   if (this.charLength) { | ||
|  |     // buffer the incomplete character bytes we got
 | ||
|  |     buffer.copy(this.charBuffer, 0, buffer.length - this.charReceived, end); | ||
|  |     end -= this.charReceived; | ||
|  |   } | ||
|  | 
 | ||
|  |   charStr += buffer.toString(this.encoding, 0, end); | ||
|  | 
 | ||
|  |   var end = charStr.length - 1; | ||
|  |   var charCode = charStr.charCodeAt(end); | ||
|  |   // CESU-8: lead surrogate (D800-DBFF) is also the incomplete character
 | ||
|  |   if (charCode >= 0xD800 && charCode <= 0xDBFF) { | ||
|  |     var size = this.surrogateSize; | ||
|  |     this.charLength += size; | ||
|  |     this.charReceived += size; | ||
|  |     this.charBuffer.copy(this.charBuffer, size, 0, size); | ||
|  |     buffer.copy(this.charBuffer, 0, 0, size); | ||
|  |     return charStr.substring(0, end); | ||
|  |   } | ||
|  | 
 | ||
|  |   // or just emit the charStr
 | ||
|  |   return charStr; | ||
|  | }; | ||
|  | 
 | ||
|  | // detectIncompleteChar determines if there is an incomplete UTF-8 character at
 | ||
|  | // the end of the given buffer. If so, it sets this.charLength to the byte
 | ||
|  | // length that character, and sets this.charReceived to the number of bytes
 | ||
|  | // that are available for this character.
 | ||
|  | StringDecoder.prototype.detectIncompleteChar = function(buffer) { | ||
|  |   // determine how many bytes we have to check at the end of this buffer
 | ||
|  |   var i = (buffer.length >= 3) ? 3 : buffer.length; | ||
|  | 
 | ||
|  |   // Figure out if one of the last i bytes of our buffer announces an
 | ||
|  |   // incomplete char.
 | ||
|  |   for (; i > 0; i--) { | ||
|  |     var c = buffer[buffer.length - i]; | ||
|  | 
 | ||
|  |     // See http://en.wikipedia.org/wiki/UTF-8#Description
 | ||
|  | 
 | ||
|  |     // 110XXXXX
 | ||
|  |     if (i == 1 && c >> 5 == 0x06) { | ||
|  |       this.charLength = 2; | ||
|  |       break; | ||
|  |     } | ||
|  | 
 | ||
|  |     // 1110XXXX
 | ||
|  |     if (i <= 2 && c >> 4 == 0x0E) { | ||
|  |       this.charLength = 3; | ||
|  |       break; | ||
|  |     } | ||
|  | 
 | ||
|  |     // 11110XXX
 | ||
|  |     if (i <= 3 && c >> 3 == 0x1E) { | ||
|  |       this.charLength = 4; | ||
|  |       break; | ||
|  |     } | ||
|  |   } | ||
|  |   this.charReceived = i; | ||
|  | }; | ||
|  | 
 | ||
|  | StringDecoder.prototype.end = function(buffer) { | ||
|  |   var res = ''; | ||
|  |   if (buffer && buffer.length) | ||
|  |     res = this.write(buffer); | ||
|  | 
 | ||
|  |   if (this.charReceived) { | ||
|  |     var cr = this.charReceived; | ||
|  |     var buf = this.charBuffer; | ||
|  |     var enc = this.encoding; | ||
|  |     res += buf.slice(0, cr).toString(enc); | ||
|  |   } | ||
|  | 
 | ||
|  |   return res; | ||
|  | }; | ||
|  | 
 | ||
|  | function passThroughWrite(buffer) { | ||
|  |   return buffer.toString(this.encoding); | ||
|  | } | ||
|  | 
 | ||
|  | function utf16DetectIncompleteChar(buffer) { | ||
|  |   this.charReceived = buffer.length % 2; | ||
|  |   this.charLength = this.charReceived ? 2 : 0; | ||
|  | } | ||
|  | 
 | ||
|  | function base64DetectIncompleteChar(buffer) { | ||
|  |   this.charReceived = buffer.length % 3; | ||
|  |   this.charLength = this.charReceived ? 3 : 0; | ||
|  | } | ||
|  | 
 | ||
|  | },{"13":13}],82:[function(_dereq_,module,exports){ | ||
|  | (function (process){(function (){ | ||
|  | exports = module.exports = _dereq_(77); | ||
|  | exports.Stream = _dereq_(85); | ||
|  | exports.Readable = exports; | ||
|  | exports.Writable = _dereq_(79); | ||
|  | exports.Duplex = _dereq_(75); | ||
|  | exports.Transform = _dereq_(78); | ||
|  | exports.PassThrough = _dereq_(76); | ||
|  | if (!process.browser && process.env.READABLE_STREAM === 'disable') { | ||
|  |   module.exports = _dereq_(85); | ||
|  | } | ||
|  | 
 | ||
|  | }).call(this)}).call(this,_dereq_(73)) | ||
|  | },{"73":73,"75":75,"76":76,"77":77,"78":78,"79":79,"85":85}],83:[function(_dereq_,module,exports){ | ||
|  | /*! safe-buffer. MIT License. Feross Aboukhadijeh <https://feross.org/opensource> */ | ||
|  | /* eslint-disable node/no-deprecated-api */ | ||
|  | var buffer = _dereq_(13) | ||
|  | var Buffer = buffer.Buffer | ||
|  | 
 | ||
|  | // alternative to using Object.keys for old browsers
 | ||
|  | function copyProps (src, dst) { | ||
|  |   for (var key in src) { | ||
|  |     dst[key] = src[key] | ||
|  |   } | ||
|  | } | ||
|  | if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { | ||
|  |   module.exports = buffer | ||
|  | } else { | ||
|  |   // Copy properties from require('buffer')
 | ||
|  |   copyProps(buffer, exports) | ||
|  |   exports.Buffer = SafeBuffer | ||
|  | } | ||
|  | 
 | ||
|  | function SafeBuffer (arg, encodingOrOffset, length) { | ||
|  |   return Buffer(arg, encodingOrOffset, length) | ||
|  | } | ||
|  | 
 | ||
|  | SafeBuffer.prototype = Object.create(Buffer.prototype) | ||
|  | 
 | ||
|  | // Copy static methods from Buffer
 | ||
|  | copyProps(Buffer, SafeBuffer) | ||
|  | 
 | ||
|  | SafeBuffer.from = function (arg, encodingOrOffset, length) { | ||
|  |   if (typeof arg === 'number') { | ||
|  |     throw new TypeError('Argument must not be a number') | ||
|  |   } | ||
|  |   return Buffer(arg, encodingOrOffset, length) | ||
|  | } | ||
|  | 
 | ||
|  | SafeBuffer.alloc = function (size, fill, encoding) { | ||
|  |   if (typeof size !== 'number') { | ||
|  |     throw new TypeError('Argument must be a number') | ||
|  |   } | ||
|  |   var buf = Buffer(size) | ||
|  |   if (fill !== undefined) { | ||
|  |     if (typeof encoding === 'string') { | ||
|  |       buf.fill(fill, encoding) | ||
|  |     } else { | ||
|  |       buf.fill(fill) | ||
|  |     } | ||
|  |   } else { | ||
|  |     buf.fill(0) | ||
|  |   } | ||
|  |   return buf | ||
|  | } | ||
|  | 
 | ||
|  | SafeBuffer.allocUnsafe = function (size) { | ||
|  |   if (typeof size !== 'number') { | ||
|  |     throw new TypeError('Argument must be a number') | ||
|  |   } | ||
|  |   return Buffer(size) | ||
|  | } | ||
|  | 
 | ||
|  | SafeBuffer.allocUnsafeSlow = function (size) { | ||
|  |   if (typeof size !== 'number') { | ||
|  |     throw new TypeError('Argument must be a number') | ||
|  |   } | ||
|  |   return buffer.SlowBuffer(size) | ||
|  | } | ||
|  | 
 | ||
|  | },{"13":13}],84:[function(_dereq_,module,exports){ | ||
|  | (function (factory) { | ||
|  |     if (typeof exports === 'object') { | ||
|  |         // Node/CommonJS
 | ||
|  |         module.exports = factory(); | ||
|  |     } else if (typeof define === 'function' && define.amd) { | ||
|  |         // AMD
 | ||
|  |         define(factory); | ||
|  |     } else { | ||
|  |         // Browser globals (with support for web workers)
 | ||
|  |         var glob; | ||
|  | 
 | ||
|  |         try { | ||
|  |             glob = window; | ||
|  |         } catch (e) { | ||
|  |             glob = self; | ||
|  |         } | ||
|  | 
 | ||
|  |         glob.SparkMD5 = factory(); | ||
|  |     } | ||
|  | }(function (undefined) { | ||
|  | 
 | ||
|  |     'use strict'; | ||
|  | 
 | ||
|  |     /* | ||
|  |      * Fastest md5 implementation around (JKM md5). | ||
|  |      * Credits: Joseph Myers | ||
|  |      * | ||
|  |      * @see http://www.myersdaily.org/joseph/javascript/md5-text.html
 | ||
|  |      * @see http://jsperf.com/md5-shootout/7
 | ||
|  |      */ | ||
|  | 
 | ||
|  |     /* this function is much faster, | ||
|  |       so if possible we use it. Some IEs | ||
|  |       are the only ones I know of that | ||
|  |       need the idiotic second function, | ||
|  |       generated by an if clause.  */ | ||
|  |     var add32 = function (a, b) { | ||
|  |         return (a + b) & 0xFFFFFFFF; | ||
|  |     }, | ||
|  |         hex_chr = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f']; | ||
|  | 
 | ||
|  | 
 | ||
|  |     function cmn(q, a, b, x, s, t) { | ||
|  |         a = add32(add32(a, q), add32(x, t)); | ||
|  |         return add32((a << s) | (a >>> (32 - s)), b); | ||
|  |     } | ||
|  | 
 | ||
|  |     function md5cycle(x, k) { | ||
|  |         var a = x[0], | ||
|  |             b = x[1], | ||
|  |             c = x[2], | ||
|  |             d = x[3]; | ||
|  | 
 | ||
|  |         a += (b & c | ~b & d) + k[0] - 680876936 | 0; | ||
|  |         a  = (a << 7 | a >>> 25) + b | 0; | ||
|  |         d += (a & b | ~a & c) + k[1] - 389564586 | 0; | ||
|  |         d  = (d << 12 | d >>> 20) + a | 0; | ||
|  |         c += (d & a | ~d & b) + k[2] + 606105819 | 0; | ||
|  |         c  = (c << 17 | c >>> 15) + d | 0; | ||
|  |         b += (c & d | ~c & a) + k[3] - 1044525330 | 0; | ||
|  |         b  = (b << 22 | b >>> 10) + c | 0; | ||
|  |         a += (b & c | ~b & d) + k[4] - 176418897 | 0; | ||
|  |         a  = (a << 7 | a >>> 25) + b | 0; | ||
|  |         d += (a & b | ~a & c) + k[5] + 1200080426 | 0; | ||
|  |         d  = (d << 12 | d >>> 20) + a | 0; | ||
|  |         c += (d & a | ~d & b) + k[6] - 1473231341 | 0; | ||
|  |         c  = (c << 17 | c >>> 15) + d | 0; | ||
|  |         b += (c & d | ~c & a) + k[7] - 45705983 | 0; | ||
|  |         b  = (b << 22 | b >>> 10) + c | 0; | ||
|  |         a += (b & c | ~b & d) + k[8] + 1770035416 | 0; | ||
|  |         a  = (a << 7 | a >>> 25) + b | 0; | ||
|  |         d += (a & b | ~a & c) + k[9] - 1958414417 | 0; | ||
|  |         d  = (d << 12 | d >>> 20) + a | 0; | ||
|  |         c += (d & a | ~d & b) + k[10] - 42063 | 0; | ||
|  |         c  = (c << 17 | c >>> 15) + d | 0; | ||
|  |         b += (c & d | ~c & a) + k[11] - 1990404162 | 0; | ||
|  |         b  = (b << 22 | b >>> 10) + c | 0; | ||
|  |         a += (b & c | ~b & d) + k[12] + 1804603682 | 0; | ||
|  |         a  = (a << 7 | a >>> 25) + b | 0; | ||
|  |         d += (a & b | ~a & c) + k[13] - 40341101 | 0; | ||
|  |         d  = (d << 12 | d >>> 20) + a | 0; | ||
|  |         c += (d & a | ~d & b) + k[14] - 1502002290 | 0; | ||
|  |         c  = (c << 17 | c >>> 15) + d | 0; | ||
|  |         b += (c & d | ~c & a) + k[15] + 1236535329 | 0; | ||
|  |         b  = (b << 22 | b >>> 10) + c | 0; | ||
|  | 
 | ||
|  |         a += (b & d | c & ~d) + k[1] - 165796510 | 0; | ||
|  |         a  = (a << 5 | a >>> 27) + b | 0; | ||
|  |         d += (a & c | b & ~c) + k[6] - 1069501632 | 0; | ||
|  |         d  = (d << 9 | d >>> 23) + a | 0; | ||
|  |         c += (d & b | a & ~b) + k[11] + 643717713 | 0; | ||
|  |         c  = (c << 14 | c >>> 18) + d | 0; | ||
|  |         b += (c & a | d & ~a) + k[0] - 373897302 | 0; | ||
|  |         b  = (b << 20 | b >>> 12) + c | 0; | ||
|  |         a += (b & d | c & ~d) + k[5] - 701558691 | 0; | ||
|  |         a  = (a << 5 | a >>> 27) + b | 0; | ||
|  |         d += (a & c | b & ~c) + k[10] + 38016083 | 0; | ||
|  |         d  = (d << 9 | d >>> 23) + a | 0; | ||
|  |         c += (d & b | a & ~b) + k[15] - 660478335 | 0; | ||
|  |         c  = (c << 14 | c >>> 18) + d | 0; | ||
|  |         b += (c & a | d & ~a) + k[4] - 405537848 | 0; | ||
|  |         b  = (b << 20 | b >>> 12) + c | 0; | ||
|  |         a += (b & d | c & ~d) + k[9] + 568446438 | 0; | ||
|  |         a  = (a << 5 | a >>> 27) + b | 0; | ||
|  |         d += (a & c | b & ~c) + k[14] - 1019803690 | 0; | ||
|  |         d  = (d << 9 | d >>> 23) + a | 0; | ||
|  |         c += (d & b | a & ~b) + k[3] - 187363961 | 0; | ||
|  |         c  = (c << 14 | c >>> 18) + d | 0; | ||
|  |         b += (c & a | d & ~a) + k[8] + 1163531501 | 0; | ||
|  |         b  = (b << 20 | b >>> 12) + c | 0; | ||
|  |         a += (b & d | c & ~d) + k[13] - 1444681467 | 0; | ||
|  |         a  = (a << 5 | a >>> 27) + b | 0; | ||
|  |         d += (a & c | b & ~c) + k[2] - 51403784 | 0; | ||
|  |         d  = (d << 9 | d >>> 23) + a | 0; | ||
|  |         c += (d & b | a & ~b) + k[7] + 1735328473 | 0; | ||
|  |         c  = (c << 14 | c >>> 18) + d | 0; | ||
|  |         b += (c & a | d & ~a) + k[12] - 1926607734 | 0; | ||
|  |         b  = (b << 20 | b >>> 12) + c | 0; | ||
|  | 
 | ||
|  |         a += (b ^ c ^ d) + k[5] - 378558 | 0; | ||
|  |         a  = (a << 4 | a >>> 28) + b | 0; | ||
|  |         d += (a ^ b ^ c) + k[8] - 2022574463 | 0; | ||
|  |         d  = (d << 11 | d >>> 21) + a | 0; | ||
|  |         c += (d ^ a ^ b) + k[11] + 1839030562 | 0; | ||
|  |         c  = (c << 16 | c >>> 16) + d | 0; | ||
|  |         b += (c ^ d ^ a) + k[14] - 35309556 | 0; | ||
|  |         b  = (b << 23 | b >>> 9) + c | 0; | ||
|  |         a += (b ^ c ^ d) + k[1] - 1530992060 | 0; | ||
|  |         a  = (a << 4 | a >>> 28) + b | 0; | ||
|  |         d += (a ^ b ^ c) + k[4] + 1272893353 | 0; | ||
|  |         d  = (d << 11 | d >>> 21) + a | 0; | ||
|  |         c += (d ^ a ^ b) + k[7] - 155497632 | 0; | ||
|  |         c  = (c << 16 | c >>> 16) + d | 0; | ||
|  |         b += (c ^ d ^ a) + k[10] - 1094730640 | 0; | ||
|  |         b  = (b << 23 | b >>> 9) + c | 0; | ||
|  |         a += (b ^ c ^ d) + k[13] + 681279174 | 0; | ||
|  |         a  = (a << 4 | a >>> 28) + b | 0; | ||
|  |         d += (a ^ b ^ c) + k[0] - 358537222 | 0; | ||
|  |         d  = (d << 11 | d >>> 21) + a | 0; | ||
|  |         c += (d ^ a ^ b) + k[3] - 722521979 | 0; | ||
|  |         c  = (c << 16 | c >>> 16) + d | 0; | ||
|  |         b += (c ^ d ^ a) + k[6] + 76029189 | 0; | ||
|  |         b  = (b << 23 | b >>> 9) + c | 0; | ||
|  |         a += (b ^ c ^ d) + k[9] - 640364487 | 0; | ||
|  |         a  = (a << 4 | a >>> 28) + b | 0; | ||
|  |         d += (a ^ b ^ c) + k[12] - 421815835 | 0; | ||
|  |         d  = (d << 11 | d >>> 21) + a | 0; | ||
|  |         c += (d ^ a ^ b) + k[15] + 530742520 | 0; | ||
|  |         c  = (c << 16 | c >>> 16) + d | 0; | ||
|  |         b += (c ^ d ^ a) + k[2] - 995338651 | 0; | ||
|  |         b  = (b << 23 | b >>> 9) + c | 0; | ||
|  | 
 | ||
|  |         a += (c ^ (b | ~d)) + k[0] - 198630844 | 0; | ||
|  |         a  = (a << 6 | a >>> 26) + b | 0; | ||
|  |         d += (b ^ (a | ~c)) + k[7] + 1126891415 | 0; | ||
|  |         d  = (d << 10 | d >>> 22) + a | 0; | ||
|  |         c += (a ^ (d | ~b)) + k[14] - 1416354905 | 0; | ||
|  |         c  = (c << 15 | c >>> 17) + d | 0; | ||
|  |         b += (d ^ (c | ~a)) + k[5] - 57434055 | 0; | ||
|  |         b  = (b << 21 |b >>> 11) + c | 0; | ||
|  |         a += (c ^ (b | ~d)) + k[12] + 1700485571 | 0; | ||
|  |         a  = (a << 6 | a >>> 26) + b | 0; | ||
|  |         d += (b ^ (a | ~c)) + k[3] - 1894986606 | 0; | ||
|  |         d  = (d << 10 | d >>> 22) + a | 0; | ||
|  |         c += (a ^ (d | ~b)) + k[10] - 1051523 | 0; | ||
|  |         c  = (c << 15 | c >>> 17) + d | 0; | ||
|  |         b += (d ^ (c | ~a)) + k[1] - 2054922799 | 0; | ||
|  |         b  = (b << 21 |b >>> 11) + c | 0; | ||
|  |         a += (c ^ (b | ~d)) + k[8] + 1873313359 | 0; | ||
|  |         a  = (a << 6 | a >>> 26) + b | 0; | ||
|  |         d += (b ^ (a | ~c)) + k[15] - 30611744 | 0; | ||
|  |         d  = (d << 10 | d >>> 22) + a | 0; | ||
|  |         c += (a ^ (d | ~b)) + k[6] - 1560198380 | 0; | ||
|  |         c  = (c << 15 | c >>> 17) + d | 0; | ||
|  |         b += (d ^ (c | ~a)) + k[13] + 1309151649 | 0; | ||
|  |         b  = (b << 21 |b >>> 11) + c | 0; | ||
|  |         a += (c ^ (b | ~d)) + k[4] - 145523070 | 0; | ||
|  |         a  = (a << 6 | a >>> 26) + b | 0; | ||
|  |         d += (b ^ (a | ~c)) + k[11] - 1120210379 | 0; | ||
|  |         d  = (d << 10 | d >>> 22) + a | 0; | ||
|  |         c += (a ^ (d | ~b)) + k[2] + 718787259 | 0; | ||
|  |         c  = (c << 15 | c >>> 17) + d | 0; | ||
|  |         b += (d ^ (c | ~a)) + k[9] - 343485551 | 0; | ||
|  |         b  = (b << 21 | b >>> 11) + c | 0; | ||
|  | 
 | ||
|  |         x[0] = a + x[0] | 0; | ||
|  |         x[1] = b + x[1] | 0; | ||
|  |         x[2] = c + x[2] | 0; | ||
|  |         x[3] = d + x[3] | 0; | ||
|  |     } | ||
|  | 
 | ||
|  |     function md5blk(s) { | ||
|  |         var md5blks = [], | ||
|  |             i; /* Andy King said do it this way. */ | ||
|  | 
 | ||
|  |         for (i = 0; i < 64; i += 4) { | ||
|  |             md5blks[i >> 2] = s.charCodeAt(i) + (s.charCodeAt(i + 1) << 8) + (s.charCodeAt(i + 2) << 16) + (s.charCodeAt(i + 3) << 24); | ||
|  |         } | ||
|  |         return md5blks; | ||
|  |     } | ||
|  | 
 | ||
|  |     function md5blk_array(a) { | ||
|  |         var md5blks = [], | ||
|  |             i; /* Andy King said do it this way. */ | ||
|  | 
 | ||
|  |         for (i = 0; i < 64; i += 4) { | ||
|  |             md5blks[i >> 2] = a[i] + (a[i + 1] << 8) + (a[i + 2] << 16) + (a[i + 3] << 24); | ||
|  |         } | ||
|  |         return md5blks; | ||
|  |     } | ||
|  | 
 | ||
|  |     function md51(s) { | ||
|  |         var n = s.length, | ||
|  |             state = [1732584193, -271733879, -1732584194, 271733878], | ||
|  |             i, | ||
|  |             length, | ||
|  |             tail, | ||
|  |             tmp, | ||
|  |             lo, | ||
|  |             hi; | ||
|  | 
 | ||
|  |         for (i = 64; i <= n; i += 64) { | ||
|  |             md5cycle(state, md5blk(s.substring(i - 64, i))); | ||
|  |         } | ||
|  |         s = s.substring(i - 64); | ||
|  |         length = s.length; | ||
|  |         tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; | ||
|  |         for (i = 0; i < length; i += 1) { | ||
|  |             tail[i >> 2] |= s.charCodeAt(i) << ((i % 4) << 3); | ||
|  |         } | ||
|  |         tail[i >> 2] |= 0x80 << ((i % 4) << 3); | ||
|  |         if (i > 55) { | ||
|  |             md5cycle(state, tail); | ||
|  |             for (i = 0; i < 16; i += 1) { | ||
|  |                 tail[i] = 0; | ||
|  |             } | ||
|  |         } | ||
|  | 
 | ||
|  |         // Beware that the final length might not fit in 32 bits so we take care of that
 | ||
|  |         tmp = n * 8; | ||
|  |         tmp = tmp.toString(16).match(/(.*?)(.{0,8})$/); | ||
|  |         lo = parseInt(tmp[2], 16); | ||
|  |         hi = parseInt(tmp[1], 16) || 0; | ||
|  | 
 | ||
|  |         tail[14] = lo; | ||
|  |         tail[15] = hi; | ||
|  | 
 | ||
|  |         md5cycle(state, tail); | ||
|  |         return state; | ||
|  |     } | ||
|  | 
 | ||
|  |     function md51_array(a) { | ||
|  |         var n = a.length, | ||
|  |             state = [1732584193, -271733879, -1732584194, 271733878], | ||
|  |             i, | ||
|  |             length, | ||
|  |             tail, | ||
|  |             tmp, | ||
|  |             lo, | ||
|  |             hi; | ||
|  | 
 | ||
|  |         for (i = 64; i <= n; i += 64) { | ||
|  |             md5cycle(state, md5blk_array(a.subarray(i - 64, i))); | ||
|  |         } | ||
|  | 
 | ||
|  |         // Not sure if it is a bug, however IE10 will always produce a sub array of length 1
 | ||
|  |         // containing the last element of the parent array if the sub array specified starts
 | ||
|  |         // beyond the length of the parent array - weird.
 | ||
|  |         // https://connect.microsoft.com/IE/feedback/details/771452/typed-array-subarray-issue
 | ||
|  |         a = (i - 64) < n ? a.subarray(i - 64) : new Uint8Array(0); | ||
|  | 
 | ||
|  |         length = a.length; | ||
|  |         tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; | ||
|  |         for (i = 0; i < length; i += 1) { | ||
|  |             tail[i >> 2] |= a[i] << ((i % 4) << 3); | ||
|  |         } | ||
|  | 
 | ||
|  |         tail[i >> 2] |= 0x80 << ((i % 4) << 3); | ||
|  |         if (i > 55) { | ||
|  |             md5cycle(state, tail); | ||
|  |             for (i = 0; i < 16; i += 1) { | ||
|  |                 tail[i] = 0; | ||
|  |             } | ||
|  |         } | ||
|  | 
 | ||
|  |         // Beware that the final length might not fit in 32 bits so we take care of that
 | ||
|  |         tmp = n * 8; | ||
|  |         tmp = tmp.toString(16).match(/(.*?)(.{0,8})$/); | ||
|  |         lo = parseInt(tmp[2], 16); | ||
|  |         hi = parseInt(tmp[1], 16) || 0; | ||
|  | 
 | ||
|  |         tail[14] = lo; | ||
|  |         tail[15] = hi; | ||
|  | 
 | ||
|  |         md5cycle(state, tail); | ||
|  | 
 | ||
|  |         return state; | ||
|  |     } | ||
|  | 
 | ||
|  |     function rhex(n) { | ||
|  |         var s = '', | ||
|  |             j; | ||
|  |         for (j = 0; j < 4; j += 1) { | ||
|  |             s += hex_chr[(n >> (j * 8 + 4)) & 0x0F] + hex_chr[(n >> (j * 8)) & 0x0F]; | ||
|  |         } | ||
|  |         return s; | ||
|  |     } | ||
|  | 
 | ||
|  |     function hex(x) { | ||
|  |         var i; | ||
|  |         for (i = 0; i < x.length; i += 1) { | ||
|  |             x[i] = rhex(x[i]); | ||
|  |         } | ||
|  |         return x.join(''); | ||
|  |     } | ||
|  | 
 | ||
|  |     // In some cases the fast add32 function cannot be used..
 | ||
|  |     if (hex(md51('hello')) !== '5d41402abc4b2a76b9719d911017c592') { | ||
|  |         add32 = function (x, y) { | ||
|  |             var lsw = (x & 0xFFFF) + (y & 0xFFFF), | ||
|  |                 msw = (x >> 16) + (y >> 16) + (lsw >> 16); | ||
|  |             return (msw << 16) | (lsw & 0xFFFF); | ||
|  |         }; | ||
|  |     } | ||
|  | 
 | ||
|  |     // ---------------------------------------------------
 | ||
|  | 
 | ||
|  |     /** | ||
|  |      * ArrayBuffer slice polyfill. | ||
|  |      * | ||
|  |      * @see https://github.com/ttaubert/node-arraybuffer-slice
 | ||
|  |      */ | ||
|  | 
 | ||
|  |     if (typeof ArrayBuffer !== 'undefined' && !ArrayBuffer.prototype.slice) { | ||
|  |         (function () { | ||
|  |             function clamp(val, length) { | ||
|  |                 val = (val | 0) || 0; | ||
|  | 
 | ||
|  |                 if (val < 0) { | ||
|  |                     return Math.max(val + length, 0); | ||
|  |                 } | ||
|  | 
 | ||
|  |                 return Math.min(val, length); | ||
|  |             } | ||
|  | 
 | ||
|  |             ArrayBuffer.prototype.slice = function (from, to) { | ||
|  |                 var length = this.byteLength, | ||
|  |                     begin = clamp(from, length), | ||
|  |                     end = length, | ||
|  |                     num, | ||
|  |                     target, | ||
|  |                     targetArray, | ||
|  |                     sourceArray; | ||
|  | 
 | ||
|  |                 if (to !== undefined) { | ||
|  |                     end = clamp(to, length); | ||
|  |                 } | ||
|  | 
 | ||
|  |                 if (begin > end) { | ||
|  |                     return new ArrayBuffer(0); | ||
|  |                 } | ||
|  | 
 | ||
|  |                 num = end - begin; | ||
|  |                 target = new ArrayBuffer(num); | ||
|  |                 targetArray = new Uint8Array(target); | ||
|  | 
 | ||
|  |                 sourceArray = new Uint8Array(this, begin, num); | ||
|  |                 targetArray.set(sourceArray); | ||
|  | 
 | ||
|  |                 return target; | ||
|  |             }; | ||
|  |         })(); | ||
|  |     } | ||
|  | 
 | ||
|  |     // ---------------------------------------------------
 | ||
|  | 
 | ||
|  |     /** | ||
|  |      * Helpers. | ||
|  |      */ | ||
|  | 
 | ||
|  |     function toUtf8(str) { | ||
|  |         if (/[\u0080-\uFFFF]/.test(str)) { | ||
|  |             str = unescape(encodeURIComponent(str)); | ||
|  |         } | ||
|  | 
 | ||
|  |         return str; | ||
|  |     } | ||
|  | 
 | ||
|  |     function utf8Str2ArrayBuffer(str, returnUInt8Array) { | ||
|  |         var length = str.length, | ||
|  |            buff = new ArrayBuffer(length), | ||
|  |            arr = new Uint8Array(buff), | ||
|  |            i; | ||
|  | 
 | ||
|  |         for (i = 0; i < length; i += 1) { | ||
|  |             arr[i] = str.charCodeAt(i); | ||
|  |         } | ||
|  | 
 | ||
|  |         return returnUInt8Array ? arr : buff; | ||
|  |     } | ||
|  | 
 | ||
|  |     function arrayBuffer2Utf8Str(buff) { | ||
|  |         return String.fromCharCode.apply(null, new Uint8Array(buff)); | ||
|  |     } | ||
|  | 
 | ||
|  |     function concatenateArrayBuffers(first, second, returnUInt8Array) { | ||
|  |         var result = new Uint8Array(first.byteLength + second.byteLength); | ||
|  | 
 | ||
|  |         result.set(new Uint8Array(first)); | ||
|  |         result.set(new Uint8Array(second), first.byteLength); | ||
|  | 
 | ||
|  |         return returnUInt8Array ? result : result.buffer; | ||
|  |     } | ||
|  | 
 | ||
|  |     function hexToBinaryString(hex) { | ||
|  |         var bytes = [], | ||
|  |             length = hex.length, | ||
|  |             x; | ||
|  | 
 | ||
|  |         for (x = 0; x < length - 1; x += 2) { | ||
|  |             bytes.push(parseInt(hex.substr(x, 2), 16)); | ||
|  |         } | ||
|  | 
 | ||
|  |         return String.fromCharCode.apply(String, bytes); | ||
|  |     } | ||
|  | 
 | ||
|  |     // ---------------------------------------------------
 | ||
|  | 
 | ||
|  |     /** | ||
|  |      * SparkMD5 OOP implementation. | ||
|  |      * | ||
|  |      * Use this class to perform an incremental md5, otherwise use the | ||
|  |      * static methods instead. | ||
|  |      */ | ||
|  | 
 | ||
|  |     function SparkMD5() { | ||
|  |         // call reset to init the instance
 | ||
|  |         this.reset(); | ||
|  |     } | ||
|  | 
 | ||
|  |     /** | ||
|  |      * Appends a string. | ||
|  |      * A conversion will be applied if an utf8 string is detected. | ||
|  |      * | ||
|  |      * @param {String} str The string to be appended | ||
|  |      * | ||
|  |      * @return {SparkMD5} The instance itself | ||
|  |      */ | ||
|  |     SparkMD5.prototype.append = function (str) { | ||
|  |         // Converts the string to utf8 bytes if necessary
 | ||
|  |         // Then append as binary
 | ||
|  |         this.appendBinary(toUtf8(str)); | ||
|  | 
 | ||
|  |         return this; | ||
|  |     }; | ||
|  | 
 | ||
|  |     /** | ||
|  |      * Appends a binary string. | ||
|  |      * | ||
|  |      * @param {String} contents The binary string to be appended | ||
|  |      * | ||
|  |      * @return {SparkMD5} The instance itself | ||
|  |      */ | ||
|  |     SparkMD5.prototype.appendBinary = function (contents) { | ||
|  |         this._buff += contents; | ||
|  |         this._length += contents.length; | ||
|  | 
 | ||
|  |         var length = this._buff.length, | ||
|  |             i; | ||
|  | 
 | ||
|  |         for (i = 64; i <= length; i += 64) { | ||
|  |             md5cycle(this._hash, md5blk(this._buff.substring(i - 64, i))); | ||
|  |         } | ||
|  | 
 | ||
|  |         this._buff = this._buff.substring(i - 64); | ||
|  | 
 | ||
|  |         return this; | ||
|  |     }; | ||
|  | 
 | ||
|  |     /** | ||
|  |      * Finishes the incremental computation, reseting the internal state and | ||
|  |      * returning the result. | ||
|  |      * | ||
|  |      * @param {Boolean} raw True to get the raw string, false to get the hex string | ||
|  |      * | ||
|  |      * @return {String} The result | ||
|  |      */ | ||
|  |     SparkMD5.prototype.end = function (raw) { | ||
|  |         var buff = this._buff, | ||
|  |             length = buff.length, | ||
|  |             i, | ||
|  |             tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], | ||
|  |             ret; | ||
|  | 
 | ||
|  |         for (i = 0; i < length; i += 1) { | ||
|  |             tail[i >> 2] |= buff.charCodeAt(i) << ((i % 4) << 3); | ||
|  |         } | ||
|  | 
 | ||
|  |         this._finish(tail, length); | ||
|  |         ret = hex(this._hash); | ||
|  | 
 | ||
|  |         if (raw) { | ||
|  |             ret = hexToBinaryString(ret); | ||
|  |         } | ||
|  | 
 | ||
|  |         this.reset(); | ||
|  | 
 | ||
|  |         return ret; | ||
|  |     }; | ||
|  | 
 | ||
|  |     /** | ||
|  |      * Resets the internal state of the computation. | ||
|  |      * | ||
|  |      * @return {SparkMD5} The instance itself | ||
|  |      */ | ||
|  |     SparkMD5.prototype.reset = function () { | ||
|  |         this._buff = ''; | ||
|  |         this._length = 0; | ||
|  |         this._hash = [1732584193, -271733879, -1732584194, 271733878]; | ||
|  | 
 | ||
|  |         return this; | ||
|  |     }; | ||
|  | 
 | ||
|  |     /** | ||
|  |      * Gets the internal state of the computation. | ||
|  |      * | ||
|  |      * @return {Object} The state | ||
|  |      */ | ||
|  |     SparkMD5.prototype.getState = function () { | ||
|  |         return { | ||
|  |             buff: this._buff, | ||
|  |             length: this._length, | ||
|  |             hash: this._hash.slice() | ||
|  |         }; | ||
|  |     }; | ||
|  | 
 | ||
|  |     /** | ||
|  |      * Gets the internal state of the computation. | ||
|  |      * | ||
|  |      * @param {Object} state The state | ||
|  |      * | ||
|  |      * @return {SparkMD5} The instance itself | ||
|  |      */ | ||
|  |     SparkMD5.prototype.setState = function (state) { | ||
|  |         this._buff = state.buff; | ||
|  |         this._length = state.length; | ||
|  |         this._hash = state.hash; | ||
|  | 
 | ||
|  |         return this; | ||
|  |     }; | ||
|  | 
 | ||
|  |     /** | ||
|  |      * Releases memory used by the incremental buffer and other additional | ||
|  |      * resources. If you plan to use the instance again, use reset instead. | ||
|  |      */ | ||
|  |     SparkMD5.prototype.destroy = function () { | ||
|  |         delete this._hash; | ||
|  |         delete this._buff; | ||
|  |         delete this._length; | ||
|  |     }; | ||
|  | 
 | ||
|  |     /** | ||
|  |      * Finish the final calculation based on the tail. | ||
|  |      * | ||
|  |      * @param {Array}  tail   The tail (will be modified) | ||
|  |      * @param {Number} length The length of the remaining buffer | ||
|  |      */ | ||
|  |     SparkMD5.prototype._finish = function (tail, length) { | ||
|  |         var i = length, | ||
|  |             tmp, | ||
|  |             lo, | ||
|  |             hi; | ||
|  | 
 | ||
|  |         tail[i >> 2] |= 0x80 << ((i % 4) << 3); | ||
|  |         if (i > 55) { | ||
|  |             md5cycle(this._hash, tail); | ||
|  |             for (i = 0; i < 16; i += 1) { | ||
|  |                 tail[i] = 0; | ||
|  |             } | ||
|  |         } | ||
|  | 
 | ||
|  |         // Do the final computation based on the tail and length
 | ||
|  |         // Beware that the final length may not fit in 32 bits so we take care of that
 | ||
|  |         tmp = this._length * 8; | ||
|  |         tmp = tmp.toString(16).match(/(.*?)(.{0,8})$/); | ||
|  |         lo = parseInt(tmp[2], 16); | ||
|  |         hi = parseInt(tmp[1], 16) || 0; | ||
|  | 
 | ||
|  |         tail[14] = lo; | ||
|  |         tail[15] = hi; | ||
|  |         md5cycle(this._hash, tail); | ||
|  |     }; | ||
|  | 
 | ||
|  |     /** | ||
|  |      * Performs the md5 hash on a string. | ||
|  |      * A conversion will be applied if utf8 string is detected. | ||
|  |      * | ||
|  |      * @param {String}  str The string | ||
|  |      * @param {Boolean} [raw] True to get the raw string, false to get the hex string | ||
|  |      * | ||
|  |      * @return {String} The result | ||
|  |      */ | ||
|  |     SparkMD5.hash = function (str, raw) { | ||
|  |         // Converts the string to utf8 bytes if necessary
 | ||
|  |         // Then compute it using the binary function
 | ||
|  |         return SparkMD5.hashBinary(toUtf8(str), raw); | ||
|  |     }; | ||
|  | 
 | ||
|  |     /** | ||
|  |      * Performs the md5 hash on a binary string. | ||
|  |      * | ||
|  |      * @param {String}  content The binary string | ||
|  |      * @param {Boolean} [raw]     True to get the raw string, false to get the hex string | ||
|  |      * | ||
|  |      * @return {String} The result | ||
|  |      */ | ||
|  |     SparkMD5.hashBinary = function (content, raw) { | ||
|  |         var hash = md51(content), | ||
|  |             ret = hex(hash); | ||
|  | 
 | ||
|  |         return raw ? hexToBinaryString(ret) : ret; | ||
|  |     }; | ||
|  | 
 | ||
|  |     // ---------------------------------------------------
 | ||
|  | 
 | ||
|  |     /** | ||
|  |      * SparkMD5 OOP implementation for array buffers. | ||
|  |      * | ||
|  |      * Use this class to perform an incremental md5 ONLY for array buffers. | ||
|  |      */ | ||
|  |     SparkMD5.ArrayBuffer = function () { | ||
|  |         // call reset to init the instance
 | ||
|  |         this.reset(); | ||
|  |     }; | ||
|  | 
 | ||
|  |     /** | ||
|  |      * Appends an array buffer. | ||
|  |      * | ||
|  |      * @param {ArrayBuffer} arr The array to be appended | ||
|  |      * | ||
|  |      * @return {SparkMD5.ArrayBuffer} The instance itself | ||
|  |      */ | ||
|  |     SparkMD5.ArrayBuffer.prototype.append = function (arr) { | ||
|  |         var buff = concatenateArrayBuffers(this._buff.buffer, arr, true), | ||
|  |             length = buff.length, | ||
|  |             i; | ||
|  | 
 | ||
|  |         this._length += arr.byteLength; | ||
|  | 
 | ||
|  |         for (i = 64; i <= length; i += 64) { | ||
|  |             md5cycle(this._hash, md5blk_array(buff.subarray(i - 64, i))); | ||
|  |         } | ||
|  | 
 | ||
|  |         this._buff = (i - 64) < length ? new Uint8Array(buff.buffer.slice(i - 64)) : new Uint8Array(0); | ||
|  | 
 | ||
|  |         return this; | ||
|  |     }; | ||
|  | 
 | ||
|  |     /** | ||
|  |      * Finishes the incremental computation, reseting the internal state and | ||
|  |      * returning the result. | ||
|  |      * | ||
|  |      * @param {Boolean} raw True to get the raw string, false to get the hex string | ||
|  |      * | ||
|  |      * @return {String} The result | ||
|  |      */ | ||
|  |     SparkMD5.ArrayBuffer.prototype.end = function (raw) { | ||
|  |         var buff = this._buff, | ||
|  |             length = buff.length, | ||
|  |             tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], | ||
|  |             i, | ||
|  |             ret; | ||
|  | 
 | ||
|  |         for (i = 0; i < length; i += 1) { | ||
|  |             tail[i >> 2] |= buff[i] << ((i % 4) << 3); | ||
|  |         } | ||
|  | 
 | ||
|  |         this._finish(tail, length); | ||
|  |         ret = hex(this._hash); | ||
|  | 
 | ||
|  |         if (raw) { | ||
|  |             ret = hexToBinaryString(ret); | ||
|  |         } | ||
|  | 
 | ||
|  |         this.reset(); | ||
|  | 
 | ||
|  |         return ret; | ||
|  |     }; | ||
|  | 
 | ||
|  |     /** | ||
|  |      * Resets the internal state of the computation. | ||
|  |      * | ||
|  |      * @return {SparkMD5.ArrayBuffer} The instance itself | ||
|  |      */ | ||
|  |     SparkMD5.ArrayBuffer.prototype.reset = function () { | ||
|  |         this._buff = new Uint8Array(0); | ||
|  |         this._length = 0; | ||
|  |         this._hash = [1732584193, -271733879, -1732584194, 271733878]; | ||
|  | 
 | ||
|  |         return this; | ||
|  |     }; | ||
|  | 
 | ||
|  |     /** | ||
|  |      * Gets the internal state of the computation. | ||
|  |      * | ||
|  |      * @return {Object} The state | ||
|  |      */ | ||
|  |     SparkMD5.ArrayBuffer.prototype.getState = function () { | ||
|  |         var state = SparkMD5.prototype.getState.call(this); | ||
|  | 
 | ||
|  |         // Convert buffer to a string
 | ||
|  |         state.buff = arrayBuffer2Utf8Str(state.buff); | ||
|  | 
 | ||
|  |         return state; | ||
|  |     }; | ||
|  | 
 | ||
|  |     /** | ||
|  |      * Gets the internal state of the computation. | ||
|  |      * | ||
|  |      * @param {Object} state The state | ||
|  |      * | ||
|  |      * @return {SparkMD5.ArrayBuffer} The instance itself | ||
|  |      */ | ||
|  |     SparkMD5.ArrayBuffer.prototype.setState = function (state) { | ||
|  |         // Convert string to buffer
 | ||
|  |         state.buff = utf8Str2ArrayBuffer(state.buff, true); | ||
|  | 
 | ||
|  |         return SparkMD5.prototype.setState.call(this, state); | ||
|  |     }; | ||
|  | 
 | ||
|  |     SparkMD5.ArrayBuffer.prototype.destroy = SparkMD5.prototype.destroy; | ||
|  | 
 | ||
|  |     SparkMD5.ArrayBuffer.prototype._finish = SparkMD5.prototype._finish; | ||
|  | 
 | ||
|  |     /** | ||
|  |      * Performs the md5 hash on an array buffer. | ||
|  |      * | ||
|  |      * @param {ArrayBuffer} arr The array buffer | ||
|  |      * @param {Boolean}     [raw] True to get the raw string, false to get the hex one | ||
|  |      * | ||
|  |      * @return {String} The result | ||
|  |      */ | ||
|  |     SparkMD5.ArrayBuffer.hash = function (arr, raw) { | ||
|  |         var hash = md51_array(new Uint8Array(arr)), | ||
|  |             ret = hex(hash); | ||
|  | 
 | ||
|  |         return raw ? hexToBinaryString(ret) : ret; | ||
|  |     }; | ||
|  | 
 | ||
|  |     return SparkMD5; | ||
|  | })); | ||
|  | 
 | ||
|  | },{}],85:[function(_dereq_,module,exports){ | ||
|  | // Copyright Joyent, Inc. and other Node contributors.
 | ||
|  | //
 | ||
|  | // Permission is hereby granted, free of charge, to any person obtaining a
 | ||
|  | // copy of this software and associated documentation files (the
 | ||
|  | // "Software"), to deal in the Software without restriction, including
 | ||
|  | // without limitation the rights to use, copy, modify, merge, publish,
 | ||
|  | // distribute, sublicense, and/or sell copies of the Software, and to permit
 | ||
|  | // persons to whom the Software is furnished to do so, subject to the
 | ||
|  | // following conditions:
 | ||
|  | //
 | ||
|  | // The above copyright notice and this permission notice shall be included
 | ||
|  | // in all copies or substantial portions of the Software.
 | ||
|  | //
 | ||
|  | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
 | ||
|  | // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
 | ||
|  | // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
 | ||
|  | // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
 | ||
|  | // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
 | ||
|  | // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
 | ||
|  | // USE OR OTHER DEALINGS IN THE SOFTWARE.
 | ||
|  | 
 | ||
|  | module.exports = Stream; | ||
|  | 
 | ||
|  | var EE = _dereq_(26).EventEmitter; | ||
|  | var inherits = _dereq_(37); | ||
|  | 
 | ||
|  | inherits(Stream, EE); | ||
|  | Stream.Readable = _dereq_(97); | ||
|  | Stream.Writable = _dereq_(99); | ||
|  | Stream.Duplex = _dereq_(87); | ||
|  | Stream.Transform = _dereq_(98); | ||
|  | Stream.PassThrough = _dereq_(96); | ||
|  | 
 | ||
|  | // Backwards-compat with node 0.4.x
 | ||
|  | Stream.Stream = Stream; | ||
|  | 
 | ||
|  | 
 | ||
|  | 
 | ||
|  | // old-style streams.  Note that the pipe method (the only relevant
 | ||
|  | // part of this class) is overridden in the Readable class.
 | ||
|  | 
 | ||
|  | function Stream() { | ||
|  |   EE.call(this); | ||
|  | } | ||
|  | 
 | ||
|  | Stream.prototype.pipe = function(dest, options) { | ||
|  |   var source = this; | ||
|  | 
 | ||
|  |   function ondata(chunk) { | ||
|  |     if (dest.writable) { | ||
|  |       if (false === dest.write(chunk) && source.pause) { | ||
|  |         source.pause(); | ||
|  |       } | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   source.on('data', ondata); | ||
|  | 
 | ||
|  |   function ondrain() { | ||
|  |     if (source.readable && source.resume) { | ||
|  |       source.resume(); | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   dest.on('drain', ondrain); | ||
|  | 
 | ||
|  |   // If the 'end' option is not supplied, dest.end() will be called when
 | ||
|  |   // source gets the 'end' or 'close' events.  Only dest.end() once.
 | ||
|  |   if (!dest._isStdio && (!options || options.end !== false)) { | ||
|  |     source.on('end', onend); | ||
|  |     source.on('close', onclose); | ||
|  |   } | ||
|  | 
 | ||
|  |   var didOnEnd = false; | ||
|  |   function onend() { | ||
|  |     if (didOnEnd) return; | ||
|  |     didOnEnd = true; | ||
|  | 
 | ||
|  |     dest.end(); | ||
|  |   } | ||
|  | 
 | ||
|  | 
 | ||
|  |   function onclose() { | ||
|  |     if (didOnEnd) return; | ||
|  |     didOnEnd = true; | ||
|  | 
 | ||
|  |     if (typeof dest.destroy === 'function') dest.destroy(); | ||
|  |   } | ||
|  | 
 | ||
|  |   // don't leave dangling pipes when there are errors.
 | ||
|  |   function onerror(er) { | ||
|  |     cleanup(); | ||
|  |     if (EE.listenerCount(this, 'error') === 0) { | ||
|  |       throw er; // Unhandled stream error in pipe.
 | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   source.on('error', onerror); | ||
|  |   dest.on('error', onerror); | ||
|  | 
 | ||
|  |   // remove all the event listeners that were added.
 | ||
|  |   function cleanup() { | ||
|  |     source.removeListener('data', ondata); | ||
|  |     dest.removeListener('drain', ondrain); | ||
|  | 
 | ||
|  |     source.removeListener('end', onend); | ||
|  |     source.removeListener('close', onclose); | ||
|  | 
 | ||
|  |     source.removeListener('error', onerror); | ||
|  |     dest.removeListener('error', onerror); | ||
|  | 
 | ||
|  |     source.removeListener('end', cleanup); | ||
|  |     source.removeListener('close', cleanup); | ||
|  | 
 | ||
|  |     dest.removeListener('close', cleanup); | ||
|  |   } | ||
|  | 
 | ||
|  |   source.on('end', cleanup); | ||
|  |   source.on('close', cleanup); | ||
|  | 
 | ||
|  |   dest.on('close', cleanup); | ||
|  | 
 | ||
|  |   dest.emit('pipe', source); | ||
|  | 
 | ||
|  |   // Allow for unix-like usage: A.pipe(B).pipe(C)
 | ||
|  |   return dest; | ||
|  | }; | ||
|  | 
 | ||
|  | },{"26":26,"37":37,"87":87,"96":96,"97":97,"98":98,"99":99}],86:[function(_dereq_,module,exports){ | ||
|  | var toString = {}.toString; | ||
|  | 
 | ||
|  | module.exports = Array.isArray || function (arr) { | ||
|  |   return toString.call(arr) == '[object Array]'; | ||
|  | }; | ||
|  | 
 | ||
|  | },{}],87:[function(_dereq_,module,exports){ | ||
|  | module.exports = _dereq_(88); | ||
|  | 
 | ||
|  | },{"88":88}],88:[function(_dereq_,module,exports){ | ||
|  | // Copyright Joyent, Inc. and other Node contributors.
 | ||
|  | //
 | ||
|  | // Permission is hereby granted, free of charge, to any person obtaining a
 | ||
|  | // copy of this software and associated documentation files (the
 | ||
|  | // "Software"), to deal in the Software without restriction, including
 | ||
|  | // without limitation the rights to use, copy, modify, merge, publish,
 | ||
|  | // distribute, sublicense, and/or sell copies of the Software, and to permit
 | ||
|  | // persons to whom the Software is furnished to do so, subject to the
 | ||
|  | // following conditions:
 | ||
|  | //
 | ||
|  | // The above copyright notice and this permission notice shall be included
 | ||
|  | // in all copies or substantial portions of the Software.
 | ||
|  | //
 | ||
|  | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
 | ||
|  | // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
 | ||
|  | // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
 | ||
|  | // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
 | ||
|  | // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
 | ||
|  | // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
 | ||
|  | // USE OR OTHER DEALINGS IN THE SOFTWARE.
 | ||
|  | 
 | ||
|  | // a duplex stream is just a stream that is both readable and writable.
 | ||
|  | // Since JS doesn't have multiple prototypal inheritance, this class
 | ||
|  | // prototypally inherits from Readable, and then parasitically from
 | ||
|  | // Writable.
 | ||
|  | 
 | ||
|  | 'use strict'; | ||
|  | 
 | ||
|  | /*<replacement>*/ | ||
|  | 
 | ||
|  | var pna = _dereq_(72); | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | /*<replacement>*/ | ||
|  | var objectKeys = Object.keys || function (obj) { | ||
|  |   var keys = []; | ||
|  |   for (var key in obj) { | ||
|  |     keys.push(key); | ||
|  |   }return keys; | ||
|  | }; | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | module.exports = Duplex; | ||
|  | 
 | ||
|  | /*<replacement>*/ | ||
|  | var util = Object.create(_dereq_(14)); | ||
|  | util.inherits = _dereq_(37); | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | var Readable = _dereq_(90); | ||
|  | var Writable = _dereq_(92); | ||
|  | 
 | ||
|  | util.inherits(Duplex, Readable); | ||
|  | 
 | ||
|  | { | ||
|  |   // avoid scope creep, the keys array can then be collected
 | ||
|  |   var keys = objectKeys(Writable.prototype); | ||
|  |   for (var v = 0; v < keys.length; v++) { | ||
|  |     var method = keys[v]; | ||
|  |     if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function Duplex(options) { | ||
|  |   if (!(this instanceof Duplex)) return new Duplex(options); | ||
|  | 
 | ||
|  |   Readable.call(this, options); | ||
|  |   Writable.call(this, options); | ||
|  | 
 | ||
|  |   if (options && options.readable === false) this.readable = false; | ||
|  | 
 | ||
|  |   if (options && options.writable === false) this.writable = false; | ||
|  | 
 | ||
|  |   this.allowHalfOpen = true; | ||
|  |   if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; | ||
|  | 
 | ||
|  |   this.once('end', onend); | ||
|  | } | ||
|  | 
 | ||
|  | Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { | ||
|  |   // making it explicit this property is not enumerable
 | ||
|  |   // because otherwise some prototype manipulation in
 | ||
|  |   // userland will fail
 | ||
|  |   enumerable: false, | ||
|  |   get: function () { | ||
|  |     return this._writableState.highWaterMark; | ||
|  |   } | ||
|  | }); | ||
|  | 
 | ||
|  | // the no-half-open enforcer
 | ||
|  | function onend() { | ||
|  |   // if we allow half-open state, or if the writable side ended,
 | ||
|  |   // then we're ok.
 | ||
|  |   if (this.allowHalfOpen || this._writableState.ended) return; | ||
|  | 
 | ||
|  |   // no more data can be written.
 | ||
|  |   // But allow more writes to happen in this tick.
 | ||
|  |   pna.nextTick(onEndNT, this); | ||
|  | } | ||
|  | 
 | ||
|  | function onEndNT(self) { | ||
|  |   self.end(); | ||
|  | } | ||
|  | 
 | ||
|  | Object.defineProperty(Duplex.prototype, 'destroyed', { | ||
|  |   get: function () { | ||
|  |     if (this._readableState === undefined || this._writableState === undefined) { | ||
|  |       return false; | ||
|  |     } | ||
|  |     return this._readableState.destroyed && this._writableState.destroyed; | ||
|  |   }, | ||
|  |   set: function (value) { | ||
|  |     // we ignore the value if the stream
 | ||
|  |     // has not been initialized yet
 | ||
|  |     if (this._readableState === undefined || this._writableState === undefined) { | ||
|  |       return; | ||
|  |     } | ||
|  | 
 | ||
|  |     // backward compatibility, the user is explicitly
 | ||
|  |     // managing destroyed
 | ||
|  |     this._readableState.destroyed = value; | ||
|  |     this._writableState.destroyed = value; | ||
|  |   } | ||
|  | }); | ||
|  | 
 | ||
|  | Duplex.prototype._destroy = function (err, cb) { | ||
|  |   this.push(null); | ||
|  |   this.end(); | ||
|  | 
 | ||
|  |   pna.nextTick(cb, err); | ||
|  | }; | ||
|  | },{"14":14,"37":37,"72":72,"90":90,"92":92}],89:[function(_dereq_,module,exports){ | ||
|  | // Copyright Joyent, Inc. and other Node contributors.
 | ||
|  | //
 | ||
|  | // Permission is hereby granted, free of charge, to any person obtaining a
 | ||
|  | // copy of this software and associated documentation files (the
 | ||
|  | // "Software"), to deal in the Software without restriction, including
 | ||
|  | // without limitation the rights to use, copy, modify, merge, publish,
 | ||
|  | // distribute, sublicense, and/or sell copies of the Software, and to permit
 | ||
|  | // persons to whom the Software is furnished to do so, subject to the
 | ||
|  | // following conditions:
 | ||
|  | //
 | ||
|  | // The above copyright notice and this permission notice shall be included
 | ||
|  | // in all copies or substantial portions of the Software.
 | ||
|  | //
 | ||
|  | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
 | ||
|  | // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
 | ||
|  | // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
 | ||
|  | // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
 | ||
|  | // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
 | ||
|  | // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
 | ||
|  | // USE OR OTHER DEALINGS IN THE SOFTWARE.
 | ||
|  | 
 | ||
|  | // a passthrough stream.
 | ||
|  | // basically just the most minimal sort of Transform stream.
 | ||
|  | // Every written chunk gets output as-is.
 | ||
|  | 
 | ||
|  | 'use strict'; | ||
|  | 
 | ||
|  | module.exports = PassThrough; | ||
|  | 
 | ||
|  | var Transform = _dereq_(91); | ||
|  | 
 | ||
|  | /*<replacement>*/ | ||
|  | var util = Object.create(_dereq_(14)); | ||
|  | util.inherits = _dereq_(37); | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | util.inherits(PassThrough, Transform); | ||
|  | 
 | ||
|  | function PassThrough(options) { | ||
|  |   if (!(this instanceof PassThrough)) return new PassThrough(options); | ||
|  | 
 | ||
|  |   Transform.call(this, options); | ||
|  | } | ||
|  | 
 | ||
|  | PassThrough.prototype._transform = function (chunk, encoding, cb) { | ||
|  |   cb(null, chunk); | ||
|  | }; | ||
|  | },{"14":14,"37":37,"91":91}],90:[function(_dereq_,module,exports){ | ||
|  | (function (process,global){(function (){ | ||
|  | // Copyright Joyent, Inc. and other Node contributors.
 | ||
|  | //
 | ||
|  | // Permission is hereby granted, free of charge, to any person obtaining a
 | ||
|  | // copy of this software and associated documentation files (the
 | ||
|  | // "Software"), to deal in the Software without restriction, including
 | ||
|  | // without limitation the rights to use, copy, modify, merge, publish,
 | ||
|  | // distribute, sublicense, and/or sell copies of the Software, and to permit
 | ||
|  | // persons to whom the Software is furnished to do so, subject to the
 | ||
|  | // following conditions:
 | ||
|  | //
 | ||
|  | // The above copyright notice and this permission notice shall be included
 | ||
|  | // in all copies or substantial portions of the Software.
 | ||
|  | //
 | ||
|  | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
 | ||
|  | // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
 | ||
|  | // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
 | ||
|  | // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
 | ||
|  | // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
 | ||
|  | // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
 | ||
|  | // USE OR OTHER DEALINGS IN THE SOFTWARE.
 | ||
|  | 
 | ||
|  | 'use strict'; | ||
|  | 
 | ||
|  | /*<replacement>*/ | ||
|  | 
 | ||
|  | var pna = _dereq_(72); | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | module.exports = Readable; | ||
|  | 
 | ||
|  | /*<replacement>*/ | ||
|  | var isArray = _dereq_(86); | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | /*<replacement>*/ | ||
|  | var Duplex; | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | Readable.ReadableState = ReadableState; | ||
|  | 
 | ||
|  | /*<replacement>*/ | ||
|  | var EE = _dereq_(26).EventEmitter; | ||
|  | 
 | ||
|  | var EElistenerCount = function (emitter, type) { | ||
|  |   return emitter.listeners(type).length; | ||
|  | }; | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | /*<replacement>*/ | ||
|  | var Stream = _dereq_(95); | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | /*<replacement>*/ | ||
|  | 
 | ||
|  | var Buffer = _dereq_(100).Buffer; | ||
|  | var OurUint8Array = global.Uint8Array || function () {}; | ||
|  | function _uint8ArrayToBuffer(chunk) { | ||
|  |   return Buffer.from(chunk); | ||
|  | } | ||
|  | function _isUint8Array(obj) { | ||
|  |   return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; | ||
|  | } | ||
|  | 
 | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | /*<replacement>*/ | ||
|  | var util = Object.create(_dereq_(14)); | ||
|  | util.inherits = _dereq_(37); | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | /*<replacement>*/ | ||
|  | var debugUtil = _dereq_(11); | ||
|  | var debug = void 0; | ||
|  | if (debugUtil && debugUtil.debuglog) { | ||
|  |   debug = debugUtil.debuglog('stream'); | ||
|  | } else { | ||
|  |   debug = function () {}; | ||
|  | } | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | var BufferList = _dereq_(93); | ||
|  | var destroyImpl = _dereq_(94); | ||
|  | var StringDecoder; | ||
|  | 
 | ||
|  | util.inherits(Readable, Stream); | ||
|  | 
 | ||
|  | var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; | ||
|  | 
 | ||
|  | function prependListener(emitter, event, fn) { | ||
|  |   // Sadly this is not cacheable as some libraries bundle their own
 | ||
|  |   // event emitter implementation with them.
 | ||
|  |   if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); | ||
|  | 
 | ||
|  |   // This is a hack to make sure that our error handler is attached before any
 | ||
|  |   // userland ones.  NEVER DO THIS. This is here only because this code needs
 | ||
|  |   // to continue to work with older versions of Node.js that do not include
 | ||
|  |   // the prependListener() method. The goal is to eventually remove this hack.
 | ||
|  |   if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; | ||
|  | } | ||
|  | 
 | ||
|  | function ReadableState(options, stream) { | ||
|  |   Duplex = Duplex || _dereq_(88); | ||
|  | 
 | ||
|  |   options = options || {}; | ||
|  | 
 | ||
|  |   // Duplex streams are both readable and writable, but share
 | ||
|  |   // the same options object.
 | ||
|  |   // However, some cases require setting options to different
 | ||
|  |   // values for the readable and the writable sides of the duplex stream.
 | ||
|  |   // These options can be provided separately as readableXXX and writableXXX.
 | ||
|  |   var isDuplex = stream instanceof Duplex; | ||
|  | 
 | ||
|  |   // object stream flag. Used to make read(n) ignore n and to
 | ||
|  |   // make all the buffer merging and length checks go away
 | ||
|  |   this.objectMode = !!options.objectMode; | ||
|  | 
 | ||
|  |   if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; | ||
|  | 
 | ||
|  |   // the point at which it stops calling _read() to fill the buffer
 | ||
|  |   // Note: 0 is a valid value, means "don't call _read preemptively ever"
 | ||
|  |   var hwm = options.highWaterMark; | ||
|  |   var readableHwm = options.readableHighWaterMark; | ||
|  |   var defaultHwm = this.objectMode ? 16 : 16 * 1024; | ||
|  | 
 | ||
|  |   if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm; | ||
|  | 
 | ||
|  |   // cast to ints.
 | ||
|  |   this.highWaterMark = Math.floor(this.highWaterMark); | ||
|  | 
 | ||
|  |   // A linked list is used to store data chunks instead of an array because the
 | ||
|  |   // linked list can remove elements from the beginning faster than
 | ||
|  |   // array.shift()
 | ||
|  |   this.buffer = new BufferList(); | ||
|  |   this.length = 0; | ||
|  |   this.pipes = null; | ||
|  |   this.pipesCount = 0; | ||
|  |   this.flowing = null; | ||
|  |   this.ended = false; | ||
|  |   this.endEmitted = false; | ||
|  |   this.reading = false; | ||
|  | 
 | ||
|  |   // a flag to be able to tell if the event 'readable'/'data' is emitted
 | ||
|  |   // immediately, or on a later tick.  We set this to true at first, because
 | ||
|  |   // any actions that shouldn't happen until "later" should generally also
 | ||
|  |   // not happen before the first read call.
 | ||
|  |   this.sync = true; | ||
|  | 
 | ||
|  |   // whenever we return null, then we set a flag to say
 | ||
|  |   // that we're awaiting a 'readable' event emission.
 | ||
|  |   this.needReadable = false; | ||
|  |   this.emittedReadable = false; | ||
|  |   this.readableListening = false; | ||
|  |   this.resumeScheduled = false; | ||
|  | 
 | ||
|  |   // has it been destroyed
 | ||
|  |   this.destroyed = false; | ||
|  | 
 | ||
|  |   // Crypto is kind of old and crusty.  Historically, its default string
 | ||
|  |   // encoding is 'binary' so we have to make this configurable.
 | ||
|  |   // Everything else in the universe uses 'utf8', though.
 | ||
|  |   this.defaultEncoding = options.defaultEncoding || 'utf8'; | ||
|  | 
 | ||
|  |   // the number of writers that are awaiting a drain event in .pipe()s
 | ||
|  |   this.awaitDrain = 0; | ||
|  | 
 | ||
|  |   // if true, a maybeReadMore has been scheduled
 | ||
|  |   this.readingMore = false; | ||
|  | 
 | ||
|  |   this.decoder = null; | ||
|  |   this.encoding = null; | ||
|  |   if (options.encoding) { | ||
|  |     if (!StringDecoder) StringDecoder = _dereq_(101).StringDecoder; | ||
|  |     this.decoder = new StringDecoder(options.encoding); | ||
|  |     this.encoding = options.encoding; | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function Readable(options) { | ||
|  |   Duplex = Duplex || _dereq_(88); | ||
|  | 
 | ||
|  |   if (!(this instanceof Readable)) return new Readable(options); | ||
|  | 
 | ||
|  |   this._readableState = new ReadableState(options, this); | ||
|  | 
 | ||
|  |   // legacy
 | ||
|  |   this.readable = true; | ||
|  | 
 | ||
|  |   if (options) { | ||
|  |     if (typeof options.read === 'function') this._read = options.read; | ||
|  | 
 | ||
|  |     if (typeof options.destroy === 'function') this._destroy = options.destroy; | ||
|  |   } | ||
|  | 
 | ||
|  |   Stream.call(this); | ||
|  | } | ||
|  | 
 | ||
|  | Object.defineProperty(Readable.prototype, 'destroyed', { | ||
|  |   get: function () { | ||
|  |     if (this._readableState === undefined) { | ||
|  |       return false; | ||
|  |     } | ||
|  |     return this._readableState.destroyed; | ||
|  |   }, | ||
|  |   set: function (value) { | ||
|  |     // we ignore the value if the stream
 | ||
|  |     // has not been initialized yet
 | ||
|  |     if (!this._readableState) { | ||
|  |       return; | ||
|  |     } | ||
|  | 
 | ||
|  |     // backward compatibility, the user is explicitly
 | ||
|  |     // managing destroyed
 | ||
|  |     this._readableState.destroyed = value; | ||
|  |   } | ||
|  | }); | ||
|  | 
 | ||
|  | Readable.prototype.destroy = destroyImpl.destroy; | ||
|  | Readable.prototype._undestroy = destroyImpl.undestroy; | ||
|  | Readable.prototype._destroy = function (err, cb) { | ||
|  |   this.push(null); | ||
|  |   cb(err); | ||
|  | }; | ||
|  | 
 | ||
|  | // Manually shove something into the read() buffer.
 | ||
|  | // This returns true if the highWaterMark has not been hit yet,
 | ||
|  | // similar to how Writable.write() returns true if you should
 | ||
|  | // write() some more.
 | ||
|  | Readable.prototype.push = function (chunk, encoding) { | ||
|  |   var state = this._readableState; | ||
|  |   var skipChunkCheck; | ||
|  | 
 | ||
|  |   if (!state.objectMode) { | ||
|  |     if (typeof chunk === 'string') { | ||
|  |       encoding = encoding || state.defaultEncoding; | ||
|  |       if (encoding !== state.encoding) { | ||
|  |         chunk = Buffer.from(chunk, encoding); | ||
|  |         encoding = ''; | ||
|  |       } | ||
|  |       skipChunkCheck = true; | ||
|  |     } | ||
|  |   } else { | ||
|  |     skipChunkCheck = true; | ||
|  |   } | ||
|  | 
 | ||
|  |   return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); | ||
|  | }; | ||
|  | 
 | ||
|  | // Unshift should *always* be something directly out of read()
 | ||
|  | Readable.prototype.unshift = function (chunk) { | ||
|  |   return readableAddChunk(this, chunk, null, true, false); | ||
|  | }; | ||
|  | 
 | ||
|  | function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { | ||
|  |   var state = stream._readableState; | ||
|  |   if (chunk === null) { | ||
|  |     state.reading = false; | ||
|  |     onEofChunk(stream, state); | ||
|  |   } else { | ||
|  |     var er; | ||
|  |     if (!skipChunkCheck) er = chunkInvalid(state, chunk); | ||
|  |     if (er) { | ||
|  |       stream.emit('error', er); | ||
|  |     } else if (state.objectMode || chunk && chunk.length > 0) { | ||
|  |       if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { | ||
|  |         chunk = _uint8ArrayToBuffer(chunk); | ||
|  |       } | ||
|  | 
 | ||
|  |       if (addToFront) { | ||
|  |         if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true); | ||
|  |       } else if (state.ended) { | ||
|  |         stream.emit('error', new Error('stream.push() after EOF')); | ||
|  |       } else { | ||
|  |         state.reading = false; | ||
|  |         if (state.decoder && !encoding) { | ||
|  |           chunk = state.decoder.write(chunk); | ||
|  |           if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); | ||
|  |         } else { | ||
|  |           addChunk(stream, state, chunk, false); | ||
|  |         } | ||
|  |       } | ||
|  |     } else if (!addToFront) { | ||
|  |       state.reading = false; | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   return needMoreData(state); | ||
|  | } | ||
|  | 
 | ||
|  | function addChunk(stream, state, chunk, addToFront) { | ||
|  |   if (state.flowing && state.length === 0 && !state.sync) { | ||
|  |     stream.emit('data', chunk); | ||
|  |     stream.read(0); | ||
|  |   } else { | ||
|  |     // update the buffer info.
 | ||
|  |     state.length += state.objectMode ? 1 : chunk.length; | ||
|  |     if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); | ||
|  | 
 | ||
|  |     if (state.needReadable) emitReadable(stream); | ||
|  |   } | ||
|  |   maybeReadMore(stream, state); | ||
|  | } | ||
|  | 
 | ||
|  | function chunkInvalid(state, chunk) { | ||
|  |   var er; | ||
|  |   if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { | ||
|  |     er = new TypeError('Invalid non-string/buffer chunk'); | ||
|  |   } | ||
|  |   return er; | ||
|  | } | ||
|  | 
 | ||
|  | // if it's past the high water mark, we can push in some more.
 | ||
|  | // Also, if we have no data yet, we can stand some
 | ||
|  | // more bytes.  This is to work around cases where hwm=0,
 | ||
|  | // such as the repl.  Also, if the push() triggered a
 | ||
|  | // readable event, and the user called read(largeNumber) such that
 | ||
|  | // needReadable was set, then we ought to push more, so that another
 | ||
|  | // 'readable' event will be triggered.
 | ||
|  | function needMoreData(state) { | ||
|  |   return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); | ||
|  | } | ||
|  | 
 | ||
|  | Readable.prototype.isPaused = function () { | ||
|  |   return this._readableState.flowing === false; | ||
|  | }; | ||
|  | 
 | ||
|  | // backwards compatibility.
 | ||
|  | Readable.prototype.setEncoding = function (enc) { | ||
|  |   if (!StringDecoder) StringDecoder = _dereq_(101).StringDecoder; | ||
|  |   this._readableState.decoder = new StringDecoder(enc); | ||
|  |   this._readableState.encoding = enc; | ||
|  |   return this; | ||
|  | }; | ||
|  | 
 | ||
|  | // Don't raise the hwm > 8MB
 | ||
|  | var MAX_HWM = 0x800000; | ||
|  | function computeNewHighWaterMark(n) { | ||
|  |   if (n >= MAX_HWM) { | ||
|  |     n = MAX_HWM; | ||
|  |   } else { | ||
|  |     // Get the next highest power of 2 to prevent increasing hwm excessively in
 | ||
|  |     // tiny amounts
 | ||
|  |     n--; | ||
|  |     n |= n >>> 1; | ||
|  |     n |= n >>> 2; | ||
|  |     n |= n >>> 4; | ||
|  |     n |= n >>> 8; | ||
|  |     n |= n >>> 16; | ||
|  |     n++; | ||
|  |   } | ||
|  |   return n; | ||
|  | } | ||
|  | 
 | ||
|  | // This function is designed to be inlinable, so please take care when making
 | ||
|  | // changes to the function body.
 | ||
|  | function howMuchToRead(n, state) { | ||
|  |   if (n <= 0 || state.length === 0 && state.ended) return 0; | ||
|  |   if (state.objectMode) return 1; | ||
|  |   if (n !== n) { | ||
|  |     // Only flow one buffer at a time
 | ||
|  |     if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; | ||
|  |   } | ||
|  |   // If we're asking for more than the current hwm, then raise the hwm.
 | ||
|  |   if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); | ||
|  |   if (n <= state.length) return n; | ||
|  |   // Don't have enough
 | ||
|  |   if (!state.ended) { | ||
|  |     state.needReadable = true; | ||
|  |     return 0; | ||
|  |   } | ||
|  |   return state.length; | ||
|  | } | ||
|  | 
 | ||
|  | // you can override either this method, or the async _read(n) below.
 | ||
|  | Readable.prototype.read = function (n) { | ||
|  |   debug('read', n); | ||
|  |   n = parseInt(n, 10); | ||
|  |   var state = this._readableState; | ||
|  |   var nOrig = n; | ||
|  | 
 | ||
|  |   if (n !== 0) state.emittedReadable = false; | ||
|  | 
 | ||
|  |   // if we're doing read(0) to trigger a readable event, but we
 | ||
|  |   // already have a bunch of data in the buffer, then just trigger
 | ||
|  |   // the 'readable' event and move on.
 | ||
|  |   if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { | ||
|  |     debug('read: emitReadable', state.length, state.ended); | ||
|  |     if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); | ||
|  |     return null; | ||
|  |   } | ||
|  | 
 | ||
|  |   n = howMuchToRead(n, state); | ||
|  | 
 | ||
|  |   // if we've ended, and we're now clear, then finish it up.
 | ||
|  |   if (n === 0 && state.ended) { | ||
|  |     if (state.length === 0) endReadable(this); | ||
|  |     return null; | ||
|  |   } | ||
|  | 
 | ||
|  |   // All the actual chunk generation logic needs to be
 | ||
|  |   // *below* the call to _read.  The reason is that in certain
 | ||
|  |   // synthetic stream cases, such as passthrough streams, _read
 | ||
|  |   // may be a completely synchronous operation which may change
 | ||
|  |   // the state of the read buffer, providing enough data when
 | ||
|  |   // before there was *not* enough.
 | ||
|  |   //
 | ||
|  |   // So, the steps are:
 | ||
|  |   // 1. Figure out what the state of things will be after we do
 | ||
|  |   // a read from the buffer.
 | ||
|  |   //
 | ||
|  |   // 2. If that resulting state will trigger a _read, then call _read.
 | ||
|  |   // Note that this may be asynchronous, or synchronous.  Yes, it is
 | ||
|  |   // deeply ugly to write APIs this way, but that still doesn't mean
 | ||
|  |   // that the Readable class should behave improperly, as streams are
 | ||
|  |   // designed to be sync/async agnostic.
 | ||
|  |   // Take note if the _read call is sync or async (ie, if the read call
 | ||
|  |   // has returned yet), so that we know whether or not it's safe to emit
 | ||
|  |   // 'readable' etc.
 | ||
|  |   //
 | ||
|  |   // 3. Actually pull the requested chunks out of the buffer and return.
 | ||
|  | 
 | ||
|  |   // if we need a readable event, then we need to do some reading.
 | ||
|  |   var doRead = state.needReadable; | ||
|  |   debug('need readable', doRead); | ||
|  | 
 | ||
|  |   // if we currently have less than the highWaterMark, then also read some
 | ||
|  |   if (state.length === 0 || state.length - n < state.highWaterMark) { | ||
|  |     doRead = true; | ||
|  |     debug('length less than watermark', doRead); | ||
|  |   } | ||
|  | 
 | ||
|  |   // however, if we've ended, then there's no point, and if we're already
 | ||
|  |   // reading, then it's unnecessary.
 | ||
|  |   if (state.ended || state.reading) { | ||
|  |     doRead = false; | ||
|  |     debug('reading or ended', doRead); | ||
|  |   } else if (doRead) { | ||
|  |     debug('do read'); | ||
|  |     state.reading = true; | ||
|  |     state.sync = true; | ||
|  |     // if the length is currently zero, then we *need* a readable event.
 | ||
|  |     if (state.length === 0) state.needReadable = true; | ||
|  |     // call internal read method
 | ||
|  |     this._read(state.highWaterMark); | ||
|  |     state.sync = false; | ||
|  |     // If _read pushed data synchronously, then `reading` will be false,
 | ||
|  |     // and we need to re-evaluate how much data we can return to the user.
 | ||
|  |     if (!state.reading) n = howMuchToRead(nOrig, state); | ||
|  |   } | ||
|  | 
 | ||
|  |   var ret; | ||
|  |   if (n > 0) ret = fromList(n, state);else ret = null; | ||
|  | 
 | ||
|  |   if (ret === null) { | ||
|  |     state.needReadable = true; | ||
|  |     n = 0; | ||
|  |   } else { | ||
|  |     state.length -= n; | ||
|  |   } | ||
|  | 
 | ||
|  |   if (state.length === 0) { | ||
|  |     // If we have nothing in the buffer, then we want to know
 | ||
|  |     // as soon as we *do* get something into the buffer.
 | ||
|  |     if (!state.ended) state.needReadable = true; | ||
|  | 
 | ||
|  |     // If we tried to read() past the EOF, then emit end on the next tick.
 | ||
|  |     if (nOrig !== n && state.ended) endReadable(this); | ||
|  |   } | ||
|  | 
 | ||
|  |   if (ret !== null) this.emit('data', ret); | ||
|  | 
 | ||
|  |   return ret; | ||
|  | }; | ||
|  | 
 | ||
|  | function onEofChunk(stream, state) { | ||
|  |   if (state.ended) return; | ||
|  |   if (state.decoder) { | ||
|  |     var chunk = state.decoder.end(); | ||
|  |     if (chunk && chunk.length) { | ||
|  |       state.buffer.push(chunk); | ||
|  |       state.length += state.objectMode ? 1 : chunk.length; | ||
|  |     } | ||
|  |   } | ||
|  |   state.ended = true; | ||
|  | 
 | ||
|  |   // emit 'readable' now to make sure it gets picked up.
 | ||
|  |   emitReadable(stream); | ||
|  | } | ||
|  | 
 | ||
|  | // Don't emit readable right away in sync mode, because this can trigger
 | ||
|  | // another read() call => stack overflow.  This way, it might trigger
 | ||
|  | // a nextTick recursion warning, but that's not so bad.
 | ||
|  | function emitReadable(stream) { | ||
|  |   var state = stream._readableState; | ||
|  |   state.needReadable = false; | ||
|  |   if (!state.emittedReadable) { | ||
|  |     debug('emitReadable', state.flowing); | ||
|  |     state.emittedReadable = true; | ||
|  |     if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function emitReadable_(stream) { | ||
|  |   debug('emit readable'); | ||
|  |   stream.emit('readable'); | ||
|  |   flow(stream); | ||
|  | } | ||
|  | 
 | ||
|  | // at this point, the user has presumably seen the 'readable' event,
 | ||
|  | // and called read() to consume some data.  that may have triggered
 | ||
|  | // in turn another _read(n) call, in which case reading = true if
 | ||
|  | // it's in progress.
 | ||
|  | // However, if we're not ended, or reading, and the length < hwm,
 | ||
|  | // then go ahead and try to read some more preemptively.
 | ||
|  | function maybeReadMore(stream, state) { | ||
|  |   if (!state.readingMore) { | ||
|  |     state.readingMore = true; | ||
|  |     pna.nextTick(maybeReadMore_, stream, state); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function maybeReadMore_(stream, state) { | ||
|  |   var len = state.length; | ||
|  |   while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { | ||
|  |     debug('maybeReadMore read 0'); | ||
|  |     stream.read(0); | ||
|  |     if (len === state.length) | ||
|  |       // didn't get any data, stop spinning.
 | ||
|  |       break;else len = state.length; | ||
|  |   } | ||
|  |   state.readingMore = false; | ||
|  | } | ||
|  | 
 | ||
|  | // abstract method.  to be overridden in specific implementation classes.
 | ||
|  | // call cb(er, data) where data is <= n in length.
 | ||
|  | // for virtual (non-string, non-buffer) streams, "length" is somewhat
 | ||
|  | // arbitrary, and perhaps not very meaningful.
 | ||
|  | Readable.prototype._read = function (n) { | ||
|  |   this.emit('error', new Error('_read() is not implemented')); | ||
|  | }; | ||
|  | 
 | ||
|  | Readable.prototype.pipe = function (dest, pipeOpts) { | ||
|  |   var src = this; | ||
|  |   var state = this._readableState; | ||
|  | 
 | ||
|  |   switch (state.pipesCount) { | ||
|  |     case 0: | ||
|  |       state.pipes = dest; | ||
|  |       break; | ||
|  |     case 1: | ||
|  |       state.pipes = [state.pipes, dest]; | ||
|  |       break; | ||
|  |     default: | ||
|  |       state.pipes.push(dest); | ||
|  |       break; | ||
|  |   } | ||
|  |   state.pipesCount += 1; | ||
|  |   debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); | ||
|  | 
 | ||
|  |   var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; | ||
|  | 
 | ||
|  |   var endFn = doEnd ? onend : unpipe; | ||
|  |   if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn); | ||
|  | 
 | ||
|  |   dest.on('unpipe', onunpipe); | ||
|  |   function onunpipe(readable, unpipeInfo) { | ||
|  |     debug('onunpipe'); | ||
|  |     if (readable === src) { | ||
|  |       if (unpipeInfo && unpipeInfo.hasUnpiped === false) { | ||
|  |         unpipeInfo.hasUnpiped = true; | ||
|  |         cleanup(); | ||
|  |       } | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   function onend() { | ||
|  |     debug('onend'); | ||
|  |     dest.end(); | ||
|  |   } | ||
|  | 
 | ||
|  |   // when the dest drains, it reduces the awaitDrain counter
 | ||
|  |   // on the source.  This would be more elegant with a .once()
 | ||
|  |   // handler in flow(), but adding and removing repeatedly is
 | ||
|  |   // too slow.
 | ||
|  |   var ondrain = pipeOnDrain(src); | ||
|  |   dest.on('drain', ondrain); | ||
|  | 
 | ||
|  |   var cleanedUp = false; | ||
|  |   function cleanup() { | ||
|  |     debug('cleanup'); | ||
|  |     // cleanup event handlers once the pipe is broken
 | ||
|  |     dest.removeListener('close', onclose); | ||
|  |     dest.removeListener('finish', onfinish); | ||
|  |     dest.removeListener('drain', ondrain); | ||
|  |     dest.removeListener('error', onerror); | ||
|  |     dest.removeListener('unpipe', onunpipe); | ||
|  |     src.removeListener('end', onend); | ||
|  |     src.removeListener('end', unpipe); | ||
|  |     src.removeListener('data', ondata); | ||
|  | 
 | ||
|  |     cleanedUp = true; | ||
|  | 
 | ||
|  |     // if the reader is waiting for a drain event from this
 | ||
|  |     // specific writer, then it would cause it to never start
 | ||
|  |     // flowing again.
 | ||
|  |     // So, if this is awaiting a drain, then we just call it now.
 | ||
|  |     // If we don't know, then assume that we are waiting for one.
 | ||
|  |     if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); | ||
|  |   } | ||
|  | 
 | ||
|  |   // If the user pushes more data while we're writing to dest then we'll end up
 | ||
|  |   // in ondata again. However, we only want to increase awaitDrain once because
 | ||
|  |   // dest will only emit one 'drain' event for the multiple writes.
 | ||
|  |   // => Introduce a guard on increasing awaitDrain.
 | ||
|  |   var increasedAwaitDrain = false; | ||
|  |   src.on('data', ondata); | ||
|  |   function ondata(chunk) { | ||
|  |     debug('ondata'); | ||
|  |     increasedAwaitDrain = false; | ||
|  |     var ret = dest.write(chunk); | ||
|  |     if (false === ret && !increasedAwaitDrain) { | ||
|  |       // If the user unpiped during `dest.write()`, it is possible
 | ||
|  |       // to get stuck in a permanently paused state if that write
 | ||
|  |       // also returned false.
 | ||
|  |       // => Check whether `dest` is still a piping destination.
 | ||
|  |       if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { | ||
|  |         debug('false write response, pause', src._readableState.awaitDrain); | ||
|  |         src._readableState.awaitDrain++; | ||
|  |         increasedAwaitDrain = true; | ||
|  |       } | ||
|  |       src.pause(); | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   // if the dest has an error, then stop piping into it.
 | ||
|  |   // however, don't suppress the throwing behavior for this.
 | ||
|  |   function onerror(er) { | ||
|  |     debug('onerror', er); | ||
|  |     unpipe(); | ||
|  |     dest.removeListener('error', onerror); | ||
|  |     if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); | ||
|  |   } | ||
|  | 
 | ||
|  |   // Make sure our error handler is attached before userland ones.
 | ||
|  |   prependListener(dest, 'error', onerror); | ||
|  | 
 | ||
|  |   // Both close and finish should trigger unpipe, but only once.
 | ||
|  |   function onclose() { | ||
|  |     dest.removeListener('finish', onfinish); | ||
|  |     unpipe(); | ||
|  |   } | ||
|  |   dest.once('close', onclose); | ||
|  |   function onfinish() { | ||
|  |     debug('onfinish'); | ||
|  |     dest.removeListener('close', onclose); | ||
|  |     unpipe(); | ||
|  |   } | ||
|  |   dest.once('finish', onfinish); | ||
|  | 
 | ||
|  |   function unpipe() { | ||
|  |     debug('unpipe'); | ||
|  |     src.unpipe(dest); | ||
|  |   } | ||
|  | 
 | ||
|  |   // tell the dest that it's being piped to
 | ||
|  |   dest.emit('pipe', src); | ||
|  | 
 | ||
|  |   // start the flow if it hasn't been started already.
 | ||
|  |   if (!state.flowing) { | ||
|  |     debug('pipe resume'); | ||
|  |     src.resume(); | ||
|  |   } | ||
|  | 
 | ||
|  |   return dest; | ||
|  | }; | ||
|  | 
 | ||
|  | function pipeOnDrain(src) { | ||
|  |   return function () { | ||
|  |     var state = src._readableState; | ||
|  |     debug('pipeOnDrain', state.awaitDrain); | ||
|  |     if (state.awaitDrain) state.awaitDrain--; | ||
|  |     if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { | ||
|  |       state.flowing = true; | ||
|  |       flow(src); | ||
|  |     } | ||
|  |   }; | ||
|  | } | ||
|  | 
 | ||
|  | Readable.prototype.unpipe = function (dest) { | ||
|  |   var state = this._readableState; | ||
|  |   var unpipeInfo = { hasUnpiped: false }; | ||
|  | 
 | ||
|  |   // if we're not piping anywhere, then do nothing.
 | ||
|  |   if (state.pipesCount === 0) return this; | ||
|  | 
 | ||
|  |   // just one destination.  most common case.
 | ||
|  |   if (state.pipesCount === 1) { | ||
|  |     // passed in one, but it's not the right one.
 | ||
|  |     if (dest && dest !== state.pipes) return this; | ||
|  | 
 | ||
|  |     if (!dest) dest = state.pipes; | ||
|  | 
 | ||
|  |     // got a match.
 | ||
|  |     state.pipes = null; | ||
|  |     state.pipesCount = 0; | ||
|  |     state.flowing = false; | ||
|  |     if (dest) dest.emit('unpipe', this, unpipeInfo); | ||
|  |     return this; | ||
|  |   } | ||
|  | 
 | ||
|  |   // slow case. multiple pipe destinations.
 | ||
|  | 
 | ||
|  |   if (!dest) { | ||
|  |     // remove all.
 | ||
|  |     var dests = state.pipes; | ||
|  |     var len = state.pipesCount; | ||
|  |     state.pipes = null; | ||
|  |     state.pipesCount = 0; | ||
|  |     state.flowing = false; | ||
|  | 
 | ||
|  |     for (var i = 0; i < len; i++) { | ||
|  |       dests[i].emit('unpipe', this, unpipeInfo); | ||
|  |     }return this; | ||
|  |   } | ||
|  | 
 | ||
|  |   // try to find the right one.
 | ||
|  |   var index = indexOf(state.pipes, dest); | ||
|  |   if (index === -1) return this; | ||
|  | 
 | ||
|  |   state.pipes.splice(index, 1); | ||
|  |   state.pipesCount -= 1; | ||
|  |   if (state.pipesCount === 1) state.pipes = state.pipes[0]; | ||
|  | 
 | ||
|  |   dest.emit('unpipe', this, unpipeInfo); | ||
|  | 
 | ||
|  |   return this; | ||
|  | }; | ||
|  | 
 | ||
|  | // set up data events if they are asked for
 | ||
|  | // Ensure readable listeners eventually get something
 | ||
|  | Readable.prototype.on = function (ev, fn) { | ||
|  |   var res = Stream.prototype.on.call(this, ev, fn); | ||
|  | 
 | ||
|  |   if (ev === 'data') { | ||
|  |     // Start flowing on next tick if stream isn't explicitly paused
 | ||
|  |     if (this._readableState.flowing !== false) this.resume(); | ||
|  |   } else if (ev === 'readable') { | ||
|  |     var state = this._readableState; | ||
|  |     if (!state.endEmitted && !state.readableListening) { | ||
|  |       state.readableListening = state.needReadable = true; | ||
|  |       state.emittedReadable = false; | ||
|  |       if (!state.reading) { | ||
|  |         pna.nextTick(nReadingNextTick, this); | ||
|  |       } else if (state.length) { | ||
|  |         emitReadable(this); | ||
|  |       } | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   return res; | ||
|  | }; | ||
|  | Readable.prototype.addListener = Readable.prototype.on; | ||
|  | 
 | ||
|  | function nReadingNextTick(self) { | ||
|  |   debug('readable nexttick read 0'); | ||
|  |   self.read(0); | ||
|  | } | ||
|  | 
 | ||
|  | // pause() and resume() are remnants of the legacy readable stream API
 | ||
|  | // If the user uses them, then switch into old mode.
 | ||
|  | Readable.prototype.resume = function () { | ||
|  |   var state = this._readableState; | ||
|  |   if (!state.flowing) { | ||
|  |     debug('resume'); | ||
|  |     state.flowing = true; | ||
|  |     resume(this, state); | ||
|  |   } | ||
|  |   return this; | ||
|  | }; | ||
|  | 
 | ||
|  | function resume(stream, state) { | ||
|  |   if (!state.resumeScheduled) { | ||
|  |     state.resumeScheduled = true; | ||
|  |     pna.nextTick(resume_, stream, state); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function resume_(stream, state) { | ||
|  |   if (!state.reading) { | ||
|  |     debug('resume read 0'); | ||
|  |     stream.read(0); | ||
|  |   } | ||
|  | 
 | ||
|  |   state.resumeScheduled = false; | ||
|  |   state.awaitDrain = 0; | ||
|  |   stream.emit('resume'); | ||
|  |   flow(stream); | ||
|  |   if (state.flowing && !state.reading) stream.read(0); | ||
|  | } | ||
|  | 
 | ||
|  | Readable.prototype.pause = function () { | ||
|  |   debug('call pause flowing=%j', this._readableState.flowing); | ||
|  |   if (false !== this._readableState.flowing) { | ||
|  |     debug('pause'); | ||
|  |     this._readableState.flowing = false; | ||
|  |     this.emit('pause'); | ||
|  |   } | ||
|  |   return this; | ||
|  | }; | ||
|  | 
 | ||
|  | function flow(stream) { | ||
|  |   var state = stream._readableState; | ||
|  |   debug('flow', state.flowing); | ||
|  |   while (state.flowing && stream.read() !== null) {} | ||
|  | } | ||
|  | 
 | ||
|  | // wrap an old-style stream as the async data source.
 | ||
|  | // This is *not* part of the readable stream interface.
 | ||
|  | // It is an ugly unfortunate mess of history.
 | ||
|  | Readable.prototype.wrap = function (stream) { | ||
|  |   var _this = this; | ||
|  | 
 | ||
|  |   var state = this._readableState; | ||
|  |   var paused = false; | ||
|  | 
 | ||
|  |   stream.on('end', function () { | ||
|  |     debug('wrapped end'); | ||
|  |     if (state.decoder && !state.ended) { | ||
|  |       var chunk = state.decoder.end(); | ||
|  |       if (chunk && chunk.length) _this.push(chunk); | ||
|  |     } | ||
|  | 
 | ||
|  |     _this.push(null); | ||
|  |   }); | ||
|  | 
 | ||
|  |   stream.on('data', function (chunk) { | ||
|  |     debug('wrapped data'); | ||
|  |     if (state.decoder) chunk = state.decoder.write(chunk); | ||
|  | 
 | ||
|  |     // don't skip over falsy values in objectMode
 | ||
|  |     if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; | ||
|  | 
 | ||
|  |     var ret = _this.push(chunk); | ||
|  |     if (!ret) { | ||
|  |       paused = true; | ||
|  |       stream.pause(); | ||
|  |     } | ||
|  |   }); | ||
|  | 
 | ||
|  |   // proxy all the other methods.
 | ||
|  |   // important when wrapping filters and duplexes.
 | ||
|  |   for (var i in stream) { | ||
|  |     if (this[i] === undefined && typeof stream[i] === 'function') { | ||
|  |       this[i] = function (method) { | ||
|  |         return function () { | ||
|  |           return stream[method].apply(stream, arguments); | ||
|  |         }; | ||
|  |       }(i); | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   // proxy certain important events.
 | ||
|  |   for (var n = 0; n < kProxyEvents.length; n++) { | ||
|  |     stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); | ||
|  |   } | ||
|  | 
 | ||
|  |   // when we try to consume some more bytes, simply unpause the
 | ||
|  |   // underlying stream.
 | ||
|  |   this._read = function (n) { | ||
|  |     debug('wrapped _read', n); | ||
|  |     if (paused) { | ||
|  |       paused = false; | ||
|  |       stream.resume(); | ||
|  |     } | ||
|  |   }; | ||
|  | 
 | ||
|  |   return this; | ||
|  | }; | ||
|  | 
 | ||
|  | Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { | ||
|  |   // making it explicit this property is not enumerable
 | ||
|  |   // because otherwise some prototype manipulation in
 | ||
|  |   // userland will fail
 | ||
|  |   enumerable: false, | ||
|  |   get: function () { | ||
|  |     return this._readableState.highWaterMark; | ||
|  |   } | ||
|  | }); | ||
|  | 
 | ||
|  | // exposed for testing purposes only.
 | ||
|  | Readable._fromList = fromList; | ||
|  | 
 | ||
|  | // Pluck off n bytes from an array of buffers.
 | ||
|  | // Length is the combined lengths of all the buffers in the list.
 | ||
|  | // This function is designed to be inlinable, so please take care when making
 | ||
|  | // changes to the function body.
 | ||
|  | function fromList(n, state) { | ||
|  |   // nothing buffered
 | ||
|  |   if (state.length === 0) return null; | ||
|  | 
 | ||
|  |   var ret; | ||
|  |   if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { | ||
|  |     // read it all, truncate the list
 | ||
|  |     if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length); | ||
|  |     state.buffer.clear(); | ||
|  |   } else { | ||
|  |     // read part of list
 | ||
|  |     ret = fromListPartial(n, state.buffer, state.decoder); | ||
|  |   } | ||
|  | 
 | ||
|  |   return ret; | ||
|  | } | ||
|  | 
 | ||
|  | // Extracts only enough buffered data to satisfy the amount requested.
 | ||
|  | // This function is designed to be inlinable, so please take care when making
 | ||
|  | // changes to the function body.
 | ||
|  | function fromListPartial(n, list, hasStrings) { | ||
|  |   var ret; | ||
|  |   if (n < list.head.data.length) { | ||
|  |     // slice is the same for buffers and strings
 | ||
|  |     ret = list.head.data.slice(0, n); | ||
|  |     list.head.data = list.head.data.slice(n); | ||
|  |   } else if (n === list.head.data.length) { | ||
|  |     // first chunk is a perfect match
 | ||
|  |     ret = list.shift(); | ||
|  |   } else { | ||
|  |     // result spans more than one buffer
 | ||
|  |     ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list); | ||
|  |   } | ||
|  |   return ret; | ||
|  | } | ||
|  | 
 | ||
|  | // Copies a specified amount of characters from the list of buffered data
 | ||
|  | // chunks.
 | ||
|  | // This function is designed to be inlinable, so please take care when making
 | ||
|  | // changes to the function body.
 | ||
|  | function copyFromBufferString(n, list) { | ||
|  |   var p = list.head; | ||
|  |   var c = 1; | ||
|  |   var ret = p.data; | ||
|  |   n -= ret.length; | ||
|  |   while (p = p.next) { | ||
|  |     var str = p.data; | ||
|  |     var nb = n > str.length ? str.length : n; | ||
|  |     if (nb === str.length) ret += str;else ret += str.slice(0, n); | ||
|  |     n -= nb; | ||
|  |     if (n === 0) { | ||
|  |       if (nb === str.length) { | ||
|  |         ++c; | ||
|  |         if (p.next) list.head = p.next;else list.head = list.tail = null; | ||
|  |       } else { | ||
|  |         list.head = p; | ||
|  |         p.data = str.slice(nb); | ||
|  |       } | ||
|  |       break; | ||
|  |     } | ||
|  |     ++c; | ||
|  |   } | ||
|  |   list.length -= c; | ||
|  |   return ret; | ||
|  | } | ||
|  | 
 | ||
|  | // Copies a specified amount of bytes from the list of buffered data chunks.
 | ||
|  | // This function is designed to be inlinable, so please take care when making
 | ||
|  | // changes to the function body.
 | ||
|  | function copyFromBuffer(n, list) { | ||
|  |   var ret = Buffer.allocUnsafe(n); | ||
|  |   var p = list.head; | ||
|  |   var c = 1; | ||
|  |   p.data.copy(ret); | ||
|  |   n -= p.data.length; | ||
|  |   while (p = p.next) { | ||
|  |     var buf = p.data; | ||
|  |     var nb = n > buf.length ? buf.length : n; | ||
|  |     buf.copy(ret, ret.length - n, 0, nb); | ||
|  |     n -= nb; | ||
|  |     if (n === 0) { | ||
|  |       if (nb === buf.length) { | ||
|  |         ++c; | ||
|  |         if (p.next) list.head = p.next;else list.head = list.tail = null; | ||
|  |       } else { | ||
|  |         list.head = p; | ||
|  |         p.data = buf.slice(nb); | ||
|  |       } | ||
|  |       break; | ||
|  |     } | ||
|  |     ++c; | ||
|  |   } | ||
|  |   list.length -= c; | ||
|  |   return ret; | ||
|  | } | ||
|  | 
 | ||
|  | function endReadable(stream) { | ||
|  |   var state = stream._readableState; | ||
|  | 
 | ||
|  |   // If we get here before consuming all the bytes, then that is a
 | ||
|  |   // bug in node.  Should never happen.
 | ||
|  |   if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream'); | ||
|  | 
 | ||
|  |   if (!state.endEmitted) { | ||
|  |     state.ended = true; | ||
|  |     pna.nextTick(endReadableNT, state, stream); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function endReadableNT(state, stream) { | ||
|  |   // Check that we didn't get one last unshift.
 | ||
|  |   if (!state.endEmitted && state.length === 0) { | ||
|  |     state.endEmitted = true; | ||
|  |     stream.readable = false; | ||
|  |     stream.emit('end'); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function indexOf(xs, x) { | ||
|  |   for (var i = 0, l = xs.length; i < l; i++) { | ||
|  |     if (xs[i] === x) return i; | ||
|  |   } | ||
|  |   return -1; | ||
|  | } | ||
|  | }).call(this)}).call(this,_dereq_(73),typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | ||
|  | },{"100":100,"101":101,"11":11,"14":14,"26":26,"37":37,"72":72,"73":73,"86":86,"88":88,"93":93,"94":94,"95":95}],91:[function(_dereq_,module,exports){ | ||
|  | // Copyright Joyent, Inc. and other Node contributors.
 | ||
|  | //
 | ||
|  | // Permission is hereby granted, free of charge, to any person obtaining a
 | ||
|  | // copy of this software and associated documentation files (the
 | ||
|  | // "Software"), to deal in the Software without restriction, including
 | ||
|  | // without limitation the rights to use, copy, modify, merge, publish,
 | ||
|  | // distribute, sublicense, and/or sell copies of the Software, and to permit
 | ||
|  | // persons to whom the Software is furnished to do so, subject to the
 | ||
|  | // following conditions:
 | ||
|  | //
 | ||
|  | // The above copyright notice and this permission notice shall be included
 | ||
|  | // in all copies or substantial portions of the Software.
 | ||
|  | //
 | ||
|  | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
 | ||
|  | // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
 | ||
|  | // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
 | ||
|  | // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
 | ||
|  | // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
 | ||
|  | // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
 | ||
|  | // USE OR OTHER DEALINGS IN THE SOFTWARE.
 | ||
|  | 
 | ||
|  | // a transform stream is a readable/writable stream where you do
 | ||
|  | // something with the data.  Sometimes it's called a "filter",
 | ||
|  | // but that's not a great name for it, since that implies a thing where
 | ||
|  | // some bits pass through, and others are simply ignored.  (That would
 | ||
|  | // be a valid example of a transform, of course.)
 | ||
|  | //
 | ||
|  | // While the output is causally related to the input, it's not a
 | ||
|  | // necessarily symmetric or synchronous transformation.  For example,
 | ||
|  | // a zlib stream might take multiple plain-text writes(), and then
 | ||
|  | // emit a single compressed chunk some time in the future.
 | ||
|  | //
 | ||
|  | // Here's how this works:
 | ||
|  | //
 | ||
|  | // The Transform stream has all the aspects of the readable and writable
 | ||
|  | // stream classes.  When you write(chunk), that calls _write(chunk,cb)
 | ||
|  | // internally, and returns false if there's a lot of pending writes
 | ||
|  | // buffered up.  When you call read(), that calls _read(n) until
 | ||
|  | // there's enough pending readable data buffered up.
 | ||
|  | //
 | ||
|  | // In a transform stream, the written data is placed in a buffer.  When
 | ||
|  | // _read(n) is called, it transforms the queued up data, calling the
 | ||
|  | // buffered _write cb's as it consumes chunks.  If consuming a single
 | ||
|  | // written chunk would result in multiple output chunks, then the first
 | ||
|  | // outputted bit calls the readcb, and subsequent chunks just go into
 | ||
|  | // the read buffer, and will cause it to emit 'readable' if necessary.
 | ||
|  | //
 | ||
|  | // This way, back-pressure is actually determined by the reading side,
 | ||
|  | // since _read has to be called to start processing a new chunk.  However,
 | ||
|  | // a pathological inflate type of transform can cause excessive buffering
 | ||
|  | // here.  For example, imagine a stream where every byte of input is
 | ||
|  | // interpreted as an integer from 0-255, and then results in that many
 | ||
|  | // bytes of output.  Writing the 4 bytes {ff,ff,ff,ff} would result in
 | ||
|  | // 1kb of data being output.  In this case, you could write a very small
 | ||
|  | // amount of input, and end up with a very large amount of output.  In
 | ||
|  | // such a pathological inflating mechanism, there'd be no way to tell
 | ||
|  | // the system to stop doing the transform.  A single 4MB write could
 | ||
|  | // cause the system to run out of memory.
 | ||
|  | //
 | ||
|  | // However, even in such a pathological case, only a single written chunk
 | ||
|  | // would be consumed, and then the rest would wait (un-transformed) until
 | ||
|  | // the results of the previous transformed chunk were consumed.
 | ||
|  | 
 | ||
|  | 'use strict'; | ||
|  | 
 | ||
|  | module.exports = Transform; | ||
|  | 
 | ||
|  | var Duplex = _dereq_(88); | ||
|  | 
 | ||
|  | /*<replacement>*/ | ||
|  | var util = Object.create(_dereq_(14)); | ||
|  | util.inherits = _dereq_(37); | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | util.inherits(Transform, Duplex); | ||
|  | 
 | ||
|  | function afterTransform(er, data) { | ||
|  |   var ts = this._transformState; | ||
|  |   ts.transforming = false; | ||
|  | 
 | ||
|  |   var cb = ts.writecb; | ||
|  | 
 | ||
|  |   if (!cb) { | ||
|  |     return this.emit('error', new Error('write callback called multiple times')); | ||
|  |   } | ||
|  | 
 | ||
|  |   ts.writechunk = null; | ||
|  |   ts.writecb = null; | ||
|  | 
 | ||
|  |   if (data != null) // single equals check for both `null` and `undefined`
 | ||
|  |     this.push(data); | ||
|  | 
 | ||
|  |   cb(er); | ||
|  | 
 | ||
|  |   var rs = this._readableState; | ||
|  |   rs.reading = false; | ||
|  |   if (rs.needReadable || rs.length < rs.highWaterMark) { | ||
|  |     this._read(rs.highWaterMark); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function Transform(options) { | ||
|  |   if (!(this instanceof Transform)) return new Transform(options); | ||
|  | 
 | ||
|  |   Duplex.call(this, options); | ||
|  | 
 | ||
|  |   this._transformState = { | ||
|  |     afterTransform: afterTransform.bind(this), | ||
|  |     needTransform: false, | ||
|  |     transforming: false, | ||
|  |     writecb: null, | ||
|  |     writechunk: null, | ||
|  |     writeencoding: null | ||
|  |   }; | ||
|  | 
 | ||
|  |   // start out asking for a readable event once data is transformed.
 | ||
|  |   this._readableState.needReadable = true; | ||
|  | 
 | ||
|  |   // we have implemented the _read method, and done the other things
 | ||
|  |   // that Readable wants before the first _read call, so unset the
 | ||
|  |   // sync guard flag.
 | ||
|  |   this._readableState.sync = false; | ||
|  | 
 | ||
|  |   if (options) { | ||
|  |     if (typeof options.transform === 'function') this._transform = options.transform; | ||
|  | 
 | ||
|  |     if (typeof options.flush === 'function') this._flush = options.flush; | ||
|  |   } | ||
|  | 
 | ||
|  |   // When the writable side finishes, then flush out anything remaining.
 | ||
|  |   this.on('prefinish', prefinish); | ||
|  | } | ||
|  | 
 | ||
|  | function prefinish() { | ||
|  |   var _this = this; | ||
|  | 
 | ||
|  |   if (typeof this._flush === 'function') { | ||
|  |     this._flush(function (er, data) { | ||
|  |       done(_this, er, data); | ||
|  |     }); | ||
|  |   } else { | ||
|  |     done(this, null, null); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | Transform.prototype.push = function (chunk, encoding) { | ||
|  |   this._transformState.needTransform = false; | ||
|  |   return Duplex.prototype.push.call(this, chunk, encoding); | ||
|  | }; | ||
|  | 
 | ||
|  | // This is the part where you do stuff!
 | ||
|  | // override this function in implementation classes.
 | ||
|  | // 'chunk' is an input chunk.
 | ||
|  | //
 | ||
|  | // Call `push(newChunk)` to pass along transformed output
 | ||
|  | // to the readable side.  You may call 'push' zero or more times.
 | ||
|  | //
 | ||
|  | // Call `cb(err)` when you are done with this chunk.  If you pass
 | ||
|  | // an error, then that'll put the hurt on the whole operation.  If you
 | ||
|  | // never call cb(), then you'll never get another chunk.
 | ||
|  | Transform.prototype._transform = function (chunk, encoding, cb) { | ||
|  |   throw new Error('_transform() is not implemented'); | ||
|  | }; | ||
|  | 
 | ||
|  | Transform.prototype._write = function (chunk, encoding, cb) { | ||
|  |   var ts = this._transformState; | ||
|  |   ts.writecb = cb; | ||
|  |   ts.writechunk = chunk; | ||
|  |   ts.writeencoding = encoding; | ||
|  |   if (!ts.transforming) { | ||
|  |     var rs = this._readableState; | ||
|  |     if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); | ||
|  |   } | ||
|  | }; | ||
|  | 
 | ||
|  | // Doesn't matter what the args are here.
 | ||
|  | // _transform does all the work.
 | ||
|  | // That we got here means that the readable side wants more data.
 | ||
|  | Transform.prototype._read = function (n) { | ||
|  |   var ts = this._transformState; | ||
|  | 
 | ||
|  |   if (ts.writechunk !== null && ts.writecb && !ts.transforming) { | ||
|  |     ts.transforming = true; | ||
|  |     this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); | ||
|  |   } else { | ||
|  |     // mark that we need a transform, so that any data that comes in
 | ||
|  |     // will get processed, now that we've asked for it.
 | ||
|  |     ts.needTransform = true; | ||
|  |   } | ||
|  | }; | ||
|  | 
 | ||
|  | Transform.prototype._destroy = function (err, cb) { | ||
|  |   var _this2 = this; | ||
|  | 
 | ||
|  |   Duplex.prototype._destroy.call(this, err, function (err2) { | ||
|  |     cb(err2); | ||
|  |     _this2.emit('close'); | ||
|  |   }); | ||
|  | }; | ||
|  | 
 | ||
|  | function done(stream, er, data) { | ||
|  |   if (er) return stream.emit('error', er); | ||
|  | 
 | ||
|  |   if (data != null) // single equals check for both `null` and `undefined`
 | ||
|  |     stream.push(data); | ||
|  | 
 | ||
|  |   // if there's nothing in the write buffer, then that means
 | ||
|  |   // that nothing more will ever be provided
 | ||
|  |   if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0'); | ||
|  | 
 | ||
|  |   if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming'); | ||
|  | 
 | ||
|  |   return stream.push(null); | ||
|  | } | ||
|  | },{"14":14,"37":37,"88":88}],92:[function(_dereq_,module,exports){ | ||
|  | (function (process,global,setImmediate){(function (){ | ||
|  | // Copyright Joyent, Inc. and other Node contributors.
 | ||
|  | //
 | ||
|  | // Permission is hereby granted, free of charge, to any person obtaining a
 | ||
|  | // copy of this software and associated documentation files (the
 | ||
|  | // "Software"), to deal in the Software without restriction, including
 | ||
|  | // without limitation the rights to use, copy, modify, merge, publish,
 | ||
|  | // distribute, sublicense, and/or sell copies of the Software, and to permit
 | ||
|  | // persons to whom the Software is furnished to do so, subject to the
 | ||
|  | // following conditions:
 | ||
|  | //
 | ||
|  | // The above copyright notice and this permission notice shall be included
 | ||
|  | // in all copies or substantial portions of the Software.
 | ||
|  | //
 | ||
|  | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
 | ||
|  | // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
 | ||
|  | // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
 | ||
|  | // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
 | ||
|  | // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
 | ||
|  | // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
 | ||
|  | // USE OR OTHER DEALINGS IN THE SOFTWARE.
 | ||
|  | 
 | ||
|  | // A bit simpler than readable streams.
 | ||
|  | // Implement an async ._write(chunk, encoding, cb), and it'll handle all
 | ||
|  | // the drain event emission and buffering.
 | ||
|  | 
 | ||
|  | 'use strict'; | ||
|  | 
 | ||
|  | /*<replacement>*/ | ||
|  | 
 | ||
|  | var pna = _dereq_(72); | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | module.exports = Writable; | ||
|  | 
 | ||
|  | /* <replacement> */ | ||
|  | function WriteReq(chunk, encoding, cb) { | ||
|  |   this.chunk = chunk; | ||
|  |   this.encoding = encoding; | ||
|  |   this.callback = cb; | ||
|  |   this.next = null; | ||
|  | } | ||
|  | 
 | ||
|  | // It seems a linked list but it is not
 | ||
|  | // there will be only 2 of these for each stream
 | ||
|  | function CorkedRequest(state) { | ||
|  |   var _this = this; | ||
|  | 
 | ||
|  |   this.next = null; | ||
|  |   this.entry = null; | ||
|  |   this.finish = function () { | ||
|  |     onCorkedFinish(_this, state); | ||
|  |   }; | ||
|  | } | ||
|  | /* </replacement> */ | ||
|  | 
 | ||
|  | /*<replacement>*/ | ||
|  | var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick; | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | /*<replacement>*/ | ||
|  | var Duplex; | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | Writable.WritableState = WritableState; | ||
|  | 
 | ||
|  | /*<replacement>*/ | ||
|  | var util = Object.create(_dereq_(14)); | ||
|  | util.inherits = _dereq_(37); | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | /*<replacement>*/ | ||
|  | var internalUtil = { | ||
|  |   deprecate: _dereq_(121) | ||
|  | }; | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | /*<replacement>*/ | ||
|  | var Stream = _dereq_(95); | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | /*<replacement>*/ | ||
|  | 
 | ||
|  | var Buffer = _dereq_(100).Buffer; | ||
|  | var OurUint8Array = global.Uint8Array || function () {}; | ||
|  | function _uint8ArrayToBuffer(chunk) { | ||
|  |   return Buffer.from(chunk); | ||
|  | } | ||
|  | function _isUint8Array(obj) { | ||
|  |   return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; | ||
|  | } | ||
|  | 
 | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | var destroyImpl = _dereq_(94); | ||
|  | 
 | ||
|  | util.inherits(Writable, Stream); | ||
|  | 
 | ||
|  | function nop() {} | ||
|  | 
 | ||
|  | function WritableState(options, stream) { | ||
|  |   Duplex = Duplex || _dereq_(88); | ||
|  | 
 | ||
|  |   options = options || {}; | ||
|  | 
 | ||
|  |   // Duplex streams are both readable and writable, but share
 | ||
|  |   // the same options object.
 | ||
|  |   // However, some cases require setting options to different
 | ||
|  |   // values for the readable and the writable sides of the duplex stream.
 | ||
|  |   // These options can be provided separately as readableXXX and writableXXX.
 | ||
|  |   var isDuplex = stream instanceof Duplex; | ||
|  | 
 | ||
|  |   // object stream flag to indicate whether or not this stream
 | ||
|  |   // contains buffers or objects.
 | ||
|  |   this.objectMode = !!options.objectMode; | ||
|  | 
 | ||
|  |   if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; | ||
|  | 
 | ||
|  |   // the point at which write() starts returning false
 | ||
|  |   // Note: 0 is a valid value, means that we always return false if
 | ||
|  |   // the entire buffer is not flushed immediately on write()
 | ||
|  |   var hwm = options.highWaterMark; | ||
|  |   var writableHwm = options.writableHighWaterMark; | ||
|  |   var defaultHwm = this.objectMode ? 16 : 16 * 1024; | ||
|  | 
 | ||
|  |   if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm; | ||
|  | 
 | ||
|  |   // cast to ints.
 | ||
|  |   this.highWaterMark = Math.floor(this.highWaterMark); | ||
|  | 
 | ||
|  |   // if _final has been called
 | ||
|  |   this.finalCalled = false; | ||
|  | 
 | ||
|  |   // drain event flag.
 | ||
|  |   this.needDrain = false; | ||
|  |   // at the start of calling end()
 | ||
|  |   this.ending = false; | ||
|  |   // when end() has been called, and returned
 | ||
|  |   this.ended = false; | ||
|  |   // when 'finish' is emitted
 | ||
|  |   this.finished = false; | ||
|  | 
 | ||
|  |   // has it been destroyed
 | ||
|  |   this.destroyed = false; | ||
|  | 
 | ||
|  |   // should we decode strings into buffers before passing to _write?
 | ||
|  |   // this is here so that some node-core streams can optimize string
 | ||
|  |   // handling at a lower level.
 | ||
|  |   var noDecode = options.decodeStrings === false; | ||
|  |   this.decodeStrings = !noDecode; | ||
|  | 
 | ||
|  |   // Crypto is kind of old and crusty.  Historically, its default string
 | ||
|  |   // encoding is 'binary' so we have to make this configurable.
 | ||
|  |   // Everything else in the universe uses 'utf8', though.
 | ||
|  |   this.defaultEncoding = options.defaultEncoding || 'utf8'; | ||
|  | 
 | ||
|  |   // not an actual buffer we keep track of, but a measurement
 | ||
|  |   // of how much we're waiting to get pushed to some underlying
 | ||
|  |   // socket or file.
 | ||
|  |   this.length = 0; | ||
|  | 
 | ||
|  |   // a flag to see when we're in the middle of a write.
 | ||
|  |   this.writing = false; | ||
|  | 
 | ||
|  |   // when true all writes will be buffered until .uncork() call
 | ||
|  |   this.corked = 0; | ||
|  | 
 | ||
|  |   // a flag to be able to tell if the onwrite cb is called immediately,
 | ||
|  |   // or on a later tick.  We set this to true at first, because any
 | ||
|  |   // actions that shouldn't happen until "later" should generally also
 | ||
|  |   // not happen before the first write call.
 | ||
|  |   this.sync = true; | ||
|  | 
 | ||
|  |   // a flag to know if we're processing previously buffered items, which
 | ||
|  |   // may call the _write() callback in the same tick, so that we don't
 | ||
|  |   // end up in an overlapped onwrite situation.
 | ||
|  |   this.bufferProcessing = false; | ||
|  | 
 | ||
|  |   // the callback that's passed to _write(chunk,cb)
 | ||
|  |   this.onwrite = function (er) { | ||
|  |     onwrite(stream, er); | ||
|  |   }; | ||
|  | 
 | ||
|  |   // the callback that the user supplies to write(chunk,encoding,cb)
 | ||
|  |   this.writecb = null; | ||
|  | 
 | ||
|  |   // the amount that is being written when _write is called.
 | ||
|  |   this.writelen = 0; | ||
|  | 
 | ||
|  |   this.bufferedRequest = null; | ||
|  |   this.lastBufferedRequest = null; | ||
|  | 
 | ||
|  |   // number of pending user-supplied write callbacks
 | ||
|  |   // this must be 0 before 'finish' can be emitted
 | ||
|  |   this.pendingcb = 0; | ||
|  | 
 | ||
|  |   // emit prefinish if the only thing we're waiting for is _write cbs
 | ||
|  |   // This is relevant for synchronous Transform streams
 | ||
|  |   this.prefinished = false; | ||
|  | 
 | ||
|  |   // True if the error was already emitted and should not be thrown again
 | ||
|  |   this.errorEmitted = false; | ||
|  | 
 | ||
|  |   // count buffered requests
 | ||
|  |   this.bufferedRequestCount = 0; | ||
|  | 
 | ||
|  |   // allocate the first CorkedRequest, there is always
 | ||
|  |   // one allocated and free to use, and we maintain at most two
 | ||
|  |   this.corkedRequestsFree = new CorkedRequest(this); | ||
|  | } | ||
|  | 
 | ||
|  | WritableState.prototype.getBuffer = function getBuffer() { | ||
|  |   var current = this.bufferedRequest; | ||
|  |   var out = []; | ||
|  |   while (current) { | ||
|  |     out.push(current); | ||
|  |     current = current.next; | ||
|  |   } | ||
|  |   return out; | ||
|  | }; | ||
|  | 
 | ||
|  | (function () { | ||
|  |   try { | ||
|  |     Object.defineProperty(WritableState.prototype, 'buffer', { | ||
|  |       get: internalUtil.deprecate(function () { | ||
|  |         return this.getBuffer(); | ||
|  |       }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') | ||
|  |     }); | ||
|  |   } catch (_) {} | ||
|  | })(); | ||
|  | 
 | ||
|  | // Test _writableState for inheritance to account for Duplex streams,
 | ||
|  | // whose prototype chain only points to Readable.
 | ||
|  | var realHasInstance; | ||
|  | if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { | ||
|  |   realHasInstance = Function.prototype[Symbol.hasInstance]; | ||
|  |   Object.defineProperty(Writable, Symbol.hasInstance, { | ||
|  |     value: function (object) { | ||
|  |       if (realHasInstance.call(this, object)) return true; | ||
|  |       if (this !== Writable) return false; | ||
|  | 
 | ||
|  |       return object && object._writableState instanceof WritableState; | ||
|  |     } | ||
|  |   }); | ||
|  | } else { | ||
|  |   realHasInstance = function (object) { | ||
|  |     return object instanceof this; | ||
|  |   }; | ||
|  | } | ||
|  | 
 | ||
|  | function Writable(options) { | ||
|  |   Duplex = Duplex || _dereq_(88); | ||
|  | 
 | ||
|  |   // Writable ctor is applied to Duplexes, too.
 | ||
|  |   // `realHasInstance` is necessary because using plain `instanceof`
 | ||
|  |   // would return false, as no `_writableState` property is attached.
 | ||
|  | 
 | ||
|  |   // Trying to use the custom `instanceof` for Writable here will also break the
 | ||
|  |   // Node.js LazyTransform implementation, which has a non-trivial getter for
 | ||
|  |   // `_writableState` that would lead to infinite recursion.
 | ||
|  |   if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) { | ||
|  |     return new Writable(options); | ||
|  |   } | ||
|  | 
 | ||
|  |   this._writableState = new WritableState(options, this); | ||
|  | 
 | ||
|  |   // legacy.
 | ||
|  |   this.writable = true; | ||
|  | 
 | ||
|  |   if (options) { | ||
|  |     if (typeof options.write === 'function') this._write = options.write; | ||
|  | 
 | ||
|  |     if (typeof options.writev === 'function') this._writev = options.writev; | ||
|  | 
 | ||
|  |     if (typeof options.destroy === 'function') this._destroy = options.destroy; | ||
|  | 
 | ||
|  |     if (typeof options.final === 'function') this._final = options.final; | ||
|  |   } | ||
|  | 
 | ||
|  |   Stream.call(this); | ||
|  | } | ||
|  | 
 | ||
|  | // Otherwise people can pipe Writable streams, which is just wrong.
 | ||
|  | Writable.prototype.pipe = function () { | ||
|  |   this.emit('error', new Error('Cannot pipe, not readable')); | ||
|  | }; | ||
|  | 
 | ||
|  | function writeAfterEnd(stream, cb) { | ||
|  |   var er = new Error('write after end'); | ||
|  |   // TODO: defer error events consistently everywhere, not just the cb
 | ||
|  |   stream.emit('error', er); | ||
|  |   pna.nextTick(cb, er); | ||
|  | } | ||
|  | 
 | ||
|  | // Checks that a user-supplied chunk is valid, especially for the particular
 | ||
|  | // mode the stream is in. Currently this means that `null` is never accepted
 | ||
|  | // and undefined/non-string values are only allowed in object mode.
 | ||
|  | function validChunk(stream, state, chunk, cb) { | ||
|  |   var valid = true; | ||
|  |   var er = false; | ||
|  | 
 | ||
|  |   if (chunk === null) { | ||
|  |     er = new TypeError('May not write null values to stream'); | ||
|  |   } else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { | ||
|  |     er = new TypeError('Invalid non-string/buffer chunk'); | ||
|  |   } | ||
|  |   if (er) { | ||
|  |     stream.emit('error', er); | ||
|  |     pna.nextTick(cb, er); | ||
|  |     valid = false; | ||
|  |   } | ||
|  |   return valid; | ||
|  | } | ||
|  | 
 | ||
|  | Writable.prototype.write = function (chunk, encoding, cb) { | ||
|  |   var state = this._writableState; | ||
|  |   var ret = false; | ||
|  |   var isBuf = !state.objectMode && _isUint8Array(chunk); | ||
|  | 
 | ||
|  |   if (isBuf && !Buffer.isBuffer(chunk)) { | ||
|  |     chunk = _uint8ArrayToBuffer(chunk); | ||
|  |   } | ||
|  | 
 | ||
|  |   if (typeof encoding === 'function') { | ||
|  |     cb = encoding; | ||
|  |     encoding = null; | ||
|  |   } | ||
|  | 
 | ||
|  |   if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; | ||
|  | 
 | ||
|  |   if (typeof cb !== 'function') cb = nop; | ||
|  | 
 | ||
|  |   if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { | ||
|  |     state.pendingcb++; | ||
|  |     ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); | ||
|  |   } | ||
|  | 
 | ||
|  |   return ret; | ||
|  | }; | ||
|  | 
 | ||
|  | Writable.prototype.cork = function () { | ||
|  |   var state = this._writableState; | ||
|  | 
 | ||
|  |   state.corked++; | ||
|  | }; | ||
|  | 
 | ||
|  | Writable.prototype.uncork = function () { | ||
|  |   var state = this._writableState; | ||
|  | 
 | ||
|  |   if (state.corked) { | ||
|  |     state.corked--; | ||
|  | 
 | ||
|  |     if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); | ||
|  |   } | ||
|  | }; | ||
|  | 
 | ||
|  | Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { | ||
|  |   // node::ParseEncoding() requires lower case.
 | ||
|  |   if (typeof encoding === 'string') encoding = encoding.toLowerCase(); | ||
|  |   if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); | ||
|  |   this._writableState.defaultEncoding = encoding; | ||
|  |   return this; | ||
|  | }; | ||
|  | 
 | ||
|  | function decodeChunk(state, chunk, encoding) { | ||
|  |   if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { | ||
|  |     chunk = Buffer.from(chunk, encoding); | ||
|  |   } | ||
|  |   return chunk; | ||
|  | } | ||
|  | 
 | ||
|  | Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { | ||
|  |   // making it explicit this property is not enumerable
 | ||
|  |   // because otherwise some prototype manipulation in
 | ||
|  |   // userland will fail
 | ||
|  |   enumerable: false, | ||
|  |   get: function () { | ||
|  |     return this._writableState.highWaterMark; | ||
|  |   } | ||
|  | }); | ||
|  | 
 | ||
|  | // if we're already writing something, then just put this
 | ||
|  | // in the queue, and wait our turn.  Otherwise, call _write
 | ||
|  | // If we return false, then we need a drain event, so set that flag.
 | ||
|  | function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { | ||
|  |   if (!isBuf) { | ||
|  |     var newChunk = decodeChunk(state, chunk, encoding); | ||
|  |     if (chunk !== newChunk) { | ||
|  |       isBuf = true; | ||
|  |       encoding = 'buffer'; | ||
|  |       chunk = newChunk; | ||
|  |     } | ||
|  |   } | ||
|  |   var len = state.objectMode ? 1 : chunk.length; | ||
|  | 
 | ||
|  |   state.length += len; | ||
|  | 
 | ||
|  |   var ret = state.length < state.highWaterMark; | ||
|  |   // we must ensure that previous needDrain will not be reset to false.
 | ||
|  |   if (!ret) state.needDrain = true; | ||
|  | 
 | ||
|  |   if (state.writing || state.corked) { | ||
|  |     var last = state.lastBufferedRequest; | ||
|  |     state.lastBufferedRequest = { | ||
|  |       chunk: chunk, | ||
|  |       encoding: encoding, | ||
|  |       isBuf: isBuf, | ||
|  |       callback: cb, | ||
|  |       next: null | ||
|  |     }; | ||
|  |     if (last) { | ||
|  |       last.next = state.lastBufferedRequest; | ||
|  |     } else { | ||
|  |       state.bufferedRequest = state.lastBufferedRequest; | ||
|  |     } | ||
|  |     state.bufferedRequestCount += 1; | ||
|  |   } else { | ||
|  |     doWrite(stream, state, false, len, chunk, encoding, cb); | ||
|  |   } | ||
|  | 
 | ||
|  |   return ret; | ||
|  | } | ||
|  | 
 | ||
|  | function doWrite(stream, state, writev, len, chunk, encoding, cb) { | ||
|  |   state.writelen = len; | ||
|  |   state.writecb = cb; | ||
|  |   state.writing = true; | ||
|  |   state.sync = true; | ||
|  |   if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); | ||
|  |   state.sync = false; | ||
|  | } | ||
|  | 
 | ||
|  | function onwriteError(stream, state, sync, er, cb) { | ||
|  |   --state.pendingcb; | ||
|  | 
 | ||
|  |   if (sync) { | ||
|  |     // defer the callback if we are being called synchronously
 | ||
|  |     // to avoid piling up things on the stack
 | ||
|  |     pna.nextTick(cb, er); | ||
|  |     // this can emit finish, and it will always happen
 | ||
|  |     // after error
 | ||
|  |     pna.nextTick(finishMaybe, stream, state); | ||
|  |     stream._writableState.errorEmitted = true; | ||
|  |     stream.emit('error', er); | ||
|  |   } else { | ||
|  |     // the caller expect this to happen before if
 | ||
|  |     // it is async
 | ||
|  |     cb(er); | ||
|  |     stream._writableState.errorEmitted = true; | ||
|  |     stream.emit('error', er); | ||
|  |     // this can emit finish, but finish must
 | ||
|  |     // always follow error
 | ||
|  |     finishMaybe(stream, state); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function onwriteStateUpdate(state) { | ||
|  |   state.writing = false; | ||
|  |   state.writecb = null; | ||
|  |   state.length -= state.writelen; | ||
|  |   state.writelen = 0; | ||
|  | } | ||
|  | 
 | ||
|  | function onwrite(stream, er) { | ||
|  |   var state = stream._writableState; | ||
|  |   var sync = state.sync; | ||
|  |   var cb = state.writecb; | ||
|  | 
 | ||
|  |   onwriteStateUpdate(state); | ||
|  | 
 | ||
|  |   if (er) onwriteError(stream, state, sync, er, cb);else { | ||
|  |     // Check if we're actually ready to finish, but don't emit yet
 | ||
|  |     var finished = needFinish(state); | ||
|  | 
 | ||
|  |     if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { | ||
|  |       clearBuffer(stream, state); | ||
|  |     } | ||
|  | 
 | ||
|  |     if (sync) { | ||
|  |       /*<replacement>*/ | ||
|  |       asyncWrite(afterWrite, stream, state, finished, cb); | ||
|  |       /*</replacement>*/ | ||
|  |     } else { | ||
|  |       afterWrite(stream, state, finished, cb); | ||
|  |     } | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function afterWrite(stream, state, finished, cb) { | ||
|  |   if (!finished) onwriteDrain(stream, state); | ||
|  |   state.pendingcb--; | ||
|  |   cb(); | ||
|  |   finishMaybe(stream, state); | ||
|  | } | ||
|  | 
 | ||
|  | // Must force callback to be called on nextTick, so that we don't
 | ||
|  | // emit 'drain' before the write() consumer gets the 'false' return
 | ||
|  | // value, and has a chance to attach a 'drain' listener.
 | ||
|  | function onwriteDrain(stream, state) { | ||
|  |   if (state.length === 0 && state.needDrain) { | ||
|  |     state.needDrain = false; | ||
|  |     stream.emit('drain'); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | // if there's something in the buffer waiting, then process it
 | ||
|  | function clearBuffer(stream, state) { | ||
|  |   state.bufferProcessing = true; | ||
|  |   var entry = state.bufferedRequest; | ||
|  | 
 | ||
|  |   if (stream._writev && entry && entry.next) { | ||
|  |     // Fast case, write everything using _writev()
 | ||
|  |     var l = state.bufferedRequestCount; | ||
|  |     var buffer = new Array(l); | ||
|  |     var holder = state.corkedRequestsFree; | ||
|  |     holder.entry = entry; | ||
|  | 
 | ||
|  |     var count = 0; | ||
|  |     var allBuffers = true; | ||
|  |     while (entry) { | ||
|  |       buffer[count] = entry; | ||
|  |       if (!entry.isBuf) allBuffers = false; | ||
|  |       entry = entry.next; | ||
|  |       count += 1; | ||
|  |     } | ||
|  |     buffer.allBuffers = allBuffers; | ||
|  | 
 | ||
|  |     doWrite(stream, state, true, state.length, buffer, '', holder.finish); | ||
|  | 
 | ||
|  |     // doWrite is almost always async, defer these to save a bit of time
 | ||
|  |     // as the hot path ends with doWrite
 | ||
|  |     state.pendingcb++; | ||
|  |     state.lastBufferedRequest = null; | ||
|  |     if (holder.next) { | ||
|  |       state.corkedRequestsFree = holder.next; | ||
|  |       holder.next = null; | ||
|  |     } else { | ||
|  |       state.corkedRequestsFree = new CorkedRequest(state); | ||
|  |     } | ||
|  |     state.bufferedRequestCount = 0; | ||
|  |   } else { | ||
|  |     // Slow case, write chunks one-by-one
 | ||
|  |     while (entry) { | ||
|  |       var chunk = entry.chunk; | ||
|  |       var encoding = entry.encoding; | ||
|  |       var cb = entry.callback; | ||
|  |       var len = state.objectMode ? 1 : chunk.length; | ||
|  | 
 | ||
|  |       doWrite(stream, state, false, len, chunk, encoding, cb); | ||
|  |       entry = entry.next; | ||
|  |       state.bufferedRequestCount--; | ||
|  |       // if we didn't call the onwrite immediately, then
 | ||
|  |       // it means that we need to wait until it does.
 | ||
|  |       // also, that means that the chunk and cb are currently
 | ||
|  |       // being processed, so move the buffer counter past them.
 | ||
|  |       if (state.writing) { | ||
|  |         break; | ||
|  |       } | ||
|  |     } | ||
|  | 
 | ||
|  |     if (entry === null) state.lastBufferedRequest = null; | ||
|  |   } | ||
|  | 
 | ||
|  |   state.bufferedRequest = entry; | ||
|  |   state.bufferProcessing = false; | ||
|  | } | ||
|  | 
 | ||
|  | Writable.prototype._write = function (chunk, encoding, cb) { | ||
|  |   cb(new Error('_write() is not implemented')); | ||
|  | }; | ||
|  | 
 | ||
|  | Writable.prototype._writev = null; | ||
|  | 
 | ||
|  | Writable.prototype.end = function (chunk, encoding, cb) { | ||
|  |   var state = this._writableState; | ||
|  | 
 | ||
|  |   if (typeof chunk === 'function') { | ||
|  |     cb = chunk; | ||
|  |     chunk = null; | ||
|  |     encoding = null; | ||
|  |   } else if (typeof encoding === 'function') { | ||
|  |     cb = encoding; | ||
|  |     encoding = null; | ||
|  |   } | ||
|  | 
 | ||
|  |   if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); | ||
|  | 
 | ||
|  |   // .end() fully uncorks
 | ||
|  |   if (state.corked) { | ||
|  |     state.corked = 1; | ||
|  |     this.uncork(); | ||
|  |   } | ||
|  | 
 | ||
|  |   // ignore unnecessary end() calls.
 | ||
|  |   if (!state.ending && !state.finished) endWritable(this, state, cb); | ||
|  | }; | ||
|  | 
 | ||
|  | function needFinish(state) { | ||
|  |   return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; | ||
|  | } | ||
|  | function callFinal(stream, state) { | ||
|  |   stream._final(function (err) { | ||
|  |     state.pendingcb--; | ||
|  |     if (err) { | ||
|  |       stream.emit('error', err); | ||
|  |     } | ||
|  |     state.prefinished = true; | ||
|  |     stream.emit('prefinish'); | ||
|  |     finishMaybe(stream, state); | ||
|  |   }); | ||
|  | } | ||
|  | function prefinish(stream, state) { | ||
|  |   if (!state.prefinished && !state.finalCalled) { | ||
|  |     if (typeof stream._final === 'function') { | ||
|  |       state.pendingcb++; | ||
|  |       state.finalCalled = true; | ||
|  |       pna.nextTick(callFinal, stream, state); | ||
|  |     } else { | ||
|  |       state.prefinished = true; | ||
|  |       stream.emit('prefinish'); | ||
|  |     } | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function finishMaybe(stream, state) { | ||
|  |   var need = needFinish(state); | ||
|  |   if (need) { | ||
|  |     prefinish(stream, state); | ||
|  |     if (state.pendingcb === 0) { | ||
|  |       state.finished = true; | ||
|  |       stream.emit('finish'); | ||
|  |     } | ||
|  |   } | ||
|  |   return need; | ||
|  | } | ||
|  | 
 | ||
|  | function endWritable(stream, state, cb) { | ||
|  |   state.ending = true; | ||
|  |   finishMaybe(stream, state); | ||
|  |   if (cb) { | ||
|  |     if (state.finished) pna.nextTick(cb);else stream.once('finish', cb); | ||
|  |   } | ||
|  |   state.ended = true; | ||
|  |   stream.writable = false; | ||
|  | } | ||
|  | 
 | ||
|  | function onCorkedFinish(corkReq, state, err) { | ||
|  |   var entry = corkReq.entry; | ||
|  |   corkReq.entry = null; | ||
|  |   while (entry) { | ||
|  |     var cb = entry.callback; | ||
|  |     state.pendingcb--; | ||
|  |     cb(err); | ||
|  |     entry = entry.next; | ||
|  |   } | ||
|  |   if (state.corkedRequestsFree) { | ||
|  |     state.corkedRequestsFree.next = corkReq; | ||
|  |   } else { | ||
|  |     state.corkedRequestsFree = corkReq; | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | Object.defineProperty(Writable.prototype, 'destroyed', { | ||
|  |   get: function () { | ||
|  |     if (this._writableState === undefined) { | ||
|  |       return false; | ||
|  |     } | ||
|  |     return this._writableState.destroyed; | ||
|  |   }, | ||
|  |   set: function (value) { | ||
|  |     // we ignore the value if the stream
 | ||
|  |     // has not been initialized yet
 | ||
|  |     if (!this._writableState) { | ||
|  |       return; | ||
|  |     } | ||
|  | 
 | ||
|  |     // backward compatibility, the user is explicitly
 | ||
|  |     // managing destroyed
 | ||
|  |     this._writableState.destroyed = value; | ||
|  |   } | ||
|  | }); | ||
|  | 
 | ||
|  | Writable.prototype.destroy = destroyImpl.destroy; | ||
|  | Writable.prototype._undestroy = destroyImpl.undestroy; | ||
|  | Writable.prototype._destroy = function (err, cb) { | ||
|  |   this.end(); | ||
|  |   cb(err); | ||
|  | }; | ||
|  | }).call(this)}).call(this,_dereq_(73),typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {},_dereq_(119).setImmediate) | ||
|  | },{"100":100,"119":119,"121":121,"14":14,"37":37,"72":72,"73":73,"88":88,"94":94,"95":95}],93:[function(_dereq_,module,exports){ | ||
|  | 'use strict'; | ||
|  | 
 | ||
|  | function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } | ||
|  | 
 | ||
|  | var Buffer = _dereq_(100).Buffer; | ||
|  | var util = _dereq_(11); | ||
|  | 
 | ||
|  | function copyBuffer(src, target, offset) { | ||
|  |   src.copy(target, offset); | ||
|  | } | ||
|  | 
 | ||
|  | module.exports = function () { | ||
|  |   function BufferList() { | ||
|  |     _classCallCheck(this, BufferList); | ||
|  | 
 | ||
|  |     this.head = null; | ||
|  |     this.tail = null; | ||
|  |     this.length = 0; | ||
|  |   } | ||
|  | 
 | ||
|  |   BufferList.prototype.push = function push(v) { | ||
|  |     var entry = { data: v, next: null }; | ||
|  |     if (this.length > 0) this.tail.next = entry;else this.head = entry; | ||
|  |     this.tail = entry; | ||
|  |     ++this.length; | ||
|  |   }; | ||
|  | 
 | ||
|  |   BufferList.prototype.unshift = function unshift(v) { | ||
|  |     var entry = { data: v, next: this.head }; | ||
|  |     if (this.length === 0) this.tail = entry; | ||
|  |     this.head = entry; | ||
|  |     ++this.length; | ||
|  |   }; | ||
|  | 
 | ||
|  |   BufferList.prototype.shift = function shift() { | ||
|  |     if (this.length === 0) return; | ||
|  |     var ret = this.head.data; | ||
|  |     if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; | ||
|  |     --this.length; | ||
|  |     return ret; | ||
|  |   }; | ||
|  | 
 | ||
|  |   BufferList.prototype.clear = function clear() { | ||
|  |     this.head = this.tail = null; | ||
|  |     this.length = 0; | ||
|  |   }; | ||
|  | 
 | ||
|  |   BufferList.prototype.join = function join(s) { | ||
|  |     if (this.length === 0) return ''; | ||
|  |     var p = this.head; | ||
|  |     var ret = '' + p.data; | ||
|  |     while (p = p.next) { | ||
|  |       ret += s + p.data; | ||
|  |     }return ret; | ||
|  |   }; | ||
|  | 
 | ||
|  |   BufferList.prototype.concat = function concat(n) { | ||
|  |     if (this.length === 0) return Buffer.alloc(0); | ||
|  |     if (this.length === 1) return this.head.data; | ||
|  |     var ret = Buffer.allocUnsafe(n >>> 0); | ||
|  |     var p = this.head; | ||
|  |     var i = 0; | ||
|  |     while (p) { | ||
|  |       copyBuffer(p.data, ret, i); | ||
|  |       i += p.data.length; | ||
|  |       p = p.next; | ||
|  |     } | ||
|  |     return ret; | ||
|  |   }; | ||
|  | 
 | ||
|  |   return BufferList; | ||
|  | }(); | ||
|  | 
 | ||
|  | if (util && util.inspect && util.inspect.custom) { | ||
|  |   module.exports.prototype[util.inspect.custom] = function () { | ||
|  |     var obj = util.inspect({ length: this.length }); | ||
|  |     return this.constructor.name + ' ' + obj; | ||
|  |   }; | ||
|  | } | ||
|  | },{"100":100,"11":11}],94:[function(_dereq_,module,exports){ | ||
|  | 'use strict'; | ||
|  | 
 | ||
|  | /*<replacement>*/ | ||
|  | 
 | ||
|  | var pna = _dereq_(72); | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | // undocumented cb() API, needed for core, not for public API
 | ||
|  | function destroy(err, cb) { | ||
|  |   var _this = this; | ||
|  | 
 | ||
|  |   var readableDestroyed = this._readableState && this._readableState.destroyed; | ||
|  |   var writableDestroyed = this._writableState && this._writableState.destroyed; | ||
|  | 
 | ||
|  |   if (readableDestroyed || writableDestroyed) { | ||
|  |     if (cb) { | ||
|  |       cb(err); | ||
|  |     } else if (err && (!this._writableState || !this._writableState.errorEmitted)) { | ||
|  |       pna.nextTick(emitErrorNT, this, err); | ||
|  |     } | ||
|  |     return this; | ||
|  |   } | ||
|  | 
 | ||
|  |   // we set destroyed to true before firing error callbacks in order
 | ||
|  |   // to make it re-entrance safe in case destroy() is called within callbacks
 | ||
|  | 
 | ||
|  |   if (this._readableState) { | ||
|  |     this._readableState.destroyed = true; | ||
|  |   } | ||
|  | 
 | ||
|  |   // if this is a duplex stream mark the writable part as destroyed as well
 | ||
|  |   if (this._writableState) { | ||
|  |     this._writableState.destroyed = true; | ||
|  |   } | ||
|  | 
 | ||
|  |   this._destroy(err || null, function (err) { | ||
|  |     if (!cb && err) { | ||
|  |       pna.nextTick(emitErrorNT, _this, err); | ||
|  |       if (_this._writableState) { | ||
|  |         _this._writableState.errorEmitted = true; | ||
|  |       } | ||
|  |     } else if (cb) { | ||
|  |       cb(err); | ||
|  |     } | ||
|  |   }); | ||
|  | 
 | ||
|  |   return this; | ||
|  | } | ||
|  | 
 | ||
|  | function undestroy() { | ||
|  |   if (this._readableState) { | ||
|  |     this._readableState.destroyed = false; | ||
|  |     this._readableState.reading = false; | ||
|  |     this._readableState.ended = false; | ||
|  |     this._readableState.endEmitted = false; | ||
|  |   } | ||
|  | 
 | ||
|  |   if (this._writableState) { | ||
|  |     this._writableState.destroyed = false; | ||
|  |     this._writableState.ended = false; | ||
|  |     this._writableState.ending = false; | ||
|  |     this._writableState.finished = false; | ||
|  |     this._writableState.errorEmitted = false; | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function emitErrorNT(self, err) { | ||
|  |   self.emit('error', err); | ||
|  | } | ||
|  | 
 | ||
|  | module.exports = { | ||
|  |   destroy: destroy, | ||
|  |   undestroy: undestroy | ||
|  | }; | ||
|  | },{"72":72}],95:[function(_dereq_,module,exports){ | ||
|  | arguments[4][56][0].apply(exports,arguments) | ||
|  | },{"26":26,"56":56}],96:[function(_dereq_,module,exports){ | ||
|  | module.exports = _dereq_(97).PassThrough | ||
|  | 
 | ||
|  | },{"97":97}],97:[function(_dereq_,module,exports){ | ||
|  | exports = module.exports = _dereq_(90); | ||
|  | exports.Stream = exports; | ||
|  | exports.Readable = exports; | ||
|  | exports.Writable = _dereq_(92); | ||
|  | exports.Duplex = _dereq_(88); | ||
|  | exports.Transform = _dereq_(91); | ||
|  | exports.PassThrough = _dereq_(89); | ||
|  | 
 | ||
|  | },{"88":88,"89":89,"90":90,"91":91,"92":92}],98:[function(_dereq_,module,exports){ | ||
|  | module.exports = _dereq_(97).Transform | ||
|  | 
 | ||
|  | },{"97":97}],99:[function(_dereq_,module,exports){ | ||
|  | module.exports = _dereq_(92); | ||
|  | 
 | ||
|  | },{"92":92}],100:[function(_dereq_,module,exports){ | ||
|  | /* eslint-disable node/no-deprecated-api */ | ||
|  | var buffer = _dereq_(13) | ||
|  | var Buffer = buffer.Buffer | ||
|  | 
 | ||
|  | // alternative to using Object.keys for old browsers
 | ||
|  | function copyProps (src, dst) { | ||
|  |   for (var key in src) { | ||
|  |     dst[key] = src[key] | ||
|  |   } | ||
|  | } | ||
|  | if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { | ||
|  |   module.exports = buffer | ||
|  | } else { | ||
|  |   // Copy properties from require('buffer')
 | ||
|  |   copyProps(buffer, exports) | ||
|  |   exports.Buffer = SafeBuffer | ||
|  | } | ||
|  | 
 | ||
|  | function SafeBuffer (arg, encodingOrOffset, length) { | ||
|  |   return Buffer(arg, encodingOrOffset, length) | ||
|  | } | ||
|  | 
 | ||
|  | // Copy static methods from Buffer
 | ||
|  | copyProps(Buffer, SafeBuffer) | ||
|  | 
 | ||
|  | SafeBuffer.from = function (arg, encodingOrOffset, length) { | ||
|  |   if (typeof arg === 'number') { | ||
|  |     throw new TypeError('Argument must not be a number') | ||
|  |   } | ||
|  |   return Buffer(arg, encodingOrOffset, length) | ||
|  | } | ||
|  | 
 | ||
|  | SafeBuffer.alloc = function (size, fill, encoding) { | ||
|  |   if (typeof size !== 'number') { | ||
|  |     throw new TypeError('Argument must be a number') | ||
|  |   } | ||
|  |   var buf = Buffer(size) | ||
|  |   if (fill !== undefined) { | ||
|  |     if (typeof encoding === 'string') { | ||
|  |       buf.fill(fill, encoding) | ||
|  |     } else { | ||
|  |       buf.fill(fill) | ||
|  |     } | ||
|  |   } else { | ||
|  |     buf.fill(0) | ||
|  |   } | ||
|  |   return buf | ||
|  | } | ||
|  | 
 | ||
|  | SafeBuffer.allocUnsafe = function (size) { | ||
|  |   if (typeof size !== 'number') { | ||
|  |     throw new TypeError('Argument must be a number') | ||
|  |   } | ||
|  |   return Buffer(size) | ||
|  | } | ||
|  | 
 | ||
|  | SafeBuffer.allocUnsafeSlow = function (size) { | ||
|  |   if (typeof size !== 'number') { | ||
|  |     throw new TypeError('Argument must be a number') | ||
|  |   } | ||
|  |   return buffer.SlowBuffer(size) | ||
|  | } | ||
|  | 
 | ||
|  | },{"13":13}],101:[function(_dereq_,module,exports){ | ||
|  | // Copyright Joyent, Inc. and other Node contributors.
 | ||
|  | //
 | ||
|  | // Permission is hereby granted, free of charge, to any person obtaining a
 | ||
|  | // copy of this software and associated documentation files (the
 | ||
|  | // "Software"), to deal in the Software without restriction, including
 | ||
|  | // without limitation the rights to use, copy, modify, merge, publish,
 | ||
|  | // distribute, sublicense, and/or sell copies of the Software, and to permit
 | ||
|  | // persons to whom the Software is furnished to do so, subject to the
 | ||
|  | // following conditions:
 | ||
|  | //
 | ||
|  | // The above copyright notice and this permission notice shall be included
 | ||
|  | // in all copies or substantial portions of the Software.
 | ||
|  | //
 | ||
|  | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
 | ||
|  | // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
 | ||
|  | // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
 | ||
|  | // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
 | ||
|  | // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
 | ||
|  | // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
 | ||
|  | // USE OR OTHER DEALINGS IN THE SOFTWARE.
 | ||
|  | 
 | ||
|  | 'use strict'; | ||
|  | 
 | ||
|  | /*<replacement>*/ | ||
|  | 
 | ||
|  | var Buffer = _dereq_(100).Buffer; | ||
|  | /*</replacement>*/ | ||
|  | 
 | ||
|  | var isEncoding = Buffer.isEncoding || function (encoding) { | ||
|  |   encoding = '' + encoding; | ||
|  |   switch (encoding && encoding.toLowerCase()) { | ||
|  |     case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': | ||
|  |       return true; | ||
|  |     default: | ||
|  |       return false; | ||
|  |   } | ||
|  | }; | ||
|  | 
 | ||
|  | function _normalizeEncoding(enc) { | ||
|  |   if (!enc) return 'utf8'; | ||
|  |   var retried; | ||
|  |   while (true) { | ||
|  |     switch (enc) { | ||
|  |       case 'utf8': | ||
|  |       case 'utf-8': | ||
|  |         return 'utf8'; | ||
|  |       case 'ucs2': | ||
|  |       case 'ucs-2': | ||
|  |       case 'utf16le': | ||
|  |       case 'utf-16le': | ||
|  |         return 'utf16le'; | ||
|  |       case 'latin1': | ||
|  |       case 'binary': | ||
|  |         return 'latin1'; | ||
|  |       case 'base64': | ||
|  |       case 'ascii': | ||
|  |       case 'hex': | ||
|  |         return enc; | ||
|  |       default: | ||
|  |         if (retried) return; // undefined
 | ||
|  |         enc = ('' + enc).toLowerCase(); | ||
|  |         retried = true; | ||
|  |     } | ||
|  |   } | ||
|  | }; | ||
|  | 
 | ||
|  | // Do not cache `Buffer.isEncoding` when checking encoding names as some
 | ||
|  | // modules monkey-patch it to support additional encodings
 | ||
|  | function normalizeEncoding(enc) { | ||
|  |   var nenc = _normalizeEncoding(enc); | ||
|  |   if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); | ||
|  |   return nenc || enc; | ||
|  | } | ||
|  | 
 | ||
|  | // StringDecoder provides an interface for efficiently splitting a series of
 | ||
|  | // buffers into a series of JS strings without breaking apart multi-byte
 | ||
|  | // characters.
 | ||
|  | exports.StringDecoder = StringDecoder; | ||
|  | function StringDecoder(encoding) { | ||
|  |   this.encoding = normalizeEncoding(encoding); | ||
|  |   var nb; | ||
|  |   switch (this.encoding) { | ||
|  |     case 'utf16le': | ||
|  |       this.text = utf16Text; | ||
|  |       this.end = utf16End; | ||
|  |       nb = 4; | ||
|  |       break; | ||
|  |     case 'utf8': | ||
|  |       this.fillLast = utf8FillLast; | ||
|  |       nb = 4; | ||
|  |       break; | ||
|  |     case 'base64': | ||
|  |       this.text = base64Text; | ||
|  |       this.end = base64End; | ||
|  |       nb = 3; | ||
|  |       break; | ||
|  |     default: | ||
|  |       this.write = simpleWrite; | ||
|  |       this.end = simpleEnd; | ||
|  |       return; | ||
|  |   } | ||
|  |   this.lastNeed = 0; | ||
|  |   this.lastTotal = 0; | ||
|  |   this.lastChar = Buffer.allocUnsafe(nb); | ||
|  | } | ||
|  | 
 | ||
|  | StringDecoder.prototype.write = function (buf) { | ||
|  |   if (buf.length === 0) return ''; | ||
|  |   var r; | ||
|  |   var i; | ||
|  |   if (this.lastNeed) { | ||
|  |     r = this.fillLast(buf); | ||
|  |     if (r === undefined) return ''; | ||
|  |     i = this.lastNeed; | ||
|  |     this.lastNeed = 0; | ||
|  |   } else { | ||
|  |     i = 0; | ||
|  |   } | ||
|  |   if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); | ||
|  |   return r || ''; | ||
|  | }; | ||
|  | 
 | ||
|  | StringDecoder.prototype.end = utf8End; | ||
|  | 
 | ||
|  | // Returns only complete characters in a Buffer
 | ||
|  | StringDecoder.prototype.text = utf8Text; | ||
|  | 
 | ||
|  | // Attempts to complete a partial non-UTF-8 character using bytes from a Buffer
 | ||
|  | StringDecoder.prototype.fillLast = function (buf) { | ||
|  |   if (this.lastNeed <= buf.length) { | ||
|  |     buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); | ||
|  |     return this.lastChar.toString(this.encoding, 0, this.lastTotal); | ||
|  |   } | ||
|  |   buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); | ||
|  |   this.lastNeed -= buf.length; | ||
|  | }; | ||
|  | 
 | ||
|  | // Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a
 | ||
|  | // continuation byte. If an invalid byte is detected, -2 is returned.
 | ||
|  | function utf8CheckByte(byte) { | ||
|  |   if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; | ||
|  |   return byte >> 6 === 0x02 ? -1 : -2; | ||
|  | } | ||
|  | 
 | ||
|  | // Checks at most 3 bytes at the end of a Buffer in order to detect an
 | ||
|  | // incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4)
 | ||
|  | // needed to complete the UTF-8 character (if applicable) are returned.
 | ||
|  | function utf8CheckIncomplete(self, buf, i) { | ||
|  |   var j = buf.length - 1; | ||
|  |   if (j < i) return 0; | ||
|  |   var nb = utf8CheckByte(buf[j]); | ||
|  |   if (nb >= 0) { | ||
|  |     if (nb > 0) self.lastNeed = nb - 1; | ||
|  |     return nb; | ||
|  |   } | ||
|  |   if (--j < i || nb === -2) return 0; | ||
|  |   nb = utf8CheckByte(buf[j]); | ||
|  |   if (nb >= 0) { | ||
|  |     if (nb > 0) self.lastNeed = nb - 2; | ||
|  |     return nb; | ||
|  |   } | ||
|  |   if (--j < i || nb === -2) return 0; | ||
|  |   nb = utf8CheckByte(buf[j]); | ||
|  |   if (nb >= 0) { | ||
|  |     if (nb > 0) { | ||
|  |       if (nb === 2) nb = 0;else self.lastNeed = nb - 3; | ||
|  |     } | ||
|  |     return nb; | ||
|  |   } | ||
|  |   return 0; | ||
|  | } | ||
|  | 
 | ||
|  | // Validates as many continuation bytes for a multi-byte UTF-8 character as
 | ||
|  | // needed or are available. If we see a non-continuation byte where we expect
 | ||
|  | // one, we "replace" the validated continuation bytes we've seen so far with
 | ||
|  | // a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding
 | ||
|  | // behavior. The continuation byte check is included three times in the case
 | ||
|  | // where all of the continuation bytes for a character exist in the same buffer.
 | ||
|  | // It is also done this way as a slight performance increase instead of using a
 | ||
|  | // loop.
 | ||
|  | function utf8CheckExtraBytes(self, buf, p) { | ||
|  |   if ((buf[0] & 0xC0) !== 0x80) { | ||
|  |     self.lastNeed = 0; | ||
|  |     return '\ufffd'; | ||
|  |   } | ||
|  |   if (self.lastNeed > 1 && buf.length > 1) { | ||
|  |     if ((buf[1] & 0xC0) !== 0x80) { | ||
|  |       self.lastNeed = 1; | ||
|  |       return '\ufffd'; | ||
|  |     } | ||
|  |     if (self.lastNeed > 2 && buf.length > 2) { | ||
|  |       if ((buf[2] & 0xC0) !== 0x80) { | ||
|  |         self.lastNeed = 2; | ||
|  |         return '\ufffd'; | ||
|  |       } | ||
|  |     } | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | // Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer.
 | ||
|  | function utf8FillLast(buf) { | ||
|  |   var p = this.lastTotal - this.lastNeed; | ||
|  |   var r = utf8CheckExtraBytes(this, buf, p); | ||
|  |   if (r !== undefined) return r; | ||
|  |   if (this.lastNeed <= buf.length) { | ||
|  |     buf.copy(this.lastChar, p, 0, this.lastNeed); | ||
|  |     return this.lastChar.toString(this.encoding, 0, this.lastTotal); | ||
|  |   } | ||
|  |   buf.copy(this.lastChar, p, 0, buf.length); | ||
|  |   this.lastNeed -= buf.length; | ||
|  | } | ||
|  | 
 | ||
|  | // Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a
 | ||
|  | // partial character, the character's bytes are buffered until the required
 | ||
|  | // number of bytes are available.
 | ||
|  | function utf8Text(buf, i) { | ||
|  |   var total = utf8CheckIncomplete(this, buf, i); | ||
|  |   if (!this.lastNeed) return buf.toString('utf8', i); | ||
|  |   this.lastTotal = total; | ||
|  |   var end = buf.length - (total - this.lastNeed); | ||
|  |   buf.copy(this.lastChar, 0, end); | ||
|  |   return buf.toString('utf8', i, end); | ||
|  | } | ||
|  | 
 | ||
|  | // For UTF-8, a replacement character is added when ending on a partial
 | ||
|  | // character.
 | ||
|  | function utf8End(buf) { | ||
|  |   var r = buf && buf.length ? this.write(buf) : ''; | ||
|  |   if (this.lastNeed) return r + '\ufffd'; | ||
|  |   return r; | ||
|  | } | ||
|  | 
 | ||
|  | // UTF-16LE typically needs two bytes per character, but even if we have an even
 | ||
|  | // number of bytes available, we need to check if we end on a leading/high
 | ||
|  | // surrogate. In that case, we need to wait for the next two bytes in order to
 | ||
|  | // decode the last character properly.
 | ||
|  | function utf16Text(buf, i) { | ||
|  |   if ((buf.length - i) % 2 === 0) { | ||
|  |     var r = buf.toString('utf16le', i); | ||
|  |     if (r) { | ||
|  |       var c = r.charCodeAt(r.length - 1); | ||
|  |       if (c >= 0xD800 && c <= 0xDBFF) { | ||
|  |         this.lastNeed = 2; | ||
|  |         this.lastTotal = 4; | ||
|  |         this.lastChar[0] = buf[buf.length - 2]; | ||
|  |         this.lastChar[1] = buf[buf.length - 1]; | ||
|  |         return r.slice(0, -1); | ||
|  |       } | ||
|  |     } | ||
|  |     return r; | ||
|  |   } | ||
|  |   this.lastNeed = 1; | ||
|  |   this.lastTotal = 2; | ||
|  |   this.lastChar[0] = buf[buf.length - 1]; | ||
|  |   return buf.toString('utf16le', i, buf.length - 1); | ||
|  | } | ||
|  | 
 | ||
|  | // For UTF-16LE we do not explicitly append special replacement characters if we
 | ||
|  | // end on a partial character, we simply let v8 handle that.
 | ||
|  | function utf16End(buf) { | ||
|  |   var r = buf && buf.length ? this.write(buf) : ''; | ||
|  |   if (this.lastNeed) { | ||
|  |     var end = this.lastTotal - this.lastNeed; | ||
|  |     return r + this.lastChar.toString('utf16le', 0, end); | ||
|  |   } | ||
|  |   return r; | ||
|  | } | ||
|  | 
 | ||
|  | function base64Text(buf, i) { | ||
|  |   var n = (buf.length - i) % 3; | ||
|  |   if (n === 0) return buf.toString('base64', i); | ||
|  |   this.lastNeed = 3 - n; | ||
|  |   this.lastTotal = 3; | ||
|  |   if (n === 1) { | ||
|  |     this.lastChar[0] = buf[buf.length - 1]; | ||
|  |   } else { | ||
|  |     this.lastChar[0] = buf[buf.length - 2]; | ||
|  |     this.lastChar[1] = buf[buf.length - 1]; | ||
|  |   } | ||
|  |   return buf.toString('base64', i, buf.length - n); | ||
|  | } | ||
|  | 
 | ||
|  | function base64End(buf) { | ||
|  |   var r = buf && buf.length ? this.write(buf) : ''; | ||
|  |   if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); | ||
|  |   return r; | ||
|  | } | ||
|  | 
 | ||
|  | // Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex)
 | ||
|  | function simpleWrite(buf) { | ||
|  |   return buf.toString(this.encoding); | ||
|  | } | ||
|  | 
 | ||
|  | function simpleEnd(buf) { | ||
|  |   return buf && buf.length ? this.write(buf) : ''; | ||
|  | } | ||
|  | },{"100":100}],102:[function(_dereq_,module,exports){ | ||
|  | arguments[4][101][0].apply(exports,arguments) | ||
|  | },{"101":101,"83":83}],103:[function(_dereq_,module,exports){ | ||
|  | arguments[4][43][0].apply(exports,arguments) | ||
|  | },{"43":43}],104:[function(_dereq_,module,exports){ | ||
|  | arguments[4][44][0].apply(exports,arguments) | ||
|  | },{"106":106,"108":108,"37":37,"44":44,"73":73}],105:[function(_dereq_,module,exports){ | ||
|  | arguments[4][45][0].apply(exports,arguments) | ||
|  | },{"107":107,"37":37,"45":45}],106:[function(_dereq_,module,exports){ | ||
|  | arguments[4][46][0].apply(exports,arguments) | ||
|  | },{"102":102,"103":103,"104":104,"109":109,"11":11,"110":110,"111":111,"113":113,"115":115,"116":116,"13":13,"26":26,"37":37,"46":46,"73":73}],107:[function(_dereq_,module,exports){ | ||
|  | arguments[4][47][0].apply(exports,arguments) | ||
|  | },{"103":103,"104":104,"37":37,"47":47}],108:[function(_dereq_,module,exports){ | ||
|  | arguments[4][48][0].apply(exports,arguments) | ||
|  | },{"103":103,"104":104,"111":111,"115":115,"116":116,"121":121,"13":13,"37":37,"48":48,"73":73}],109:[function(_dereq_,module,exports){ | ||
|  | arguments[4][49][0].apply(exports,arguments) | ||
|  | },{"112":112,"49":49,"73":73}],110:[function(_dereq_,module,exports){ | ||
|  | arguments[4][50][0].apply(exports,arguments) | ||
|  | },{"11":11,"13":13,"50":50}],111:[function(_dereq_,module,exports){ | ||
|  | arguments[4][51][0].apply(exports,arguments) | ||
|  | },{"51":51,"73":73}],112:[function(_dereq_,module,exports){ | ||
|  | arguments[4][52][0].apply(exports,arguments) | ||
|  | },{"103":103,"52":52}],113:[function(_dereq_,module,exports){ | ||
|  | arguments[4][53][0].apply(exports,arguments) | ||
|  | },{"53":53}],114:[function(_dereq_,module,exports){ | ||
|  | arguments[4][54][0].apply(exports,arguments) | ||
|  | },{"103":103,"112":112,"54":54}],115:[function(_dereq_,module,exports){ | ||
|  | arguments[4][55][0].apply(exports,arguments) | ||
|  | },{"103":103,"55":55}],116:[function(_dereq_,module,exports){ | ||
|  | arguments[4][56][0].apply(exports,arguments) | ||
|  | },{"26":26,"56":56}],117:[function(_dereq_,module,exports){ | ||
|  | arguments[4][57][0].apply(exports,arguments) | ||
|  | },{"104":104,"105":105,"106":106,"107":107,"108":108,"112":112,"114":114,"57":57}],118:[function(_dereq_,module,exports){ | ||
|  | (function (process){(function (){ | ||
|  | var Transform = _dereq_(117).Transform | ||
|  |   , inherits  = _dereq_(37) | ||
|  | 
 | ||
|  | function DestroyableTransform(opts) { | ||
|  |   Transform.call(this, opts) | ||
|  |   this._destroyed = false | ||
|  | } | ||
|  | 
 | ||
|  | inherits(DestroyableTransform, Transform) | ||
|  | 
 | ||
|  | DestroyableTransform.prototype.destroy = function(err) { | ||
|  |   if (this._destroyed) return | ||
|  |   this._destroyed = true | ||
|  |    | ||
|  |   var self = this | ||
|  |   process.nextTick(function() { | ||
|  |     if (err) | ||
|  |       self.emit('error', err) | ||
|  |     self.emit('close') | ||
|  |   }) | ||
|  | } | ||
|  | 
 | ||
|  | // a noop _transform function
 | ||
|  | function noop (chunk, enc, callback) { | ||
|  |   callback(null, chunk) | ||
|  | } | ||
|  | 
 | ||
|  | 
 | ||
|  | // create a new export function, used by both the main export and
 | ||
|  | // the .ctor export, contains common logic for dealing with arguments
 | ||
|  | function through2 (construct) { | ||
|  |   return function (options, transform, flush) { | ||
|  |     if (typeof options == 'function') { | ||
|  |       flush     = transform | ||
|  |       transform = options | ||
|  |       options   = {} | ||
|  |     } | ||
|  | 
 | ||
|  |     if (typeof transform != 'function') | ||
|  |       transform = noop | ||
|  | 
 | ||
|  |     if (typeof flush != 'function') | ||
|  |       flush = null | ||
|  | 
 | ||
|  |     return construct(options, transform, flush) | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | 
 | ||
|  | // main export, just make me a transform stream!
 | ||
|  | module.exports = through2(function (options, transform, flush) { | ||
|  |   var t2 = new DestroyableTransform(options) | ||
|  | 
 | ||
|  |   t2._transform = transform | ||
|  | 
 | ||
|  |   if (flush) | ||
|  |     t2._flush = flush | ||
|  | 
 | ||
|  |   return t2 | ||
|  | }) | ||
|  | 
 | ||
|  | 
 | ||
|  | // make me a reusable prototype that I can `new`, or implicitly `new`
 | ||
|  | // with a constructor call
 | ||
|  | module.exports.ctor = through2(function (options, transform, flush) { | ||
|  |   function Through2 (override) { | ||
|  |     if (!(this instanceof Through2)) | ||
|  |       return new Through2(override) | ||
|  | 
 | ||
|  |     this.options = Object.assign({}, options, override) | ||
|  | 
 | ||
|  |     DestroyableTransform.call(this, this.options) | ||
|  |   } | ||
|  | 
 | ||
|  |   inherits(Through2, DestroyableTransform) | ||
|  | 
 | ||
|  |   Through2.prototype._transform = transform | ||
|  | 
 | ||
|  |   if (flush) | ||
|  |     Through2.prototype._flush = flush | ||
|  | 
 | ||
|  |   return Through2 | ||
|  | }) | ||
|  | 
 | ||
|  | 
 | ||
|  | module.exports.obj = through2(function (options, transform, flush) { | ||
|  |   var t2 = new DestroyableTransform(Object.assign({ objectMode: true, highWaterMark: 16 }, options)) | ||
|  | 
 | ||
|  |   t2._transform = transform | ||
|  | 
 | ||
|  |   if (flush) | ||
|  |     t2._flush = flush | ||
|  | 
 | ||
|  |   return t2 | ||
|  | }) | ||
|  | 
 | ||
|  | }).call(this)}).call(this,_dereq_(73)) | ||
|  | },{"117":117,"37":37,"73":73}],119:[function(_dereq_,module,exports){ | ||
|  | (function (setImmediate,clearImmediate){(function (){ | ||
|  | var nextTick = _dereq_(73).nextTick; | ||
|  | var apply = Function.prototype.apply; | ||
|  | var slice = Array.prototype.slice; | ||
|  | var immediateIds = {}; | ||
|  | var nextImmediateId = 0; | ||
|  | 
 | ||
|  | // DOM APIs, for completeness
 | ||
|  | 
 | ||
|  | exports.setTimeout = function() { | ||
|  |   return new Timeout(apply.call(setTimeout, window, arguments), clearTimeout); | ||
|  | }; | ||
|  | exports.setInterval = function() { | ||
|  |   return new Timeout(apply.call(setInterval, window, arguments), clearInterval); | ||
|  | }; | ||
|  | exports.clearTimeout = | ||
|  | exports.clearInterval = function(timeout) { timeout.close(); }; | ||
|  | 
 | ||
|  | function Timeout(id, clearFn) { | ||
|  |   this._id = id; | ||
|  |   this._clearFn = clearFn; | ||
|  | } | ||
|  | Timeout.prototype.unref = Timeout.prototype.ref = function() {}; | ||
|  | Timeout.prototype.close = function() { | ||
|  |   this._clearFn.call(window, this._id); | ||
|  | }; | ||
|  | 
 | ||
|  | // Does not start the time, just sets up the members needed.
 | ||
|  | exports.enroll = function(item, msecs) { | ||
|  |   clearTimeout(item._idleTimeoutId); | ||
|  |   item._idleTimeout = msecs; | ||
|  | }; | ||
|  | 
 | ||
|  | exports.unenroll = function(item) { | ||
|  |   clearTimeout(item._idleTimeoutId); | ||
|  |   item._idleTimeout = -1; | ||
|  | }; | ||
|  | 
 | ||
|  | exports._unrefActive = exports.active = function(item) { | ||
|  |   clearTimeout(item._idleTimeoutId); | ||
|  | 
 | ||
|  |   var msecs = item._idleTimeout; | ||
|  |   if (msecs >= 0) { | ||
|  |     item._idleTimeoutId = setTimeout(function onTimeout() { | ||
|  |       if (item._onTimeout) | ||
|  |         item._onTimeout(); | ||
|  |     }, msecs); | ||
|  |   } | ||
|  | }; | ||
|  | 
 | ||
|  | // That's not how node.js implements it but the exposed api is the same.
 | ||
|  | exports.setImmediate = typeof setImmediate === "function" ? setImmediate : function(fn) { | ||
|  |   var id = nextImmediateId++; | ||
|  |   var args = arguments.length < 2 ? false : slice.call(arguments, 1); | ||
|  | 
 | ||
|  |   immediateIds[id] = true; | ||
|  | 
 | ||
|  |   nextTick(function onNextTick() { | ||
|  |     if (immediateIds[id]) { | ||
|  |       // fn.call() is faster so we optimize for the common use-case
 | ||
|  |       // @see http://jsperf.com/call-apply-segu
 | ||
|  |       if (args) { | ||
|  |         fn.apply(null, args); | ||
|  |       } else { | ||
|  |         fn.call(null); | ||
|  |       } | ||
|  |       // Prevent ids from leaking
 | ||
|  |       exports.clearImmediate(id); | ||
|  |     } | ||
|  |   }); | ||
|  | 
 | ||
|  |   return id; | ||
|  | }; | ||
|  | 
 | ||
|  | exports.clearImmediate = typeof clearImmediate === "function" ? clearImmediate : function(id) { | ||
|  |   delete immediateIds[id]; | ||
|  | }; | ||
|  | }).call(this)}).call(this,_dereq_(119).setImmediate,_dereq_(119).clearImmediate) | ||
|  | },{"119":119,"73":73}],120:[function(_dereq_,module,exports){ | ||
|  | 'use strict'; | ||
|  | 
 | ||
|  | // Simple FIFO queue implementation to avoid having to do shift()
 | ||
|  | // on an array, which is slow.
 | ||
|  | 
 | ||
|  | function Queue() { | ||
|  |   this.length = 0; | ||
|  | } | ||
|  | 
 | ||
|  | Queue.prototype.push = function (item) { | ||
|  |   var node = {item: item}; | ||
|  |   if (this.last) { | ||
|  |     this.last = this.last.next = node; | ||
|  |   } else { | ||
|  |     this.last = this.first = node; | ||
|  |   } | ||
|  |   this.length++; | ||
|  | }; | ||
|  | 
 | ||
|  | Queue.prototype.shift = function () { | ||
|  |   var node = this.first; | ||
|  |   if (node) { | ||
|  |     this.first = node.next; | ||
|  |     if (!(--this.length)) { | ||
|  |       this.last = undefined; | ||
|  |     } | ||
|  |     return node.item; | ||
|  |   } | ||
|  | }; | ||
|  | 
 | ||
|  | Queue.prototype.slice = function (start, end) { | ||
|  |   start = typeof start === 'undefined' ? 0 : start; | ||
|  |   end = typeof end === 'undefined' ? Infinity : end; | ||
|  | 
 | ||
|  |   var output = []; | ||
|  | 
 | ||
|  |   var i = 0; | ||
|  |   for (var node = this.first; node; node = node.next) { | ||
|  |     if (--end < 0) { | ||
|  |       break; | ||
|  |     } else if (++i > start) { | ||
|  |       output.push(node.item); | ||
|  |     } | ||
|  |   } | ||
|  |   return output; | ||
|  | } | ||
|  | 
 | ||
|  | module.exports = Queue; | ||
|  | 
 | ||
|  | },{}],121:[function(_dereq_,module,exports){ | ||
|  | (function (global){(function (){ | ||
|  | 
 | ||
|  | /** | ||
|  |  * Module exports. | ||
|  |  */ | ||
|  | 
 | ||
|  | module.exports = deprecate; | ||
|  | 
 | ||
|  | /** | ||
|  |  * Mark that a method should not be used. | ||
|  |  * Returns a modified function which warns once by default. | ||
|  |  * | ||
|  |  * If `localStorage.noDeprecation = true` is set, then it is a no-op. | ||
|  |  * | ||
|  |  * If `localStorage.throwDeprecation = true` is set, then deprecated functions | ||
|  |  * will throw an Error when invoked. | ||
|  |  * | ||
|  |  * If `localStorage.traceDeprecation = true` is set, then deprecated functions | ||
|  |  * will invoke `console.trace()` instead of `console.error()`. | ||
|  |  * | ||
|  |  * @param {Function} fn - the function to deprecate | ||
|  |  * @param {String} msg - the string to print to the console when `fn` is invoked | ||
|  |  * @returns {Function} a new "deprecated" version of `fn` | ||
|  |  * @api public | ||
|  |  */ | ||
|  | 
 | ||
|  | function deprecate (fn, msg) { | ||
|  |   if (config('noDeprecation')) { | ||
|  |     return fn; | ||
|  |   } | ||
|  | 
 | ||
|  |   var warned = false; | ||
|  |   function deprecated() { | ||
|  |     if (!warned) { | ||
|  |       if (config('throwDeprecation')) { | ||
|  |         throw new Error(msg); | ||
|  |       } else if (config('traceDeprecation')) { | ||
|  |         console.trace(msg); | ||
|  |       } else { | ||
|  |         console.warn(msg); | ||
|  |       } | ||
|  |       warned = true; | ||
|  |     } | ||
|  |     return fn.apply(this, arguments); | ||
|  |   } | ||
|  | 
 | ||
|  |   return deprecated; | ||
|  | } | ||
|  | 
 | ||
|  | /** | ||
|  |  * Checks `localStorage` for boolean values for the given `name`. | ||
|  |  * | ||
|  |  * @param {String} name | ||
|  |  * @returns {Boolean} | ||
|  |  * @api private | ||
|  |  */ | ||
|  | 
 | ||
|  | function config (name) { | ||
|  |   // accessing global.localStorage can trigger a DOMException in sandboxed iframes
 | ||
|  |   try { | ||
|  |     if (!global.localStorage) return false; | ||
|  |   } catch (_) { | ||
|  |     return false; | ||
|  |   } | ||
|  |   var val = global.localStorage[name]; | ||
|  |   if (null == val) return false; | ||
|  |   return String(val).toLowerCase() === 'true'; | ||
|  | } | ||
|  | 
 | ||
|  | }).call(this)}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | ||
|  | },{}],122:[function(_dereq_,module,exports){ | ||
|  | arguments[4][7][0].apply(exports,arguments) | ||
|  | },{"7":7}],123:[function(_dereq_,module,exports){ | ||
|  | arguments[4][8][0].apply(exports,arguments) | ||
|  | },{"8":8}],124:[function(_dereq_,module,exports){ | ||
|  | arguments[4][9][0].apply(exports,arguments) | ||
|  | },{"122":122,"123":123,"73":73,"9":9}],125:[function(_dereq_,module,exports){ | ||
|  | "use strict"; | ||
|  | 
 | ||
|  | Object.defineProperty(exports, "__esModule", { | ||
|  |   value: true | ||
|  | }); | ||
|  | Object.defineProperty(exports, "v1", { | ||
|  |   enumerable: true, | ||
|  |   get: function () { | ||
|  |     return _v.default; | ||
|  |   } | ||
|  | }); | ||
|  | Object.defineProperty(exports, "v3", { | ||
|  |   enumerable: true, | ||
|  |   get: function () { | ||
|  |     return _v2.default; | ||
|  |   } | ||
|  | }); | ||
|  | Object.defineProperty(exports, "v4", { | ||
|  |   enumerable: true, | ||
|  |   get: function () { | ||
|  |     return _v3.default; | ||
|  |   } | ||
|  | }); | ||
|  | Object.defineProperty(exports, "v5", { | ||
|  |   enumerable: true, | ||
|  |   get: function () { | ||
|  |     return _v4.default; | ||
|  |   } | ||
|  | }); | ||
|  | Object.defineProperty(exports, "NIL", { | ||
|  |   enumerable: true, | ||
|  |   get: function () { | ||
|  |     return _nil.default; | ||
|  |   } | ||
|  | }); | ||
|  | Object.defineProperty(exports, "version", { | ||
|  |   enumerable: true, | ||
|  |   get: function () { | ||
|  |     return _version.default; | ||
|  |   } | ||
|  | }); | ||
|  | Object.defineProperty(exports, "validate", { | ||
|  |   enumerable: true, | ||
|  |   get: function () { | ||
|  |     return _validate.default; | ||
|  |   } | ||
|  | }); | ||
|  | Object.defineProperty(exports, "stringify", { | ||
|  |   enumerable: true, | ||
|  |   get: function () { | ||
|  |     return _stringify.default; | ||
|  |   } | ||
|  | }); | ||
|  | Object.defineProperty(exports, "parse", { | ||
|  |   enumerable: true, | ||
|  |   get: function () { | ||
|  |     return _parse.default; | ||
|  |   } | ||
|  | }); | ||
|  | 
 | ||
|  | var _v = _interopRequireDefault(_dereq_(133)); | ||
|  | 
 | ||
|  | var _v2 = _interopRequireDefault(_dereq_(134)); | ||
|  | 
 | ||
|  | var _v3 = _interopRequireDefault(_dereq_(136)); | ||
|  | 
 | ||
|  | var _v4 = _interopRequireDefault(_dereq_(137)); | ||
|  | 
 | ||
|  | var _nil = _interopRequireDefault(_dereq_(127)); | ||
|  | 
 | ||
|  | var _version = _interopRequireDefault(_dereq_(139)); | ||
|  | 
 | ||
|  | var _validate = _interopRequireDefault(_dereq_(138)); | ||
|  | 
 | ||
|  | var _stringify = _interopRequireDefault(_dereq_(132)); | ||
|  | 
 | ||
|  | var _parse = _interopRequireDefault(_dereq_(128)); | ||
|  | 
 | ||
|  | function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } | ||
|  | },{"127":127,"128":128,"132":132,"133":133,"134":134,"136":136,"137":137,"138":138,"139":139}],126:[function(_dereq_,module,exports){ | ||
|  | "use strict"; | ||
|  | 
 | ||
|  | Object.defineProperty(exports, "__esModule", { | ||
|  |   value: true | ||
|  | }); | ||
|  | exports.default = void 0; | ||
|  | 
 | ||
|  | /* | ||
|  |  * Browser-compatible JavaScript MD5 | ||
|  |  * | ||
|  |  * Modification of JavaScript MD5 | ||
|  |  * https://github.com/blueimp/JavaScript-MD5
 | ||
|  |  * | ||
|  |  * Copyright 2011, Sebastian Tschan | ||
|  |  * https://blueimp.net
 | ||
|  |  * | ||
|  |  * Licensed under the MIT license: | ||
|  |  * https://opensource.org/licenses/MIT
 | ||
|  |  * | ||
|  |  * Based on | ||
|  |  * A JavaScript implementation of the RSA Data Security, Inc. MD5 Message | ||
|  |  * Digest Algorithm, as defined in RFC 1321. | ||
|  |  * Version 2.2 Copyright (C) Paul Johnston 1999 - 2009 | ||
|  |  * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet | ||
|  |  * Distributed under the BSD License | ||
|  |  * See http://pajhome.org.uk/crypt/md5 for more info.
 | ||
|  |  */ | ||
|  | function md5(bytes) { | ||
|  |   if (typeof bytes === 'string') { | ||
|  |     const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape
 | ||
|  | 
 | ||
|  |     bytes = new Uint8Array(msg.length); | ||
|  | 
 | ||
|  |     for (let i = 0; i < msg.length; ++i) { | ||
|  |       bytes[i] = msg.charCodeAt(i); | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   return md5ToHexEncodedArray(wordsToMd5(bytesToWords(bytes), bytes.length * 8)); | ||
|  | } | ||
|  | /* | ||
|  |  * Convert an array of little-endian words to an array of bytes | ||
|  |  */ | ||
|  | 
 | ||
|  | 
 | ||
|  | function md5ToHexEncodedArray(input) { | ||
|  |   const output = []; | ||
|  |   const length32 = input.length * 32; | ||
|  |   const hexTab = '0123456789abcdef'; | ||
|  | 
 | ||
|  |   for (let i = 0; i < length32; i += 8) { | ||
|  |     const x = input[i >> 5] >>> i % 32 & 0xff; | ||
|  |     const hex = parseInt(hexTab.charAt(x >>> 4 & 0x0f) + hexTab.charAt(x & 0x0f), 16); | ||
|  |     output.push(hex); | ||
|  |   } | ||
|  | 
 | ||
|  |   return output; | ||
|  | } | ||
|  | /** | ||
|  |  * Calculate output length with padding and bit length | ||
|  |  */ | ||
|  | 
 | ||
|  | 
 | ||
|  | function getOutputLength(inputLength8) { | ||
|  |   return (inputLength8 + 64 >>> 9 << 4) + 14 + 1; | ||
|  | } | ||
|  | /* | ||
|  |  * Calculate the MD5 of an array of little-endian words, and a bit length. | ||
|  |  */ | ||
|  | 
 | ||
|  | 
 | ||
|  | function wordsToMd5(x, len) { | ||
|  |   /* append padding */ | ||
|  |   x[len >> 5] |= 0x80 << len % 32; | ||
|  |   x[getOutputLength(len) - 1] = len; | ||
|  |   let a = 1732584193; | ||
|  |   let b = -271733879; | ||
|  |   let c = -1732584194; | ||
|  |   let d = 271733878; | ||
|  | 
 | ||
|  |   for (let i = 0; i < x.length; i += 16) { | ||
|  |     const olda = a; | ||
|  |     const oldb = b; | ||
|  |     const oldc = c; | ||
|  |     const oldd = d; | ||
|  |     a = md5ff(a, b, c, d, x[i], 7, -680876936); | ||
|  |     d = md5ff(d, a, b, c, x[i + 1], 12, -389564586); | ||
|  |     c = md5ff(c, d, a, b, x[i + 2], 17, 606105819); | ||
|  |     b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330); | ||
|  |     a = md5ff(a, b, c, d, x[i + 4], 7, -176418897); | ||
|  |     d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426); | ||
|  |     c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341); | ||
|  |     b = md5ff(b, c, d, a, x[i + 7], 22, -45705983); | ||
|  |     a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416); | ||
|  |     d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417); | ||
|  |     c = md5ff(c, d, a, b, x[i + 10], 17, -42063); | ||
|  |     b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162); | ||
|  |     a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682); | ||
|  |     d = md5ff(d, a, b, c, x[i + 13], 12, -40341101); | ||
|  |     c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290); | ||
|  |     b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329); | ||
|  |     a = md5gg(a, b, c, d, x[i + 1], 5, -165796510); | ||
|  |     d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632); | ||
|  |     c = md5gg(c, d, a, b, x[i + 11], 14, 643717713); | ||
|  |     b = md5gg(b, c, d, a, x[i], 20, -373897302); | ||
|  |     a = md5gg(a, b, c, d, x[i + 5], 5, -701558691); | ||
|  |     d = md5gg(d, a, b, c, x[i + 10], 9, 38016083); | ||
|  |     c = md5gg(c, d, a, b, x[i + 15], 14, -660478335); | ||
|  |     b = md5gg(b, c, d, a, x[i + 4], 20, -405537848); | ||
|  |     a = md5gg(a, b, c, d, x[i + 9], 5, 568446438); | ||
|  |     d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690); | ||
|  |     c = md5gg(c, d, a, b, x[i + 3], 14, -187363961); | ||
|  |     b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501); | ||
|  |     a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467); | ||
|  |     d = md5gg(d, a, b, c, x[i + 2], 9, -51403784); | ||
|  |     c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473); | ||
|  |     b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734); | ||
|  |     a = md5hh(a, b, c, d, x[i + 5], 4, -378558); | ||
|  |     d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463); | ||
|  |     c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562); | ||
|  |     b = md5hh(b, c, d, a, x[i + 14], 23, -35309556); | ||
|  |     a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060); | ||
|  |     d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353); | ||
|  |     c = md5hh(c, d, a, b, x[i + 7], 16, -155497632); | ||
|  |     b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640); | ||
|  |     a = md5hh(a, b, c, d, x[i + 13], 4, 681279174); | ||
|  |     d = md5hh(d, a, b, c, x[i], 11, -358537222); | ||
|  |     c = md5hh(c, d, a, b, x[i + 3], 16, -722521979); | ||
|  |     b = md5hh(b, c, d, a, x[i + 6], 23, 76029189); | ||
|  |     a = md5hh(a, b, c, d, x[i + 9], 4, -640364487); | ||
|  |     d = md5hh(d, a, b, c, x[i + 12], 11, -421815835); | ||
|  |     c = md5hh(c, d, a, b, x[i + 15], 16, 530742520); | ||
|  |     b = md5hh(b, c, d, a, x[i + 2], 23, -995338651); | ||
|  |     a = md5ii(a, b, c, d, x[i], 6, -198630844); | ||
|  |     d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415); | ||
|  |     c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905); | ||
|  |     b = md5ii(b, c, d, a, x[i + 5], 21, -57434055); | ||
|  |     a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571); | ||
|  |     d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606); | ||
|  |     c = md5ii(c, d, a, b, x[i + 10], 15, -1051523); | ||
|  |     b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799); | ||
|  |     a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359); | ||
|  |     d = md5ii(d, a, b, c, x[i + 15], 10, -30611744); | ||
|  |     c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380); | ||
|  |     b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649); | ||
|  |     a = md5ii(a, b, c, d, x[i + 4], 6, -145523070); | ||
|  |     d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379); | ||
|  |     c = md5ii(c, d, a, b, x[i + 2], 15, 718787259); | ||
|  |     b = md5ii(b, c, d, a, x[i + 9], 21, -343485551); | ||
|  |     a = safeAdd(a, olda); | ||
|  |     b = safeAdd(b, oldb); | ||
|  |     c = safeAdd(c, oldc); | ||
|  |     d = safeAdd(d, oldd); | ||
|  |   } | ||
|  | 
 | ||
|  |   return [a, b, c, d]; | ||
|  | } | ||
|  | /* | ||
|  |  * Convert an array bytes to an array of little-endian words | ||
|  |  * Characters >255 have their high-byte silently ignored. | ||
|  |  */ | ||
|  | 
 | ||
|  | 
 | ||
|  | function bytesToWords(input) { | ||
|  |   if (input.length === 0) { | ||
|  |     return []; | ||
|  |   } | ||
|  | 
 | ||
|  |   const length8 = input.length * 8; | ||
|  |   const output = new Uint32Array(getOutputLength(length8)); | ||
|  | 
 | ||
|  |   for (let i = 0; i < length8; i += 8) { | ||
|  |     output[i >> 5] |= (input[i / 8] & 0xff) << i % 32; | ||
|  |   } | ||
|  | 
 | ||
|  |   return output; | ||
|  | } | ||
|  | /* | ||
|  |  * Add integers, wrapping at 2^32. This uses 16-bit operations internally | ||
|  |  * to work around bugs in some JS interpreters. | ||
|  |  */ | ||
|  | 
 | ||
|  | 
 | ||
|  | function safeAdd(x, y) { | ||
|  |   const lsw = (x & 0xffff) + (y & 0xffff); | ||
|  |   const msw = (x >> 16) + (y >> 16) + (lsw >> 16); | ||
|  |   return msw << 16 | lsw & 0xffff; | ||
|  | } | ||
|  | /* | ||
|  |  * Bitwise rotate a 32-bit number to the left. | ||
|  |  */ | ||
|  | 
 | ||
|  | 
 | ||
|  | function bitRotateLeft(num, cnt) { | ||
|  |   return num << cnt | num >>> 32 - cnt; | ||
|  | } | ||
|  | /* | ||
|  |  * These functions implement the four basic operations the algorithm uses. | ||
|  |  */ | ||
|  | 
 | ||
|  | 
 | ||
|  | function md5cmn(q, a, b, x, s, t) { | ||
|  |   return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b); | ||
|  | } | ||
|  | 
 | ||
|  | function md5ff(a, b, c, d, x, s, t) { | ||
|  |   return md5cmn(b & c | ~b & d, a, b, x, s, t); | ||
|  | } | ||
|  | 
 | ||
|  | function md5gg(a, b, c, d, x, s, t) { | ||
|  |   return md5cmn(b & d | c & ~d, a, b, x, s, t); | ||
|  | } | ||
|  | 
 | ||
|  | function md5hh(a, b, c, d, x, s, t) { | ||
|  |   return md5cmn(b ^ c ^ d, a, b, x, s, t); | ||
|  | } | ||
|  | 
 | ||
|  | function md5ii(a, b, c, d, x, s, t) { | ||
|  |   return md5cmn(c ^ (b | ~d), a, b, x, s, t); | ||
|  | } | ||
|  | 
 | ||
|  | var _default = md5; | ||
|  | exports.default = _default; | ||
|  | },{}],127:[function(_dereq_,module,exports){ | ||
|  | "use strict"; | ||
|  | 
 | ||
|  | Object.defineProperty(exports, "__esModule", { | ||
|  |   value: true | ||
|  | }); | ||
|  | exports.default = void 0; | ||
|  | var _default = '00000000-0000-0000-0000-000000000000'; | ||
|  | exports.default = _default; | ||
|  | },{}],128:[function(_dereq_,module,exports){ | ||
|  | "use strict"; | ||
|  | 
 | ||
|  | Object.defineProperty(exports, "__esModule", { | ||
|  |   value: true | ||
|  | }); | ||
|  | exports.default = void 0; | ||
|  | 
 | ||
|  | var _validate = _interopRequireDefault(_dereq_(138)); | ||
|  | 
 | ||
|  | function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } | ||
|  | 
 | ||
|  | function parse(uuid) { | ||
|  |   if (!(0, _validate.default)(uuid)) { | ||
|  |     throw TypeError('Invalid UUID'); | ||
|  |   } | ||
|  | 
 | ||
|  |   let v; | ||
|  |   const arr = new Uint8Array(16); // Parse ########-....-....-....-............
 | ||
|  | 
 | ||
|  |   arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; | ||
|  |   arr[1] = v >>> 16 & 0xff; | ||
|  |   arr[2] = v >>> 8 & 0xff; | ||
|  |   arr[3] = v & 0xff; // Parse ........-####-....-....-............
 | ||
|  | 
 | ||
|  |   arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; | ||
|  |   arr[5] = v & 0xff; // Parse ........-....-####-....-............
 | ||
|  | 
 | ||
|  |   arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; | ||
|  |   arr[7] = v & 0xff; // Parse ........-....-....-####-............
 | ||
|  | 
 | ||
|  |   arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; | ||
|  |   arr[9] = v & 0xff; // Parse ........-....-....-....-############
 | ||
|  |   // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes)
 | ||
|  | 
 | ||
|  |   arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; | ||
|  |   arr[11] = v / 0x100000000 & 0xff; | ||
|  |   arr[12] = v >>> 24 & 0xff; | ||
|  |   arr[13] = v >>> 16 & 0xff; | ||
|  |   arr[14] = v >>> 8 & 0xff; | ||
|  |   arr[15] = v & 0xff; | ||
|  |   return arr; | ||
|  | } | ||
|  | 
 | ||
|  | var _default = parse; | ||
|  | exports.default = _default; | ||
|  | },{"138":138}],129:[function(_dereq_,module,exports){ | ||
|  | "use strict"; | ||
|  | 
 | ||
|  | Object.defineProperty(exports, "__esModule", { | ||
|  |   value: true | ||
|  | }); | ||
|  | exports.default = void 0; | ||
|  | var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; | ||
|  | exports.default = _default; | ||
|  | },{}],130:[function(_dereq_,module,exports){ | ||
|  | "use strict"; | ||
|  | 
 | ||
|  | Object.defineProperty(exports, "__esModule", { | ||
|  |   value: true | ||
|  | }); | ||
|  | exports.default = rng; | ||
|  | // Unique ID creation requires a high quality random # generator. In the browser we therefore
 | ||
|  | // require the crypto API and do not support built-in fallback to lower quality random number
 | ||
|  | // generators (like Math.random()).
 | ||
|  | let getRandomValues; | ||
|  | const rnds8 = new Uint8Array(16); | ||
|  | 
 | ||
|  | function rng() { | ||
|  |   // lazy load so that environments that need to polyfill have a chance to do so
 | ||
|  |   if (!getRandomValues) { | ||
|  |     // getRandomValues needs to be invoked in a context where "this" is a Crypto implementation. Also,
 | ||
|  |     // find the complete implementation of crypto (msCrypto) on IE11.
 | ||
|  |     getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto) || typeof msCrypto !== 'undefined' && typeof msCrypto.getRandomValues === 'function' && msCrypto.getRandomValues.bind(msCrypto); | ||
|  | 
 | ||
|  |     if (!getRandomValues) { | ||
|  |       throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported'); | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   return getRandomValues(rnds8); | ||
|  | } | ||
|  | },{}],131:[function(_dereq_,module,exports){ | ||
|  | "use strict"; | ||
|  | 
 | ||
|  | Object.defineProperty(exports, "__esModule", { | ||
|  |   value: true | ||
|  | }); | ||
|  | exports.default = void 0; | ||
|  | 
 | ||
|  | // Adapted from Chris Veness' SHA1 code at
 | ||
|  | // http://www.movable-type.co.uk/scripts/sha1.html
 | ||
|  | function f(s, x, y, z) { | ||
|  |   switch (s) { | ||
|  |     case 0: | ||
|  |       return x & y ^ ~x & z; | ||
|  | 
 | ||
|  |     case 1: | ||
|  |       return x ^ y ^ z; | ||
|  | 
 | ||
|  |     case 2: | ||
|  |       return x & y ^ x & z ^ y & z; | ||
|  | 
 | ||
|  |     case 3: | ||
|  |       return x ^ y ^ z; | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function ROTL(x, n) { | ||
|  |   return x << n | x >>> 32 - n; | ||
|  | } | ||
|  | 
 | ||
|  | function sha1(bytes) { | ||
|  |   const K = [0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6]; | ||
|  |   const H = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0]; | ||
|  | 
 | ||
|  |   if (typeof bytes === 'string') { | ||
|  |     const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape
 | ||
|  | 
 | ||
|  |     bytes = []; | ||
|  | 
 | ||
|  |     for (let i = 0; i < msg.length; ++i) { | ||
|  |       bytes.push(msg.charCodeAt(i)); | ||
|  |     } | ||
|  |   } else if (!Array.isArray(bytes)) { | ||
|  |     // Convert Array-like to Array
 | ||
|  |     bytes = Array.prototype.slice.call(bytes); | ||
|  |   } | ||
|  | 
 | ||
|  |   bytes.push(0x80); | ||
|  |   const l = bytes.length / 4 + 2; | ||
|  |   const N = Math.ceil(l / 16); | ||
|  |   const M = new Array(N); | ||
|  | 
 | ||
|  |   for (let i = 0; i < N; ++i) { | ||
|  |     const arr = new Uint32Array(16); | ||
|  | 
 | ||
|  |     for (let j = 0; j < 16; ++j) { | ||
|  |       arr[j] = bytes[i * 64 + j * 4] << 24 | bytes[i * 64 + j * 4 + 1] << 16 | bytes[i * 64 + j * 4 + 2] << 8 | bytes[i * 64 + j * 4 + 3]; | ||
|  |     } | ||
|  | 
 | ||
|  |     M[i] = arr; | ||
|  |   } | ||
|  | 
 | ||
|  |   M[N - 1][14] = (bytes.length - 1) * 8 / Math.pow(2, 32); | ||
|  |   M[N - 1][14] = Math.floor(M[N - 1][14]); | ||
|  |   M[N - 1][15] = (bytes.length - 1) * 8 & 0xffffffff; | ||
|  | 
 | ||
|  |   for (let i = 0; i < N; ++i) { | ||
|  |     const W = new Uint32Array(80); | ||
|  | 
 | ||
|  |     for (let t = 0; t < 16; ++t) { | ||
|  |       W[t] = M[i][t]; | ||
|  |     } | ||
|  | 
 | ||
|  |     for (let t = 16; t < 80; ++t) { | ||
|  |       W[t] = ROTL(W[t - 3] ^ W[t - 8] ^ W[t - 14] ^ W[t - 16], 1); | ||
|  |     } | ||
|  | 
 | ||
|  |     let a = H[0]; | ||
|  |     let b = H[1]; | ||
|  |     let c = H[2]; | ||
|  |     let d = H[3]; | ||
|  |     let e = H[4]; | ||
|  | 
 | ||
|  |     for (let t = 0; t < 80; ++t) { | ||
|  |       const s = Math.floor(t / 20); | ||
|  |       const T = ROTL(a, 5) + f(s, b, c, d) + e + K[s] + W[t] >>> 0; | ||
|  |       e = d; | ||
|  |       d = c; | ||
|  |       c = ROTL(b, 30) >>> 0; | ||
|  |       b = a; | ||
|  |       a = T; | ||
|  |     } | ||
|  | 
 | ||
|  |     H[0] = H[0] + a >>> 0; | ||
|  |     H[1] = H[1] + b >>> 0; | ||
|  |     H[2] = H[2] + c >>> 0; | ||
|  |     H[3] = H[3] + d >>> 0; | ||
|  |     H[4] = H[4] + e >>> 0; | ||
|  |   } | ||
|  | 
 | ||
|  |   return [H[0] >> 24 & 0xff, H[0] >> 16 & 0xff, H[0] >> 8 & 0xff, H[0] & 0xff, H[1] >> 24 & 0xff, H[1] >> 16 & 0xff, H[1] >> 8 & 0xff, H[1] & 0xff, H[2] >> 24 & 0xff, H[2] >> 16 & 0xff, H[2] >> 8 & 0xff, H[2] & 0xff, H[3] >> 24 & 0xff, H[3] >> 16 & 0xff, H[3] >> 8 & 0xff, H[3] & 0xff, H[4] >> 24 & 0xff, H[4] >> 16 & 0xff, H[4] >> 8 & 0xff, H[4] & 0xff]; | ||
|  | } | ||
|  | 
 | ||
|  | var _default = sha1; | ||
|  | exports.default = _default; | ||
|  | },{}],132:[function(_dereq_,module,exports){ | ||
|  | "use strict"; | ||
|  | 
 | ||
|  | Object.defineProperty(exports, "__esModule", { | ||
|  |   value: true | ||
|  | }); | ||
|  | exports.default = void 0; | ||
|  | 
 | ||
|  | var _validate = _interopRequireDefault(_dereq_(138)); | ||
|  | 
 | ||
|  | function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } | ||
|  | 
 | ||
|  | /** | ||
|  |  * Convert array of 16 byte values to UUID string format of the form: | ||
|  |  * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX | ||
|  |  */ | ||
|  | const byteToHex = []; | ||
|  | 
 | ||
|  | for (let i = 0; i < 256; ++i) { | ||
|  |   byteToHex.push((i + 0x100).toString(16).substr(1)); | ||
|  | } | ||
|  | 
 | ||
|  | function stringify(arr, offset = 0) { | ||
|  |   // Note: Be careful editing this code!  It's been tuned for performance
 | ||
|  |   // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434
 | ||
|  |   const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID.  If this throws, it's likely due to one
 | ||
|  |   // of the following:
 | ||
|  |   // - One or more input array values don't map to a hex octet (leading to
 | ||
|  |   // "undefined" in the uuid)
 | ||
|  |   // - Invalid input values for the RFC `version` or `variant` fields
 | ||
|  | 
 | ||
|  |   if (!(0, _validate.default)(uuid)) { | ||
|  |     throw TypeError('Stringified UUID is invalid'); | ||
|  |   } | ||
|  | 
 | ||
|  |   return uuid; | ||
|  | } | ||
|  | 
 | ||
|  | var _default = stringify; | ||
|  | exports.default = _default; | ||
|  | },{"138":138}],133:[function(_dereq_,module,exports){ | ||
|  | "use strict"; | ||
|  | 
 | ||
|  | Object.defineProperty(exports, "__esModule", { | ||
|  |   value: true | ||
|  | }); | ||
|  | exports.default = void 0; | ||
|  | 
 | ||
|  | var _rng = _interopRequireDefault(_dereq_(130)); | ||
|  | 
 | ||
|  | var _stringify = _interopRequireDefault(_dereq_(132)); | ||
|  | 
 | ||
|  | function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } | ||
|  | 
 | ||
|  | // **`v1()` - Generate time-based UUID**
 | ||
|  | //
 | ||
|  | // Inspired by https://github.com/LiosK/UUID.js
 | ||
|  | // and http://docs.python.org/library/uuid.html
 | ||
|  | let _nodeId; | ||
|  | 
 | ||
|  | let _clockseq; // Previous uuid creation time
 | ||
|  | 
 | ||
|  | 
 | ||
|  | let _lastMSecs = 0; | ||
|  | let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details
 | ||
|  | 
 | ||
|  | function v1(options, buf, offset) { | ||
|  |   let i = buf && offset || 0; | ||
|  |   const b = buf || new Array(16); | ||
|  |   options = options || {}; | ||
|  |   let node = options.node || _nodeId; | ||
|  |   let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not
 | ||
|  |   // specified.  We do this lazily to minimize issues related to insufficient
 | ||
|  |   // system entropy.  See #189
 | ||
|  | 
 | ||
|  |   if (node == null || clockseq == null) { | ||
|  |     const seedBytes = options.random || (options.rng || _rng.default)(); | ||
|  | 
 | ||
|  |     if (node == null) { | ||
|  |       // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)
 | ||
|  |       node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; | ||
|  |     } | ||
|  | 
 | ||
|  |     if (clockseq == null) { | ||
|  |       // Per 4.2.2, randomize (14 bit) clockseq
 | ||
|  |       clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; | ||
|  |     } | ||
|  |   } // UUID timestamps are 100 nano-second units since the Gregorian epoch,
 | ||
|  |   // (1582-10-15 00:00).  JSNumbers aren't precise enough for this, so
 | ||
|  |   // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'
 | ||
|  |   // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.
 | ||
|  | 
 | ||
|  | 
 | ||
|  |   let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock
 | ||
|  |   // cycle to simulate higher resolution clock
 | ||
|  | 
 | ||
|  |   let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs)
 | ||
|  | 
 | ||
|  |   const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression
 | ||
|  | 
 | ||
|  |   if (dt < 0 && options.clockseq === undefined) { | ||
|  |     clockseq = clockseq + 1 & 0x3fff; | ||
|  |   } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new
 | ||
|  |   // time interval
 | ||
|  | 
 | ||
|  | 
 | ||
|  |   if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { | ||
|  |     nsecs = 0; | ||
|  |   } // Per 4.2.1.2 Throw error if too many uuids are requested
 | ||
|  | 
 | ||
|  | 
 | ||
|  |   if (nsecs >= 10000) { | ||
|  |     throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); | ||
|  |   } | ||
|  | 
 | ||
|  |   _lastMSecs = msecs; | ||
|  |   _lastNSecs = nsecs; | ||
|  |   _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch
 | ||
|  | 
 | ||
|  |   msecs += 12219292800000; // `time_low`
 | ||
|  | 
 | ||
|  |   const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; | ||
|  |   b[i++] = tl >>> 24 & 0xff; | ||
|  |   b[i++] = tl >>> 16 & 0xff; | ||
|  |   b[i++] = tl >>> 8 & 0xff; | ||
|  |   b[i++] = tl & 0xff; // `time_mid`
 | ||
|  | 
 | ||
|  |   const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; | ||
|  |   b[i++] = tmh >>> 8 & 0xff; | ||
|  |   b[i++] = tmh & 0xff; // `time_high_and_version`
 | ||
|  | 
 | ||
|  |   b[i++] = tmh >>> 24 & 0xf | 0x10; // include version
 | ||
|  | 
 | ||
|  |   b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)
 | ||
|  | 
 | ||
|  |   b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low`
 | ||
|  | 
 | ||
|  |   b[i++] = clockseq & 0xff; // `node`
 | ||
|  | 
 | ||
|  |   for (let n = 0; n < 6; ++n) { | ||
|  |     b[i + n] = node[n]; | ||
|  |   } | ||
|  | 
 | ||
|  |   return buf || (0, _stringify.default)(b); | ||
|  | } | ||
|  | 
 | ||
|  | var _default = v1; | ||
|  | exports.default = _default; | ||
|  | },{"130":130,"132":132}],134:[function(_dereq_,module,exports){ | ||
|  | "use strict"; | ||
|  | 
 | ||
|  | Object.defineProperty(exports, "__esModule", { | ||
|  |   value: true | ||
|  | }); | ||
|  | exports.default = void 0; | ||
|  | 
 | ||
|  | var _v = _interopRequireDefault(_dereq_(135)); | ||
|  | 
 | ||
|  | var _md = _interopRequireDefault(_dereq_(126)); | ||
|  | 
 | ||
|  | function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } | ||
|  | 
 | ||
|  | const v3 = (0, _v.default)('v3', 0x30, _md.default); | ||
|  | var _default = v3; | ||
|  | exports.default = _default; | ||
|  | },{"126":126,"135":135}],135:[function(_dereq_,module,exports){ | ||
|  | "use strict"; | ||
|  | 
 | ||
|  | Object.defineProperty(exports, "__esModule", { | ||
|  |   value: true | ||
|  | }); | ||
|  | exports.default = _default; | ||
|  | exports.URL = exports.DNS = void 0; | ||
|  | 
 | ||
|  | var _stringify = _interopRequireDefault(_dereq_(132)); | ||
|  | 
 | ||
|  | var _parse = _interopRequireDefault(_dereq_(128)); | ||
|  | 
 | ||
|  | function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } | ||
|  | 
 | ||
|  | function stringToBytes(str) { | ||
|  |   str = unescape(encodeURIComponent(str)); // UTF8 escape
 | ||
|  | 
 | ||
|  |   const bytes = []; | ||
|  | 
 | ||
|  |   for (let i = 0; i < str.length; ++i) { | ||
|  |     bytes.push(str.charCodeAt(i)); | ||
|  |   } | ||
|  | 
 | ||
|  |   return bytes; | ||
|  | } | ||
|  | 
 | ||
|  | const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; | ||
|  | exports.DNS = DNS; | ||
|  | const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; | ||
|  | exports.URL = URL; | ||
|  | 
 | ||
|  | function _default(name, version, hashfunc) { | ||
|  |   function generateUUID(value, namespace, buf, offset) { | ||
|  |     if (typeof value === 'string') { | ||
|  |       value = stringToBytes(value); | ||
|  |     } | ||
|  | 
 | ||
|  |     if (typeof namespace === 'string') { | ||
|  |       namespace = (0, _parse.default)(namespace); | ||
|  |     } | ||
|  | 
 | ||
|  |     if (namespace.length !== 16) { | ||
|  |       throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); | ||
|  |     } // Compute hash of namespace and value, Per 4.3
 | ||
|  |     // Future: Use spread syntax when supported on all platforms, e.g. `bytes =
 | ||
|  |     // hashfunc([...namespace, ... value])`
 | ||
|  | 
 | ||
|  | 
 | ||
|  |     let bytes = new Uint8Array(16 + value.length); | ||
|  |     bytes.set(namespace); | ||
|  |     bytes.set(value, namespace.length); | ||
|  |     bytes = hashfunc(bytes); | ||
|  |     bytes[6] = bytes[6] & 0x0f | version; | ||
|  |     bytes[8] = bytes[8] & 0x3f | 0x80; | ||
|  | 
 | ||
|  |     if (buf) { | ||
|  |       offset = offset || 0; | ||
|  | 
 | ||
|  |       for (let i = 0; i < 16; ++i) { | ||
|  |         buf[offset + i] = bytes[i]; | ||
|  |       } | ||
|  | 
 | ||
|  |       return buf; | ||
|  |     } | ||
|  | 
 | ||
|  |     return (0, _stringify.default)(bytes); | ||
|  |   } // Function#name is not settable on some platforms (#270)
 | ||
|  | 
 | ||
|  | 
 | ||
|  |   try { | ||
|  |     generateUUID.name = name; // eslint-disable-next-line no-empty
 | ||
|  |   } catch (err) {} // For CommonJS default export support
 | ||
|  | 
 | ||
|  | 
 | ||
|  |   generateUUID.DNS = DNS; | ||
|  |   generateUUID.URL = URL; | ||
|  |   return generateUUID; | ||
|  | } | ||
|  | },{"128":128,"132":132}],136:[function(_dereq_,module,exports){ | ||
|  | "use strict"; | ||
|  | 
 | ||
|  | Object.defineProperty(exports, "__esModule", { | ||
|  |   value: true | ||
|  | }); | ||
|  | exports.default = void 0; | ||
|  | 
 | ||
|  | var _rng = _interopRequireDefault(_dereq_(130)); | ||
|  | 
 | ||
|  | var _stringify = _interopRequireDefault(_dereq_(132)); | ||
|  | 
 | ||
|  | function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } | ||
|  | 
 | ||
|  | function v4(options, buf, offset) { | ||
|  |   options = options || {}; | ||
|  | 
 | ||
|  |   const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
 | ||
|  | 
 | ||
|  | 
 | ||
|  |   rnds[6] = rnds[6] & 0x0f | 0x40; | ||
|  |   rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided
 | ||
|  | 
 | ||
|  |   if (buf) { | ||
|  |     offset = offset || 0; | ||
|  | 
 | ||
|  |     for (let i = 0; i < 16; ++i) { | ||
|  |       buf[offset + i] = rnds[i]; | ||
|  |     } | ||
|  | 
 | ||
|  |     return buf; | ||
|  |   } | ||
|  | 
 | ||
|  |   return (0, _stringify.default)(rnds); | ||
|  | } | ||
|  | 
 | ||
|  | var _default = v4; | ||
|  | exports.default = _default; | ||
|  | },{"130":130,"132":132}],137:[function(_dereq_,module,exports){ | ||
|  | "use strict"; | ||
|  | 
 | ||
|  | Object.defineProperty(exports, "__esModule", { | ||
|  |   value: true | ||
|  | }); | ||
|  | exports.default = void 0; | ||
|  | 
 | ||
|  | var _v = _interopRequireDefault(_dereq_(135)); | ||
|  | 
 | ||
|  | var _sha = _interopRequireDefault(_dereq_(131)); | ||
|  | 
 | ||
|  | function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } | ||
|  | 
 | ||
|  | const v5 = (0, _v.default)('v5', 0x50, _sha.default); | ||
|  | var _default = v5; | ||
|  | exports.default = _default; | ||
|  | },{"131":131,"135":135}],138:[function(_dereq_,module,exports){ | ||
|  | "use strict"; | ||
|  | 
 | ||
|  | Object.defineProperty(exports, "__esModule", { | ||
|  |   value: true | ||
|  | }); | ||
|  | exports.default = void 0; | ||
|  | 
 | ||
|  | var _regex = _interopRequireDefault(_dereq_(129)); | ||
|  | 
 | ||
|  | function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } | ||
|  | 
 | ||
|  | function validate(uuid) { | ||
|  |   return typeof uuid === 'string' && _regex.default.test(uuid); | ||
|  | } | ||
|  | 
 | ||
|  | var _default = validate; | ||
|  | exports.default = _default; | ||
|  | },{"129":129}],139:[function(_dereq_,module,exports){ | ||
|  | "use strict"; | ||
|  | 
 | ||
|  | Object.defineProperty(exports, "__esModule", { | ||
|  |   value: true | ||
|  | }); | ||
|  | exports.default = void 0; | ||
|  | 
 | ||
|  | var _validate = _interopRequireDefault(_dereq_(138)); | ||
|  | 
 | ||
|  | function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } | ||
|  | 
 | ||
|  | function version(uuid) { | ||
|  |   if (!(0, _validate.default)(uuid)) { | ||
|  |     throw TypeError('Invalid UUID'); | ||
|  |   } | ||
|  | 
 | ||
|  |   return parseInt(uuid.substr(14, 1), 16); | ||
|  | } | ||
|  | 
 | ||
|  | var _default = version; | ||
|  | exports.default = _default; | ||
|  | },{"138":138}],140:[function(_dereq_,module,exports){ | ||
|  | 'use strict'; | ||
|  | 
 | ||
|  | /** | ||
|  |  * Stringify/parse functions that don't operate | ||
|  |  * recursively, so they avoid call stack exceeded | ||
|  |  * errors. | ||
|  |  */ | ||
|  | exports.stringify = function stringify(input) { | ||
|  |   var queue = []; | ||
|  |   queue.push({obj: input}); | ||
|  | 
 | ||
|  |   var res = ''; | ||
|  |   var next, obj, prefix, val, i, arrayPrefix, keys, k, key, value, objPrefix; | ||
|  |   while ((next = queue.pop())) { | ||
|  |     obj = next.obj; | ||
|  |     prefix = next.prefix || ''; | ||
|  |     val = next.val || ''; | ||
|  |     res += prefix; | ||
|  |     if (val) { | ||
|  |       res += val; | ||
|  |     } else if (typeof obj !== 'object') { | ||
|  |       res += typeof obj === 'undefined' ? null : JSON.stringify(obj); | ||
|  |     } else if (obj === null) { | ||
|  |       res += 'null'; | ||
|  |     } else if (Array.isArray(obj)) { | ||
|  |       queue.push({val: ']'}); | ||
|  |       for (i = obj.length - 1; i >= 0; i--) { | ||
|  |         arrayPrefix = i === 0 ? '' : ','; | ||
|  |         queue.push({obj: obj[i], prefix: arrayPrefix}); | ||
|  |       } | ||
|  |       queue.push({val: '['}); | ||
|  |     } else { // object
 | ||
|  |       keys = []; | ||
|  |       for (k in obj) { | ||
|  |         if (obj.hasOwnProperty(k)) { | ||
|  |           keys.push(k); | ||
|  |         } | ||
|  |       } | ||
|  |       queue.push({val: '}'}); | ||
|  |       for (i = keys.length - 1; i >= 0; i--) { | ||
|  |         key = keys[i]; | ||
|  |         value = obj[key]; | ||
|  |         objPrefix = (i > 0 ? ',' : ''); | ||
|  |         objPrefix += JSON.stringify(key) + ':'; | ||
|  |         queue.push({obj: value, prefix: objPrefix}); | ||
|  |       } | ||
|  |       queue.push({val: '{'}); | ||
|  |     } | ||
|  |   } | ||
|  |   return res; | ||
|  | }; | ||
|  | 
 | ||
|  | // Convenience function for the parse function.
 | ||
|  | // This pop function is basically copied from
 | ||
|  | // pouchCollate.parseIndexableString
 | ||
|  | function pop(obj, stack, metaStack) { | ||
|  |   var lastMetaElement = metaStack[metaStack.length - 1]; | ||
|  |   if (obj === lastMetaElement.element) { | ||
|  |     // popping a meta-element, e.g. an object whose value is another object
 | ||
|  |     metaStack.pop(); | ||
|  |     lastMetaElement = metaStack[metaStack.length - 1]; | ||
|  |   } | ||
|  |   var element = lastMetaElement.element; | ||
|  |   var lastElementIndex = lastMetaElement.index; | ||
|  |   if (Array.isArray(element)) { | ||
|  |     element.push(obj); | ||
|  |   } else if (lastElementIndex === stack.length - 2) { // obj with key+value
 | ||
|  |     var key = stack.pop(); | ||
|  |     element[key] = obj; | ||
|  |   } else { | ||
|  |     stack.push(obj); // obj with key only
 | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | exports.parse = function (str) { | ||
|  |   var stack = []; | ||
|  |   var metaStack = []; // stack for arrays and objects
 | ||
|  |   var i = 0; | ||
|  |   var collationIndex,parsedNum,numChar; | ||
|  |   var parsedString,lastCh,numConsecutiveSlashes,ch; | ||
|  |   var arrayElement, objElement; | ||
|  |   while (true) { | ||
|  |     collationIndex = str[i++]; | ||
|  |     if (collationIndex === '}' || | ||
|  |         collationIndex === ']' || | ||
|  |         typeof collationIndex === 'undefined') { | ||
|  |       if (stack.length === 1) { | ||
|  |         return stack.pop(); | ||
|  |       } else { | ||
|  |         pop(stack.pop(), stack, metaStack); | ||
|  |         continue; | ||
|  |       } | ||
|  |     } | ||
|  |     switch (collationIndex) { | ||
|  |       case ' ': | ||
|  |       case '\t': | ||
|  |       case '\n': | ||
|  |       case ':': | ||
|  |       case ',': | ||
|  |         break; | ||
|  |       case 'n': | ||
|  |         i += 3; // 'ull'
 | ||
|  |         pop(null, stack, metaStack); | ||
|  |         break; | ||
|  |       case 't': | ||
|  |         i += 3; // 'rue'
 | ||
|  |         pop(true, stack, metaStack); | ||
|  |         break; | ||
|  |       case 'f': | ||
|  |         i += 4; // 'alse'
 | ||
|  |         pop(false, stack, metaStack); | ||
|  |         break; | ||
|  |       case '0': | ||
|  |       case '1': | ||
|  |       case '2': | ||
|  |       case '3': | ||
|  |       case '4': | ||
|  |       case '5': | ||
|  |       case '6': | ||
|  |       case '7': | ||
|  |       case '8': | ||
|  |       case '9': | ||
|  |       case '-': | ||
|  |         parsedNum = ''; | ||
|  |         i--; | ||
|  |         while (true) { | ||
|  |           numChar = str[i++]; | ||
|  |           if (/[\d\.\-e\+]/.test(numChar)) { | ||
|  |             parsedNum += numChar; | ||
|  |           } else { | ||
|  |             i--; | ||
|  |             break; | ||
|  |           } | ||
|  |         } | ||
|  |         pop(parseFloat(parsedNum), stack, metaStack); | ||
|  |         break; | ||
|  |       case '"': | ||
|  |         parsedString = ''; | ||
|  |         lastCh = void 0; | ||
|  |         numConsecutiveSlashes = 0; | ||
|  |         while (true) { | ||
|  |           ch = str[i++]; | ||
|  |           if (ch !== '"' || (lastCh === '\\' && | ||
|  |               numConsecutiveSlashes % 2 === 1)) { | ||
|  |             parsedString += ch; | ||
|  |             lastCh = ch; | ||
|  |             if (lastCh === '\\') { | ||
|  |               numConsecutiveSlashes++; | ||
|  |             } else { | ||
|  |               numConsecutiveSlashes = 0; | ||
|  |             } | ||
|  |           } else { | ||
|  |             break; | ||
|  |           } | ||
|  |         } | ||
|  |         pop(JSON.parse('"' + parsedString + '"'), stack, metaStack); | ||
|  |         break; | ||
|  |       case '[': | ||
|  |         arrayElement = { element: [], index: stack.length }; | ||
|  |         stack.push(arrayElement.element); | ||
|  |         metaStack.push(arrayElement); | ||
|  |         break; | ||
|  |       case '{': | ||
|  |         objElement = { element: {}, index: stack.length }; | ||
|  |         stack.push(objElement.element); | ||
|  |         metaStack.push(objElement); | ||
|  |         break; | ||
|  |       default: | ||
|  |         throw new Error( | ||
|  |           'unexpectedly reached end of input: ' + collationIndex); | ||
|  |     } | ||
|  |   } | ||
|  | }; | ||
|  | 
 | ||
|  | },{}],141:[function(_dereq_,module,exports){ | ||
|  | module.exports = extend | ||
|  | 
 | ||
|  | var hasOwnProperty = Object.prototype.hasOwnProperty; | ||
|  | 
 | ||
|  | function extend() { | ||
|  |     var target = {} | ||
|  | 
 | ||
|  |     for (var i = 0; i < arguments.length; i++) { | ||
|  |         var source = arguments[i] | ||
|  | 
 | ||
|  |         for (var key in source) { | ||
|  |             if (hasOwnProperty.call(source, key)) { | ||
|  |                 target[key] = source[key] | ||
|  |             } | ||
|  |         } | ||
|  |     } | ||
|  | 
 | ||
|  |     return target | ||
|  | } | ||
|  | 
 | ||
|  | },{}],142:[function(_dereq_,module,exports){ | ||
|  | module.exports = extend | ||
|  | 
 | ||
|  | var hasOwnProperty = Object.prototype.hasOwnProperty; | ||
|  | 
 | ||
|  | function extend(target) { | ||
|  |     for (var i = 1; i < arguments.length; i++) { | ||
|  |         var source = arguments[i] | ||
|  | 
 | ||
|  |         for (var key in source) { | ||
|  |             if (hasOwnProperty.call(source, key)) { | ||
|  |                 target[key] = source[key] | ||
|  |             } | ||
|  |         } | ||
|  |     } | ||
|  | 
 | ||
|  |     return target | ||
|  | } | ||
|  | 
 | ||
|  | },{}],143:[function(_dereq_,module,exports){ | ||
|  | 'use strict'; | ||
|  | 
 | ||
|  | function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } | ||
|  | 
 | ||
|  | var immediate = _interopDefault(_dereq_(31)); | ||
|  | var uuid = _dereq_(125); | ||
|  | var Md5 = _interopDefault(_dereq_(84)); | ||
|  | var levelup = _interopDefault(_dereq_(61)); | ||
|  | var ltgt = _interopDefault(_dereq_(70)); | ||
|  | var EventEmitter = _interopDefault(_dereq_(26)); | ||
|  | var Codec = _interopDefault(_dereq_(39)); | ||
|  | var ReadableStreamCore = _interopDefault(_dereq_(82)); | ||
|  | var inherits = _interopDefault(_dereq_(37)); | ||
|  | var through2 = _dereq_(118); | ||
|  | var getArguments = _interopDefault(_dereq_(5)); | ||
|  | var Deque = _interopDefault(_dereq_(23)); | ||
|  | var bufferFrom = _interopDefault(_dereq_(12)); | ||
|  | var vuvuzela = _interopDefault(_dereq_(140)); | ||
|  | var localstoragedown = _interopDefault(_dereq_(63)); | ||
|  | 
 | ||
|  | function isBinaryObject(object) { | ||
|  |   return (typeof ArrayBuffer !== 'undefined' && object instanceof ArrayBuffer) || | ||
|  |     (typeof Blob !== 'undefined' && object instanceof Blob); | ||
|  | } | ||
|  | 
 | ||
|  | function cloneArrayBuffer(buff) { | ||
|  |   if (typeof buff.slice === 'function') { | ||
|  |     return buff.slice(0); | ||
|  |   } | ||
|  |   // IE10-11 slice() polyfill
 | ||
|  |   var target = new ArrayBuffer(buff.byteLength); | ||
|  |   var targetArray = new Uint8Array(target); | ||
|  |   var sourceArray = new Uint8Array(buff); | ||
|  |   targetArray.set(sourceArray); | ||
|  |   return target; | ||
|  | } | ||
|  | 
 | ||
|  | function cloneBinaryObject(object) { | ||
|  |   if (object instanceof ArrayBuffer) { | ||
|  |     return cloneArrayBuffer(object); | ||
|  |   } | ||
|  |   var size = object.size; | ||
|  |   var type = object.type; | ||
|  |   // Blob
 | ||
|  |   if (typeof object.slice === 'function') { | ||
|  |     return object.slice(0, size, type); | ||
|  |   } | ||
|  |   // PhantomJS slice() replacement
 | ||
|  |   return object.webkitSlice(0, size, type); | ||
|  | } | ||
|  | 
 | ||
|  | // most of this is borrowed from lodash.isPlainObject:
 | ||
|  | // https://github.com/fis-components/lodash.isplainobject/
 | ||
|  | // blob/29c358140a74f252aeb08c9eb28bef86f2217d4a/index.js
 | ||
|  | 
 | ||
|  | var funcToString = Function.prototype.toString; | ||
|  | var objectCtorString = funcToString.call(Object); | ||
|  | 
 | ||
|  | function isPlainObject(value) { | ||
|  |   var proto = Object.getPrototypeOf(value); | ||
|  |   /* istanbul ignore if */ | ||
|  |   if (proto === null) { // not sure when this happens, but I guess it can
 | ||
|  |     return true; | ||
|  |   } | ||
|  |   var Ctor = proto.constructor; | ||
|  |   return (typeof Ctor == 'function' && | ||
|  |     Ctor instanceof Ctor && funcToString.call(Ctor) == objectCtorString); | ||
|  | } | ||
|  | 
 | ||
|  | function clone(object) { | ||
|  |   var newObject; | ||
|  |   var i; | ||
|  |   var len; | ||
|  | 
 | ||
|  |   if (!object || typeof object !== 'object') { | ||
|  |     return object; | ||
|  |   } | ||
|  | 
 | ||
|  |   if (Array.isArray(object)) { | ||
|  |     newObject = []; | ||
|  |     for (i = 0, len = object.length; i < len; i++) { | ||
|  |       newObject[i] = clone(object[i]); | ||
|  |     } | ||
|  |     return newObject; | ||
|  |   } | ||
|  | 
 | ||
|  |   // special case: to avoid inconsistencies between IndexedDB
 | ||
|  |   // and other backends, we automatically stringify Dates
 | ||
|  |   if (object instanceof Date && isFinite(object)) { | ||
|  |     return object.toISOString(); | ||
|  |   } | ||
|  | 
 | ||
|  |   if (isBinaryObject(object)) { | ||
|  |     return cloneBinaryObject(object); | ||
|  |   } | ||
|  | 
 | ||
|  |   if (!isPlainObject(object)) { | ||
|  |     return object; // don't clone objects like Workers
 | ||
|  |   } | ||
|  | 
 | ||
|  |   newObject = {}; | ||
|  |   for (i in object) { | ||
|  |     /* istanbul ignore else */ | ||
|  |     if (Object.prototype.hasOwnProperty.call(object, i)) { | ||
|  |       var value = clone(object[i]); | ||
|  |       if (typeof value !== 'undefined') { | ||
|  |         newObject[i] = value; | ||
|  |       } | ||
|  |     } | ||
|  |   } | ||
|  |   return newObject; | ||
|  | } | ||
|  | 
 | ||
|  | function mangle(key) { | ||
|  |   return '$' + key; | ||
|  | } | ||
|  | function unmangle(key) { | ||
|  |   return key.substring(1); | ||
|  | } | ||
|  | function Map$1() { | ||
|  |   this._store = {}; | ||
|  | } | ||
|  | Map$1.prototype.get = function (key) { | ||
|  |   var mangled = mangle(key); | ||
|  |   return this._store[mangled]; | ||
|  | }; | ||
|  | Map$1.prototype.set = function (key, value) { | ||
|  |   var mangled = mangle(key); | ||
|  |   this._store[mangled] = value; | ||
|  |   return true; | ||
|  | }; | ||
|  | Map$1.prototype.has = function (key) { | ||
|  |   var mangled = mangle(key); | ||
|  |   return mangled in this._store; | ||
|  | }; | ||
|  | Map$1.prototype.keys = function () { | ||
|  |   return Object.keys(this._store).map(k => unmangle(k)); | ||
|  | }; | ||
|  | Map$1.prototype["delete"] = function (key) { | ||
|  |   var mangled = mangle(key); | ||
|  |   var res = mangled in this._store; | ||
|  |   delete this._store[mangled]; | ||
|  |   return res; | ||
|  | }; | ||
|  | Map$1.prototype.forEach = function (cb) { | ||
|  |   var keys = Object.keys(this._store); | ||
|  |   for (var i = 0, len = keys.length; i < len; i++) { | ||
|  |     var key = keys[i]; | ||
|  |     var value = this._store[key]; | ||
|  |     key = unmangle(key); | ||
|  |     cb(value, key); | ||
|  |   } | ||
|  | }; | ||
|  | Object.defineProperty(Map$1.prototype, 'size', { | ||
|  |   get: function () { | ||
|  |     return Object.keys(this._store).length; | ||
|  |   } | ||
|  | }); | ||
|  | 
 | ||
|  | function Set$1(array) { | ||
|  |   this._store = new Map$1(); | ||
|  | 
 | ||
|  |   // init with an array
 | ||
|  |   if (array && Array.isArray(array)) { | ||
|  |     for (var i = 0, len = array.length; i < len; i++) { | ||
|  |       this.add(array[i]); | ||
|  |     } | ||
|  |   } | ||
|  | } | ||
|  | Set$1.prototype.add = function (key) { | ||
|  |   return this._store.set(key, true); | ||
|  | }; | ||
|  | Set$1.prototype.has = function (key) { | ||
|  |   return this._store.has(key); | ||
|  | }; | ||
|  | Set$1.prototype.forEach = function (cb) { | ||
|  |   this._store.forEach(function (value, key) { | ||
|  |     cb(key); | ||
|  |   }); | ||
|  | }; | ||
|  | Object.defineProperty(Set$1.prototype, 'size', { | ||
|  |   get: function () { | ||
|  |     return this._store.size; | ||
|  |   } | ||
|  | }); | ||
|  | 
 | ||
|  | // Based on https://kangax.github.io/compat-table/es6/ we can sniff out
 | ||
|  | // incomplete Map/Set implementations which would otherwise cause our tests to fail.
 | ||
|  | // Notably they fail in IE11 and iOS 8.4, which this prevents.
 | ||
|  | function supportsMapAndSet() { | ||
|  |   if (typeof Symbol === 'undefined' || typeof Map === 'undefined' || typeof Set === 'undefined') { | ||
|  |     return false; | ||
|  |   } | ||
|  |   var prop = Object.getOwnPropertyDescriptor(Map, Symbol.species); | ||
|  |   return prop && 'get' in prop && Map[Symbol.species] === Map; | ||
|  | } | ||
|  | 
 | ||
|  | // based on https://github.com/montagejs/collections
 | ||
|  | 
 | ||
|  | var ExportedSet; | ||
|  | var ExportedMap; | ||
|  | 
 | ||
|  | { | ||
|  |   if (supportsMapAndSet()) { // prefer built-in Map/Set
 | ||
|  |     ExportedSet = Set; | ||
|  |     ExportedMap = Map; | ||
|  |   } else { // fall back to our polyfill
 | ||
|  |     ExportedSet = Set$1; | ||
|  |     ExportedMap = Map$1; | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | // like underscore/lodash _.pick()
 | ||
|  | function pick(obj, arr) { | ||
|  |   var res = {}; | ||
|  |   for (var i = 0, len = arr.length; i < len; i++) { | ||
|  |     var prop = arr[i]; | ||
|  |     if (prop in obj) { | ||
|  |       res[prop] = obj[prop]; | ||
|  |     } | ||
|  |   } | ||
|  |   return res; | ||
|  | } | ||
|  | 
 | ||
|  | var hasLocal; | ||
|  | 
 | ||
|  | try { | ||
|  |   localStorage.setItem('_pouch_check_localstorage', 1); | ||
|  |   hasLocal = !!localStorage.getItem('_pouch_check_localstorage'); | ||
|  | } catch (e) { | ||
|  |   hasLocal = false; | ||
|  | } | ||
|  | 
 | ||
|  | function hasLocalStorage() { | ||
|  |   return hasLocal; | ||
|  | } | ||
|  | 
 | ||
|  | // Custom nextTick() shim for browsers. In node, this will just be process.nextTick(). We
 | ||
|  | 
 | ||
|  | inherits(Changes, EventEmitter); | ||
|  | 
 | ||
|  | /* istanbul ignore next */ | ||
|  | function attachBrowserEvents(self) { | ||
|  |   if (hasLocalStorage()) { | ||
|  |     addEventListener("storage", function (e) { | ||
|  |       self.emit(e.key); | ||
|  |     }); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function Changes() { | ||
|  |   EventEmitter.call(this); | ||
|  |   this._listeners = {}; | ||
|  | 
 | ||
|  |   attachBrowserEvents(this); | ||
|  | } | ||
|  | Changes.prototype.addListener = function (dbName, id, db, opts) { | ||
|  |   /* istanbul ignore if */ | ||
|  |   if (this._listeners[id]) { | ||
|  |     return; | ||
|  |   } | ||
|  |   var self = this; | ||
|  |   var inprogress = false; | ||
|  |   function eventFunction() { | ||
|  |     /* istanbul ignore if */ | ||
|  |     if (!self._listeners[id]) { | ||
|  |       return; | ||
|  |     } | ||
|  |     if (inprogress) { | ||
|  |       inprogress = 'waiting'; | ||
|  |       return; | ||
|  |     } | ||
|  |     inprogress = true; | ||
|  |     var changesOpts = pick(opts, [ | ||
|  |       'style', 'include_docs', 'attachments', 'conflicts', 'filter', | ||
|  |       'doc_ids', 'view', 'since', 'query_params', 'binary', 'return_docs' | ||
|  |     ]); | ||
|  | 
 | ||
|  |     /* istanbul ignore next */ | ||
|  |     function onError() { | ||
|  |       inprogress = false; | ||
|  |     } | ||
|  | 
 | ||
|  |     db.changes(changesOpts).on('change', function (c) { | ||
|  |       if (c.seq > opts.since && !opts.cancelled) { | ||
|  |         opts.since = c.seq; | ||
|  |         opts.onChange(c); | ||
|  |       } | ||
|  |     }).on('complete', function () { | ||
|  |       if (inprogress === 'waiting') { | ||
|  |         immediate(eventFunction); | ||
|  |       } | ||
|  |       inprogress = false; | ||
|  |     }).on('error', onError); | ||
|  |   } | ||
|  |   this._listeners[id] = eventFunction; | ||
|  |   this.on(dbName, eventFunction); | ||
|  | }; | ||
|  | 
 | ||
|  | Changes.prototype.removeListener = function (dbName, id) { | ||
|  |   /* istanbul ignore if */ | ||
|  |   if (!(id in this._listeners)) { | ||
|  |     return; | ||
|  |   } | ||
|  |   EventEmitter.prototype.removeListener.call(this, dbName, | ||
|  |     this._listeners[id]); | ||
|  |   delete this._listeners[id]; | ||
|  | }; | ||
|  | 
 | ||
|  | 
 | ||
|  | /* istanbul ignore next */ | ||
|  | Changes.prototype.notifyLocalWindows = function (dbName) { | ||
|  |   //do a useless change on a storage thing
 | ||
|  |   //in order to get other windows's listeners to activate
 | ||
|  |   if (hasLocalStorage()) { | ||
|  |     localStorage[dbName] = (localStorage[dbName] === "a") ? "b" : "a"; | ||
|  |   } | ||
|  | }; | ||
|  | 
 | ||
|  | Changes.prototype.notify = function (dbName) { | ||
|  |   this.emit(dbName); | ||
|  |   this.notifyLocalWindows(dbName); | ||
|  | }; | ||
|  | 
 | ||
|  | function guardedConsole(method) { | ||
|  |   /* istanbul ignore else */ | ||
|  |   if (typeof console !== 'undefined' && typeof console[method] === 'function') { | ||
|  |     var args = Array.prototype.slice.call(arguments, 1); | ||
|  |     console[method].apply(console, args); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | var assign; | ||
|  | { | ||
|  |   if (typeof Object.assign === 'function') { | ||
|  |     assign = Object.assign; | ||
|  |   } else { | ||
|  |     // lite Object.assign polyfill based on
 | ||
|  |     // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/assign
 | ||
|  |     assign = function (target) { | ||
|  |       var to = Object(target); | ||
|  | 
 | ||
|  |       for (var index = 1; index < arguments.length; index++) { | ||
|  |         var nextSource = arguments[index]; | ||
|  | 
 | ||
|  |         if (nextSource != null) { // Skip over if undefined or null
 | ||
|  |           for (var nextKey in nextSource) { | ||
|  |             // Avoid bugs when hasOwnProperty is shadowed
 | ||
|  |             if (Object.prototype.hasOwnProperty.call(nextSource, nextKey)) { | ||
|  |               to[nextKey] = nextSource[nextKey]; | ||
|  |             } | ||
|  |           } | ||
|  |         } | ||
|  |       } | ||
|  |       return to; | ||
|  |     }; | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | var $inject_Object_assign = assign; | ||
|  | 
 | ||
|  | inherits(PouchError, Error); | ||
|  | 
 | ||
|  | function PouchError(status, error, reason) { | ||
|  |   Error.call(this, reason); | ||
|  |   this.status = status; | ||
|  |   this.name = error; | ||
|  |   this.message = reason; | ||
|  |   this.error = true; | ||
|  | } | ||
|  | 
 | ||
|  | PouchError.prototype.toString = function () { | ||
|  |   return JSON.stringify({ | ||
|  |     status: this.status, | ||
|  |     name: this.name, | ||
|  |     message: this.message, | ||
|  |     reason: this.reason | ||
|  |   }); | ||
|  | }; | ||
|  | 
 | ||
|  | var UNAUTHORIZED = new PouchError(401, 'unauthorized', "Name or password is incorrect."); | ||
|  | var MISSING_BULK_DOCS = new PouchError(400, 'bad_request', "Missing JSON list of 'docs'"); | ||
|  | var MISSING_DOC = new PouchError(404, 'not_found', 'missing'); | ||
|  | var REV_CONFLICT = new PouchError(409, 'conflict', 'Document update conflict'); | ||
|  | var INVALID_ID = new PouchError(400, 'bad_request', '_id field must contain a string'); | ||
|  | var MISSING_ID = new PouchError(412, 'missing_id', '_id is required for puts'); | ||
|  | var RESERVED_ID = new PouchError(400, 'bad_request', 'Only reserved document ids may start with underscore.'); | ||
|  | var NOT_OPEN = new PouchError(412, 'precondition_failed', 'Database not open'); | ||
|  | var UNKNOWN_ERROR = new PouchError(500, 'unknown_error', 'Database encountered an unknown error'); | ||
|  | var BAD_ARG = new PouchError(500, 'badarg', 'Some query argument is invalid'); | ||
|  | var INVALID_REQUEST = new PouchError(400, 'invalid_request', 'Request was invalid'); | ||
|  | var QUERY_PARSE_ERROR = new PouchError(400, 'query_parse_error', 'Some query parameter is invalid'); | ||
|  | var DOC_VALIDATION = new PouchError(500, 'doc_validation', 'Bad special document member'); | ||
|  | var BAD_REQUEST = new PouchError(400, 'bad_request', 'Something wrong with the request'); | ||
|  | var NOT_AN_OBJECT = new PouchError(400, 'bad_request', 'Document must be a JSON object'); | ||
|  | var DB_MISSING = new PouchError(404, 'not_found', 'Database not found'); | ||
|  | var IDB_ERROR = new PouchError(500, 'indexed_db_went_bad', 'unknown'); | ||
|  | var WSQ_ERROR = new PouchError(500, 'web_sql_went_bad', 'unknown'); | ||
|  | var LDB_ERROR = new PouchError(500, 'levelDB_went_went_bad', 'unknown'); | ||
|  | var FORBIDDEN = new PouchError(403, 'forbidden', 'Forbidden by design doc validate_doc_update function'); | ||
|  | var INVALID_REV = new PouchError(400, 'bad_request', 'Invalid rev format'); | ||
|  | var FILE_EXISTS = new PouchError(412, 'file_exists', 'The database could not be created, the file already exists.'); | ||
|  | var MISSING_STUB = new PouchError(412, 'missing_stub', 'A pre-existing attachment stub wasn\'t found'); | ||
|  | var INVALID_URL = new PouchError(413, 'invalid_url', 'Provided URL is invalid'); | ||
|  | 
 | ||
|  | function createError(error, reason) { | ||
|  |   function CustomPouchError(reason) { | ||
|  |     // inherit error properties from our parent error manually
 | ||
|  |     // so as to allow proper JSON parsing.
 | ||
|  |     /* jshint ignore:start */ | ||
|  |     var names = Object.getOwnPropertyNames(error); | ||
|  |     for (var i = 0, len = names.length; i < len; i++) { | ||
|  |       if (typeof error[names[i]] !== 'function') { | ||
|  |         this[names[i]] = error[names[i]]; | ||
|  |       } | ||
|  |     } | ||
|  | 
 | ||
|  |     if (this.stack === undefined) { | ||
|  |       this.stack = (new Error()).stack; | ||
|  |     } | ||
|  | 
 | ||
|  |     /* jshint ignore:end */ | ||
|  |     if (reason !== undefined) { | ||
|  |       this.reason = reason; | ||
|  |     } | ||
|  |   } | ||
|  |   CustomPouchError.prototype = PouchError.prototype; | ||
|  |   return new CustomPouchError(reason); | ||
|  | } | ||
|  | 
 | ||
|  | function tryFilter(filter, doc, req) { | ||
|  |   try { | ||
|  |     return !filter(doc, req); | ||
|  |   } catch (err) { | ||
|  |     var msg = 'Filter function threw: ' + err.toString(); | ||
|  |     return createError(BAD_REQUEST, msg); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function filterChange(opts) { | ||
|  |   var req = {}; | ||
|  |   var hasFilter = opts.filter && typeof opts.filter === 'function'; | ||
|  |   req.query = opts.query_params; | ||
|  | 
 | ||
|  |   return function filter(change) { | ||
|  |     if (!change.doc) { | ||
|  |       // CSG sends events on the changes feed that don't have documents,
 | ||
|  |       // this hack makes a whole lot of existing code robust.
 | ||
|  |       change.doc = {}; | ||
|  |     } | ||
|  | 
 | ||
|  |     var filterReturn = hasFilter && tryFilter(opts.filter, change.doc, req); | ||
|  | 
 | ||
|  |     if (typeof filterReturn === 'object') { | ||
|  |       return filterReturn; | ||
|  |     } | ||
|  | 
 | ||
|  |     if (filterReturn) { | ||
|  |       return false; | ||
|  |     } | ||
|  | 
 | ||
|  |     if (!opts.include_docs) { | ||
|  |       delete change.doc; | ||
|  |     } else if (!opts.attachments) { | ||
|  |       for (var att in change.doc._attachments) { | ||
|  |         /* istanbul ignore else */ | ||
|  |         if (Object.prototype.hasOwnProperty.call(change.doc._attachments, att)) { | ||
|  |           change.doc._attachments[att].stub = true; | ||
|  |         } | ||
|  |       } | ||
|  |     } | ||
|  |     return true; | ||
|  |   }; | ||
|  | } | ||
|  | 
 | ||
|  | // shim for Function.prototype.name,
 | ||
|  | // for browsers that don't support it like IE
 | ||
|  | 
 | ||
|  | /* istanbul ignore next */ | ||
|  | function f() {} | ||
|  | 
 | ||
|  | var hasName = f.name; | ||
|  | var res; | ||
|  | 
 | ||
|  | // We dont run coverage in IE
 | ||
|  | /* istanbul ignore else */ | ||
|  | if (hasName) { | ||
|  |   res = function (fun) { | ||
|  |     return fun.name; | ||
|  |   }; | ||
|  | } else { | ||
|  |   res = function (fun) { | ||
|  |     var match = fun.toString().match(/^\s*function\s*(?:(\S+)\s*)?\(/); | ||
|  |     if (match && match[1]) { | ||
|  |       return match[1]; | ||
|  |     } | ||
|  |     else { | ||
|  |       return ''; | ||
|  |     } | ||
|  |   }; | ||
|  | } | ||
|  | 
 | ||
|  | var functionName = res; | ||
|  | 
 | ||
|  | // Determine id an ID is valid
 | ||
|  | //   - invalid IDs begin with an underescore that does not begin '_design' or
 | ||
|  | //     '_local'
 | ||
|  | //   - any other string value is a valid id
 | ||
|  | // Returns the specific error object for each case
 | ||
|  | function invalidIdError(id) { | ||
|  |   var err; | ||
|  |   if (!id) { | ||
|  |     err = createError(MISSING_ID); | ||
|  |   } else if (typeof id !== 'string') { | ||
|  |     err = createError(INVALID_ID); | ||
|  |   } else if (/^_/.test(id) && !(/^_(design|local)/).test(id)) { | ||
|  |     err = createError(RESERVED_ID); | ||
|  |   } | ||
|  |   if (err) { | ||
|  |     throw err; | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | // Checks if a PouchDB object is "remote" or not. This is
 | ||
|  | 
 | ||
|  | // originally parseUri 1.2.2, now patched by us
 | ||
|  | 
 | ||
|  | // Based on https://github.com/alexdavid/scope-eval v0.0.3
 | ||
|  | 
 | ||
|  | var thisAtob = function (str) { | ||
|  |   return atob(str); | ||
|  | }; | ||
|  | 
 | ||
|  | var thisBtoa = function (str) { | ||
|  |   return btoa(str); | ||
|  | }; | ||
|  | 
 | ||
|  | // Abstracts constructing a Blob object, so it also works in older
 | ||
|  | // browsers that don't support the native Blob constructor (e.g.
 | ||
|  | // old QtWebKit versions, Android < 4.4).
 | ||
|  | function createBlob(parts, properties) { | ||
|  |   /* global BlobBuilder,MSBlobBuilder,MozBlobBuilder,WebKitBlobBuilder */ | ||
|  |   parts = parts || []; | ||
|  |   properties = properties || {}; | ||
|  |   try { | ||
|  |     return new Blob(parts, properties); | ||
|  |   } catch (e) { | ||
|  |     if (e.name !== "TypeError") { | ||
|  |       throw e; | ||
|  |     } | ||
|  |     var Builder = typeof BlobBuilder !== 'undefined' ? BlobBuilder : | ||
|  |                   typeof MSBlobBuilder !== 'undefined' ? MSBlobBuilder : | ||
|  |                   typeof MozBlobBuilder !== 'undefined' ? MozBlobBuilder : | ||
|  |                   WebKitBlobBuilder; | ||
|  |     var builder = new Builder(); | ||
|  |     for (var i = 0; i < parts.length; i += 1) { | ||
|  |       builder.append(parts[i]); | ||
|  |     } | ||
|  |     return builder.getBlob(properties.type); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | // From http://stackoverflow.com/questions/14967647/ (continues on next line)
 | ||
|  | // encode-decode-image-with-base64-breaks-image (2013-04-21)
 | ||
|  | function binaryStringToArrayBuffer(bin) { | ||
|  |   var length = bin.length; | ||
|  |   var buf = new ArrayBuffer(length); | ||
|  |   var arr = new Uint8Array(buf); | ||
|  |   for (var i = 0; i < length; i++) { | ||
|  |     arr[i] = bin.charCodeAt(i); | ||
|  |   } | ||
|  |   return buf; | ||
|  | } | ||
|  | 
 | ||
|  | function binStringToBluffer(binString, type) { | ||
|  |   return createBlob([binaryStringToArrayBuffer(binString)], {type: type}); | ||
|  | } | ||
|  | 
 | ||
|  | //Can't find original post, but this is close
 | ||
|  | //http://stackoverflow.com/questions/6965107/ (continues on next line)
 | ||
|  | //converting-between-strings-and-arraybuffers
 | ||
|  | function arrayBufferToBinaryString(buffer) { | ||
|  |   var binary = ''; | ||
|  |   var bytes = new Uint8Array(buffer); | ||
|  |   var length = bytes.byteLength; | ||
|  |   for (var i = 0; i < length; i++) { | ||
|  |     binary += String.fromCharCode(bytes[i]); | ||
|  |   } | ||
|  |   return binary; | ||
|  | } | ||
|  | 
 | ||
|  | // shim for browsers that don't support it
 | ||
|  | function readAsBinaryString(blob, callback) { | ||
|  |   var reader = new FileReader(); | ||
|  |   var hasBinaryString = typeof reader.readAsBinaryString === 'function'; | ||
|  |   reader.onloadend = function (e) { | ||
|  |     var result = e.target.result || ''; | ||
|  |     if (hasBinaryString) { | ||
|  |       return callback(result); | ||
|  |     } | ||
|  |     callback(arrayBufferToBinaryString(result)); | ||
|  |   }; | ||
|  |   if (hasBinaryString) { | ||
|  |     reader.readAsBinaryString(blob); | ||
|  |   } else { | ||
|  |     reader.readAsArrayBuffer(blob); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | // simplified API. universal browser support is assumed
 | ||
|  | function readAsArrayBuffer(blob, callback) { | ||
|  |   var reader = new FileReader(); | ||
|  |   reader.onloadend = function (e) { | ||
|  |     var result = e.target.result || new ArrayBuffer(0); | ||
|  |     callback(result); | ||
|  |   }; | ||
|  |   reader.readAsArrayBuffer(blob); | ||
|  | } | ||
|  | 
 | ||
|  | // this is not used in the browser
 | ||
|  | 
 | ||
|  | var setImmediateShim = self.setImmediate || self.setTimeout; | ||
|  | var MD5_CHUNK_SIZE = 32768; | ||
|  | 
 | ||
|  | function rawToBase64(raw) { | ||
|  |   return thisBtoa(raw); | ||
|  | } | ||
|  | 
 | ||
|  | function sliceBlob(blob, start, end) { | ||
|  |   if (blob.webkitSlice) { | ||
|  |     return blob.webkitSlice(start, end); | ||
|  |   } | ||
|  |   return blob.slice(start, end); | ||
|  | } | ||
|  | 
 | ||
|  | function appendBlob(buffer, blob, start, end, callback) { | ||
|  |   if (start > 0 || end < blob.size) { | ||
|  |     // only slice blob if we really need to
 | ||
|  |     blob = sliceBlob(blob, start, end); | ||
|  |   } | ||
|  |   readAsArrayBuffer(blob, function (arrayBuffer) { | ||
|  |     buffer.append(arrayBuffer); | ||
|  |     callback(); | ||
|  |   }); | ||
|  | } | ||
|  | 
 | ||
|  | function appendString(buffer, string, start, end, callback) { | ||
|  |   if (start > 0 || end < string.length) { | ||
|  |     // only create a substring if we really need to
 | ||
|  |     string = string.substring(start, end); | ||
|  |   } | ||
|  |   buffer.appendBinary(string); | ||
|  |   callback(); | ||
|  | } | ||
|  | 
 | ||
|  | function binaryMd5(data, callback) { | ||
|  |   var inputIsString = typeof data === 'string'; | ||
|  |   var len = inputIsString ? data.length : data.size; | ||
|  |   var chunkSize = Math.min(MD5_CHUNK_SIZE, len); | ||
|  |   var chunks = Math.ceil(len / chunkSize); | ||
|  |   var currentChunk = 0; | ||
|  |   var buffer = inputIsString ? new Md5() : new Md5.ArrayBuffer(); | ||
|  | 
 | ||
|  |   var append = inputIsString ? appendString : appendBlob; | ||
|  | 
 | ||
|  |   function next() { | ||
|  |     setImmediateShim(loadNextChunk); | ||
|  |   } | ||
|  | 
 | ||
|  |   function done() { | ||
|  |     var raw = buffer.end(true); | ||
|  |     var base64 = rawToBase64(raw); | ||
|  |     callback(base64); | ||
|  |     buffer.destroy(); | ||
|  |   } | ||
|  | 
 | ||
|  |   function loadNextChunk() { | ||
|  |     var start = currentChunk * chunkSize; | ||
|  |     var end = start + chunkSize; | ||
|  |     currentChunk++; | ||
|  |     if (currentChunk < chunks) { | ||
|  |       append(buffer, data, start, end, next); | ||
|  |     } else { | ||
|  |       append(buffer, data, start, end, done); | ||
|  |     } | ||
|  |   } | ||
|  |   loadNextChunk(); | ||
|  | } | ||
|  | 
 | ||
|  | function stringMd5(string) { | ||
|  |   return Md5.hash(string); | ||
|  | } | ||
|  | 
 | ||
|  | /** | ||
|  |  * Creates a new revision string that does NOT include the revision height | ||
|  |  * For example '56649f1b0506c6ca9fda0746eb0cacdf' | ||
|  |  */ | ||
|  | function rev$$1(doc, deterministic_revs) { | ||
|  |   if (!deterministic_revs) { | ||
|  |     return uuid.v4().replace(/-/g, '').toLowerCase(); | ||
|  |   } | ||
|  | 
 | ||
|  |   var mutateableDoc = $inject_Object_assign({}, doc); | ||
|  |   delete mutateableDoc._rev_tree; | ||
|  |   return stringMd5(JSON.stringify(mutateableDoc)); | ||
|  | } | ||
|  | 
 | ||
|  | var uuid$1 = uuid.v4; // mimic old import, only v4 is ever used elsewhere
 | ||
|  | 
 | ||
|  | function isFunction(f) { | ||
|  |   return 'function' === typeof f; | ||
|  | } | ||
|  | 
 | ||
|  | function getPrefix(db) { | ||
|  |   if (isFunction(db.prefix)) { | ||
|  |     return db.prefix(); | ||
|  |   } | ||
|  |   return db; | ||
|  | } | ||
|  | 
 | ||
|  | function clone$1(_obj) { | ||
|  |   var obj = {}; | ||
|  |   for (var k in _obj) { | ||
|  |     obj[k] = _obj[k]; | ||
|  |   } | ||
|  |   return obj; | ||
|  | } | ||
|  | 
 | ||
|  | function nut(db, precodec, codec) { | ||
|  |   function encodePrefix(prefix, key, opts1, opts2) { | ||
|  |     return precodec.encode([ prefix, codec.encodeKey(key, opts1, opts2 ) ]); | ||
|  |   } | ||
|  | 
 | ||
|  |   function addEncodings(op, prefix) { | ||
|  |     if (prefix && prefix.options) { | ||
|  |       op.keyEncoding = | ||
|  |         op.keyEncoding || prefix.options.keyEncoding; | ||
|  |       op.valueEncoding = | ||
|  |         op.valueEncoding || prefix.options.valueEncoding; | ||
|  |     } | ||
|  |     return op; | ||
|  |   } | ||
|  | 
 | ||
|  |   db.open(function () { /* no-op */}); | ||
|  | 
 | ||
|  |   return { | ||
|  |     apply: function (ops, opts, cb) { | ||
|  |       opts = opts || {}; | ||
|  | 
 | ||
|  |       var batch = []; | ||
|  |       var i = -1; | ||
|  |       var len = ops.length; | ||
|  | 
 | ||
|  |       while (++i < len) { | ||
|  |         var op = ops[i]; | ||
|  |         addEncodings(op, op.prefix); | ||
|  |         op.prefix = getPrefix(op.prefix); | ||
|  |         batch.push({ | ||
|  |           key: encodePrefix(op.prefix, op.key, opts, op), | ||
|  |           value: op.type !== 'del' && codec.encodeValue(op.value, opts, op), | ||
|  |           type: op.type | ||
|  |         }); | ||
|  |       } | ||
|  |       db.db.batch(batch, opts, cb); | ||
|  |     }, | ||
|  |     get: function (key, prefix, opts, cb) { | ||
|  |       opts.asBuffer = codec.valueAsBuffer(opts); | ||
|  |       return db.db.get( | ||
|  |         encodePrefix(prefix, key, opts), | ||
|  |         opts, | ||
|  |         function (err, value) { | ||
|  |           if (err) { | ||
|  |             cb(err); | ||
|  |           } else { | ||
|  |             cb(null, codec.decodeValue(value, opts)); | ||
|  |           } | ||
|  |         } | ||
|  |       ); | ||
|  |     }, | ||
|  |     createDecoder: function (opts) { | ||
|  |       return function (key, value) { | ||
|  |         return { | ||
|  |           key: codec.decodeKey(precodec.decode(key)[1], opts), | ||
|  |           value: codec.decodeValue(value, opts) | ||
|  |         }; | ||
|  |       }; | ||
|  |     }, | ||
|  |     isClosed: function isClosed() { | ||
|  |       return db.isClosed(); | ||
|  |     }, | ||
|  |     close: function close(cb) { | ||
|  |       return db.close(cb); | ||
|  |     }, | ||
|  |     iterator: function (_opts) { | ||
|  |       var opts = clone$1(_opts || {}); | ||
|  |       var prefix = _opts.prefix || []; | ||
|  | 
 | ||
|  |       function encodeKey(key) { | ||
|  |         return encodePrefix(prefix, key, opts, {}); | ||
|  |       } | ||
|  | 
 | ||
|  |       ltgt.toLtgt(_opts, opts, encodeKey, precodec.lowerBound, precodec.upperBound); | ||
|  | 
 | ||
|  |       // if these legacy values are in the options, remove them
 | ||
|  | 
 | ||
|  |       opts.prefix = null; | ||
|  | 
 | ||
|  |       //************************************************
 | ||
|  |       //hard coded defaults, for now...
 | ||
|  |       //TODO: pull defaults and encoding out of levelup.
 | ||
|  |       opts.keyAsBuffer = opts.valueAsBuffer = false; | ||
|  |       //************************************************
 | ||
|  | 
 | ||
|  | 
 | ||
|  |       //this is vital, otherwise limit: undefined will
 | ||
|  |       //create an empty stream.
 | ||
|  |       /* istanbul ignore next */ | ||
|  |       if ('number' !== typeof opts.limit) { | ||
|  |         opts.limit = -1; | ||
|  |       } | ||
|  | 
 | ||
|  |       opts.keyAsBuffer = precodec.buffer; | ||
|  |       opts.valueAsBuffer = codec.valueAsBuffer(opts); | ||
|  | 
 | ||
|  |       function wrapIterator(iterator) { | ||
|  |         return { | ||
|  |           next: function (cb) { | ||
|  |             return iterator.next(cb); | ||
|  |           }, | ||
|  |           end: function (cb) { | ||
|  |             iterator.end(cb); | ||
|  |           } | ||
|  |         }; | ||
|  |       } | ||
|  | 
 | ||
|  |       return wrapIterator(db.db.iterator(opts)); | ||
|  |     } | ||
|  |   }; | ||
|  | } | ||
|  | 
 | ||
|  | function NotFoundError() { | ||
|  |   Error.call(this); | ||
|  | } | ||
|  | 
 | ||
|  | inherits(NotFoundError, Error); | ||
|  | 
 | ||
|  | NotFoundError.prototype.name = 'NotFoundError'; | ||
|  | 
 | ||
|  | var EventEmitter$1 = EventEmitter.EventEmitter; | ||
|  | var version = "6.5.4"; | ||
|  | 
 | ||
|  | var NOT_FOUND_ERROR = new NotFoundError(); | ||
|  | 
 | ||
|  | var sublevel = function (nut, prefix, createStream, options) { | ||
|  |   var emitter = new EventEmitter$1(); | ||
|  |   emitter.sublevels = {}; | ||
|  |   emitter.options = options; | ||
|  | 
 | ||
|  |   emitter.version = version; | ||
|  | 
 | ||
|  |   emitter.methods = {}; | ||
|  |   prefix = prefix || []; | ||
|  | 
 | ||
|  |   function mergeOpts(opts) { | ||
|  |     var o = {}; | ||
|  |     var k; | ||
|  |     if (options) { | ||
|  |       for (k in options) { | ||
|  |         if (typeof options[k] !== 'undefined') { | ||
|  |           o[k] = options[k]; | ||
|  |         } | ||
|  |       } | ||
|  |     } | ||
|  |     if (opts) { | ||
|  |       for (k in opts) { | ||
|  |         if (typeof opts[k] !== 'undefined') { | ||
|  |           o[k] = opts[k]; | ||
|  |         } | ||
|  |       } | ||
|  |     } | ||
|  |     return o; | ||
|  |   } | ||
|  | 
 | ||
|  |   emitter.put = function (key, value, opts, cb) { | ||
|  |     if ('function' === typeof opts) { | ||
|  |       cb = opts; | ||
|  |       opts = {}; | ||
|  |     } | ||
|  | 
 | ||
|  |     nut.apply([{ | ||
|  |       key: key, value: value, | ||
|  |       prefix: prefix.slice(), type: 'put' | ||
|  |     }], mergeOpts(opts), function (err) { | ||
|  |       /* istanbul ignore next */ | ||
|  |       if (err) { | ||
|  |         return cb(err); | ||
|  |       } | ||
|  |       emitter.emit('put', key, value); | ||
|  |       cb(null); | ||
|  |     }); | ||
|  |   }; | ||
|  | 
 | ||
|  |   emitter.prefix = function () { | ||
|  |     return prefix.slice(); | ||
|  |   }; | ||
|  | 
 | ||
|  |   emitter.batch = function (ops, opts, cb) { | ||
|  |     if ('function' === typeof opts) { | ||
|  |       cb = opts; | ||
|  |       opts = {}; | ||
|  |     } | ||
|  | 
 | ||
|  |     ops = ops.map(function (op) { | ||
|  |       return { | ||
|  |         key: op.key, | ||
|  |         value: op.value, | ||
|  |         prefix: op.prefix || prefix, | ||
|  |         keyEncoding: op.keyEncoding,    // *
 | ||
|  |         valueEncoding: op.valueEncoding,  // * (TODO: encodings on sublevel)
 | ||
|  |         type: op.type | ||
|  |       }; | ||
|  |     }); | ||
|  | 
 | ||
|  |     nut.apply(ops, mergeOpts(opts), function (err) { | ||
|  |       /* istanbul ignore next */ | ||
|  |       if (err) { | ||
|  |         return cb(err); | ||
|  |       } | ||
|  |       emitter.emit('batch', ops); | ||
|  |       cb(null); | ||
|  |     }); | ||
|  |   }; | ||
|  | 
 | ||
|  |   emitter.get = function (key, opts, cb) { | ||
|  |     /* istanbul ignore else */ | ||
|  |     if ('function' === typeof opts) { | ||
|  |       cb = opts; | ||
|  |       opts = {}; | ||
|  |     } | ||
|  |     nut.get(key, prefix, mergeOpts(opts), function (err, value) { | ||
|  |       if (err) { | ||
|  |         cb(NOT_FOUND_ERROR); | ||
|  |       } else { | ||
|  |         cb(null, value); | ||
|  |       } | ||
|  |     }); | ||
|  |   }; | ||
|  | 
 | ||
|  |   emitter.sublevel = function (name, opts) { | ||
|  |     return emitter.sublevels[name] = | ||
|  |       emitter.sublevels[name] || sublevel(nut, prefix.concat(name), createStream, mergeOpts(opts)); | ||
|  |   }; | ||
|  | 
 | ||
|  |   emitter.readStream = emitter.createReadStream = function (opts) { | ||
|  |     opts = mergeOpts(opts); | ||
|  |     opts.prefix = prefix; | ||
|  |     var stream; | ||
|  |     var it = nut.iterator(opts); | ||
|  | 
 | ||
|  |     stream = createStream(opts, nut.createDecoder(opts)); | ||
|  |     stream.setIterator(it); | ||
|  | 
 | ||
|  |     return stream; | ||
|  |   }; | ||
|  | 
 | ||
|  |   emitter.close = function (cb) { | ||
|  |     nut.close(cb); | ||
|  |   }; | ||
|  | 
 | ||
|  |   emitter.isOpen = nut.isOpen; | ||
|  |   emitter.isClosed = nut.isClosed; | ||
|  | 
 | ||
|  |   return emitter; | ||
|  | }; | ||
|  | 
 | ||
|  | /* Copyright (c) 2012-2014 LevelUP contributors | ||
|  |  * See list at <https://github.com/rvagg/node-levelup#contributing>
 | ||
|  |  * MIT License <https://github.com/rvagg/node-levelup/blob/master/LICENSE.md>
 | ||
|  |  */ | ||
|  | 
 | ||
|  | var Readable = ReadableStreamCore.Readable; | ||
|  | 
 | ||
|  | function ReadStream(options, makeData) { | ||
|  |   if (!(this instanceof ReadStream)) { | ||
|  |     return new ReadStream(options, makeData); | ||
|  |   } | ||
|  | 
 | ||
|  |   Readable.call(this, { objectMode: true, highWaterMark: options.highWaterMark }); | ||
|  | 
 | ||
|  |   // purely to keep `db` around until we're done so it's not GCed if the user doesn't keep a ref
 | ||
|  | 
 | ||
|  |   this._waiting = false; | ||
|  |   this._options = options; | ||
|  |   this._makeData = makeData; | ||
|  | } | ||
|  | 
 | ||
|  | inherits(ReadStream, Readable); | ||
|  | 
 | ||
|  | ReadStream.prototype.setIterator = function (it) { | ||
|  |   this._iterator = it; | ||
|  |   /* istanbul ignore if */ | ||
|  |   if (this._destroyed) { | ||
|  |     return it.end(function () {}); | ||
|  |   } | ||
|  |   /* istanbul ignore if */ | ||
|  |   if (this._waiting) { | ||
|  |     this._waiting = false; | ||
|  |     return this._read(); | ||
|  |   } | ||
|  |   return this; | ||
|  | }; | ||
|  | 
 | ||
|  | ReadStream.prototype._read = function read() { | ||
|  |   var self = this; | ||
|  |   /* istanbul ignore if */ | ||
|  |   if (self._destroyed) { | ||
|  |     return; | ||
|  |   } | ||
|  |   /* istanbul ignore if */ | ||
|  |   if (!self._iterator) { | ||
|  |     return this._waiting = true; | ||
|  |   } | ||
|  | 
 | ||
|  |   self._iterator.next(function (err, key, value) { | ||
|  |     if (err || (key === undefined && value === undefined)) { | ||
|  |       if (!err && !self._destroyed) { | ||
|  |         self.push(null); | ||
|  |       } | ||
|  |       return self._cleanup(err); | ||
|  |     } | ||
|  | 
 | ||
|  | 
 | ||
|  |     value = self._makeData(key, value); | ||
|  |     if (!self._destroyed) { | ||
|  |       self.push(value); | ||
|  |     } | ||
|  |   }); | ||
|  | }; | ||
|  | 
 | ||
|  | ReadStream.prototype._cleanup = function (err) { | ||
|  |   if (this._destroyed) { | ||
|  |     return; | ||
|  |   } | ||
|  | 
 | ||
|  |   this._destroyed = true; | ||
|  | 
 | ||
|  |   var self = this; | ||
|  |   /* istanbul ignore if */ | ||
|  |   if (err && err.message !== 'iterator has ended') { | ||
|  |     self.emit('error', err); | ||
|  |   } | ||
|  | 
 | ||
|  |   /* istanbul ignore else */ | ||
|  |   if (self._iterator) { | ||
|  |     self._iterator.end(function () { | ||
|  |       self._iterator = null; | ||
|  |       self.emit('close'); | ||
|  |     }); | ||
|  |   } else { | ||
|  |     self.emit('close'); | ||
|  |   } | ||
|  | }; | ||
|  | 
 | ||
|  | ReadStream.prototype.destroy = function () { | ||
|  |   this._cleanup(); | ||
|  | }; | ||
|  | 
 | ||
|  | var precodec = { | ||
|  |   encode: function (decodedKey) { | ||
|  |     return '\xff' + decodedKey[0] + '\xff' + decodedKey[1]; | ||
|  |   }, | ||
|  |   decode: function (encodedKeyAsBuffer) { | ||
|  |     var str = encodedKeyAsBuffer.toString(); | ||
|  |     var idx = str.indexOf('\xff', 1); | ||
|  |     return [str.substring(1, idx), str.substring(idx + 1)]; | ||
|  |   }, | ||
|  |   lowerBound: '\x00', | ||
|  |   upperBound: '\xff' | ||
|  | }; | ||
|  | 
 | ||
|  | var codec = new Codec(); | ||
|  | 
 | ||
|  | function sublevelPouch(db) { | ||
|  |   return sublevel(nut(db, precodec, codec), [], ReadStream, db.options); | ||
|  | } | ||
|  | 
 | ||
|  | function allDocsKeysQuery(api, opts) { | ||
|  |   var keys = opts.keys; | ||
|  |   var finalResults = { | ||
|  |     offset: opts.skip | ||
|  |   }; | ||
|  |   return Promise.all(keys.map(function (key) { | ||
|  |     var subOpts = $inject_Object_assign({key: key, deleted: 'ok'}, opts); | ||
|  |     ['limit', 'skip', 'keys'].forEach(function (optKey) { | ||
|  |       delete subOpts[optKey]; | ||
|  |     }); | ||
|  |     return new Promise(function (resolve, reject) { | ||
|  |       api._allDocs(subOpts, function (err, res) { | ||
|  |         /* istanbul ignore if */ | ||
|  |         if (err) { | ||
|  |           return reject(err); | ||
|  |         } | ||
|  |         /* istanbul ignore if */ | ||
|  |         if (opts.update_seq && res.update_seq !== undefined) { | ||
|  |           finalResults.update_seq = res.update_seq; | ||
|  |         } | ||
|  |         finalResults.total_rows = res.total_rows; | ||
|  |         resolve(res.rows[0] || {key: key, error: 'not_found'}); | ||
|  |       }); | ||
|  |     }); | ||
|  |   })).then(function (results) { | ||
|  |     finalResults.rows = results; | ||
|  |     return finalResults; | ||
|  |   }); | ||
|  | } | ||
|  | 
 | ||
|  | function toObject(array) { | ||
|  |   return array.reduce(function (obj, item) { | ||
|  |     obj[item] = true; | ||
|  |     return obj; | ||
|  |   }, {}); | ||
|  | } | ||
|  | // List of top level reserved words for doc
 | ||
|  | var reservedWords = toObject([ | ||
|  |   '_id', | ||
|  |   '_rev', | ||
|  |   '_access', | ||
|  |   '_attachments', | ||
|  |   '_deleted', | ||
|  |   '_revisions', | ||
|  |   '_revs_info', | ||
|  |   '_conflicts', | ||
|  |   '_deleted_conflicts', | ||
|  |   '_local_seq', | ||
|  |   '_rev_tree', | ||
|  |   // replication documents
 | ||
|  |   '_replication_id', | ||
|  |   '_replication_state', | ||
|  |   '_replication_state_time', | ||
|  |   '_replication_state_reason', | ||
|  |   '_replication_stats', | ||
|  |   // Specific to Couchbase Sync Gateway
 | ||
|  |   '_removed' | ||
|  | ]); | ||
|  | 
 | ||
|  | // List of reserved words that should end up in the document
 | ||
|  | var dataWords = toObject([ | ||
|  |   '_access', | ||
|  |   '_attachments', | ||
|  |   // replication documents
 | ||
|  |   '_replication_id', | ||
|  |   '_replication_state', | ||
|  |   '_replication_state_time', | ||
|  |   '_replication_state_reason', | ||
|  |   '_replication_stats' | ||
|  | ]); | ||
|  | 
 | ||
|  | function parseRevisionInfo(rev) { | ||
|  |   if (!/^\d+-/.test(rev)) { | ||
|  |     return createError(INVALID_REV); | ||
|  |   } | ||
|  |   var idx = rev.indexOf('-'); | ||
|  |   var left = rev.substring(0, idx); | ||
|  |   var right = rev.substring(idx + 1); | ||
|  |   return { | ||
|  |     prefix: parseInt(left, 10), | ||
|  |     id: right | ||
|  |   }; | ||
|  | } | ||
|  | 
 | ||
|  | function makeRevTreeFromRevisions(revisions, opts) { | ||
|  |   var pos = revisions.start - revisions.ids.length + 1; | ||
|  | 
 | ||
|  |   var revisionIds = revisions.ids; | ||
|  |   var ids = [revisionIds[0], opts, []]; | ||
|  | 
 | ||
|  |   for (var i = 1, len = revisionIds.length; i < len; i++) { | ||
|  |     ids = [revisionIds[i], {status: 'missing'}, [ids]]; | ||
|  |   } | ||
|  | 
 | ||
|  |   return [{ | ||
|  |     pos: pos, | ||
|  |     ids: ids | ||
|  |   }]; | ||
|  | } | ||
|  | 
 | ||
|  | // Preprocess documents, parse their revisions, assign an id and a
 | ||
|  | // revision for new writes that are missing them, etc
 | ||
|  | function parseDoc(doc, newEdits, dbOpts) { | ||
|  |   if (!dbOpts) { | ||
|  |     dbOpts = { | ||
|  |       deterministic_revs: true | ||
|  |     }; | ||
|  |   } | ||
|  | 
 | ||
|  |   var nRevNum; | ||
|  |   var newRevId; | ||
|  |   var revInfo; | ||
|  |   var opts = {status: 'available'}; | ||
|  |   if (doc._deleted) { | ||
|  |     opts.deleted = true; | ||
|  |   } | ||
|  | 
 | ||
|  |   if (newEdits) { | ||
|  |     if (!doc._id) { | ||
|  |       doc._id = uuid$1(); | ||
|  |     } | ||
|  |     newRevId = rev$$1(doc, dbOpts.deterministic_revs); | ||
|  |     if (doc._rev) { | ||
|  |       revInfo = parseRevisionInfo(doc._rev); | ||
|  |       if (revInfo.error) { | ||
|  |         return revInfo; | ||
|  |       } | ||
|  |       doc._rev_tree = [{ | ||
|  |         pos: revInfo.prefix, | ||
|  |         ids: [revInfo.id, {status: 'missing'}, [[newRevId, opts, []]]] | ||
|  |       }]; | ||
|  |       nRevNum = revInfo.prefix + 1; | ||
|  |     } else { | ||
|  |       doc._rev_tree = [{ | ||
|  |         pos: 1, | ||
|  |         ids : [newRevId, opts, []] | ||
|  |       }]; | ||
|  |       nRevNum = 1; | ||
|  |     } | ||
|  |   } else { | ||
|  |     if (doc._revisions) { | ||
|  |       doc._rev_tree = makeRevTreeFromRevisions(doc._revisions, opts); | ||
|  |       nRevNum = doc._revisions.start; | ||
|  |       newRevId = doc._revisions.ids[0]; | ||
|  |     } | ||
|  |     if (!doc._rev_tree) { | ||
|  |       revInfo = parseRevisionInfo(doc._rev); | ||
|  |       if (revInfo.error) { | ||
|  |         return revInfo; | ||
|  |       } | ||
|  |       nRevNum = revInfo.prefix; | ||
|  |       newRevId = revInfo.id; | ||
|  |       doc._rev_tree = [{ | ||
|  |         pos: nRevNum, | ||
|  |         ids: [newRevId, opts, []] | ||
|  |       }]; | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   invalidIdError(doc._id); | ||
|  | 
 | ||
|  |   doc._rev = nRevNum + '-' + newRevId; | ||
|  | 
 | ||
|  |   var result = {metadata : {}, data : {}}; | ||
|  |   for (var key in doc) { | ||
|  |     /* istanbul ignore else */ | ||
|  |     if (Object.prototype.hasOwnProperty.call(doc, key)) { | ||
|  |       var specialKey = key[0] === '_'; | ||
|  |       if (specialKey && !reservedWords[key]) { | ||
|  |         var error = createError(DOC_VALIDATION, key); | ||
|  |         error.message = DOC_VALIDATION.message + ': ' + key; | ||
|  |         throw error; | ||
|  |       } else if (specialKey && !dataWords[key]) { | ||
|  |         result.metadata[key.slice(1)] = doc[key]; | ||
|  |       } else { | ||
|  |         result.data[key] = doc[key]; | ||
|  |       } | ||
|  |     } | ||
|  |   } | ||
|  |   return result; | ||
|  | } | ||
|  | 
 | ||
|  | // We fetch all leafs of the revision tree, and sort them based on tree length
 | ||
|  | // and whether they were deleted, undeleted documents with the longest revision
 | ||
|  | // tree (most edits) win
 | ||
|  | // The final sort algorithm is slightly documented in a sidebar here:
 | ||
|  | // http://guide.couchdb.org/draft/conflicts.html
 | ||
|  | function winningRev(metadata) { | ||
|  |   var winningId; | ||
|  |   var winningPos; | ||
|  |   var winningDeleted; | ||
|  |   var toVisit = metadata.rev_tree.slice(); | ||
|  |   var node; | ||
|  |   while ((node = toVisit.pop())) { | ||
|  |     var tree = node.ids; | ||
|  |     var branches = tree[2]; | ||
|  |     var pos = node.pos; | ||
|  |     if (branches.length) { // non-leaf
 | ||
|  |       for (var i = 0, len = branches.length; i < len; i++) { | ||
|  |         toVisit.push({pos: pos + 1, ids: branches[i]}); | ||
|  |       } | ||
|  |       continue; | ||
|  |     } | ||
|  |     var deleted = !!tree[1].deleted; | ||
|  |     var id = tree[0]; | ||
|  |     // sort by deleted, then pos, then id
 | ||
|  |     if (!winningId || (winningDeleted !== deleted ? winningDeleted : | ||
|  |         winningPos !== pos ? winningPos < pos : winningId < id)) { | ||
|  |       winningId = id; | ||
|  |       winningPos = pos; | ||
|  |       winningDeleted = deleted; | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   return winningPos + '-' + winningId; | ||
|  | } | ||
|  | 
 | ||
|  | // Pretty much all below can be combined into a higher order function to
 | ||
|  | // traverse revisions
 | ||
|  | // The return value from the callback will be passed as context to all
 | ||
|  | // children of that node
 | ||
|  | function traverseRevTree(revs, callback) { | ||
|  |   var toVisit = revs.slice(); | ||
|  | 
 | ||
|  |   var node; | ||
|  |   while ((node = toVisit.pop())) { | ||
|  |     var pos = node.pos; | ||
|  |     var tree = node.ids; | ||
|  |     var branches = tree[2]; | ||
|  |     var newCtx = | ||
|  |       callback(branches.length === 0, pos, tree[0], node.ctx, tree[1]); | ||
|  |     for (var i = 0, len = branches.length; i < len; i++) { | ||
|  |       toVisit.push({pos: pos + 1, ids: branches[i], ctx: newCtx}); | ||
|  |     } | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function sortByPos(a, b) { | ||
|  |   return a.pos - b.pos; | ||
|  | } | ||
|  | 
 | ||
|  | function collectLeaves(revs) { | ||
|  |   var leaves = []; | ||
|  |   traverseRevTree(revs, function (isLeaf, pos, id, acc, opts) { | ||
|  |     if (isLeaf) { | ||
|  |       leaves.push({rev: pos + "-" + id, pos: pos, opts: opts}); | ||
|  |     } | ||
|  |   }); | ||
|  |   leaves.sort(sortByPos).reverse(); | ||
|  |   for (var i = 0, len = leaves.length; i < len; i++) { | ||
|  |     delete leaves[i].pos; | ||
|  |   } | ||
|  |   return leaves; | ||
|  | } | ||
|  | 
 | ||
|  | // returns revs of all conflicts that is leaves such that
 | ||
|  | // 1. are not deleted and
 | ||
|  | // 2. are different than winning revision
 | ||
|  | function collectConflicts(metadata) { | ||
|  |   var win = winningRev(metadata); | ||
|  |   var leaves = collectLeaves(metadata.rev_tree); | ||
|  |   var conflicts = []; | ||
|  |   for (var i = 0, len = leaves.length; i < len; i++) { | ||
|  |     var leaf = leaves[i]; | ||
|  |     if (leaf.rev !== win && !leaf.opts.deleted) { | ||
|  |       conflicts.push(leaf.rev); | ||
|  |     } | ||
|  |   } | ||
|  |   return conflicts; | ||
|  | } | ||
|  | 
 | ||
|  | // compact a tree by marking its non-leafs as missing,
 | ||
|  | // and return a list of revs to delete
 | ||
|  | function compactTree(metadata) { | ||
|  |   var revs = []; | ||
|  |   traverseRevTree(metadata.rev_tree, function (isLeaf, pos, | ||
|  |                                                revHash, ctx, opts) { | ||
|  |     if (opts.status === 'available' && !isLeaf) { | ||
|  |       revs.push(pos + '-' + revHash); | ||
|  |       opts.status = 'missing'; | ||
|  |     } | ||
|  |   }); | ||
|  |   return revs; | ||
|  | } | ||
|  | 
 | ||
|  | // build up a list of all the paths to the leafs in this revision tree
 | ||
|  | function rootToLeaf(revs) { | ||
|  |   var paths = []; | ||
|  |   var toVisit = revs.slice(); | ||
|  |   var node; | ||
|  |   while ((node = toVisit.pop())) { | ||
|  |     var pos = node.pos; | ||
|  |     var tree = node.ids; | ||
|  |     var id = tree[0]; | ||
|  |     var opts = tree[1]; | ||
|  |     var branches = tree[2]; | ||
|  |     var isLeaf = branches.length === 0; | ||
|  | 
 | ||
|  |     var history = node.history ? node.history.slice() : []; | ||
|  |     history.push({id: id, opts: opts}); | ||
|  |     if (isLeaf) { | ||
|  |       paths.push({pos: (pos + 1 - history.length), ids: history}); | ||
|  |     } | ||
|  |     for (var i = 0, len = branches.length; i < len; i++) { | ||
|  |       toVisit.push({pos: pos + 1, ids: branches[i], history: history}); | ||
|  |     } | ||
|  |   } | ||
|  |   return paths.reverse(); | ||
|  | } | ||
|  | 
 | ||
|  | // for a better overview of what this is doing, read:
 | ||
|  | 
 | ||
|  | function sortByPos$1(a, b) { | ||
|  |   return a.pos - b.pos; | ||
|  | } | ||
|  | 
 | ||
|  | // classic binary search
 | ||
|  | function binarySearch(arr, item, comparator) { | ||
|  |   var low = 0; | ||
|  |   var high = arr.length; | ||
|  |   var mid; | ||
|  |   while (low < high) { | ||
|  |     mid = (low + high) >>> 1; | ||
|  |     if (comparator(arr[mid], item) < 0) { | ||
|  |       low = mid + 1; | ||
|  |     } else { | ||
|  |       high = mid; | ||
|  |     } | ||
|  |   } | ||
|  |   return low; | ||
|  | } | ||
|  | 
 | ||
|  | // assuming the arr is sorted, insert the item in the proper place
 | ||
|  | function insertSorted(arr, item, comparator) { | ||
|  |   var idx = binarySearch(arr, item, comparator); | ||
|  |   arr.splice(idx, 0, item); | ||
|  | } | ||
|  | 
 | ||
|  | // Turn a path as a flat array into a tree with a single branch.
 | ||
|  | // If any should be stemmed from the beginning of the array, that's passed
 | ||
|  | // in as the second argument
 | ||
|  | function pathToTree(path, numStemmed) { | ||
|  |   var root; | ||
|  |   var leaf; | ||
|  |   for (var i = numStemmed, len = path.length; i < len; i++) { | ||
|  |     var node = path[i]; | ||
|  |     var currentLeaf = [node.id, node.opts, []]; | ||
|  |     if (leaf) { | ||
|  |       leaf[2].push(currentLeaf); | ||
|  |       leaf = currentLeaf; | ||
|  |     } else { | ||
|  |       root = leaf = currentLeaf; | ||
|  |     } | ||
|  |   } | ||
|  |   return root; | ||
|  | } | ||
|  | 
 | ||
|  | // compare the IDs of two trees
 | ||
|  | function compareTree(a, b) { | ||
|  |   return a[0] < b[0] ? -1 : 1; | ||
|  | } | ||
|  | 
 | ||
|  | // Merge two trees together
 | ||
|  | // The roots of tree1 and tree2 must be the same revision
 | ||
|  | function mergeTree(in_tree1, in_tree2) { | ||
|  |   var queue = [{tree1: in_tree1, tree2: in_tree2}]; | ||
|  |   var conflicts = false; | ||
|  |   while (queue.length > 0) { | ||
|  |     var item = queue.pop(); | ||
|  |     var tree1 = item.tree1; | ||
|  |     var tree2 = item.tree2; | ||
|  | 
 | ||
|  |     if (tree1[1].status || tree2[1].status) { | ||
|  |       tree1[1].status = | ||
|  |         (tree1[1].status ===  'available' || | ||
|  |         tree2[1].status === 'available') ? 'available' : 'missing'; | ||
|  |     } | ||
|  | 
 | ||
|  |     for (var i = 0; i < tree2[2].length; i++) { | ||
|  |       if (!tree1[2][0]) { | ||
|  |         conflicts = 'new_leaf'; | ||
|  |         tree1[2][0] = tree2[2][i]; | ||
|  |         continue; | ||
|  |       } | ||
|  | 
 | ||
|  |       var merged = false; | ||
|  |       for (var j = 0; j < tree1[2].length; j++) { | ||
|  |         if (tree1[2][j][0] === tree2[2][i][0]) { | ||
|  |           queue.push({tree1: tree1[2][j], tree2: tree2[2][i]}); | ||
|  |           merged = true; | ||
|  |         } | ||
|  |       } | ||
|  |       if (!merged) { | ||
|  |         conflicts = 'new_branch'; | ||
|  |         insertSorted(tree1[2], tree2[2][i], compareTree); | ||
|  |       } | ||
|  |     } | ||
|  |   } | ||
|  |   return {conflicts: conflicts, tree: in_tree1}; | ||
|  | } | ||
|  | 
 | ||
|  | function doMerge(tree, path, dontExpand) { | ||
|  |   var restree = []; | ||
|  |   var conflicts = false; | ||
|  |   var merged = false; | ||
|  |   var res; | ||
|  | 
 | ||
|  |   if (!tree.length) { | ||
|  |     return {tree: [path], conflicts: 'new_leaf'}; | ||
|  |   } | ||
|  | 
 | ||
|  |   for (var i = 0, len = tree.length; i < len; i++) { | ||
|  |     var branch = tree[i]; | ||
|  |     if (branch.pos === path.pos && branch.ids[0] === path.ids[0]) { | ||
|  |       // Paths start at the same position and have the same root, so they need
 | ||
|  |       // merged
 | ||
|  |       res = mergeTree(branch.ids, path.ids); | ||
|  |       restree.push({pos: branch.pos, ids: res.tree}); | ||
|  |       conflicts = conflicts || res.conflicts; | ||
|  |       merged = true; | ||
|  |     } else if (dontExpand !== true) { | ||
|  |       // The paths start at a different position, take the earliest path and
 | ||
|  |       // traverse up until it as at the same point from root as the path we
 | ||
|  |       // want to merge.  If the keys match we return the longer path with the
 | ||
|  |       // other merged After stemming we dont want to expand the trees
 | ||
|  | 
 | ||
|  |       var t1 = branch.pos < path.pos ? branch : path; | ||
|  |       var t2 = branch.pos < path.pos ? path : branch; | ||
|  |       var diff = t2.pos - t1.pos; | ||
|  | 
 | ||
|  |       var candidateParents = []; | ||
|  | 
 | ||
|  |       var trees = []; | ||
|  |       trees.push({ids: t1.ids, diff: diff, parent: null, parentIdx: null}); | ||
|  |       while (trees.length > 0) { | ||
|  |         var item = trees.pop(); | ||
|  |         if (item.diff === 0) { | ||
|  |           if (item.ids[0] === t2.ids[0]) { | ||
|  |             candidateParents.push(item); | ||
|  |           } | ||
|  |           continue; | ||
|  |         } | ||
|  |         var elements = item.ids[2]; | ||
|  |         for (var j = 0, elementsLen = elements.length; j < elementsLen; j++) { | ||
|  |           trees.push({ | ||
|  |             ids: elements[j], | ||
|  |             diff: item.diff - 1, | ||
|  |             parent: item.ids, | ||
|  |             parentIdx: j | ||
|  |           }); | ||
|  |         } | ||
|  |       } | ||
|  | 
 | ||
|  |       var el = candidateParents[0]; | ||
|  | 
 | ||
|  |       if (!el) { | ||
|  |         restree.push(branch); | ||
|  |       } else { | ||
|  |         res = mergeTree(el.ids, t2.ids); | ||
|  |         el.parent[2][el.parentIdx] = res.tree; | ||
|  |         restree.push({pos: t1.pos, ids: t1.ids}); | ||
|  |         conflicts = conflicts || res.conflicts; | ||
|  |         merged = true; | ||
|  |       } | ||
|  |     } else { | ||
|  |       restree.push(branch); | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   // We didnt find
 | ||
|  |   if (!merged) { | ||
|  |     restree.push(path); | ||
|  |   } | ||
|  | 
 | ||
|  |   restree.sort(sortByPos$1); | ||
|  | 
 | ||
|  |   return { | ||
|  |     tree: restree, | ||
|  |     conflicts: conflicts || 'internal_node' | ||
|  |   }; | ||
|  | } | ||
|  | 
 | ||
|  | // To ensure we dont grow the revision tree infinitely, we stem old revisions
 | ||
|  | function stem(tree, depth) { | ||
|  |   // First we break out the tree into a complete list of root to leaf paths
 | ||
|  |   var paths = rootToLeaf(tree); | ||
|  |   var stemmedRevs; | ||
|  | 
 | ||
|  |   var result; | ||
|  |   for (var i = 0, len = paths.length; i < len; i++) { | ||
|  |     // Then for each path, we cut off the start of the path based on the
 | ||
|  |     // `depth` to stem to, and generate a new set of flat trees
 | ||
|  |     var path = paths[i]; | ||
|  |     var stemmed = path.ids; | ||
|  |     var node; | ||
|  |     if (stemmed.length > depth) { | ||
|  |       // only do the stemming work if we actually need to stem
 | ||
|  |       if (!stemmedRevs) { | ||
|  |         stemmedRevs = {}; // avoid allocating this object unnecessarily
 | ||
|  |       } | ||
|  |       var numStemmed = stemmed.length - depth; | ||
|  |       node = { | ||
|  |         pos: path.pos + numStemmed, | ||
|  |         ids: pathToTree(stemmed, numStemmed) | ||
|  |       }; | ||
|  | 
 | ||
|  |       for (var s = 0; s < numStemmed; s++) { | ||
|  |         var rev = (path.pos + s) + '-' + stemmed[s].id; | ||
|  |         stemmedRevs[rev] = true; | ||
|  |       } | ||
|  |     } else { // no need to actually stem
 | ||
|  |       node = { | ||
|  |         pos: path.pos, | ||
|  |         ids: pathToTree(stemmed, 0) | ||
|  |       }; | ||
|  |     } | ||
|  | 
 | ||
|  |     // Then we remerge all those flat trees together, ensuring that we dont
 | ||
|  |     // connect trees that would go beyond the depth limit
 | ||
|  |     if (result) { | ||
|  |       result = doMerge(result, node, true).tree; | ||
|  |     } else { | ||
|  |       result = [node]; | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   // this is memory-heavy per Chrome profiler, avoid unless we actually stemmed
 | ||
|  |   if (stemmedRevs) { | ||
|  |     traverseRevTree(result, function (isLeaf, pos, revHash) { | ||
|  |       // some revisions may have been removed in a branch but not in another
 | ||
|  |       delete stemmedRevs[pos + '-' + revHash]; | ||
|  |     }); | ||
|  |   } | ||
|  | 
 | ||
|  |   return { | ||
|  |     tree: result, | ||
|  |     revs: stemmedRevs ? Object.keys(stemmedRevs) : [] | ||
|  |   }; | ||
|  | } | ||
|  | 
 | ||
|  | function merge(tree, path, depth) { | ||
|  |   var newTree = doMerge(tree, path); | ||
|  |   var stemmed = stem(newTree.tree, depth); | ||
|  |   return { | ||
|  |     tree: stemmed.tree, | ||
|  |     stemmedRevs: stemmed.revs, | ||
|  |     conflicts: newTree.conflicts | ||
|  |   }; | ||
|  | } | ||
|  | 
 | ||
|  | // return true if a rev exists in the rev tree, false otherwise
 | ||
|  | function revExists(revs, rev) { | ||
|  |   var toVisit = revs.slice(); | ||
|  |   var splitRev = rev.split('-'); | ||
|  |   var targetPos = parseInt(splitRev[0], 10); | ||
|  |   var targetId = splitRev[1]; | ||
|  | 
 | ||
|  |   var node; | ||
|  |   while ((node = toVisit.pop())) { | ||
|  |     if (node.pos === targetPos && node.ids[0] === targetId) { | ||
|  |       return true; | ||
|  |     } | ||
|  |     var branches = node.ids[2]; | ||
|  |     for (var i = 0, len = branches.length; i < len; i++) { | ||
|  |       toVisit.push({pos: node.pos + 1, ids: branches[i]}); | ||
|  |     } | ||
|  |   } | ||
|  |   return false; | ||
|  | } | ||
|  | 
 | ||
|  | function getTrees(node) { | ||
|  |   return node.ids; | ||
|  | } | ||
|  | 
 | ||
|  | // check if a specific revision of a doc has been deleted
 | ||
|  | //  - metadata: the metadata object from the doc store
 | ||
|  | //  - rev: (optional) the revision to check. defaults to winning revision
 | ||
|  | function isDeleted(metadata, rev) { | ||
|  |   if (!rev) { | ||
|  |     rev = winningRev(metadata); | ||
|  |   } | ||
|  |   var id = rev.substring(rev.indexOf('-') + 1); | ||
|  |   var toVisit = metadata.rev_tree.map(getTrees); | ||
|  | 
 | ||
|  |   var tree; | ||
|  |   while ((tree = toVisit.pop())) { | ||
|  |     if (tree[0] === id) { | ||
|  |       return !!tree[1].deleted; | ||
|  |     } | ||
|  |     toVisit = toVisit.concat(tree[2]); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function isLocalId(id) { | ||
|  |   return (/^_local/).test(id); | ||
|  | } | ||
|  | 
 | ||
|  | // returns the current leaf node for a given revision
 | ||
|  | function latest(rev, metadata) { | ||
|  |   var toVisit = metadata.rev_tree.slice(); | ||
|  |   var node; | ||
|  |   while ((node = toVisit.pop())) { | ||
|  |     var pos = node.pos; | ||
|  |     var tree = node.ids; | ||
|  |     var id = tree[0]; | ||
|  |     var opts = tree[1]; | ||
|  |     var branches = tree[2]; | ||
|  |     var isLeaf = branches.length === 0; | ||
|  | 
 | ||
|  |     var history = node.history ? node.history.slice() : []; | ||
|  |     history.push({id: id, pos: pos, opts: opts}); | ||
|  | 
 | ||
|  |     if (isLeaf) { | ||
|  |       for (var i = 0, len = history.length; i < len; i++) { | ||
|  |         var historyNode = history[i]; | ||
|  |         var historyRev = historyNode.pos + '-' + historyNode.id; | ||
|  | 
 | ||
|  |         if (historyRev === rev) { | ||
|  |           // return the rev of this leaf
 | ||
|  |           return pos + '-' + id; | ||
|  |         } | ||
|  |       } | ||
|  |     } | ||
|  | 
 | ||
|  |     for (var j = 0, l = branches.length; j < l; j++) { | ||
|  |       toVisit.push({pos: pos + 1, ids: branches[j], history: history}); | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   /* istanbul ignore next */ | ||
|  |   throw new Error('Unable to resolve latest revision for id ' + metadata.id + ', rev ' + rev); | ||
|  | } | ||
|  | 
 | ||
|  | function updateDoc(revLimit, prev, docInfo, results, | ||
|  |                    i, cb, writeDoc, newEdits) { | ||
|  | 
 | ||
|  |   if (revExists(prev.rev_tree, docInfo.metadata.rev) && !newEdits) { | ||
|  |     results[i] = docInfo; | ||
|  |     return cb(); | ||
|  |   } | ||
|  | 
 | ||
|  |   // sometimes this is pre-calculated. historically not always
 | ||
|  |   var previousWinningRev = prev.winningRev || winningRev(prev); | ||
|  |   var previouslyDeleted = 'deleted' in prev ? prev.deleted : | ||
|  |     isDeleted(prev, previousWinningRev); | ||
|  |   var deleted = 'deleted' in docInfo.metadata ? docInfo.metadata.deleted : | ||
|  |     isDeleted(docInfo.metadata); | ||
|  |   var isRoot = /^1-/.test(docInfo.metadata.rev); | ||
|  | 
 | ||
|  |   if (previouslyDeleted && !deleted && newEdits && isRoot) { | ||
|  |     var newDoc = docInfo.data; | ||
|  |     newDoc._rev = previousWinningRev; | ||
|  |     newDoc._id = docInfo.metadata.id; | ||
|  |     docInfo = parseDoc(newDoc, newEdits); | ||
|  |   } | ||
|  | 
 | ||
|  |   var merged = merge(prev.rev_tree, docInfo.metadata.rev_tree[0], revLimit); | ||
|  | 
 | ||
|  |   var inConflict = newEdits && (( | ||
|  |     (previouslyDeleted && deleted && merged.conflicts !== 'new_leaf') || | ||
|  |     (!previouslyDeleted && merged.conflicts !== 'new_leaf') || | ||
|  |     (previouslyDeleted && !deleted && merged.conflicts === 'new_branch'))); | ||
|  | 
 | ||
|  |   if (inConflict) { | ||
|  |     var err = createError(REV_CONFLICT); | ||
|  |     results[i] = err; | ||
|  |     return cb(); | ||
|  |   } | ||
|  | 
 | ||
|  |   var newRev = docInfo.metadata.rev; | ||
|  |   docInfo.metadata.rev_tree = merged.tree; | ||
|  |   docInfo.stemmedRevs = merged.stemmedRevs || []; | ||
|  |   /* istanbul ignore else */ | ||
|  |   if (prev.rev_map) { | ||
|  |     docInfo.metadata.rev_map = prev.rev_map; // used only by leveldb
 | ||
|  |   } | ||
|  | 
 | ||
|  |   // recalculate
 | ||
|  |   var winningRev$$1 = winningRev(docInfo.metadata); | ||
|  |   var winningRevIsDeleted = isDeleted(docInfo.metadata, winningRev$$1); | ||
|  | 
 | ||
|  |   // calculate the total number of documents that were added/removed,
 | ||
|  |   // from the perspective of total_rows/doc_count
 | ||
|  |   var delta = (previouslyDeleted === winningRevIsDeleted) ? 0 : | ||
|  |     previouslyDeleted < winningRevIsDeleted ? -1 : 1; | ||
|  | 
 | ||
|  |   var newRevIsDeleted; | ||
|  |   if (newRev === winningRev$$1) { | ||
|  |     // if the new rev is the same as the winning rev, we can reuse that value
 | ||
|  |     newRevIsDeleted = winningRevIsDeleted; | ||
|  |   } else { | ||
|  |     // if they're not the same, then we need to recalculate
 | ||
|  |     newRevIsDeleted = isDeleted(docInfo.metadata, newRev); | ||
|  |   } | ||
|  | 
 | ||
|  |   writeDoc(docInfo, winningRev$$1, winningRevIsDeleted, newRevIsDeleted, | ||
|  |     true, delta, i, cb); | ||
|  | } | ||
|  | 
 | ||
|  | function rootIsMissing(docInfo) { | ||
|  |   return docInfo.metadata.rev_tree[0].ids[1].status === 'missing'; | ||
|  | } | ||
|  | 
 | ||
|  | function processDocs(revLimit, docInfos, api, fetchedDocs, tx, results, | ||
|  |                      writeDoc, opts, overallCallback) { | ||
|  | 
 | ||
|  |   // Default to 1000 locally
 | ||
|  |   revLimit = revLimit || 1000; | ||
|  | 
 | ||
|  |   function insertDoc(docInfo, resultsIdx, callback) { | ||
|  |     // Cant insert new deleted documents
 | ||
|  |     var winningRev$$1 = winningRev(docInfo.metadata); | ||
|  |     var deleted = isDeleted(docInfo.metadata, winningRev$$1); | ||
|  |     if ('was_delete' in opts && deleted) { | ||
|  |       results[resultsIdx] = createError(MISSING_DOC, 'deleted'); | ||
|  |       return callback(); | ||
|  |     } | ||
|  | 
 | ||
|  |     // 4712 - detect whether a new document was inserted with a _rev
 | ||
|  |     var inConflict = newEdits && rootIsMissing(docInfo); | ||
|  | 
 | ||
|  |     if (inConflict) { | ||
|  |       var err = createError(REV_CONFLICT); | ||
|  |       results[resultsIdx] = err; | ||
|  |       return callback(); | ||
|  |     } | ||
|  | 
 | ||
|  |     var delta = deleted ? 0 : 1; | ||
|  | 
 | ||
|  |     writeDoc(docInfo, winningRev$$1, deleted, deleted, false, | ||
|  |       delta, resultsIdx, callback); | ||
|  |   } | ||
|  | 
 | ||
|  |   var newEdits = opts.new_edits; | ||
|  |   var idsToDocs = new ExportedMap(); | ||
|  | 
 | ||
|  |   var docsDone = 0; | ||
|  |   var docsToDo = docInfos.length; | ||
|  | 
 | ||
|  |   function checkAllDocsDone() { | ||
|  |     if (++docsDone === docsToDo && overallCallback) { | ||
|  |       overallCallback(); | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   docInfos.forEach(function (currentDoc, resultsIdx) { | ||
|  | 
 | ||
|  |     if (currentDoc._id && isLocalId(currentDoc._id)) { | ||
|  |       var fun = currentDoc._deleted ? '_removeLocal' : '_putLocal'; | ||
|  |       api[fun](currentDoc, {ctx: tx}, function (err, res) { | ||
|  |         results[resultsIdx] = err || res; | ||
|  |         checkAllDocsDone(); | ||
|  |       }); | ||
|  |       return; | ||
|  |     } | ||
|  | 
 | ||
|  |     var id = currentDoc.metadata.id; | ||
|  |     if (idsToDocs.has(id)) { | ||
|  |       docsToDo--; // duplicate
 | ||
|  |       idsToDocs.get(id).push([currentDoc, resultsIdx]); | ||
|  |     } else { | ||
|  |       idsToDocs.set(id, [[currentDoc, resultsIdx]]); | ||
|  |     } | ||
|  |   }); | ||
|  | 
 | ||
|  |   // in the case of new_edits, the user can provide multiple docs
 | ||
|  |   // with the same id. these need to be processed sequentially
 | ||
|  |   idsToDocs.forEach(function (docs, id) { | ||
|  |     var numDone = 0; | ||
|  | 
 | ||
|  |     function docWritten() { | ||
|  |       if (++numDone < docs.length) { | ||
|  |         nextDoc(); | ||
|  |       } else { | ||
|  |         checkAllDocsDone(); | ||
|  |       } | ||
|  |     } | ||
|  |     function nextDoc() { | ||
|  |       var value = docs[numDone]; | ||
|  |       var currentDoc = value[0]; | ||
|  |       var resultsIdx = value[1]; | ||
|  | 
 | ||
|  |       if (fetchedDocs.has(id)) { | ||
|  |         updateDoc(revLimit, fetchedDocs.get(id), currentDoc, results, | ||
|  |           resultsIdx, docWritten, writeDoc, newEdits); | ||
|  |       } else { | ||
|  |         // Ensure stemming applies to new writes as well
 | ||
|  |         var merged = merge([], currentDoc.metadata.rev_tree[0], revLimit); | ||
|  |         currentDoc.metadata.rev_tree = merged.tree; | ||
|  |         currentDoc.stemmedRevs = merged.stemmedRevs || []; | ||
|  |         insertDoc(currentDoc, resultsIdx, docWritten); | ||
|  |       } | ||
|  |     } | ||
|  |     nextDoc(); | ||
|  |   }); | ||
|  | } | ||
|  | 
 | ||
|  | function safeJsonParse(str) { | ||
|  |   // This try/catch guards against stack overflow errors.
 | ||
|  |   // JSON.parse() is faster than vuvuzela.parse() but vuvuzela
 | ||
|  |   // cannot overflow.
 | ||
|  |   try { | ||
|  |     return JSON.parse(str); | ||
|  |   } catch (e) { | ||
|  |     /* istanbul ignore next */ | ||
|  |     return vuvuzela.parse(str); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function safeJsonStringify(json) { | ||
|  |   try { | ||
|  |     return JSON.stringify(json); | ||
|  |   } catch (e) { | ||
|  |     /* istanbul ignore next */ | ||
|  |     return vuvuzela.stringify(json); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function readAsBlobOrBuffer(storedObject, type) { | ||
|  |   // In the browser, we've stored a binary string. This now comes back as a
 | ||
|  |   // browserified Node-style Buffer (implemented as a typed array),
 | ||
|  |   // but we want a Blob instead.
 | ||
|  |   var byteArray = new Uint8Array(storedObject); | ||
|  |   return createBlob([byteArray], {type: type}); | ||
|  | } | ||
|  | 
 | ||
|  | // In the browser, we store a binary string
 | ||
|  | function prepareAttachmentForStorage(attData, cb) { | ||
|  |   readAsBinaryString(attData, cb); | ||
|  | } | ||
|  | 
 | ||
|  | function createEmptyBlobOrBuffer(type) { | ||
|  |   return createBlob([''], {type: type}); | ||
|  | } | ||
|  | 
 | ||
|  | function getCacheFor(transaction, store) { | ||
|  |   var prefix = store.prefix()[0]; | ||
|  |   var cache = transaction._cache; | ||
|  |   var subCache = cache.get(prefix); | ||
|  |   if (!subCache) { | ||
|  |     subCache = new ExportedMap(); | ||
|  |     cache.set(prefix, subCache); | ||
|  |   } | ||
|  |   return subCache; | ||
|  | } | ||
|  | 
 | ||
|  | function LevelTransaction() { | ||
|  |   this._batch = []; | ||
|  |   this._cache = new ExportedMap(); | ||
|  | } | ||
|  | 
 | ||
|  | LevelTransaction.prototype.get = function (store, key, callback) { | ||
|  |   var cache = getCacheFor(this, store); | ||
|  |   var exists = cache.get(key); | ||
|  |   if (exists) { | ||
|  |     return immediate(function () { | ||
|  |       callback(null, exists); | ||
|  |     }); | ||
|  |   } else if (exists === null) { // deleted marker
 | ||
|  |     /* istanbul ignore next */ | ||
|  |     return immediate(function () { | ||
|  |       callback({name: 'NotFoundError'}); | ||
|  |     }); | ||
|  |   } | ||
|  |   store.get(key, function (err, res) { | ||
|  |     if (err) { | ||
|  |       /* istanbul ignore else */ | ||
|  |       if (err.name === 'NotFoundError') { | ||
|  |         cache.set(key, null); | ||
|  |       } | ||
|  |       return callback(err); | ||
|  |     } | ||
|  |     cache.set(key, res); | ||
|  |     callback(null, res); | ||
|  |   }); | ||
|  | }; | ||
|  | 
 | ||
|  | LevelTransaction.prototype.batch = function (batch) { | ||
|  |   for (var i = 0, len = batch.length; i < len; i++) { | ||
|  |     var operation = batch[i]; | ||
|  | 
 | ||
|  |     var cache = getCacheFor(this, operation.prefix); | ||
|  | 
 | ||
|  |     if (operation.type === 'put') { | ||
|  |       cache.set(operation.key, operation.value); | ||
|  |     } else { | ||
|  |       cache.set(operation.key, null); | ||
|  |     } | ||
|  |   } | ||
|  |   this._batch = this._batch.concat(batch); | ||
|  | }; | ||
|  | 
 | ||
|  | LevelTransaction.prototype.execute = function (db, callback) { | ||
|  | 
 | ||
|  |   var keys = new ExportedSet(); | ||
|  |   var uniqBatches = []; | ||
|  | 
 | ||
|  |   // remove duplicates; last one wins
 | ||
|  |   for (var i = this._batch.length - 1; i >= 0; i--) { | ||
|  |     var operation = this._batch[i]; | ||
|  |     var lookupKey = operation.prefix.prefix()[0] + '\xff' + operation.key; | ||
|  |     if (keys.has(lookupKey)) { | ||
|  |       continue; | ||
|  |     } | ||
|  |     keys.add(lookupKey); | ||
|  |     uniqBatches.push(operation); | ||
|  |   } | ||
|  | 
 | ||
|  |   db.batch(uniqBatches, callback); | ||
|  | }; | ||
|  | 
 | ||
|  | var DOC_STORE = 'document-store'; | ||
|  | var BY_SEQ_STORE = 'by-sequence'; | ||
|  | var ATTACHMENT_STORE = 'attach-store'; | ||
|  | var BINARY_STORE = 'attach-binary-store'; | ||
|  | var LOCAL_STORE = 'local-store'; | ||
|  | var META_STORE = 'meta-store'; | ||
|  | 
 | ||
|  | // leveldb barks if we try to open a db multiple times
 | ||
|  | // so we cache opened connections here for initstore()
 | ||
|  | var dbStores = new ExportedMap(); | ||
|  | 
 | ||
|  | // store the value of update_seq in the by-sequence store the key name will
 | ||
|  | // never conflict, since the keys in the by-sequence store are integers
 | ||
|  | var UPDATE_SEQ_KEY = '_local_last_update_seq'; | ||
|  | var DOC_COUNT_KEY = '_local_doc_count'; | ||
|  | var UUID_KEY = '_local_uuid'; | ||
|  | 
 | ||
|  | var MD5_PREFIX = 'md5-'; | ||
|  | 
 | ||
|  | var safeJsonEncoding = { | ||
|  |   encode: safeJsonStringify, | ||
|  |   decode: safeJsonParse, | ||
|  |   buffer: false, | ||
|  |   type: 'cheap-json' | ||
|  | }; | ||
|  | 
 | ||
|  | var levelChanges = new Changes(); | ||
|  | 
 | ||
|  | // winningRev and deleted are performance-killers, but
 | ||
|  | // in newer versions of PouchDB, they are cached on the metadata
 | ||
|  | function getWinningRev(metadata) { | ||
|  |   return 'winningRev' in metadata ? | ||
|  |     metadata.winningRev : winningRev(metadata); | ||
|  | } | ||
|  | 
 | ||
|  | function getIsDeleted(metadata, winningRev$$1) { | ||
|  |   return 'deleted' in metadata ? | ||
|  |     metadata.deleted : isDeleted(metadata, winningRev$$1); | ||
|  | } | ||
|  | 
 | ||
|  | function fetchAttachment(att, stores, opts) { | ||
|  |   var type = att.content_type; | ||
|  |   return new Promise(function (resolve, reject) { | ||
|  |     stores.binaryStore.get(att.digest, function (err, buffer) { | ||
|  |       var data; | ||
|  |       if (err) { | ||
|  |         /* istanbul ignore if */ | ||
|  |         if (err.name !== 'NotFoundError') { | ||
|  |           return reject(err); | ||
|  |         } else { | ||
|  |           // empty
 | ||
|  |           if (!opts.binary) { | ||
|  |             data = ''; | ||
|  |           } else { | ||
|  |             data = binStringToBluffer('', type); | ||
|  |           } | ||
|  |         } | ||
|  |       } else { // non-empty
 | ||
|  |         if (opts.binary) { | ||
|  |           data = readAsBlobOrBuffer(buffer, type); | ||
|  |         } else { | ||
|  |           data = buffer.toString('base64'); | ||
|  |         } | ||
|  |       } | ||
|  |       delete att.stub; | ||
|  |       delete att.length; | ||
|  |       att.data = data; | ||
|  |       resolve(); | ||
|  |     }); | ||
|  |   }); | ||
|  | } | ||
|  | 
 | ||
|  | function fetchAttachments(results, stores, opts) { | ||
|  |   var atts = []; | ||
|  |   results.forEach(function (row) { | ||
|  |     if (!(row.doc && row.doc._attachments)) { | ||
|  |       return; | ||
|  |     } | ||
|  |     var attNames = Object.keys(row.doc._attachments); | ||
|  |     attNames.forEach(function (attName) { | ||
|  |       var att = row.doc._attachments[attName]; | ||
|  |       if (!('data' in att)) { | ||
|  |         atts.push(att); | ||
|  |       } | ||
|  |     }); | ||
|  |   }); | ||
|  | 
 | ||
|  |   return Promise.all(atts.map(function (att) { | ||
|  |     return fetchAttachment(att, stores, opts); | ||
|  |   })); | ||
|  | } | ||
|  | 
 | ||
|  | function LevelPouch(opts, callback) { | ||
|  |   opts = clone(opts); | ||
|  |   var api = this; | ||
|  |   var instanceId; | ||
|  |   var stores = {}; | ||
|  |   var revLimit = opts.revs_limit; | ||
|  |   var db; | ||
|  |   var name = opts.name; | ||
|  |   // TODO: this is undocumented and unused probably
 | ||
|  |   /* istanbul ignore else */ | ||
|  |   if (typeof opts.createIfMissing === 'undefined') { | ||
|  |     opts.createIfMissing = true; | ||
|  |   } | ||
|  | 
 | ||
|  |   var leveldown = opts.db; | ||
|  | 
 | ||
|  |   var dbStore; | ||
|  |   var leveldownName = functionName(leveldown); | ||
|  |   if (dbStores.has(leveldownName)) { | ||
|  |     dbStore = dbStores.get(leveldownName); | ||
|  |   } else { | ||
|  |     dbStore = new ExportedMap(); | ||
|  |     dbStores.set(leveldownName, dbStore); | ||
|  |   } | ||
|  |   if (dbStore.has(name)) { | ||
|  |     db = dbStore.get(name); | ||
|  |     afterDBCreated(); | ||
|  |   } else { | ||
|  |     dbStore.set(name, sublevelPouch(levelup(leveldown(name), opts, function (err) { | ||
|  |       /* istanbul ignore if */ | ||
|  |       if (err) { | ||
|  |         dbStore["delete"](name); | ||
|  |         return callback(err); | ||
|  |       } | ||
|  |       db = dbStore.get(name); | ||
|  |       db._docCount  = -1; | ||
|  |       db._queue = new Deque(); | ||
|  |       /* istanbul ignore else */ | ||
|  |       if (typeof opts.migrate === 'object') { // migration for leveldown
 | ||
|  |         opts.migrate.doMigrationOne(name, db, afterDBCreated); | ||
|  |       } else { | ||
|  |         afterDBCreated(); | ||
|  |       } | ||
|  |     }))); | ||
|  |   } | ||
|  | 
 | ||
|  |   function afterDBCreated() { | ||
|  |     stores.docStore = db.sublevel(DOC_STORE, {valueEncoding: safeJsonEncoding}); | ||
|  |     stores.bySeqStore = db.sublevel(BY_SEQ_STORE, {valueEncoding: 'json'}); | ||
|  |     stores.attachmentStore = | ||
|  |       db.sublevel(ATTACHMENT_STORE, {valueEncoding: 'json'}); | ||
|  |     stores.binaryStore = db.sublevel(BINARY_STORE, {valueEncoding: 'binary'}); | ||
|  |     stores.localStore = db.sublevel(LOCAL_STORE, {valueEncoding: 'json'}); | ||
|  |     stores.metaStore = db.sublevel(META_STORE, {valueEncoding: 'json'}); | ||
|  |     /* istanbul ignore else */ | ||
|  |     if (typeof opts.migrate === 'object') { // migration for leveldown
 | ||
|  |       opts.migrate.doMigrationTwo(db, stores, afterLastMigration); | ||
|  |     } else { | ||
|  |       afterLastMigration(); | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   function afterLastMigration() { | ||
|  |     stores.metaStore.get(UPDATE_SEQ_KEY, function (err, value) { | ||
|  |       if (typeof db._updateSeq === 'undefined') { | ||
|  |         db._updateSeq = value || 0; | ||
|  |       } | ||
|  |       stores.metaStore.get(DOC_COUNT_KEY, function (err, value) { | ||
|  |         db._docCount = !err ? value : 0; | ||
|  |         stores.metaStore.get(UUID_KEY, function (err, value) { | ||
|  |           instanceId = !err ? value : uuid$1(); | ||
|  |           stores.metaStore.put(UUID_KEY, instanceId, function () { | ||
|  |             immediate(function () { | ||
|  |               callback(null, api); | ||
|  |             }); | ||
|  |           }); | ||
|  |         }); | ||
|  |       }); | ||
|  |     }); | ||
|  |   } | ||
|  | 
 | ||
|  |   function countDocs(callback) { | ||
|  |     /* istanbul ignore if */ | ||
|  |     if (db.isClosed()) { | ||
|  |       return callback(new Error('database is closed')); | ||
|  |     } | ||
|  |     return callback(null, db._docCount); // use cached value
 | ||
|  |   } | ||
|  | 
 | ||
|  |   api._remote = false; | ||
|  |   /* istanbul ignore next */ | ||
|  |   api.type = function () { | ||
|  |     return 'leveldb'; | ||
|  |   }; | ||
|  | 
 | ||
|  |   api._id = function (callback) { | ||
|  |     callback(null, instanceId); | ||
|  |   }; | ||
|  | 
 | ||
|  |   api._info = function (callback) { | ||
|  |     var res = { | ||
|  |       doc_count: db._docCount, | ||
|  |       update_seq: db._updateSeq, | ||
|  |       backend_adapter: functionName(leveldown) | ||
|  |     }; | ||
|  |     return immediate(function () { | ||
|  |       callback(null, res); | ||
|  |     }); | ||
|  |   }; | ||
|  | 
 | ||
|  |   function tryCode(fun, args) { | ||
|  |     try { | ||
|  |       fun.apply(null, args); | ||
|  |     } catch (err) { | ||
|  |       args[args.length - 1](err); | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   function executeNext() { | ||
|  |     var firstTask = db._queue.peekFront(); | ||
|  | 
 | ||
|  |     if (firstTask.type === 'read') { | ||
|  |       runReadOperation(firstTask); | ||
|  |     } else { // write, only do one at a time
 | ||
|  |       runWriteOperation(firstTask); | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   function runReadOperation(firstTask) { | ||
|  |     // do multiple reads at once simultaneously, because it's safe
 | ||
|  | 
 | ||
|  |     var readTasks = [firstTask]; | ||
|  |     var i = 1; | ||
|  |     var nextTask = db._queue.get(i); | ||
|  |     while (typeof nextTask !== 'undefined' && nextTask.type === 'read') { | ||
|  |       readTasks.push(nextTask); | ||
|  |       i++; | ||
|  |       nextTask = db._queue.get(i); | ||
|  |     } | ||
|  | 
 | ||
|  |     var numDone = 0; | ||
|  | 
 | ||
|  |     readTasks.forEach(function (readTask) { | ||
|  |       var args = readTask.args; | ||
|  |       var callback = args[args.length - 1]; | ||
|  |       args[args.length - 1] = getArguments(function (cbArgs) { | ||
|  |         callback.apply(null, cbArgs); | ||
|  |         if (++numDone === readTasks.length) { | ||
|  |           immediate(function () { | ||
|  |             // all read tasks have finished
 | ||
|  |             readTasks.forEach(function () { | ||
|  |               db._queue.shift(); | ||
|  |             }); | ||
|  |             if (db._queue.length) { | ||
|  |               executeNext(); | ||
|  |             } | ||
|  |           }); | ||
|  |         } | ||
|  |       }); | ||
|  |       tryCode(readTask.fun, args); | ||
|  |     }); | ||
|  |   } | ||
|  | 
 | ||
|  |   function runWriteOperation(firstTask) { | ||
|  |     var args = firstTask.args; | ||
|  |     var callback = args[args.length - 1]; | ||
|  |     args[args.length - 1] = getArguments(function (cbArgs) { | ||
|  |       callback.apply(null, cbArgs); | ||
|  |       immediate(function () { | ||
|  |         db._queue.shift(); | ||
|  |         if (db._queue.length) { | ||
|  |           executeNext(); | ||
|  |         } | ||
|  |       }); | ||
|  |     }); | ||
|  |     tryCode(firstTask.fun, args); | ||
|  |   } | ||
|  | 
 | ||
|  |   // all read/write operations to the database are done in a queue,
 | ||
|  |   // similar to how websql/idb works. this avoids problems such
 | ||
|  |   // as e.g. compaction needing to have a lock on the database while
 | ||
|  |   // it updates stuff. in the future we can revisit this.
 | ||
|  |   function writeLock(fun) { | ||
|  |     return getArguments(function (args) { | ||
|  |       db._queue.push({ | ||
|  |         fun: fun, | ||
|  |         args: args, | ||
|  |         type: 'write' | ||
|  |       }); | ||
|  | 
 | ||
|  |       if (db._queue.length === 1) { | ||
|  |         immediate(executeNext); | ||
|  |       } | ||
|  |     }); | ||
|  |   } | ||
|  | 
 | ||
|  |   // same as the writelock, but multiple can run at once
 | ||
|  |   function readLock(fun) { | ||
|  |     return getArguments(function (args) { | ||
|  |       db._queue.push({ | ||
|  |         fun: fun, | ||
|  |         args: args, | ||
|  |         type: 'read' | ||
|  |       }); | ||
|  | 
 | ||
|  |       if (db._queue.length === 1) { | ||
|  |         immediate(executeNext); | ||
|  |       } | ||
|  |     }); | ||
|  |   } | ||
|  | 
 | ||
|  |   function formatSeq(n) { | ||
|  |     return ('0000000000000000' + n).slice(-16); | ||
|  |   } | ||
|  | 
 | ||
|  |   function parseSeq(s) { | ||
|  |     return parseInt(s, 10); | ||
|  |   } | ||
|  | 
 | ||
|  |   api._get = readLock(function (id, opts, callback) { | ||
|  |     opts = clone(opts); | ||
|  | 
 | ||
|  |     stores.docStore.get(id, function (err, metadata) { | ||
|  | 
 | ||
|  |       if (err || !metadata) { | ||
|  |         return callback(createError(MISSING_DOC, 'missing')); | ||
|  |       } | ||
|  | 
 | ||
|  |       var rev; | ||
|  |       if (!opts.rev) { | ||
|  |         rev = getWinningRev(metadata); | ||
|  |         var deleted = getIsDeleted(metadata, rev); | ||
|  |         if (deleted) { | ||
|  |           return callback(createError(MISSING_DOC, "deleted")); | ||
|  |         } | ||
|  |       } else { | ||
|  |         rev = opts.latest ? latest(opts.rev, metadata) : opts.rev; | ||
|  |       } | ||
|  | 
 | ||
|  |       var seq = metadata.rev_map[rev]; | ||
|  | 
 | ||
|  |       stores.bySeqStore.get(formatSeq(seq), function (err, doc) { | ||
|  |         if (!doc) { | ||
|  |           return callback(createError(MISSING_DOC)); | ||
|  |         } | ||
|  |         /* istanbul ignore if */ | ||
|  |         if ('_id' in doc && doc._id !== metadata.id) { | ||
|  |           // this failing implies something very wrong
 | ||
|  |           return callback(new Error('wrong doc returned')); | ||
|  |         } | ||
|  |         doc._id = metadata.id; | ||
|  |         if ('_rev' in doc) { | ||
|  |           /* istanbul ignore if */ | ||
|  |           if (doc._rev !== rev) { | ||
|  |             // this failing implies something very wrong
 | ||
|  |             return callback(new Error('wrong doc returned')); | ||
|  |           } | ||
|  |         } else { | ||
|  |           // we didn't always store this
 | ||
|  |           doc._rev = rev; | ||
|  |         } | ||
|  |         return callback(null, {doc: doc, metadata: metadata}); | ||
|  |       }); | ||
|  |     }); | ||
|  |   }); | ||
|  | 
 | ||
|  |   // not technically part of the spec, but if putAttachment has its own
 | ||
|  |   // method...
 | ||
|  |   api._getAttachment = function (docId, attachId, attachment, opts, callback) { | ||
|  |     var digest = attachment.digest; | ||
|  |     var type = attachment.content_type; | ||
|  | 
 | ||
|  |     stores.binaryStore.get(digest, function (err, attach) { | ||
|  |       if (err) { | ||
|  |         /* istanbul ignore if */ | ||
|  |         if (err.name !== 'NotFoundError') { | ||
|  |           return callback(err); | ||
|  |         } | ||
|  |         // Empty attachment
 | ||
|  |         return callback(null, opts.binary ? createEmptyBlobOrBuffer(type) : ''); | ||
|  |       } | ||
|  | 
 | ||
|  |       if (opts.binary) { | ||
|  |         callback(null, readAsBlobOrBuffer(attach, type)); | ||
|  |       } else { | ||
|  |         callback(null, attach.toString('base64')); | ||
|  |       } | ||
|  |     }); | ||
|  |   }; | ||
|  | 
 | ||
|  |   api._bulkDocs = writeLock(function (req, opts, callback) { | ||
|  |     var newEdits = opts.new_edits; | ||
|  |     var results = new Array(req.docs.length); | ||
|  |     var fetchedDocs = new ExportedMap(); | ||
|  |     var stemmedRevs = new ExportedMap(); | ||
|  | 
 | ||
|  |     var txn = new LevelTransaction(); | ||
|  |     var docCountDelta = 0; | ||
|  |     var newUpdateSeq = db._updateSeq; | ||
|  | 
 | ||
|  |     // parse the docs and give each a sequence number
 | ||
|  |     var userDocs = req.docs; | ||
|  |     var docInfos = userDocs.map(function (doc) { | ||
|  |       if (doc._id && isLocalId(doc._id)) { | ||
|  |         return doc; | ||
|  |       } | ||
|  |       var newDoc = parseDoc(doc, newEdits, api.__opts); | ||
|  | 
 | ||
|  |       if (newDoc.metadata && !newDoc.metadata.rev_map) { | ||
|  |         newDoc.metadata.rev_map = {}; | ||
|  |       } | ||
|  | 
 | ||
|  |       return newDoc; | ||
|  |     }); | ||
|  |     var infoErrors = docInfos.filter(function (doc) { | ||
|  |       return doc.error; | ||
|  |     }); | ||
|  | 
 | ||
|  |     if (infoErrors.length) { | ||
|  |       return callback(infoErrors[0]); | ||
|  |     } | ||
|  | 
 | ||
|  |     // verify any stub attachments as a precondition test
 | ||
|  | 
 | ||
|  |     function verifyAttachment(digest, callback) { | ||
|  |       txn.get(stores.attachmentStore, digest, function (levelErr) { | ||
|  |         if (levelErr) { | ||
|  |           var err = createError(MISSING_STUB, | ||
|  |                                 'unknown stub attachment with digest ' + | ||
|  |                                 digest); | ||
|  |           callback(err); | ||
|  |         } else { | ||
|  |           callback(); | ||
|  |         } | ||
|  |       }); | ||
|  |     } | ||
|  | 
 | ||
|  |     function verifyAttachments(finish) { | ||
|  |       var digests = []; | ||
|  |       userDocs.forEach(function (doc) { | ||
|  |         if (doc && doc._attachments) { | ||
|  |           Object.keys(doc._attachments).forEach(function (filename) { | ||
|  |             var att = doc._attachments[filename]; | ||
|  |             if (att.stub) { | ||
|  |               digests.push(att.digest); | ||
|  |             } | ||
|  |           }); | ||
|  |         } | ||
|  |       }); | ||
|  |       if (!digests.length) { | ||
|  |         return finish(); | ||
|  |       } | ||
|  |       var numDone = 0; | ||
|  |       var err; | ||
|  | 
 | ||
|  |       digests.forEach(function (digest) { | ||
|  |         verifyAttachment(digest, function (attErr) { | ||
|  |           if (attErr && !err) { | ||
|  |             err = attErr; | ||
|  |           } | ||
|  | 
 | ||
|  |           if (++numDone === digests.length) { | ||
|  |             finish(err); | ||
|  |           } | ||
|  |         }); | ||
|  |       }); | ||
|  |     } | ||
|  | 
 | ||
|  |     function fetchExistingDocs(finish) { | ||
|  |       var numDone = 0; | ||
|  |       var overallErr; | ||
|  |       function checkDone() { | ||
|  |         if (++numDone === userDocs.length) { | ||
|  |           return finish(overallErr); | ||
|  |         } | ||
|  |       } | ||
|  | 
 | ||
|  |       userDocs.forEach(function (doc) { | ||
|  |         if (doc._id && isLocalId(doc._id)) { | ||
|  |           // skip local docs
 | ||
|  |           return checkDone(); | ||
|  |         } | ||
|  |         txn.get(stores.docStore, doc._id, function (err, info) { | ||
|  |           if (err) { | ||
|  |             /* istanbul ignore if */ | ||
|  |             if (err.name !== 'NotFoundError') { | ||
|  |               overallErr = err; | ||
|  |             } | ||
|  |           } else { | ||
|  |             fetchedDocs.set(doc._id, info); | ||
|  |           } | ||
|  |           checkDone(); | ||
|  |         }); | ||
|  |       }); | ||
|  |     } | ||
|  | 
 | ||
|  |     function compact(revsMap, callback) { | ||
|  |       var promise = Promise.resolve(); | ||
|  |       revsMap.forEach(function (revs, docId) { | ||
|  |         // TODO: parallelize, for now need to be sequential to
 | ||
|  |         // pass orphaned attachment tests
 | ||
|  |         promise = promise.then(function () { | ||
|  |           return new Promise(function (resolve, reject) { | ||
|  |             api._doCompactionNoLock(docId, revs, {ctx: txn}, function (err) { | ||
|  |               /* istanbul ignore if */ | ||
|  |               if (err) { | ||
|  |                 return reject(err); | ||
|  |               } | ||
|  |               resolve(); | ||
|  |             }); | ||
|  |           }); | ||
|  |         }); | ||
|  |       }); | ||
|  | 
 | ||
|  |       promise.then(function () { | ||
|  |         callback(); | ||
|  |       }, callback); | ||
|  |     } | ||
|  | 
 | ||
|  |     function autoCompact(callback) { | ||
|  |       var revsMap = new ExportedMap(); | ||
|  |       fetchedDocs.forEach(function (metadata, docId) { | ||
|  |         revsMap.set(docId, compactTree(metadata)); | ||
|  |       }); | ||
|  |       compact(revsMap, callback); | ||
|  |     } | ||
|  | 
 | ||
|  |     function finish() { | ||
|  |       compact(stemmedRevs, function (error) { | ||
|  |         /* istanbul ignore if */ | ||
|  |         if (error) { | ||
|  |           complete(error); | ||
|  |         } | ||
|  |         if (api.auto_compaction) { | ||
|  |           return autoCompact(complete); | ||
|  |         } | ||
|  |         complete(); | ||
|  |       }); | ||
|  |     } | ||
|  | 
 | ||
|  |     function writeDoc(docInfo, winningRev$$1, winningRevIsDeleted, newRevIsDeleted, | ||
|  |                       isUpdate, delta, resultsIdx, callback2) { | ||
|  |       docCountDelta += delta; | ||
|  | 
 | ||
|  |       var err = null; | ||
|  |       var recv = 0; | ||
|  | 
 | ||
|  |       docInfo.metadata.winningRev = winningRev$$1; | ||
|  |       docInfo.metadata.deleted = winningRevIsDeleted; | ||
|  | 
 | ||
|  |       docInfo.data._id = docInfo.metadata.id; | ||
|  |       docInfo.data._rev = docInfo.metadata.rev; | ||
|  | 
 | ||
|  |       if (newRevIsDeleted) { | ||
|  |         docInfo.data._deleted = true; | ||
|  |       } | ||
|  | 
 | ||
|  |       if (docInfo.stemmedRevs.length) { | ||
|  |         stemmedRevs.set(docInfo.metadata.id, docInfo.stemmedRevs); | ||
|  |       } | ||
|  | 
 | ||
|  |       var attachments = docInfo.data._attachments ? | ||
|  |         Object.keys(docInfo.data._attachments) : | ||
|  |         []; | ||
|  | 
 | ||
|  |       function attachmentSaved(attachmentErr) { | ||
|  |         recv++; | ||
|  |         if (!err) { | ||
|  |           /* istanbul ignore if */ | ||
|  |           if (attachmentErr) { | ||
|  |             err = attachmentErr; | ||
|  |             callback2(err); | ||
|  |           } else if (recv === attachments.length) { | ||
|  |             finish(); | ||
|  |           } | ||
|  |         } | ||
|  |       } | ||
|  | 
 | ||
|  |       function onMD5Load(doc, key, data, attachmentSaved) { | ||
|  |         return function (result) { | ||
|  |           saveAttachment(doc, MD5_PREFIX + result, key, data, attachmentSaved); | ||
|  |         }; | ||
|  |       } | ||
|  | 
 | ||
|  |       function doMD5(doc, key, attachmentSaved) { | ||
|  |         return function (data) { | ||
|  |           binaryMd5(data, onMD5Load(doc, key, data, attachmentSaved)); | ||
|  |         }; | ||
|  |       } | ||
|  | 
 | ||
|  |       for (var i = 0; i < attachments.length; i++) { | ||
|  |         var key = attachments[i]; | ||
|  |         var att = docInfo.data._attachments[key]; | ||
|  | 
 | ||
|  |         if (att.stub) { | ||
|  |           // still need to update the refs mapping
 | ||
|  |           var id = docInfo.data._id; | ||
|  |           var rev = docInfo.data._rev; | ||
|  |           saveAttachmentRefs(id, rev, att.digest, attachmentSaved); | ||
|  |           continue; | ||
|  |         } | ||
|  |         var data; | ||
|  |         if (typeof att.data === 'string') { | ||
|  |           // input is assumed to be a base64 string
 | ||
|  |           try { | ||
|  |             data = thisAtob(att.data); | ||
|  |           } catch (e) { | ||
|  |             callback(createError(BAD_ARG, | ||
|  |                      'Attachment is not a valid base64 string')); | ||
|  |             return; | ||
|  |           } | ||
|  |           doMD5(docInfo, key, attachmentSaved)(data); | ||
|  |         } else { | ||
|  |           prepareAttachmentForStorage(att.data, | ||
|  |             doMD5(docInfo, key, attachmentSaved)); | ||
|  |         } | ||
|  |       } | ||
|  | 
 | ||
|  |       function finish() { | ||
|  |         var seq = docInfo.metadata.rev_map[docInfo.metadata.rev]; | ||
|  |         /* istanbul ignore if */ | ||
|  |         if (seq) { | ||
|  |           // check that there aren't any existing revisions with the same
 | ||
|  |           // revision id, else we shouldn't do anything
 | ||
|  |           return callback2(); | ||
|  |         } | ||
|  |         seq = ++newUpdateSeq; | ||
|  |         docInfo.metadata.rev_map[docInfo.metadata.rev] = | ||
|  |           docInfo.metadata.seq = seq; | ||
|  |         var seqKey = formatSeq(seq); | ||
|  |         var batch = [{ | ||
|  |           key: seqKey, | ||
|  |           value: docInfo.data, | ||
|  |           prefix: stores.bySeqStore, | ||
|  |           type: 'put' | ||
|  |         }, { | ||
|  |           key: docInfo.metadata.id, | ||
|  |           value: docInfo.metadata, | ||
|  |           prefix: stores.docStore, | ||
|  |           type: 'put' | ||
|  |         }]; | ||
|  |         txn.batch(batch); | ||
|  |         results[resultsIdx] = { | ||
|  |           ok: true, | ||
|  |           id: docInfo.metadata.id, | ||
|  |           rev: docInfo.metadata.rev | ||
|  |         }; | ||
|  |         fetchedDocs.set(docInfo.metadata.id, docInfo.metadata); | ||
|  |         callback2(); | ||
|  |       } | ||
|  | 
 | ||
|  |       if (!attachments.length) { | ||
|  |         finish(); | ||
|  |       } | ||
|  |     } | ||
|  | 
 | ||
|  |     // attachments are queued per-digest, otherwise the refs could be
 | ||
|  |     // overwritten by concurrent writes in the same bulkDocs session
 | ||
|  |     var attachmentQueues = {}; | ||
|  | 
 | ||
|  |     function saveAttachmentRefs(id, rev, digest, callback) { | ||
|  | 
 | ||
|  |       function fetchAtt() { | ||
|  |         return new Promise(function (resolve, reject) { | ||
|  |           txn.get(stores.attachmentStore, digest, function (err, oldAtt) { | ||
|  |             /* istanbul ignore if */ | ||
|  |             if (err && err.name !== 'NotFoundError') { | ||
|  |               return reject(err); | ||
|  |             } | ||
|  |             resolve(oldAtt); | ||
|  |           }); | ||
|  |         }); | ||
|  |       } | ||
|  | 
 | ||
|  |       function saveAtt(oldAtt) { | ||
|  |         var ref = [id, rev].join('@'); | ||
|  |         var newAtt = {}; | ||
|  | 
 | ||
|  |         if (oldAtt) { | ||
|  |           if (oldAtt.refs) { | ||
|  |             // only update references if this attachment already has them
 | ||
|  |             // since we cannot migrate old style attachments here without
 | ||
|  |             // doing a full db scan for references
 | ||
|  |             newAtt.refs = oldAtt.refs; | ||
|  |             newAtt.refs[ref] = true; | ||
|  |           } | ||
|  |         } else { | ||
|  |           newAtt.refs = {}; | ||
|  |           newAtt.refs[ref] = true; | ||
|  |         } | ||
|  | 
 | ||
|  |         return new Promise(function (resolve) { | ||
|  |           txn.batch([{ | ||
|  |             type: 'put', | ||
|  |             prefix: stores.attachmentStore, | ||
|  |             key: digest, | ||
|  |             value: newAtt | ||
|  |           }]); | ||
|  |           resolve(!oldAtt); | ||
|  |         }); | ||
|  |       } | ||
|  | 
 | ||
|  |       // put attachments in a per-digest queue, to avoid two docs with the same
 | ||
|  |       // attachment overwriting each other
 | ||
|  |       var queue = attachmentQueues[digest] || Promise.resolve(); | ||
|  |       attachmentQueues[digest] = queue.then(function () { | ||
|  |         return fetchAtt().then(saveAtt).then(function (isNewAttachment) { | ||
|  |           callback(null, isNewAttachment); | ||
|  |         }, callback); | ||
|  |       }); | ||
|  |     } | ||
|  | 
 | ||
|  |     function saveAttachment(docInfo, digest, key, data, callback) { | ||
|  |       var att = docInfo.data._attachments[key]; | ||
|  |       delete att.data; | ||
|  |       att.digest = digest; | ||
|  |       att.length = data.length; | ||
|  |       var id = docInfo.metadata.id; | ||
|  |       var rev = docInfo.metadata.rev; | ||
|  |       att.revpos = parseInt(rev, 10); | ||
|  | 
 | ||
|  |       saveAttachmentRefs(id, rev, digest, function (err, isNewAttachment) { | ||
|  |         /* istanbul ignore if */ | ||
|  |         if (err) { | ||
|  |           return callback(err); | ||
|  |         } | ||
|  |         // do not try to store empty attachments
 | ||
|  |         if (data.length === 0) { | ||
|  |           return callback(err); | ||
|  |         } | ||
|  |         if (!isNewAttachment) { | ||
|  |           // small optimization - don't bother writing it again
 | ||
|  |           return callback(err); | ||
|  |         } | ||
|  |         txn.batch([{ | ||
|  |           type: 'put', | ||
|  |           prefix: stores.binaryStore, | ||
|  |           key: digest, | ||
|  |           value: bufferFrom(data, 'binary') | ||
|  |         }]); | ||
|  |         callback(); | ||
|  |       }); | ||
|  |     } | ||
|  | 
 | ||
|  |     function complete(err) { | ||
|  |       /* istanbul ignore if */ | ||
|  |       if (err) { | ||
|  |         return immediate(function () { | ||
|  |           callback(err); | ||
|  |         }); | ||
|  |       } | ||
|  |       txn.batch([ | ||
|  |         { | ||
|  |           prefix: stores.metaStore, | ||
|  |           type: 'put', | ||
|  |           key: UPDATE_SEQ_KEY, | ||
|  |           value: newUpdateSeq | ||
|  |         }, | ||
|  |         { | ||
|  |           prefix: stores.metaStore, | ||
|  |           type: 'put', | ||
|  |           key: DOC_COUNT_KEY, | ||
|  |           value: db._docCount + docCountDelta | ||
|  |         } | ||
|  |       ]); | ||
|  |       txn.execute(db, function (err) { | ||
|  |         /* istanbul ignore if */ | ||
|  |         if (err) { | ||
|  |           return callback(err); | ||
|  |         } | ||
|  |         db._docCount += docCountDelta; | ||
|  |         db._updateSeq = newUpdateSeq; | ||
|  |         levelChanges.notify(name); | ||
|  |         immediate(function () { | ||
|  |           callback(null, results); | ||
|  |         }); | ||
|  |       }); | ||
|  |     } | ||
|  | 
 | ||
|  |     if (!docInfos.length) { | ||
|  |       return callback(null, []); | ||
|  |     } | ||
|  | 
 | ||
|  |     verifyAttachments(function (err) { | ||
|  |       if (err) { | ||
|  |         return callback(err); | ||
|  |       } | ||
|  |       fetchExistingDocs(function (err) { | ||
|  |         /* istanbul ignore if */ | ||
|  |         if (err) { | ||
|  |           return callback(err); | ||
|  |         } | ||
|  |         processDocs(revLimit, docInfos, api, fetchedDocs, txn, results, | ||
|  |                     writeDoc, opts, finish); | ||
|  |       }); | ||
|  |     }); | ||
|  |   }); | ||
|  |   api._allDocs = function (opts, callback) { | ||
|  |     if ('keys' in opts) { | ||
|  |       return allDocsKeysQuery(this, opts); | ||
|  |     } | ||
|  |     return readLock(function (opts, callback) { | ||
|  |       opts = clone(opts); | ||
|  |       countDocs(function (err, docCount) { | ||
|  |         /* istanbul ignore if */ | ||
|  |         if (err) { | ||
|  |           return callback(err); | ||
|  |         } | ||
|  |         var readstreamOpts = {}; | ||
|  |         var skip = opts.skip || 0; | ||
|  |         if (opts.startkey) { | ||
|  |           readstreamOpts.gte = opts.startkey; | ||
|  |         } | ||
|  |         if (opts.endkey) { | ||
|  |           readstreamOpts.lte = opts.endkey; | ||
|  |         } | ||
|  |         if (opts.key) { | ||
|  |           readstreamOpts.gte = readstreamOpts.lte = opts.key; | ||
|  |         } | ||
|  |         if (opts.descending) { | ||
|  |           readstreamOpts.reverse = true; | ||
|  |           // switch start and ends
 | ||
|  |           var tmp = readstreamOpts.lte; | ||
|  |           readstreamOpts.lte = readstreamOpts.gte; | ||
|  |           readstreamOpts.gte = tmp; | ||
|  |         } | ||
|  |         var limit; | ||
|  |         if (typeof opts.limit === 'number') { | ||
|  |           limit = opts.limit; | ||
|  |         } | ||
|  |         if (limit === 0 || | ||
|  |             ('gte' in readstreamOpts && 'lte' in readstreamOpts && | ||
|  |             readstreamOpts.gte > readstreamOpts.lte)) { | ||
|  |           // should return 0 results when start is greater than end.
 | ||
|  |           // normally level would "fix" this for us by reversing the order,
 | ||
|  |           // so short-circuit instead
 | ||
|  |           var returnVal = { | ||
|  |             total_rows: docCount, | ||
|  |             offset: opts.skip, | ||
|  |             rows: [] | ||
|  |           }; | ||
|  |           /* istanbul ignore if */ | ||
|  |           if (opts.update_seq) { | ||
|  |             returnVal.update_seq = db._updateSeq; | ||
|  |           } | ||
|  |           return callback(null, returnVal); | ||
|  |         } | ||
|  |         var results = []; | ||
|  |         var docstream = stores.docStore.readStream(readstreamOpts); | ||
|  | 
 | ||
|  |         var throughStream = through2.obj(function (entry, _, next) { | ||
|  |           var metadata = entry.value; | ||
|  |           // winningRev and deleted are performance-killers, but
 | ||
|  |           // in newer versions of PouchDB, they are cached on the metadata
 | ||
|  |           var winningRev$$1 = getWinningRev(metadata); | ||
|  |           var deleted = getIsDeleted(metadata, winningRev$$1); | ||
|  |           if (!deleted) { | ||
|  |             if (skip-- > 0) { | ||
|  |               next(); | ||
|  |               return; | ||
|  |             } else if (typeof limit === 'number' && limit-- <= 0) { | ||
|  |               docstream.unpipe(); | ||
|  |               docstream.destroy(); | ||
|  |               next(); | ||
|  |               return; | ||
|  |             } | ||
|  |           } else if (opts.deleted !== 'ok') { | ||
|  |             next(); | ||
|  |             return; | ||
|  |           } | ||
|  |           function allDocsInner(data) { | ||
|  |             var doc = { | ||
|  |               id: metadata.id, | ||
|  |               key: metadata.id, | ||
|  |               value: { | ||
|  |                 rev: winningRev$$1 | ||
|  |               } | ||
|  |             }; | ||
|  |             if (opts.include_docs) { | ||
|  |               doc.doc = data; | ||
|  |               doc.doc._rev = doc.value.rev; | ||
|  |               if (opts.conflicts) { | ||
|  |                 var conflicts = collectConflicts(metadata); | ||
|  |                 if (conflicts.length) { | ||
|  |                   doc.doc._conflicts = conflicts; | ||
|  |                 } | ||
|  |               } | ||
|  |               for (var att in doc.doc._attachments) { | ||
|  |                 if (Object.prototype.hasOwnProperty.call(doc.doc._attachments, att)) { | ||
|  |                   doc.doc._attachments[att].stub = true; | ||
|  |                 } | ||
|  |               } | ||
|  |             } | ||
|  |             if (opts.inclusive_end === false && metadata.id === opts.endkey) { | ||
|  |               return next(); | ||
|  |             } else if (deleted) { | ||
|  |               if (opts.deleted === 'ok') { | ||
|  |                 doc.value.deleted = true; | ||
|  |                 doc.doc = null; | ||
|  |               } else { | ||
|  |                 /* istanbul ignore next */ | ||
|  |                 return next(); | ||
|  |               } | ||
|  |             } | ||
|  |             results.push(doc); | ||
|  |             next(); | ||
|  |           } | ||
|  |           if (opts.include_docs) { | ||
|  |             var seq = metadata.rev_map[winningRev$$1]; | ||
|  |             stores.bySeqStore.get(formatSeq(seq), function (err, data) { | ||
|  |               allDocsInner(data); | ||
|  |             }); | ||
|  |           } | ||
|  |           else { | ||
|  |             allDocsInner(); | ||
|  |           } | ||
|  |         }, function (next) { | ||
|  |           Promise.resolve().then(function () { | ||
|  |             if (opts.include_docs && opts.attachments) { | ||
|  |               return fetchAttachments(results, stores, opts); | ||
|  |             } | ||
|  |           }).then(function () { | ||
|  |             var returnVal = { | ||
|  |               total_rows: docCount, | ||
|  |               offset: opts.skip, | ||
|  |               rows: results | ||
|  |             }; | ||
|  | 
 | ||
|  |             /* istanbul ignore if */ | ||
|  |             if (opts.update_seq) { | ||
|  |               returnVal.update_seq = db._updateSeq; | ||
|  |             } | ||
|  |             callback(null, returnVal); | ||
|  |           }, callback); | ||
|  |           next(); | ||
|  |         }).on('unpipe', function () { | ||
|  |           throughStream.end(); | ||
|  |         }); | ||
|  | 
 | ||
|  |         docstream.on('error', callback); | ||
|  | 
 | ||
|  |         docstream.pipe(throughStream); | ||
|  |       }); | ||
|  |     })(opts, callback); | ||
|  |   }; | ||
|  | 
 | ||
|  |   api._changes = function (opts) { | ||
|  |     opts = clone(opts); | ||
|  | 
 | ||
|  |     if (opts.continuous) { | ||
|  |       var id = name + ':' + uuid$1(); | ||
|  |       levelChanges.addListener(name, id, api, opts); | ||
|  |       levelChanges.notify(name); | ||
|  |       return { | ||
|  |         cancel: function () { | ||
|  |           levelChanges.removeListener(name, id); | ||
|  |         } | ||
|  |       }; | ||
|  |     } | ||
|  | 
 | ||
|  |     var descending = opts.descending; | ||
|  |     var results = []; | ||
|  |     var lastSeq = opts.since || 0; | ||
|  |     var called = 0; | ||
|  |     var streamOpts = { | ||
|  |       reverse: descending | ||
|  |     }; | ||
|  |     var limit; | ||
|  |     if ('limit' in opts && opts.limit > 0) { | ||
|  |       limit = opts.limit; | ||
|  |     } | ||
|  |     if (!streamOpts.reverse) { | ||
|  |       streamOpts.start = formatSeq(opts.since || 0); | ||
|  |     } | ||
|  | 
 | ||
|  |     var docIds = opts.doc_ids && new ExportedSet(opts.doc_ids); | ||
|  |     var filter = filterChange(opts); | ||
|  |     var docIdsToMetadata = new ExportedMap(); | ||
|  | 
 | ||
|  |     function complete() { | ||
|  |       opts.done = true; | ||
|  |       if (opts.return_docs && opts.limit) { | ||
|  |         /* istanbul ignore if */ | ||
|  |         if (opts.limit < results.length) { | ||
|  |           results.length = opts.limit; | ||
|  |         } | ||
|  |       } | ||
|  |       changeStream.unpipe(throughStream); | ||
|  |       changeStream.destroy(); | ||
|  |       if (!opts.continuous && !opts.cancelled) { | ||
|  |         if (opts.include_docs && opts.attachments && opts.return_docs) { | ||
|  |           fetchAttachments(results, stores, opts).then(function () { | ||
|  |             opts.complete(null, {results: results, last_seq: lastSeq}); | ||
|  |           }); | ||
|  |         } else { | ||
|  |           opts.complete(null, {results: results, last_seq: lastSeq}); | ||
|  |         } | ||
|  |       } | ||
|  |     } | ||
|  |     var changeStream = stores.bySeqStore.readStream(streamOpts); | ||
|  |     var throughStream = through2.obj(function (data, _, next) { | ||
|  |       if (limit && called >= limit) { | ||
|  |         complete(); | ||
|  |         return next(); | ||
|  |       } | ||
|  |       if (opts.cancelled || opts.done) { | ||
|  |         return next(); | ||
|  |       } | ||
|  | 
 | ||
|  |       var seq = parseSeq(data.key); | ||
|  |       var doc = data.value; | ||
|  | 
 | ||
|  |       if (seq === opts.since && !descending) { | ||
|  |         // couchdb ignores `since` if descending=true
 | ||
|  |         return next(); | ||
|  |       } | ||
|  | 
 | ||
|  |       if (docIds && !docIds.has(doc._id)) { | ||
|  |         return next(); | ||
|  |       } | ||
|  | 
 | ||
|  |       var metadata; | ||
|  | 
 | ||
|  |       function onGetMetadata(metadata) { | ||
|  |         var winningRev$$1 = getWinningRev(metadata); | ||
|  | 
 | ||
|  |         function onGetWinningDoc(winningDoc) { | ||
|  | 
 | ||
|  |           var change = opts.processChange(winningDoc, metadata, opts); | ||
|  |           change.seq = metadata.seq; | ||
|  | 
 | ||
|  |           var filtered = filter(change); | ||
|  |           if (typeof filtered === 'object') { | ||
|  |             return opts.complete(filtered); | ||
|  |           } | ||
|  | 
 | ||
|  |           if (filtered) { | ||
|  |             called++; | ||
|  | 
 | ||
|  |             if (opts.attachments && opts.include_docs) { | ||
|  |               // fetch attachment immediately for the benefit
 | ||
|  |               // of live listeners
 | ||
|  |               fetchAttachments([change], stores, opts).then(function () { | ||
|  |                 opts.onChange(change); | ||
|  |               }); | ||
|  |             } else { | ||
|  |               opts.onChange(change); | ||
|  |             } | ||
|  | 
 | ||
|  |             if (opts.return_docs) { | ||
|  |               results.push(change); | ||
|  |             } | ||
|  |           } | ||
|  |           next(); | ||
|  |         } | ||
|  | 
 | ||
|  |         if (metadata.seq !== seq) { | ||
|  |           // some other seq is later
 | ||
|  |           return next(); | ||
|  |         } | ||
|  | 
 | ||
|  |         lastSeq = seq; | ||
|  | 
 | ||
|  |         if (winningRev$$1 === doc._rev) { | ||
|  |           return onGetWinningDoc(doc); | ||
|  |         } | ||
|  | 
 | ||
|  |         // fetch the winner
 | ||
|  | 
 | ||
|  |         var winningSeq = metadata.rev_map[winningRev$$1]; | ||
|  | 
 | ||
|  |         stores.bySeqStore.get(formatSeq(winningSeq), function (err, doc) { | ||
|  |           onGetWinningDoc(doc); | ||
|  |         }); | ||
|  |       } | ||
|  | 
 | ||
|  |       metadata = docIdsToMetadata.get(doc._id); | ||
|  |       if (metadata) { // cached
 | ||
|  |         return onGetMetadata(metadata); | ||
|  |       } | ||
|  |       // metadata not cached, have to go fetch it
 | ||
|  |       stores.docStore.get(doc._id, function (err, metadata) { | ||
|  |         /* istanbul ignore if */ | ||
|  |         if (opts.cancelled || opts.done || db.isClosed() || | ||
|  |           isLocalId(metadata.id)) { | ||
|  |           return next(); | ||
|  |         } | ||
|  |         docIdsToMetadata.set(doc._id, metadata); | ||
|  |         onGetMetadata(metadata); | ||
|  |       }); | ||
|  |     }, function (next) { | ||
|  |       if (opts.cancelled) { | ||
|  |         return next(); | ||
|  |       } | ||
|  |       if (opts.return_docs && opts.limit) { | ||
|  |         /* istanbul ignore if */ | ||
|  |         if (opts.limit < results.length) { | ||
|  |           results.length = opts.limit; | ||
|  |         } | ||
|  |       } | ||
|  | 
 | ||
|  |       next(); | ||
|  |     }).on('unpipe', function () { | ||
|  |       throughStream.end(); | ||
|  |       complete(); | ||
|  |     }); | ||
|  |     changeStream.pipe(throughStream); | ||
|  |     return { | ||
|  |       cancel: function () { | ||
|  |         opts.cancelled = true; | ||
|  |         complete(); | ||
|  |       } | ||
|  |     }; | ||
|  |   }; | ||
|  | 
 | ||
|  |   api._close = function (callback) { | ||
|  |     /* istanbul ignore if */ | ||
|  |     if (db.isClosed()) { | ||
|  |       return callback(createError(NOT_OPEN)); | ||
|  |     } | ||
|  |     db.close(function (err) { | ||
|  |       /* istanbul ignore if */ | ||
|  |       if (err) { | ||
|  |         callback(err); | ||
|  |       } else { | ||
|  |         dbStore["delete"](name); | ||
|  | 
 | ||
|  |         var adapterName = functionName(leveldown); | ||
|  |         var adapterStore = dbStores.get(adapterName); | ||
|  |         var keys = [...adapterStore.keys()].filter(k => k.includes("-mrview-")); | ||
|  |         keys.forEach(key => { | ||
|  |           var eventEmitter = adapterStore.get(key); | ||
|  |           eventEmitter.removeAllListeners(); | ||
|  |           eventEmitter.close(); | ||
|  |           adapterStore["delete"](key); | ||
|  |         }); | ||
|  | 
 | ||
|  |         callback(); | ||
|  |       } | ||
|  |     }); | ||
|  |   }; | ||
|  | 
 | ||
|  |   api._getRevisionTree = function (docId, callback) { | ||
|  |     stores.docStore.get(docId, function (err, metadata) { | ||
|  |       if (err) { | ||
|  |         callback(createError(MISSING_DOC)); | ||
|  |       } else { | ||
|  |         callback(null, metadata.rev_tree); | ||
|  |       } | ||
|  |     }); | ||
|  |   }; | ||
|  | 
 | ||
|  |   api._doCompaction = writeLock(function (docId, revs, opts, callback) { | ||
|  |     api._doCompactionNoLock(docId, revs, opts, callback); | ||
|  |   }); | ||
|  | 
 | ||
|  |   // the NoLock version is for use by bulkDocs
 | ||
|  |   api._doCompactionNoLock = function (docId, revs, opts, callback) { | ||
|  |     if (typeof opts === 'function') { | ||
|  |       callback = opts; | ||
|  |       opts = {}; | ||
|  |     } | ||
|  | 
 | ||
|  |     if (!revs.length) { | ||
|  |       return callback(); | ||
|  |     } | ||
|  |     var txn = opts.ctx || new LevelTransaction(); | ||
|  | 
 | ||
|  |     txn.get(stores.docStore, docId, function (err, metadata) { | ||
|  |       /* istanbul ignore if */ | ||
|  |       if (err) { | ||
|  |         return callback(err); | ||
|  |       } | ||
|  |       var seqs = revs.map(function (rev) { | ||
|  |         var seq = metadata.rev_map[rev]; | ||
|  |         delete metadata.rev_map[rev]; | ||
|  |         return seq; | ||
|  |       }); | ||
|  |       traverseRevTree(metadata.rev_tree, function (isLeaf, pos, | ||
|  |                                                          revHash, ctx, opts) { | ||
|  |         var rev = pos + '-' + revHash; | ||
|  |         if (revs.indexOf(rev) !== -1) { | ||
|  |           opts.status = 'missing'; | ||
|  |         } | ||
|  |       }); | ||
|  | 
 | ||
|  |       var batch = []; | ||
|  |       batch.push({ | ||
|  |         key: metadata.id, | ||
|  |         value: metadata, | ||
|  |         type: 'put', | ||
|  |         prefix: stores.docStore | ||
|  |       }); | ||
|  | 
 | ||
|  |       var digestMap = {}; | ||
|  |       var numDone = 0; | ||
|  |       var overallErr; | ||
|  |       function checkDone(err) { | ||
|  |         /* istanbul ignore if */ | ||
|  |         if (err) { | ||
|  |           overallErr = err; | ||
|  |         } | ||
|  |         if (++numDone === revs.length) { // done
 | ||
|  |           /* istanbul ignore if */ | ||
|  |           if (overallErr) { | ||
|  |             return callback(overallErr); | ||
|  |           } | ||
|  |           deleteOrphanedAttachments(); | ||
|  |         } | ||
|  |       } | ||
|  | 
 | ||
|  |       function finish(err) { | ||
|  |         /* istanbul ignore if */ | ||
|  |         if (err) { | ||
|  |           return callback(err); | ||
|  |         } | ||
|  |         txn.batch(batch); | ||
|  |         if (opts.ctx) { | ||
|  |           // don't execute immediately
 | ||
|  |           return callback(); | ||
|  |         } | ||
|  |         txn.execute(db, callback); | ||
|  |       } | ||
|  | 
 | ||
|  |       function deleteOrphanedAttachments() { | ||
|  |         var possiblyOrphanedAttachments = Object.keys(digestMap); | ||
|  |         if (!possiblyOrphanedAttachments.length) { | ||
|  |           return finish(); | ||
|  |         } | ||
|  |         var numDone = 0; | ||
|  |         var overallErr; | ||
|  |         function checkDone(err) { | ||
|  |           /* istanbul ignore if */ | ||
|  |           if (err) { | ||
|  |             overallErr = err; | ||
|  |           } | ||
|  |           if (++numDone === possiblyOrphanedAttachments.length) { | ||
|  |             finish(overallErr); | ||
|  |           } | ||
|  |         } | ||
|  |         var refsToDelete = new ExportedMap(); | ||
|  |         revs.forEach(function (rev) { | ||
|  |           refsToDelete.set(docId + '@' + rev, true); | ||
|  |         }); | ||
|  |         possiblyOrphanedAttachments.forEach(function (digest) { | ||
|  |           txn.get(stores.attachmentStore, digest, function (err, attData) { | ||
|  |             /* istanbul ignore if */ | ||
|  |             if (err) { | ||
|  |               if (err.name === 'NotFoundError') { | ||
|  |                 return checkDone(); | ||
|  |               } else { | ||
|  |                 return checkDone(err); | ||
|  |               } | ||
|  |             } | ||
|  |             var refs = Object.keys(attData.refs || {}).filter(function (ref) { | ||
|  |               return !refsToDelete.has(ref); | ||
|  |             }); | ||
|  |             var newRefs = {}; | ||
|  |             refs.forEach(function (ref) { | ||
|  |               newRefs[ref] = true; | ||
|  |             }); | ||
|  |             if (refs.length) { // not orphaned
 | ||
|  |               batch.push({ | ||
|  |                 key: digest, | ||
|  |                 type: 'put', | ||
|  |                 value: {refs: newRefs}, | ||
|  |                 prefix: stores.attachmentStore | ||
|  |               }); | ||
|  |             } else { // orphaned, can safely delete
 | ||
|  |               batch = batch.concat([{ | ||
|  |                 key: digest, | ||
|  |                 type: 'del', | ||
|  |                 prefix: stores.attachmentStore | ||
|  |               }, { | ||
|  |                 key: digest, | ||
|  |                 type: 'del', | ||
|  |                 prefix: stores.binaryStore | ||
|  |               }]); | ||
|  |             } | ||
|  |             checkDone(); | ||
|  |           }); | ||
|  |         }); | ||
|  |       } | ||
|  | 
 | ||
|  |       seqs.forEach(function (seq) { | ||
|  |         batch.push({ | ||
|  |           key: formatSeq(seq), | ||
|  |           type: 'del', | ||
|  |           prefix: stores.bySeqStore | ||
|  |         }); | ||
|  |         txn.get(stores.bySeqStore, formatSeq(seq), function (err, doc) { | ||
|  |           /* istanbul ignore if */ | ||
|  |           if (err) { | ||
|  |             if (err.name === 'NotFoundError') { | ||
|  |               return checkDone(); | ||
|  |             } else { | ||
|  |               return checkDone(err); | ||
|  |             } | ||
|  |           } | ||
|  |           var atts = Object.keys(doc._attachments || {}); | ||
|  |           atts.forEach(function (attName) { | ||
|  |             var digest = doc._attachments[attName].digest; | ||
|  |             digestMap[digest] = true; | ||
|  |           }); | ||
|  |           checkDone(); | ||
|  |         }); | ||
|  |       }); | ||
|  |     }); | ||
|  |   }; | ||
|  | 
 | ||
|  |   api._getLocal = function (id, callback) { | ||
|  |     stores.localStore.get(id, function (err, doc) { | ||
|  |       if (err) { | ||
|  |         callback(createError(MISSING_DOC)); | ||
|  |       } else { | ||
|  |         callback(null, doc); | ||
|  |       } | ||
|  |     }); | ||
|  |   }; | ||
|  | 
 | ||
|  |   api._putLocal = function (doc, opts, callback) { | ||
|  |     if (typeof opts === 'function') { | ||
|  |       callback = opts; | ||
|  |       opts = {}; | ||
|  |     } | ||
|  |     if (opts.ctx) { | ||
|  |       api._putLocalNoLock(doc, opts, callback); | ||
|  |     } else { | ||
|  |       api._putLocalWithLock(doc, opts, callback); | ||
|  |     } | ||
|  |   }; | ||
|  | 
 | ||
|  |   api._putLocalWithLock = writeLock(function (doc, opts, callback) { | ||
|  |     api._putLocalNoLock(doc, opts, callback); | ||
|  |   }); | ||
|  | 
 | ||
|  |   // the NoLock version is for use by bulkDocs
 | ||
|  |   api._putLocalNoLock = function (doc, opts, callback) { | ||
|  |     delete doc._revisions; // ignore this, trust the rev
 | ||
|  |     var oldRev = doc._rev; | ||
|  |     var id = doc._id; | ||
|  | 
 | ||
|  |     var txn = opts.ctx || new LevelTransaction(); | ||
|  | 
 | ||
|  |     txn.get(stores.localStore, id, function (err, resp) { | ||
|  |       if (err && oldRev) { | ||
|  |         return callback(createError(REV_CONFLICT)); | ||
|  |       } | ||
|  |       if (resp && resp._rev !== oldRev) { | ||
|  |         return callback(createError(REV_CONFLICT)); | ||
|  |       } | ||
|  |       doc._rev = | ||
|  |           oldRev ? '0-' + (parseInt(oldRev.split('-')[1], 10) + 1) : '0-1'; | ||
|  |       var batch = [ | ||
|  |         { | ||
|  |           type: 'put', | ||
|  |           prefix: stores.localStore, | ||
|  |           key: id, | ||
|  |           value: doc | ||
|  |         } | ||
|  |       ]; | ||
|  | 
 | ||
|  |       txn.batch(batch); | ||
|  |       var ret = {ok: true, id: doc._id, rev: doc._rev}; | ||
|  | 
 | ||
|  |       if (opts.ctx) { | ||
|  |         // don't execute immediately
 | ||
|  |         return callback(null, ret); | ||
|  |       } | ||
|  |       txn.execute(db, function (err) { | ||
|  |         /* istanbul ignore if */ | ||
|  |         if (err) { | ||
|  |           return callback(err); | ||
|  |         } | ||
|  |         callback(null, ret); | ||
|  |       }); | ||
|  |     }); | ||
|  |   }; | ||
|  | 
 | ||
|  |   api._removeLocal = function (doc, opts, callback) { | ||
|  |     if (typeof opts === 'function') { | ||
|  |       callback = opts; | ||
|  |       opts = {}; | ||
|  |     } | ||
|  |     if (opts.ctx) { | ||
|  |       api._removeLocalNoLock(doc, opts, callback); | ||
|  |     } else { | ||
|  |       api._removeLocalWithLock(doc, opts, callback); | ||
|  |     } | ||
|  |   }; | ||
|  | 
 | ||
|  |   api._removeLocalWithLock = writeLock(function (doc, opts, callback) { | ||
|  |     api._removeLocalNoLock(doc, opts, callback); | ||
|  |   }); | ||
|  | 
 | ||
|  |   // the NoLock version is for use by bulkDocs
 | ||
|  |   api._removeLocalNoLock = function (doc, opts, callback) { | ||
|  |     var txn = opts.ctx || new LevelTransaction(); | ||
|  |     txn.get(stores.localStore, doc._id, function (err, resp) { | ||
|  |       if (err) { | ||
|  |         /* istanbul ignore if */ | ||
|  |         if (err.name !== 'NotFoundError') { | ||
|  |           return callback(err); | ||
|  |         } else { | ||
|  |           return callback(createError(MISSING_DOC)); | ||
|  |         } | ||
|  |       } | ||
|  |       if (resp._rev !== doc._rev) { | ||
|  |         return callback(createError(REV_CONFLICT)); | ||
|  |       } | ||
|  |       txn.batch([{ | ||
|  |         prefix: stores.localStore, | ||
|  |         type: 'del', | ||
|  |         key: doc._id | ||
|  |       }]); | ||
|  |       var ret = {ok: true, id: doc._id, rev: '0-0'}; | ||
|  |       if (opts.ctx) { | ||
|  |         // don't execute immediately
 | ||
|  |         return callback(null, ret); | ||
|  |       } | ||
|  |       txn.execute(db, function (err) { | ||
|  |         /* istanbul ignore if */ | ||
|  |         if (err) { | ||
|  |           return callback(err); | ||
|  |         } | ||
|  |         callback(null, ret); | ||
|  |       }); | ||
|  |     }); | ||
|  |   }; | ||
|  | 
 | ||
|  |   // close and delete open leveldb stores
 | ||
|  |   api._destroy = function (opts, callback) { | ||
|  |     var dbStore; | ||
|  |     var leveldownName = functionName(leveldown); | ||
|  |     /* istanbul ignore else */ | ||
|  |     if (dbStores.has(leveldownName)) { | ||
|  |       dbStore = dbStores.get(leveldownName); | ||
|  |     } else { | ||
|  |       return callDestroy(name, callback); | ||
|  |     } | ||
|  | 
 | ||
|  |     /* istanbul ignore else */ | ||
|  |     if (dbStore.has(name)) { | ||
|  |       levelChanges.removeAllListeners(name); | ||
|  | 
 | ||
|  |       dbStore.get(name).close(function () { | ||
|  |         dbStore["delete"](name); | ||
|  |         callDestroy(name, callback); | ||
|  |       }); | ||
|  |     } else { | ||
|  |       callDestroy(name, callback); | ||
|  |     } | ||
|  |   }; | ||
|  |   function callDestroy(name, cb) { | ||
|  |     // May not exist if leveldown is backed by memory adapter
 | ||
|  |     /* istanbul ignore else */ | ||
|  |     if ('destroy' in leveldown) { | ||
|  |       leveldown.destroy(name, cb); | ||
|  |     } else { | ||
|  |       cb(null); | ||
|  |     } | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function LocalStoragePouch(opts, callback) { | ||
|  |   var _opts = $inject_Object_assign({ | ||
|  |     db: localstoragedown | ||
|  |   }, opts); | ||
|  | 
 | ||
|  |   LevelPouch.call(this, _opts, callback); | ||
|  | } | ||
|  | 
 | ||
|  | // overrides for normal LevelDB behavior on Node
 | ||
|  | LocalStoragePouch.valid = function () { | ||
|  |   return typeof localStorage !== 'undefined'; | ||
|  | }; | ||
|  | LocalStoragePouch.use_prefix = true; | ||
|  | 
 | ||
|  | function LocalStoragePouchPlugin (PouchDB) { | ||
|  |   PouchDB.adapter('localstorage', LocalStoragePouch, true); | ||
|  | } | ||
|  | 
 | ||
|  | // this code only runs in the browser, as its own dist/ script
 | ||
|  | 
 | ||
|  | if (typeof PouchDB === 'undefined') { | ||
|  |   guardedConsole('error', 'localstorage adapter plugin error: ' + | ||
|  |     'Cannot find global "PouchDB" object! ' + | ||
|  |     'Did you remember to include pouchdb.js?'); | ||
|  | } else { | ||
|  |   PouchDB.plugin(LocalStoragePouchPlugin); | ||
|  | } | ||
|  | 
 | ||
|  | },{"118":118,"12":12,"125":125,"140":140,"23":23,"26":26,"31":31,"37":37,"39":39,"5":5,"61":61,"63":63,"70":70,"82":82,"84":84}]},{},[143]); |