mirror of
				https://github.com/flynx/pWiki.git
				synced 2025-10-31 11:00:08 +00:00 
			
		
		
		
	
		
			
	
	
		
			5153 lines
		
	
	
		
			145 KiB
		
	
	
	
		
			JavaScript
		
	
	
	
	
	
		
		
			
		
	
	
			5153 lines
		
	
	
		
			145 KiB
		
	
	
	
		
			JavaScript
		
	
	
	
	
	
|  | // PouchDB indexeddb plugin 7.3.0
 | ||
|  | (function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i<t.length;i++)o(t[i]);return o}return r})()({1:[function(_dereq_,module,exports){ | ||
|  | 'use strict'; | ||
|  | 
 | ||
|  | module.exports = argsArray; | ||
|  | 
 | ||
|  | function argsArray(fun) { | ||
|  |   return function () { | ||
|  |     var len = arguments.length; | ||
|  |     if (len) { | ||
|  |       var args = []; | ||
|  |       var i = -1; | ||
|  |       while (++i < len) { | ||
|  |         args[i] = arguments[i]; | ||
|  |       } | ||
|  |       return fun.call(this, args); | ||
|  |     } else { | ||
|  |       return fun.call(this, []); | ||
|  |     } | ||
|  |   }; | ||
|  | } | ||
|  | },{}],2:[function(_dereq_,module,exports){ | ||
|  | 
 | ||
|  | },{}],3:[function(_dereq_,module,exports){ | ||
|  | // Copyright Joyent, Inc. and other Node contributors.
 | ||
|  | //
 | ||
|  | // Permission is hereby granted, free of charge, to any person obtaining a
 | ||
|  | // copy of this software and associated documentation files (the
 | ||
|  | // "Software"), to deal in the Software without restriction, including
 | ||
|  | // without limitation the rights to use, copy, modify, merge, publish,
 | ||
|  | // distribute, sublicense, and/or sell copies of the Software, and to permit
 | ||
|  | // persons to whom the Software is furnished to do so, subject to the
 | ||
|  | // following conditions:
 | ||
|  | //
 | ||
|  | // The above copyright notice and this permission notice shall be included
 | ||
|  | // in all copies or substantial portions of the Software.
 | ||
|  | //
 | ||
|  | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
 | ||
|  | // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
 | ||
|  | // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
 | ||
|  | // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
 | ||
|  | // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
 | ||
|  | // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
 | ||
|  | // USE OR OTHER DEALINGS IN THE SOFTWARE.
 | ||
|  | 
 | ||
|  | var objectCreate = Object.create || objectCreatePolyfill | ||
|  | var objectKeys = Object.keys || objectKeysPolyfill | ||
|  | var bind = Function.prototype.bind || functionBindPolyfill | ||
|  | 
 | ||
|  | function EventEmitter() { | ||
|  |   if (!this._events || !Object.prototype.hasOwnProperty.call(this, '_events')) { | ||
|  |     this._events = objectCreate(null); | ||
|  |     this._eventsCount = 0; | ||
|  |   } | ||
|  | 
 | ||
|  |   this._maxListeners = this._maxListeners || undefined; | ||
|  | } | ||
|  | module.exports = EventEmitter; | ||
|  | 
 | ||
|  | // Backwards-compat with node 0.10.x
 | ||
|  | EventEmitter.EventEmitter = EventEmitter; | ||
|  | 
 | ||
|  | EventEmitter.prototype._events = undefined; | ||
|  | EventEmitter.prototype._maxListeners = undefined; | ||
|  | 
 | ||
|  | // By default EventEmitters will print a warning if more than 10 listeners are
 | ||
|  | // added to it. This is a useful default which helps finding memory leaks.
 | ||
|  | var defaultMaxListeners = 10; | ||
|  | 
 | ||
|  | var hasDefineProperty; | ||
|  | try { | ||
|  |   var o = {}; | ||
|  |   if (Object.defineProperty) Object.defineProperty(o, 'x', { value: 0 }); | ||
|  |   hasDefineProperty = o.x === 0; | ||
|  | } catch (err) { hasDefineProperty = false } | ||
|  | if (hasDefineProperty) { | ||
|  |   Object.defineProperty(EventEmitter, 'defaultMaxListeners', { | ||
|  |     enumerable: true, | ||
|  |     get: function() { | ||
|  |       return defaultMaxListeners; | ||
|  |     }, | ||
|  |     set: function(arg) { | ||
|  |       // check whether the input is a positive number (whose value is zero or
 | ||
|  |       // greater and not a NaN).
 | ||
|  |       if (typeof arg !== 'number' || arg < 0 || arg !== arg) | ||
|  |         throw new TypeError('"defaultMaxListeners" must be a positive number'); | ||
|  |       defaultMaxListeners = arg; | ||
|  |     } | ||
|  |   }); | ||
|  | } else { | ||
|  |   EventEmitter.defaultMaxListeners = defaultMaxListeners; | ||
|  | } | ||
|  | 
 | ||
|  | // Obviously not all Emitters should be limited to 10. This function allows
 | ||
|  | // that to be increased. Set to zero for unlimited.
 | ||
|  | EventEmitter.prototype.setMaxListeners = function setMaxListeners(n) { | ||
|  |   if (typeof n !== 'number' || n < 0 || isNaN(n)) | ||
|  |     throw new TypeError('"n" argument must be a positive number'); | ||
|  |   this._maxListeners = n; | ||
|  |   return this; | ||
|  | }; | ||
|  | 
 | ||
|  | function $getMaxListeners(that) { | ||
|  |   if (that._maxListeners === undefined) | ||
|  |     return EventEmitter.defaultMaxListeners; | ||
|  |   return that._maxListeners; | ||
|  | } | ||
|  | 
 | ||
|  | EventEmitter.prototype.getMaxListeners = function getMaxListeners() { | ||
|  |   return $getMaxListeners(this); | ||
|  | }; | ||
|  | 
 | ||
|  | // These standalone emit* functions are used to optimize calling of event
 | ||
|  | // handlers for fast cases because emit() itself often has a variable number of
 | ||
|  | // arguments and can be deoptimized because of that. These functions always have
 | ||
|  | // the same number of arguments and thus do not get deoptimized, so the code
 | ||
|  | // inside them can execute faster.
 | ||
|  | function emitNone(handler, isFn, self) { | ||
|  |   if (isFn) | ||
|  |     handler.call(self); | ||
|  |   else { | ||
|  |     var len = handler.length; | ||
|  |     var listeners = arrayClone(handler, len); | ||
|  |     for (var i = 0; i < len; ++i) | ||
|  |       listeners[i].call(self); | ||
|  |   } | ||
|  | } | ||
|  | function emitOne(handler, isFn, self, arg1) { | ||
|  |   if (isFn) | ||
|  |     handler.call(self, arg1); | ||
|  |   else { | ||
|  |     var len = handler.length; | ||
|  |     var listeners = arrayClone(handler, len); | ||
|  |     for (var i = 0; i < len; ++i) | ||
|  |       listeners[i].call(self, arg1); | ||
|  |   } | ||
|  | } | ||
|  | function emitTwo(handler, isFn, self, arg1, arg2) { | ||
|  |   if (isFn) | ||
|  |     handler.call(self, arg1, arg2); | ||
|  |   else { | ||
|  |     var len = handler.length; | ||
|  |     var listeners = arrayClone(handler, len); | ||
|  |     for (var i = 0; i < len; ++i) | ||
|  |       listeners[i].call(self, arg1, arg2); | ||
|  |   } | ||
|  | } | ||
|  | function emitThree(handler, isFn, self, arg1, arg2, arg3) { | ||
|  |   if (isFn) | ||
|  |     handler.call(self, arg1, arg2, arg3); | ||
|  |   else { | ||
|  |     var len = handler.length; | ||
|  |     var listeners = arrayClone(handler, len); | ||
|  |     for (var i = 0; i < len; ++i) | ||
|  |       listeners[i].call(self, arg1, arg2, arg3); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function emitMany(handler, isFn, self, args) { | ||
|  |   if (isFn) | ||
|  |     handler.apply(self, args); | ||
|  |   else { | ||
|  |     var len = handler.length; | ||
|  |     var listeners = arrayClone(handler, len); | ||
|  |     for (var i = 0; i < len; ++i) | ||
|  |       listeners[i].apply(self, args); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | EventEmitter.prototype.emit = function emit(type) { | ||
|  |   var er, handler, len, args, i, events; | ||
|  |   var doError = (type === 'error'); | ||
|  | 
 | ||
|  |   events = this._events; | ||
|  |   if (events) | ||
|  |     doError = (doError && events.error == null); | ||
|  |   else if (!doError) | ||
|  |     return false; | ||
|  | 
 | ||
|  |   // If there is no 'error' event listener then throw.
 | ||
|  |   if (doError) { | ||
|  |     if (arguments.length > 1) | ||
|  |       er = arguments[1]; | ||
|  |     if (er instanceof Error) { | ||
|  |       throw er; // Unhandled 'error' event
 | ||
|  |     } else { | ||
|  |       // At least give some kind of context to the user
 | ||
|  |       var err = new Error('Unhandled "error" event. (' + er + ')'); | ||
|  |       err.context = er; | ||
|  |       throw err; | ||
|  |     } | ||
|  |     return false; | ||
|  |   } | ||
|  | 
 | ||
|  |   handler = events[type]; | ||
|  | 
 | ||
|  |   if (!handler) | ||
|  |     return false; | ||
|  | 
 | ||
|  |   var isFn = typeof handler === 'function'; | ||
|  |   len = arguments.length; | ||
|  |   switch (len) { | ||
|  |       // fast cases
 | ||
|  |     case 1: | ||
|  |       emitNone(handler, isFn, this); | ||
|  |       break; | ||
|  |     case 2: | ||
|  |       emitOne(handler, isFn, this, arguments[1]); | ||
|  |       break; | ||
|  |     case 3: | ||
|  |       emitTwo(handler, isFn, this, arguments[1], arguments[2]); | ||
|  |       break; | ||
|  |     case 4: | ||
|  |       emitThree(handler, isFn, this, arguments[1], arguments[2], arguments[3]); | ||
|  |       break; | ||
|  |       // slower
 | ||
|  |     default: | ||
|  |       args = new Array(len - 1); | ||
|  |       for (i = 1; i < len; i++) | ||
|  |         args[i - 1] = arguments[i]; | ||
|  |       emitMany(handler, isFn, this, args); | ||
|  |   } | ||
|  | 
 | ||
|  |   return true; | ||
|  | }; | ||
|  | 
 | ||
|  | function _addListener(target, type, listener, prepend) { | ||
|  |   var m; | ||
|  |   var events; | ||
|  |   var existing; | ||
|  | 
 | ||
|  |   if (typeof listener !== 'function') | ||
|  |     throw new TypeError('"listener" argument must be a function'); | ||
|  | 
 | ||
|  |   events = target._events; | ||
|  |   if (!events) { | ||
|  |     events = target._events = objectCreate(null); | ||
|  |     target._eventsCount = 0; | ||
|  |   } else { | ||
|  |     // To avoid recursion in the case that type === "newListener"! Before
 | ||
|  |     // adding it to the listeners, first emit "newListener".
 | ||
|  |     if (events.newListener) { | ||
|  |       target.emit('newListener', type, | ||
|  |           listener.listener ? listener.listener : listener); | ||
|  | 
 | ||
|  |       // Re-assign `events` because a newListener handler could have caused the
 | ||
|  |       // this._events to be assigned to a new object
 | ||
|  |       events = target._events; | ||
|  |     } | ||
|  |     existing = events[type]; | ||
|  |   } | ||
|  | 
 | ||
|  |   if (!existing) { | ||
|  |     // Optimize the case of one listener. Don't need the extra array object.
 | ||
|  |     existing = events[type] = listener; | ||
|  |     ++target._eventsCount; | ||
|  |   } else { | ||
|  |     if (typeof existing === 'function') { | ||
|  |       // Adding the second element, need to change to array.
 | ||
|  |       existing = events[type] = | ||
|  |           prepend ? [listener, existing] : [existing, listener]; | ||
|  |     } else { | ||
|  |       // If we've already got an array, just append.
 | ||
|  |       if (prepend) { | ||
|  |         existing.unshift(listener); | ||
|  |       } else { | ||
|  |         existing.push(listener); | ||
|  |       } | ||
|  |     } | ||
|  | 
 | ||
|  |     // Check for listener leak
 | ||
|  |     if (!existing.warned) { | ||
|  |       m = $getMaxListeners(target); | ||
|  |       if (m && m > 0 && existing.length > m) { | ||
|  |         existing.warned = true; | ||
|  |         var w = new Error('Possible EventEmitter memory leak detected. ' + | ||
|  |             existing.length + ' "' + String(type) + '" listeners ' + | ||
|  |             'added. Use emitter.setMaxListeners() to ' + | ||
|  |             'increase limit.'); | ||
|  |         w.name = 'MaxListenersExceededWarning'; | ||
|  |         w.emitter = target; | ||
|  |         w.type = type; | ||
|  |         w.count = existing.length; | ||
|  |         if (typeof console === 'object' && console.warn) { | ||
|  |           console.warn('%s: %s', w.name, w.message); | ||
|  |         } | ||
|  |       } | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   return target; | ||
|  | } | ||
|  | 
 | ||
|  | EventEmitter.prototype.addListener = function addListener(type, listener) { | ||
|  |   return _addListener(this, type, listener, false); | ||
|  | }; | ||
|  | 
 | ||
|  | EventEmitter.prototype.on = EventEmitter.prototype.addListener; | ||
|  | 
 | ||
|  | EventEmitter.prototype.prependListener = | ||
|  |     function prependListener(type, listener) { | ||
|  |       return _addListener(this, type, listener, true); | ||
|  |     }; | ||
|  | 
 | ||
|  | function onceWrapper() { | ||
|  |   if (!this.fired) { | ||
|  |     this.target.removeListener(this.type, this.wrapFn); | ||
|  |     this.fired = true; | ||
|  |     switch (arguments.length) { | ||
|  |       case 0: | ||
|  |         return this.listener.call(this.target); | ||
|  |       case 1: | ||
|  |         return this.listener.call(this.target, arguments[0]); | ||
|  |       case 2: | ||
|  |         return this.listener.call(this.target, arguments[0], arguments[1]); | ||
|  |       case 3: | ||
|  |         return this.listener.call(this.target, arguments[0], arguments[1], | ||
|  |             arguments[2]); | ||
|  |       default: | ||
|  |         var args = new Array(arguments.length); | ||
|  |         for (var i = 0; i < args.length; ++i) | ||
|  |           args[i] = arguments[i]; | ||
|  |         this.listener.apply(this.target, args); | ||
|  |     } | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function _onceWrap(target, type, listener) { | ||
|  |   var state = { fired: false, wrapFn: undefined, target: target, type: type, listener: listener }; | ||
|  |   var wrapped = bind.call(onceWrapper, state); | ||
|  |   wrapped.listener = listener; | ||
|  |   state.wrapFn = wrapped; | ||
|  |   return wrapped; | ||
|  | } | ||
|  | 
 | ||
|  | EventEmitter.prototype.once = function once(type, listener) { | ||
|  |   if (typeof listener !== 'function') | ||
|  |     throw new TypeError('"listener" argument must be a function'); | ||
|  |   this.on(type, _onceWrap(this, type, listener)); | ||
|  |   return this; | ||
|  | }; | ||
|  | 
 | ||
|  | EventEmitter.prototype.prependOnceListener = | ||
|  |     function prependOnceListener(type, listener) { | ||
|  |       if (typeof listener !== 'function') | ||
|  |         throw new TypeError('"listener" argument must be a function'); | ||
|  |       this.prependListener(type, _onceWrap(this, type, listener)); | ||
|  |       return this; | ||
|  |     }; | ||
|  | 
 | ||
|  | // Emits a 'removeListener' event if and only if the listener was removed.
 | ||
|  | EventEmitter.prototype.removeListener = | ||
|  |     function removeListener(type, listener) { | ||
|  |       var list, events, position, i, originalListener; | ||
|  | 
 | ||
|  |       if (typeof listener !== 'function') | ||
|  |         throw new TypeError('"listener" argument must be a function'); | ||
|  | 
 | ||
|  |       events = this._events; | ||
|  |       if (!events) | ||
|  |         return this; | ||
|  | 
 | ||
|  |       list = events[type]; | ||
|  |       if (!list) | ||
|  |         return this; | ||
|  | 
 | ||
|  |       if (list === listener || list.listener === listener) { | ||
|  |         if (--this._eventsCount === 0) | ||
|  |           this._events = objectCreate(null); | ||
|  |         else { | ||
|  |           delete events[type]; | ||
|  |           if (events.removeListener) | ||
|  |             this.emit('removeListener', type, list.listener || listener); | ||
|  |         } | ||
|  |       } else if (typeof list !== 'function') { | ||
|  |         position = -1; | ||
|  | 
 | ||
|  |         for (i = list.length - 1; i >= 0; i--) { | ||
|  |           if (list[i] === listener || list[i].listener === listener) { | ||
|  |             originalListener = list[i].listener; | ||
|  |             position = i; | ||
|  |             break; | ||
|  |           } | ||
|  |         } | ||
|  | 
 | ||
|  |         if (position < 0) | ||
|  |           return this; | ||
|  | 
 | ||
|  |         if (position === 0) | ||
|  |           list.shift(); | ||
|  |         else | ||
|  |           spliceOne(list, position); | ||
|  | 
 | ||
|  |         if (list.length === 1) | ||
|  |           events[type] = list[0]; | ||
|  | 
 | ||
|  |         if (events.removeListener) | ||
|  |           this.emit('removeListener', type, originalListener || listener); | ||
|  |       } | ||
|  | 
 | ||
|  |       return this; | ||
|  |     }; | ||
|  | 
 | ||
|  | EventEmitter.prototype.removeAllListeners = | ||
|  |     function removeAllListeners(type) { | ||
|  |       var listeners, events, i; | ||
|  | 
 | ||
|  |       events = this._events; | ||
|  |       if (!events) | ||
|  |         return this; | ||
|  | 
 | ||
|  |       // not listening for removeListener, no need to emit
 | ||
|  |       if (!events.removeListener) { | ||
|  |         if (arguments.length === 0) { | ||
|  |           this._events = objectCreate(null); | ||
|  |           this._eventsCount = 0; | ||
|  |         } else if (events[type]) { | ||
|  |           if (--this._eventsCount === 0) | ||
|  |             this._events = objectCreate(null); | ||
|  |           else | ||
|  |             delete events[type]; | ||
|  |         } | ||
|  |         return this; | ||
|  |       } | ||
|  | 
 | ||
|  |       // emit removeListener for all listeners on all events
 | ||
|  |       if (arguments.length === 0) { | ||
|  |         var keys = objectKeys(events); | ||
|  |         var key; | ||
|  |         for (i = 0; i < keys.length; ++i) { | ||
|  |           key = keys[i]; | ||
|  |           if (key === 'removeListener') continue; | ||
|  |           this.removeAllListeners(key); | ||
|  |         } | ||
|  |         this.removeAllListeners('removeListener'); | ||
|  |         this._events = objectCreate(null); | ||
|  |         this._eventsCount = 0; | ||
|  |         return this; | ||
|  |       } | ||
|  | 
 | ||
|  |       listeners = events[type]; | ||
|  | 
 | ||
|  |       if (typeof listeners === 'function') { | ||
|  |         this.removeListener(type, listeners); | ||
|  |       } else if (listeners) { | ||
|  |         // LIFO order
 | ||
|  |         for (i = listeners.length - 1; i >= 0; i--) { | ||
|  |           this.removeListener(type, listeners[i]); | ||
|  |         } | ||
|  |       } | ||
|  | 
 | ||
|  |       return this; | ||
|  |     }; | ||
|  | 
 | ||
|  | function _listeners(target, type, unwrap) { | ||
|  |   var events = target._events; | ||
|  | 
 | ||
|  |   if (!events) | ||
|  |     return []; | ||
|  | 
 | ||
|  |   var evlistener = events[type]; | ||
|  |   if (!evlistener) | ||
|  |     return []; | ||
|  | 
 | ||
|  |   if (typeof evlistener === 'function') | ||
|  |     return unwrap ? [evlistener.listener || evlistener] : [evlistener]; | ||
|  | 
 | ||
|  |   return unwrap ? unwrapListeners(evlistener) : arrayClone(evlistener, evlistener.length); | ||
|  | } | ||
|  | 
 | ||
|  | EventEmitter.prototype.listeners = function listeners(type) { | ||
|  |   return _listeners(this, type, true); | ||
|  | }; | ||
|  | 
 | ||
|  | EventEmitter.prototype.rawListeners = function rawListeners(type) { | ||
|  |   return _listeners(this, type, false); | ||
|  | }; | ||
|  | 
 | ||
|  | EventEmitter.listenerCount = function(emitter, type) { | ||
|  |   if (typeof emitter.listenerCount === 'function') { | ||
|  |     return emitter.listenerCount(type); | ||
|  |   } else { | ||
|  |     return listenerCount.call(emitter, type); | ||
|  |   } | ||
|  | }; | ||
|  | 
 | ||
|  | EventEmitter.prototype.listenerCount = listenerCount; | ||
|  | function listenerCount(type) { | ||
|  |   var events = this._events; | ||
|  | 
 | ||
|  |   if (events) { | ||
|  |     var evlistener = events[type]; | ||
|  | 
 | ||
|  |     if (typeof evlistener === 'function') { | ||
|  |       return 1; | ||
|  |     } else if (evlistener) { | ||
|  |       return evlistener.length; | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   return 0; | ||
|  | } | ||
|  | 
 | ||
|  | EventEmitter.prototype.eventNames = function eventNames() { | ||
|  |   return this._eventsCount > 0 ? Reflect.ownKeys(this._events) : []; | ||
|  | }; | ||
|  | 
 | ||
|  | // About 1.5x faster than the two-arg version of Array#splice().
 | ||
|  | function spliceOne(list, index) { | ||
|  |   for (var i = index, k = i + 1, n = list.length; k < n; i += 1, k += 1) | ||
|  |     list[i] = list[k]; | ||
|  |   list.pop(); | ||
|  | } | ||
|  | 
 | ||
|  | function arrayClone(arr, n) { | ||
|  |   var copy = new Array(n); | ||
|  |   for (var i = 0; i < n; ++i) | ||
|  |     copy[i] = arr[i]; | ||
|  |   return copy; | ||
|  | } | ||
|  | 
 | ||
|  | function unwrapListeners(arr) { | ||
|  |   var ret = new Array(arr.length); | ||
|  |   for (var i = 0; i < ret.length; ++i) { | ||
|  |     ret[i] = arr[i].listener || arr[i]; | ||
|  |   } | ||
|  |   return ret; | ||
|  | } | ||
|  | 
 | ||
|  | function objectCreatePolyfill(proto) { | ||
|  |   var F = function() {}; | ||
|  |   F.prototype = proto; | ||
|  |   return new F; | ||
|  | } | ||
|  | function objectKeysPolyfill(obj) { | ||
|  |   var keys = []; | ||
|  |   for (var k in obj) if (Object.prototype.hasOwnProperty.call(obj, k)) { | ||
|  |     keys.push(k); | ||
|  |   } | ||
|  |   return k; | ||
|  | } | ||
|  | function functionBindPolyfill(context) { | ||
|  |   var fn = this; | ||
|  |   return function () { | ||
|  |     return fn.apply(context, arguments); | ||
|  |   }; | ||
|  | } | ||
|  | 
 | ||
|  | },{}],4:[function(_dereq_,module,exports){ | ||
|  | 'use strict'; | ||
|  | var types = [ | ||
|  |   _dereq_(2), | ||
|  |   _dereq_(7), | ||
|  |   _dereq_(6), | ||
|  |   _dereq_(5), | ||
|  |   _dereq_(8), | ||
|  |   _dereq_(9) | ||
|  | ]; | ||
|  | var draining; | ||
|  | var currentQueue; | ||
|  | var queueIndex = -1; | ||
|  | var queue = []; | ||
|  | var scheduled = false; | ||
|  | function cleanUpNextTick() { | ||
|  |   if (!draining || !currentQueue) { | ||
|  |     return; | ||
|  |   } | ||
|  |   draining = false; | ||
|  |   if (currentQueue.length) { | ||
|  |     queue = currentQueue.concat(queue); | ||
|  |   } else { | ||
|  |     queueIndex = -1; | ||
|  |   } | ||
|  |   if (queue.length) { | ||
|  |     nextTick(); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | //named nextTick for less confusing stack traces
 | ||
|  | function nextTick() { | ||
|  |   if (draining) { | ||
|  |     return; | ||
|  |   } | ||
|  |   scheduled = false; | ||
|  |   draining = true; | ||
|  |   var len = queue.length; | ||
|  |   var timeout = setTimeout(cleanUpNextTick); | ||
|  |   while (len) { | ||
|  |     currentQueue = queue; | ||
|  |     queue = []; | ||
|  |     while (currentQueue && ++queueIndex < len) { | ||
|  |       currentQueue[queueIndex].run(); | ||
|  |     } | ||
|  |     queueIndex = -1; | ||
|  |     len = queue.length; | ||
|  |   } | ||
|  |   currentQueue = null; | ||
|  |   queueIndex = -1; | ||
|  |   draining = false; | ||
|  |   clearTimeout(timeout); | ||
|  | } | ||
|  | var scheduleDrain; | ||
|  | var i = -1; | ||
|  | var len = types.length; | ||
|  | while (++i < len) { | ||
|  |   if (types[i] && types[i].test && types[i].test()) { | ||
|  |     scheduleDrain = types[i].install(nextTick); | ||
|  |     break; | ||
|  |   } | ||
|  | } | ||
|  | // v8 likes predictible objects
 | ||
|  | function Item(fun, array) { | ||
|  |   this.fun = fun; | ||
|  |   this.array = array; | ||
|  | } | ||
|  | Item.prototype.run = function () { | ||
|  |   var fun = this.fun; | ||
|  |   var array = this.array; | ||
|  |   switch (array.length) { | ||
|  |   case 0: | ||
|  |     return fun(); | ||
|  |   case 1: | ||
|  |     return fun(array[0]); | ||
|  |   case 2: | ||
|  |     return fun(array[0], array[1]); | ||
|  |   case 3: | ||
|  |     return fun(array[0], array[1], array[2]); | ||
|  |   default: | ||
|  |     return fun.apply(null, array); | ||
|  |   } | ||
|  | 
 | ||
|  | }; | ||
|  | module.exports = immediate; | ||
|  | function immediate(task) { | ||
|  |   var args = new Array(arguments.length - 1); | ||
|  |   if (arguments.length > 1) { | ||
|  |     for (var i = 1; i < arguments.length; i++) { | ||
|  |       args[i - 1] = arguments[i]; | ||
|  |     } | ||
|  |   } | ||
|  |   queue.push(new Item(task, args)); | ||
|  |   if (!scheduled && !draining) { | ||
|  |     scheduled = true; | ||
|  |     scheduleDrain(); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | },{"2":2,"5":5,"6":6,"7":7,"8":8,"9":9}],5:[function(_dereq_,module,exports){ | ||
|  | (function (global){(function (){ | ||
|  | 'use strict'; | ||
|  | 
 | ||
|  | exports.test = function () { | ||
|  |   if (global.setImmediate) { | ||
|  |     // we can only get here in IE10
 | ||
|  |     // which doesn't handel postMessage well
 | ||
|  |     return false; | ||
|  |   } | ||
|  |   return typeof global.MessageChannel !== 'undefined'; | ||
|  | }; | ||
|  | 
 | ||
|  | exports.install = function (func) { | ||
|  |   var channel = new global.MessageChannel(); | ||
|  |   channel.port1.onmessage = func; | ||
|  |   return function () { | ||
|  |     channel.port2.postMessage(0); | ||
|  |   }; | ||
|  | }; | ||
|  | }).call(this)}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | ||
|  | },{}],6:[function(_dereq_,module,exports){ | ||
|  | (function (global){(function (){ | ||
|  | 'use strict'; | ||
|  | //based off rsvp https://github.com/tildeio/rsvp.js
 | ||
|  | //license https://github.com/tildeio/rsvp.js/blob/master/LICENSE
 | ||
|  | //https://github.com/tildeio/rsvp.js/blob/master/lib/rsvp/asap.js
 | ||
|  | 
 | ||
|  | var Mutation = global.MutationObserver || global.WebKitMutationObserver; | ||
|  | 
 | ||
|  | exports.test = function () { | ||
|  |   return Mutation; | ||
|  | }; | ||
|  | 
 | ||
|  | exports.install = function (handle) { | ||
|  |   var called = 0; | ||
|  |   var observer = new Mutation(handle); | ||
|  |   var element = global.document.createTextNode(''); | ||
|  |   observer.observe(element, { | ||
|  |     characterData: true | ||
|  |   }); | ||
|  |   return function () { | ||
|  |     element.data = (called = ++called % 2); | ||
|  |   }; | ||
|  | }; | ||
|  | }).call(this)}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | ||
|  | },{}],7:[function(_dereq_,module,exports){ | ||
|  | (function (global){(function (){ | ||
|  | 'use strict'; | ||
|  | exports.test = function () { | ||
|  |   return typeof global.queueMicrotask === 'function'; | ||
|  | }; | ||
|  | 
 | ||
|  | exports.install = function (func) { | ||
|  |   return function () { | ||
|  |     global.queueMicrotask(func); | ||
|  |   }; | ||
|  | }; | ||
|  | 
 | ||
|  | }).call(this)}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | ||
|  | },{}],8:[function(_dereq_,module,exports){ | ||
|  | (function (global){(function (){ | ||
|  | 'use strict'; | ||
|  | 
 | ||
|  | exports.test = function () { | ||
|  |   return 'document' in global && 'onreadystatechange' in global.document.createElement('script'); | ||
|  | }; | ||
|  | 
 | ||
|  | exports.install = function (handle) { | ||
|  |   return function () { | ||
|  | 
 | ||
|  |     // Create a <script> element; its readystatechange event will be fired asynchronously once it is inserted
 | ||
|  |     // into the document. Do so, thus queuing up the task. Remember to clean up once it's been called.
 | ||
|  |     var scriptEl = global.document.createElement('script'); | ||
|  |     scriptEl.onreadystatechange = function () { | ||
|  |       handle(); | ||
|  | 
 | ||
|  |       scriptEl.onreadystatechange = null; | ||
|  |       scriptEl.parentNode.removeChild(scriptEl); | ||
|  |       scriptEl = null; | ||
|  |     }; | ||
|  |     global.document.documentElement.appendChild(scriptEl); | ||
|  | 
 | ||
|  |     return handle; | ||
|  |   }; | ||
|  | }; | ||
|  | }).call(this)}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | ||
|  | },{}],9:[function(_dereq_,module,exports){ | ||
|  | 'use strict'; | ||
|  | exports.test = function () { | ||
|  |   return true; | ||
|  | }; | ||
|  | 
 | ||
|  | exports.install = function (t) { | ||
|  |   return function () { | ||
|  |     setTimeout(t, 0); | ||
|  |   }; | ||
|  | }; | ||
|  | },{}],10:[function(_dereq_,module,exports){ | ||
|  | if (typeof Object.create === 'function') { | ||
|  |   // implementation from standard node.js 'util' module
 | ||
|  |   module.exports = function inherits(ctor, superCtor) { | ||
|  |     if (superCtor) { | ||
|  |       ctor.super_ = superCtor | ||
|  |       ctor.prototype = Object.create(superCtor.prototype, { | ||
|  |         constructor: { | ||
|  |           value: ctor, | ||
|  |           enumerable: false, | ||
|  |           writable: true, | ||
|  |           configurable: true | ||
|  |         } | ||
|  |       }) | ||
|  |     } | ||
|  |   }; | ||
|  | } else { | ||
|  |   // old school shim for old browsers
 | ||
|  |   module.exports = function inherits(ctor, superCtor) { | ||
|  |     if (superCtor) { | ||
|  |       ctor.super_ = superCtor | ||
|  |       var TempCtor = function () {} | ||
|  |       TempCtor.prototype = superCtor.prototype | ||
|  |       ctor.prototype = new TempCtor() | ||
|  |       ctor.prototype.constructor = ctor | ||
|  |     } | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | },{}],11:[function(_dereq_,module,exports){ | ||
|  | (function (factory) { | ||
|  |     if (typeof exports === 'object') { | ||
|  |         // Node/CommonJS
 | ||
|  |         module.exports = factory(); | ||
|  |     } else if (typeof define === 'function' && define.amd) { | ||
|  |         // AMD
 | ||
|  |         define(factory); | ||
|  |     } else { | ||
|  |         // Browser globals (with support for web workers)
 | ||
|  |         var glob; | ||
|  | 
 | ||
|  |         try { | ||
|  |             glob = window; | ||
|  |         } catch (e) { | ||
|  |             glob = self; | ||
|  |         } | ||
|  | 
 | ||
|  |         glob.SparkMD5 = factory(); | ||
|  |     } | ||
|  | }(function (undefined) { | ||
|  | 
 | ||
|  |     'use strict'; | ||
|  | 
 | ||
|  |     /* | ||
|  |      * Fastest md5 implementation around (JKM md5). | ||
|  |      * Credits: Joseph Myers | ||
|  |      * | ||
|  |      * @see http://www.myersdaily.org/joseph/javascript/md5-text.html
 | ||
|  |      * @see http://jsperf.com/md5-shootout/7
 | ||
|  |      */ | ||
|  | 
 | ||
|  |     /* this function is much faster, | ||
|  |       so if possible we use it. Some IEs | ||
|  |       are the only ones I know of that | ||
|  |       need the idiotic second function, | ||
|  |       generated by an if clause.  */ | ||
|  |     var add32 = function (a, b) { | ||
|  |         return (a + b) & 0xFFFFFFFF; | ||
|  |     }, | ||
|  |         hex_chr = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f']; | ||
|  | 
 | ||
|  | 
 | ||
|  |     function cmn(q, a, b, x, s, t) { | ||
|  |         a = add32(add32(a, q), add32(x, t)); | ||
|  |         return add32((a << s) | (a >>> (32 - s)), b); | ||
|  |     } | ||
|  | 
 | ||
|  |     function md5cycle(x, k) { | ||
|  |         var a = x[0], | ||
|  |             b = x[1], | ||
|  |             c = x[2], | ||
|  |             d = x[3]; | ||
|  | 
 | ||
|  |         a += (b & c | ~b & d) + k[0] - 680876936 | 0; | ||
|  |         a  = (a << 7 | a >>> 25) + b | 0; | ||
|  |         d += (a & b | ~a & c) + k[1] - 389564586 | 0; | ||
|  |         d  = (d << 12 | d >>> 20) + a | 0; | ||
|  |         c += (d & a | ~d & b) + k[2] + 606105819 | 0; | ||
|  |         c  = (c << 17 | c >>> 15) + d | 0; | ||
|  |         b += (c & d | ~c & a) + k[3] - 1044525330 | 0; | ||
|  |         b  = (b << 22 | b >>> 10) + c | 0; | ||
|  |         a += (b & c | ~b & d) + k[4] - 176418897 | 0; | ||
|  |         a  = (a << 7 | a >>> 25) + b | 0; | ||
|  |         d += (a & b | ~a & c) + k[5] + 1200080426 | 0; | ||
|  |         d  = (d << 12 | d >>> 20) + a | 0; | ||
|  |         c += (d & a | ~d & b) + k[6] - 1473231341 | 0; | ||
|  |         c  = (c << 17 | c >>> 15) + d | 0; | ||
|  |         b += (c & d | ~c & a) + k[7] - 45705983 | 0; | ||
|  |         b  = (b << 22 | b >>> 10) + c | 0; | ||
|  |         a += (b & c | ~b & d) + k[8] + 1770035416 | 0; | ||
|  |         a  = (a << 7 | a >>> 25) + b | 0; | ||
|  |         d += (a & b | ~a & c) + k[9] - 1958414417 | 0; | ||
|  |         d  = (d << 12 | d >>> 20) + a | 0; | ||
|  |         c += (d & a | ~d & b) + k[10] - 42063 | 0; | ||
|  |         c  = (c << 17 | c >>> 15) + d | 0; | ||
|  |         b += (c & d | ~c & a) + k[11] - 1990404162 | 0; | ||
|  |         b  = (b << 22 | b >>> 10) + c | 0; | ||
|  |         a += (b & c | ~b & d) + k[12] + 1804603682 | 0; | ||
|  |         a  = (a << 7 | a >>> 25) + b | 0; | ||
|  |         d += (a & b | ~a & c) + k[13] - 40341101 | 0; | ||
|  |         d  = (d << 12 | d >>> 20) + a | 0; | ||
|  |         c += (d & a | ~d & b) + k[14] - 1502002290 | 0; | ||
|  |         c  = (c << 17 | c >>> 15) + d | 0; | ||
|  |         b += (c & d | ~c & a) + k[15] + 1236535329 | 0; | ||
|  |         b  = (b << 22 | b >>> 10) + c | 0; | ||
|  | 
 | ||
|  |         a += (b & d | c & ~d) + k[1] - 165796510 | 0; | ||
|  |         a  = (a << 5 | a >>> 27) + b | 0; | ||
|  |         d += (a & c | b & ~c) + k[6] - 1069501632 | 0; | ||
|  |         d  = (d << 9 | d >>> 23) + a | 0; | ||
|  |         c += (d & b | a & ~b) + k[11] + 643717713 | 0; | ||
|  |         c  = (c << 14 | c >>> 18) + d | 0; | ||
|  |         b += (c & a | d & ~a) + k[0] - 373897302 | 0; | ||
|  |         b  = (b << 20 | b >>> 12) + c | 0; | ||
|  |         a += (b & d | c & ~d) + k[5] - 701558691 | 0; | ||
|  |         a  = (a << 5 | a >>> 27) + b | 0; | ||
|  |         d += (a & c | b & ~c) + k[10] + 38016083 | 0; | ||
|  |         d  = (d << 9 | d >>> 23) + a | 0; | ||
|  |         c += (d & b | a & ~b) + k[15] - 660478335 | 0; | ||
|  |         c  = (c << 14 | c >>> 18) + d | 0; | ||
|  |         b += (c & a | d & ~a) + k[4] - 405537848 | 0; | ||
|  |         b  = (b << 20 | b >>> 12) + c | 0; | ||
|  |         a += (b & d | c & ~d) + k[9] + 568446438 | 0; | ||
|  |         a  = (a << 5 | a >>> 27) + b | 0; | ||
|  |         d += (a & c | b & ~c) + k[14] - 1019803690 | 0; | ||
|  |         d  = (d << 9 | d >>> 23) + a | 0; | ||
|  |         c += (d & b | a & ~b) + k[3] - 187363961 | 0; | ||
|  |         c  = (c << 14 | c >>> 18) + d | 0; | ||
|  |         b += (c & a | d & ~a) + k[8] + 1163531501 | 0; | ||
|  |         b  = (b << 20 | b >>> 12) + c | 0; | ||
|  |         a += (b & d | c & ~d) + k[13] - 1444681467 | 0; | ||
|  |         a  = (a << 5 | a >>> 27) + b | 0; | ||
|  |         d += (a & c | b & ~c) + k[2] - 51403784 | 0; | ||
|  |         d  = (d << 9 | d >>> 23) + a | 0; | ||
|  |         c += (d & b | a & ~b) + k[7] + 1735328473 | 0; | ||
|  |         c  = (c << 14 | c >>> 18) + d | 0; | ||
|  |         b += (c & a | d & ~a) + k[12] - 1926607734 | 0; | ||
|  |         b  = (b << 20 | b >>> 12) + c | 0; | ||
|  | 
 | ||
|  |         a += (b ^ c ^ d) + k[5] - 378558 | 0; | ||
|  |         a  = (a << 4 | a >>> 28) + b | 0; | ||
|  |         d += (a ^ b ^ c) + k[8] - 2022574463 | 0; | ||
|  |         d  = (d << 11 | d >>> 21) + a | 0; | ||
|  |         c += (d ^ a ^ b) + k[11] + 1839030562 | 0; | ||
|  |         c  = (c << 16 | c >>> 16) + d | 0; | ||
|  |         b += (c ^ d ^ a) + k[14] - 35309556 | 0; | ||
|  |         b  = (b << 23 | b >>> 9) + c | 0; | ||
|  |         a += (b ^ c ^ d) + k[1] - 1530992060 | 0; | ||
|  |         a  = (a << 4 | a >>> 28) + b | 0; | ||
|  |         d += (a ^ b ^ c) + k[4] + 1272893353 | 0; | ||
|  |         d  = (d << 11 | d >>> 21) + a | 0; | ||
|  |         c += (d ^ a ^ b) + k[7] - 155497632 | 0; | ||
|  |         c  = (c << 16 | c >>> 16) + d | 0; | ||
|  |         b += (c ^ d ^ a) + k[10] - 1094730640 | 0; | ||
|  |         b  = (b << 23 | b >>> 9) + c | 0; | ||
|  |         a += (b ^ c ^ d) + k[13] + 681279174 | 0; | ||
|  |         a  = (a << 4 | a >>> 28) + b | 0; | ||
|  |         d += (a ^ b ^ c) + k[0] - 358537222 | 0; | ||
|  |         d  = (d << 11 | d >>> 21) + a | 0; | ||
|  |         c += (d ^ a ^ b) + k[3] - 722521979 | 0; | ||
|  |         c  = (c << 16 | c >>> 16) + d | 0; | ||
|  |         b += (c ^ d ^ a) + k[6] + 76029189 | 0; | ||
|  |         b  = (b << 23 | b >>> 9) + c | 0; | ||
|  |         a += (b ^ c ^ d) + k[9] - 640364487 | 0; | ||
|  |         a  = (a << 4 | a >>> 28) + b | 0; | ||
|  |         d += (a ^ b ^ c) + k[12] - 421815835 | 0; | ||
|  |         d  = (d << 11 | d >>> 21) + a | 0; | ||
|  |         c += (d ^ a ^ b) + k[15] + 530742520 | 0; | ||
|  |         c  = (c << 16 | c >>> 16) + d | 0; | ||
|  |         b += (c ^ d ^ a) + k[2] - 995338651 | 0; | ||
|  |         b  = (b << 23 | b >>> 9) + c | 0; | ||
|  | 
 | ||
|  |         a += (c ^ (b | ~d)) + k[0] - 198630844 | 0; | ||
|  |         a  = (a << 6 | a >>> 26) + b | 0; | ||
|  |         d += (b ^ (a | ~c)) + k[7] + 1126891415 | 0; | ||
|  |         d  = (d << 10 | d >>> 22) + a | 0; | ||
|  |         c += (a ^ (d | ~b)) + k[14] - 1416354905 | 0; | ||
|  |         c  = (c << 15 | c >>> 17) + d | 0; | ||
|  |         b += (d ^ (c | ~a)) + k[5] - 57434055 | 0; | ||
|  |         b  = (b << 21 |b >>> 11) + c | 0; | ||
|  |         a += (c ^ (b | ~d)) + k[12] + 1700485571 | 0; | ||
|  |         a  = (a << 6 | a >>> 26) + b | 0; | ||
|  |         d += (b ^ (a | ~c)) + k[3] - 1894986606 | 0; | ||
|  |         d  = (d << 10 | d >>> 22) + a | 0; | ||
|  |         c += (a ^ (d | ~b)) + k[10] - 1051523 | 0; | ||
|  |         c  = (c << 15 | c >>> 17) + d | 0; | ||
|  |         b += (d ^ (c | ~a)) + k[1] - 2054922799 | 0; | ||
|  |         b  = (b << 21 |b >>> 11) + c | 0; | ||
|  |         a += (c ^ (b | ~d)) + k[8] + 1873313359 | 0; | ||
|  |         a  = (a << 6 | a >>> 26) + b | 0; | ||
|  |         d += (b ^ (a | ~c)) + k[15] - 30611744 | 0; | ||
|  |         d  = (d << 10 | d >>> 22) + a | 0; | ||
|  |         c += (a ^ (d | ~b)) + k[6] - 1560198380 | 0; | ||
|  |         c  = (c << 15 | c >>> 17) + d | 0; | ||
|  |         b += (d ^ (c | ~a)) + k[13] + 1309151649 | 0; | ||
|  |         b  = (b << 21 |b >>> 11) + c | 0; | ||
|  |         a += (c ^ (b | ~d)) + k[4] - 145523070 | 0; | ||
|  |         a  = (a << 6 | a >>> 26) + b | 0; | ||
|  |         d += (b ^ (a | ~c)) + k[11] - 1120210379 | 0; | ||
|  |         d  = (d << 10 | d >>> 22) + a | 0; | ||
|  |         c += (a ^ (d | ~b)) + k[2] + 718787259 | 0; | ||
|  |         c  = (c << 15 | c >>> 17) + d | 0; | ||
|  |         b += (d ^ (c | ~a)) + k[9] - 343485551 | 0; | ||
|  |         b  = (b << 21 | b >>> 11) + c | 0; | ||
|  | 
 | ||
|  |         x[0] = a + x[0] | 0; | ||
|  |         x[1] = b + x[1] | 0; | ||
|  |         x[2] = c + x[2] | 0; | ||
|  |         x[3] = d + x[3] | 0; | ||
|  |     } | ||
|  | 
 | ||
|  |     function md5blk(s) { | ||
|  |         var md5blks = [], | ||
|  |             i; /* Andy King said do it this way. */ | ||
|  | 
 | ||
|  |         for (i = 0; i < 64; i += 4) { | ||
|  |             md5blks[i >> 2] = s.charCodeAt(i) + (s.charCodeAt(i + 1) << 8) + (s.charCodeAt(i + 2) << 16) + (s.charCodeAt(i + 3) << 24); | ||
|  |         } | ||
|  |         return md5blks; | ||
|  |     } | ||
|  | 
 | ||
|  |     function md5blk_array(a) { | ||
|  |         var md5blks = [], | ||
|  |             i; /* Andy King said do it this way. */ | ||
|  | 
 | ||
|  |         for (i = 0; i < 64; i += 4) { | ||
|  |             md5blks[i >> 2] = a[i] + (a[i + 1] << 8) + (a[i + 2] << 16) + (a[i + 3] << 24); | ||
|  |         } | ||
|  |         return md5blks; | ||
|  |     } | ||
|  | 
 | ||
|  |     function md51(s) { | ||
|  |         var n = s.length, | ||
|  |             state = [1732584193, -271733879, -1732584194, 271733878], | ||
|  |             i, | ||
|  |             length, | ||
|  |             tail, | ||
|  |             tmp, | ||
|  |             lo, | ||
|  |             hi; | ||
|  | 
 | ||
|  |         for (i = 64; i <= n; i += 64) { | ||
|  |             md5cycle(state, md5blk(s.substring(i - 64, i))); | ||
|  |         } | ||
|  |         s = s.substring(i - 64); | ||
|  |         length = s.length; | ||
|  |         tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; | ||
|  |         for (i = 0; i < length; i += 1) { | ||
|  |             tail[i >> 2] |= s.charCodeAt(i) << ((i % 4) << 3); | ||
|  |         } | ||
|  |         tail[i >> 2] |= 0x80 << ((i % 4) << 3); | ||
|  |         if (i > 55) { | ||
|  |             md5cycle(state, tail); | ||
|  |             for (i = 0; i < 16; i += 1) { | ||
|  |                 tail[i] = 0; | ||
|  |             } | ||
|  |         } | ||
|  | 
 | ||
|  |         // Beware that the final length might not fit in 32 bits so we take care of that
 | ||
|  |         tmp = n * 8; | ||
|  |         tmp = tmp.toString(16).match(/(.*?)(.{0,8})$/); | ||
|  |         lo = parseInt(tmp[2], 16); | ||
|  |         hi = parseInt(tmp[1], 16) || 0; | ||
|  | 
 | ||
|  |         tail[14] = lo; | ||
|  |         tail[15] = hi; | ||
|  | 
 | ||
|  |         md5cycle(state, tail); | ||
|  |         return state; | ||
|  |     } | ||
|  | 
 | ||
|  |     function md51_array(a) { | ||
|  |         var n = a.length, | ||
|  |             state = [1732584193, -271733879, -1732584194, 271733878], | ||
|  |             i, | ||
|  |             length, | ||
|  |             tail, | ||
|  |             tmp, | ||
|  |             lo, | ||
|  |             hi; | ||
|  | 
 | ||
|  |         for (i = 64; i <= n; i += 64) { | ||
|  |             md5cycle(state, md5blk_array(a.subarray(i - 64, i))); | ||
|  |         } | ||
|  | 
 | ||
|  |         // Not sure if it is a bug, however IE10 will always produce a sub array of length 1
 | ||
|  |         // containing the last element of the parent array if the sub array specified starts
 | ||
|  |         // beyond the length of the parent array - weird.
 | ||
|  |         // https://connect.microsoft.com/IE/feedback/details/771452/typed-array-subarray-issue
 | ||
|  |         a = (i - 64) < n ? a.subarray(i - 64) : new Uint8Array(0); | ||
|  | 
 | ||
|  |         length = a.length; | ||
|  |         tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; | ||
|  |         for (i = 0; i < length; i += 1) { | ||
|  |             tail[i >> 2] |= a[i] << ((i % 4) << 3); | ||
|  |         } | ||
|  | 
 | ||
|  |         tail[i >> 2] |= 0x80 << ((i % 4) << 3); | ||
|  |         if (i > 55) { | ||
|  |             md5cycle(state, tail); | ||
|  |             for (i = 0; i < 16; i += 1) { | ||
|  |                 tail[i] = 0; | ||
|  |             } | ||
|  |         } | ||
|  | 
 | ||
|  |         // Beware that the final length might not fit in 32 bits so we take care of that
 | ||
|  |         tmp = n * 8; | ||
|  |         tmp = tmp.toString(16).match(/(.*?)(.{0,8})$/); | ||
|  |         lo = parseInt(tmp[2], 16); | ||
|  |         hi = parseInt(tmp[1], 16) || 0; | ||
|  | 
 | ||
|  |         tail[14] = lo; | ||
|  |         tail[15] = hi; | ||
|  | 
 | ||
|  |         md5cycle(state, tail); | ||
|  | 
 | ||
|  |         return state; | ||
|  |     } | ||
|  | 
 | ||
|  |     function rhex(n) { | ||
|  |         var s = '', | ||
|  |             j; | ||
|  |         for (j = 0; j < 4; j += 1) { | ||
|  |             s += hex_chr[(n >> (j * 8 + 4)) & 0x0F] + hex_chr[(n >> (j * 8)) & 0x0F]; | ||
|  |         } | ||
|  |         return s; | ||
|  |     } | ||
|  | 
 | ||
|  |     function hex(x) { | ||
|  |         var i; | ||
|  |         for (i = 0; i < x.length; i += 1) { | ||
|  |             x[i] = rhex(x[i]); | ||
|  |         } | ||
|  |         return x.join(''); | ||
|  |     } | ||
|  | 
 | ||
|  |     // In some cases the fast add32 function cannot be used..
 | ||
|  |     if (hex(md51('hello')) !== '5d41402abc4b2a76b9719d911017c592') { | ||
|  |         add32 = function (x, y) { | ||
|  |             var lsw = (x & 0xFFFF) + (y & 0xFFFF), | ||
|  |                 msw = (x >> 16) + (y >> 16) + (lsw >> 16); | ||
|  |             return (msw << 16) | (lsw & 0xFFFF); | ||
|  |         }; | ||
|  |     } | ||
|  | 
 | ||
|  |     // ---------------------------------------------------
 | ||
|  | 
 | ||
|  |     /** | ||
|  |      * ArrayBuffer slice polyfill. | ||
|  |      * | ||
|  |      * @see https://github.com/ttaubert/node-arraybuffer-slice
 | ||
|  |      */ | ||
|  | 
 | ||
|  |     if (typeof ArrayBuffer !== 'undefined' && !ArrayBuffer.prototype.slice) { | ||
|  |         (function () { | ||
|  |             function clamp(val, length) { | ||
|  |                 val = (val | 0) || 0; | ||
|  | 
 | ||
|  |                 if (val < 0) { | ||
|  |                     return Math.max(val + length, 0); | ||
|  |                 } | ||
|  | 
 | ||
|  |                 return Math.min(val, length); | ||
|  |             } | ||
|  | 
 | ||
|  |             ArrayBuffer.prototype.slice = function (from, to) { | ||
|  |                 var length = this.byteLength, | ||
|  |                     begin = clamp(from, length), | ||
|  |                     end = length, | ||
|  |                     num, | ||
|  |                     target, | ||
|  |                     targetArray, | ||
|  |                     sourceArray; | ||
|  | 
 | ||
|  |                 if (to !== undefined) { | ||
|  |                     end = clamp(to, length); | ||
|  |                 } | ||
|  | 
 | ||
|  |                 if (begin > end) { | ||
|  |                     return new ArrayBuffer(0); | ||
|  |                 } | ||
|  | 
 | ||
|  |                 num = end - begin; | ||
|  |                 target = new ArrayBuffer(num); | ||
|  |                 targetArray = new Uint8Array(target); | ||
|  | 
 | ||
|  |                 sourceArray = new Uint8Array(this, begin, num); | ||
|  |                 targetArray.set(sourceArray); | ||
|  | 
 | ||
|  |                 return target; | ||
|  |             }; | ||
|  |         })(); | ||
|  |     } | ||
|  | 
 | ||
|  |     // ---------------------------------------------------
 | ||
|  | 
 | ||
|  |     /** | ||
|  |      * Helpers. | ||
|  |      */ | ||
|  | 
 | ||
|  |     function toUtf8(str) { | ||
|  |         if (/[\u0080-\uFFFF]/.test(str)) { | ||
|  |             str = unescape(encodeURIComponent(str)); | ||
|  |         } | ||
|  | 
 | ||
|  |         return str; | ||
|  |     } | ||
|  | 
 | ||
|  |     function utf8Str2ArrayBuffer(str, returnUInt8Array) { | ||
|  |         var length = str.length, | ||
|  |            buff = new ArrayBuffer(length), | ||
|  |            arr = new Uint8Array(buff), | ||
|  |            i; | ||
|  | 
 | ||
|  |         for (i = 0; i < length; i += 1) { | ||
|  |             arr[i] = str.charCodeAt(i); | ||
|  |         } | ||
|  | 
 | ||
|  |         return returnUInt8Array ? arr : buff; | ||
|  |     } | ||
|  | 
 | ||
|  |     function arrayBuffer2Utf8Str(buff) { | ||
|  |         return String.fromCharCode.apply(null, new Uint8Array(buff)); | ||
|  |     } | ||
|  | 
 | ||
|  |     function concatenateArrayBuffers(first, second, returnUInt8Array) { | ||
|  |         var result = new Uint8Array(first.byteLength + second.byteLength); | ||
|  | 
 | ||
|  |         result.set(new Uint8Array(first)); | ||
|  |         result.set(new Uint8Array(second), first.byteLength); | ||
|  | 
 | ||
|  |         return returnUInt8Array ? result : result.buffer; | ||
|  |     } | ||
|  | 
 | ||
|  |     function hexToBinaryString(hex) { | ||
|  |         var bytes = [], | ||
|  |             length = hex.length, | ||
|  |             x; | ||
|  | 
 | ||
|  |         for (x = 0; x < length - 1; x += 2) { | ||
|  |             bytes.push(parseInt(hex.substr(x, 2), 16)); | ||
|  |         } | ||
|  | 
 | ||
|  |         return String.fromCharCode.apply(String, bytes); | ||
|  |     } | ||
|  | 
 | ||
|  |     // ---------------------------------------------------
 | ||
|  | 
 | ||
|  |     /** | ||
|  |      * SparkMD5 OOP implementation. | ||
|  |      * | ||
|  |      * Use this class to perform an incremental md5, otherwise use the | ||
|  |      * static methods instead. | ||
|  |      */ | ||
|  | 
 | ||
|  |     function SparkMD5() { | ||
|  |         // call reset to init the instance
 | ||
|  |         this.reset(); | ||
|  |     } | ||
|  | 
 | ||
|  |     /** | ||
|  |      * Appends a string. | ||
|  |      * A conversion will be applied if an utf8 string is detected. | ||
|  |      * | ||
|  |      * @param {String} str The string to be appended | ||
|  |      * | ||
|  |      * @return {SparkMD5} The instance itself | ||
|  |      */ | ||
|  |     SparkMD5.prototype.append = function (str) { | ||
|  |         // Converts the string to utf8 bytes if necessary
 | ||
|  |         // Then append as binary
 | ||
|  |         this.appendBinary(toUtf8(str)); | ||
|  | 
 | ||
|  |         return this; | ||
|  |     }; | ||
|  | 
 | ||
|  |     /** | ||
|  |      * Appends a binary string. | ||
|  |      * | ||
|  |      * @param {String} contents The binary string to be appended | ||
|  |      * | ||
|  |      * @return {SparkMD5} The instance itself | ||
|  |      */ | ||
|  |     SparkMD5.prototype.appendBinary = function (contents) { | ||
|  |         this._buff += contents; | ||
|  |         this._length += contents.length; | ||
|  | 
 | ||
|  |         var length = this._buff.length, | ||
|  |             i; | ||
|  | 
 | ||
|  |         for (i = 64; i <= length; i += 64) { | ||
|  |             md5cycle(this._hash, md5blk(this._buff.substring(i - 64, i))); | ||
|  |         } | ||
|  | 
 | ||
|  |         this._buff = this._buff.substring(i - 64); | ||
|  | 
 | ||
|  |         return this; | ||
|  |     }; | ||
|  | 
 | ||
|  |     /** | ||
|  |      * Finishes the incremental computation, reseting the internal state and | ||
|  |      * returning the result. | ||
|  |      * | ||
|  |      * @param {Boolean} raw True to get the raw string, false to get the hex string | ||
|  |      * | ||
|  |      * @return {String} The result | ||
|  |      */ | ||
|  |     SparkMD5.prototype.end = function (raw) { | ||
|  |         var buff = this._buff, | ||
|  |             length = buff.length, | ||
|  |             i, | ||
|  |             tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], | ||
|  |             ret; | ||
|  | 
 | ||
|  |         for (i = 0; i < length; i += 1) { | ||
|  |             tail[i >> 2] |= buff.charCodeAt(i) << ((i % 4) << 3); | ||
|  |         } | ||
|  | 
 | ||
|  |         this._finish(tail, length); | ||
|  |         ret = hex(this._hash); | ||
|  | 
 | ||
|  |         if (raw) { | ||
|  |             ret = hexToBinaryString(ret); | ||
|  |         } | ||
|  | 
 | ||
|  |         this.reset(); | ||
|  | 
 | ||
|  |         return ret; | ||
|  |     }; | ||
|  | 
 | ||
|  |     /** | ||
|  |      * Resets the internal state of the computation. | ||
|  |      * | ||
|  |      * @return {SparkMD5} The instance itself | ||
|  |      */ | ||
|  |     SparkMD5.prototype.reset = function () { | ||
|  |         this._buff = ''; | ||
|  |         this._length = 0; | ||
|  |         this._hash = [1732584193, -271733879, -1732584194, 271733878]; | ||
|  | 
 | ||
|  |         return this; | ||
|  |     }; | ||
|  | 
 | ||
|  |     /** | ||
|  |      * Gets the internal state of the computation. | ||
|  |      * | ||
|  |      * @return {Object} The state | ||
|  |      */ | ||
|  |     SparkMD5.prototype.getState = function () { | ||
|  |         return { | ||
|  |             buff: this._buff, | ||
|  |             length: this._length, | ||
|  |             hash: this._hash.slice() | ||
|  |         }; | ||
|  |     }; | ||
|  | 
 | ||
|  |     /** | ||
|  |      * Gets the internal state of the computation. | ||
|  |      * | ||
|  |      * @param {Object} state The state | ||
|  |      * | ||
|  |      * @return {SparkMD5} The instance itself | ||
|  |      */ | ||
|  |     SparkMD5.prototype.setState = function (state) { | ||
|  |         this._buff = state.buff; | ||
|  |         this._length = state.length; | ||
|  |         this._hash = state.hash; | ||
|  | 
 | ||
|  |         return this; | ||
|  |     }; | ||
|  | 
 | ||
|  |     /** | ||
|  |      * Releases memory used by the incremental buffer and other additional | ||
|  |      * resources. If you plan to use the instance again, use reset instead. | ||
|  |      */ | ||
|  |     SparkMD5.prototype.destroy = function () { | ||
|  |         delete this._hash; | ||
|  |         delete this._buff; | ||
|  |         delete this._length; | ||
|  |     }; | ||
|  | 
 | ||
|  |     /** | ||
|  |      * Finish the final calculation based on the tail. | ||
|  |      * | ||
|  |      * @param {Array}  tail   The tail (will be modified) | ||
|  |      * @param {Number} length The length of the remaining buffer | ||
|  |      */ | ||
|  |     SparkMD5.prototype._finish = function (tail, length) { | ||
|  |         var i = length, | ||
|  |             tmp, | ||
|  |             lo, | ||
|  |             hi; | ||
|  | 
 | ||
|  |         tail[i >> 2] |= 0x80 << ((i % 4) << 3); | ||
|  |         if (i > 55) { | ||
|  |             md5cycle(this._hash, tail); | ||
|  |             for (i = 0; i < 16; i += 1) { | ||
|  |                 tail[i] = 0; | ||
|  |             } | ||
|  |         } | ||
|  | 
 | ||
|  |         // Do the final computation based on the tail and length
 | ||
|  |         // Beware that the final length may not fit in 32 bits so we take care of that
 | ||
|  |         tmp = this._length * 8; | ||
|  |         tmp = tmp.toString(16).match(/(.*?)(.{0,8})$/); | ||
|  |         lo = parseInt(tmp[2], 16); | ||
|  |         hi = parseInt(tmp[1], 16) || 0; | ||
|  | 
 | ||
|  |         tail[14] = lo; | ||
|  |         tail[15] = hi; | ||
|  |         md5cycle(this._hash, tail); | ||
|  |     }; | ||
|  | 
 | ||
|  |     /** | ||
|  |      * Performs the md5 hash on a string. | ||
|  |      * A conversion will be applied if utf8 string is detected. | ||
|  |      * | ||
|  |      * @param {String}  str The string | ||
|  |      * @param {Boolean} [raw] True to get the raw string, false to get the hex string | ||
|  |      * | ||
|  |      * @return {String} The result | ||
|  |      */ | ||
|  |     SparkMD5.hash = function (str, raw) { | ||
|  |         // Converts the string to utf8 bytes if necessary
 | ||
|  |         // Then compute it using the binary function
 | ||
|  |         return SparkMD5.hashBinary(toUtf8(str), raw); | ||
|  |     }; | ||
|  | 
 | ||
|  |     /** | ||
|  |      * Performs the md5 hash on a binary string. | ||
|  |      * | ||
|  |      * @param {String}  content The binary string | ||
|  |      * @param {Boolean} [raw]     True to get the raw string, false to get the hex string | ||
|  |      * | ||
|  |      * @return {String} The result | ||
|  |      */ | ||
|  |     SparkMD5.hashBinary = function (content, raw) { | ||
|  |         var hash = md51(content), | ||
|  |             ret = hex(hash); | ||
|  | 
 | ||
|  |         return raw ? hexToBinaryString(ret) : ret; | ||
|  |     }; | ||
|  | 
 | ||
|  |     // ---------------------------------------------------
 | ||
|  | 
 | ||
|  |     /** | ||
|  |      * SparkMD5 OOP implementation for array buffers. | ||
|  |      * | ||
|  |      * Use this class to perform an incremental md5 ONLY for array buffers. | ||
|  |      */ | ||
|  |     SparkMD5.ArrayBuffer = function () { | ||
|  |         // call reset to init the instance
 | ||
|  |         this.reset(); | ||
|  |     }; | ||
|  | 
 | ||
|  |     /** | ||
|  |      * Appends an array buffer. | ||
|  |      * | ||
|  |      * @param {ArrayBuffer} arr The array to be appended | ||
|  |      * | ||
|  |      * @return {SparkMD5.ArrayBuffer} The instance itself | ||
|  |      */ | ||
|  |     SparkMD5.ArrayBuffer.prototype.append = function (arr) { | ||
|  |         var buff = concatenateArrayBuffers(this._buff.buffer, arr, true), | ||
|  |             length = buff.length, | ||
|  |             i; | ||
|  | 
 | ||
|  |         this._length += arr.byteLength; | ||
|  | 
 | ||
|  |         for (i = 64; i <= length; i += 64) { | ||
|  |             md5cycle(this._hash, md5blk_array(buff.subarray(i - 64, i))); | ||
|  |         } | ||
|  | 
 | ||
|  |         this._buff = (i - 64) < length ? new Uint8Array(buff.buffer.slice(i - 64)) : new Uint8Array(0); | ||
|  | 
 | ||
|  |         return this; | ||
|  |     }; | ||
|  | 
 | ||
|  |     /** | ||
|  |      * Finishes the incremental computation, reseting the internal state and | ||
|  |      * returning the result. | ||
|  |      * | ||
|  |      * @param {Boolean} raw True to get the raw string, false to get the hex string | ||
|  |      * | ||
|  |      * @return {String} The result | ||
|  |      */ | ||
|  |     SparkMD5.ArrayBuffer.prototype.end = function (raw) { | ||
|  |         var buff = this._buff, | ||
|  |             length = buff.length, | ||
|  |             tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], | ||
|  |             i, | ||
|  |             ret; | ||
|  | 
 | ||
|  |         for (i = 0; i < length; i += 1) { | ||
|  |             tail[i >> 2] |= buff[i] << ((i % 4) << 3); | ||
|  |         } | ||
|  | 
 | ||
|  |         this._finish(tail, length); | ||
|  |         ret = hex(this._hash); | ||
|  | 
 | ||
|  |         if (raw) { | ||
|  |             ret = hexToBinaryString(ret); | ||
|  |         } | ||
|  | 
 | ||
|  |         this.reset(); | ||
|  | 
 | ||
|  |         return ret; | ||
|  |     }; | ||
|  | 
 | ||
|  |     /** | ||
|  |      * Resets the internal state of the computation. | ||
|  |      * | ||
|  |      * @return {SparkMD5.ArrayBuffer} The instance itself | ||
|  |      */ | ||
|  |     SparkMD5.ArrayBuffer.prototype.reset = function () { | ||
|  |         this._buff = new Uint8Array(0); | ||
|  |         this._length = 0; | ||
|  |         this._hash = [1732584193, -271733879, -1732584194, 271733878]; | ||
|  | 
 | ||
|  |         return this; | ||
|  |     }; | ||
|  | 
 | ||
|  |     /** | ||
|  |      * Gets the internal state of the computation. | ||
|  |      * | ||
|  |      * @return {Object} The state | ||
|  |      */ | ||
|  |     SparkMD5.ArrayBuffer.prototype.getState = function () { | ||
|  |         var state = SparkMD5.prototype.getState.call(this); | ||
|  | 
 | ||
|  |         // Convert buffer to a string
 | ||
|  |         state.buff = arrayBuffer2Utf8Str(state.buff); | ||
|  | 
 | ||
|  |         return state; | ||
|  |     }; | ||
|  | 
 | ||
|  |     /** | ||
|  |      * Gets the internal state of the computation. | ||
|  |      * | ||
|  |      * @param {Object} state The state | ||
|  |      * | ||
|  |      * @return {SparkMD5.ArrayBuffer} The instance itself | ||
|  |      */ | ||
|  |     SparkMD5.ArrayBuffer.prototype.setState = function (state) { | ||
|  |         // Convert string to buffer
 | ||
|  |         state.buff = utf8Str2ArrayBuffer(state.buff, true); | ||
|  | 
 | ||
|  |         return SparkMD5.prototype.setState.call(this, state); | ||
|  |     }; | ||
|  | 
 | ||
|  |     SparkMD5.ArrayBuffer.prototype.destroy = SparkMD5.prototype.destroy; | ||
|  | 
 | ||
|  |     SparkMD5.ArrayBuffer.prototype._finish = SparkMD5.prototype._finish; | ||
|  | 
 | ||
|  |     /** | ||
|  |      * Performs the md5 hash on an array buffer. | ||
|  |      * | ||
|  |      * @param {ArrayBuffer} arr The array buffer | ||
|  |      * @param {Boolean}     [raw] True to get the raw string, false to get the hex one | ||
|  |      * | ||
|  |      * @return {String} The result | ||
|  |      */ | ||
|  |     SparkMD5.ArrayBuffer.hash = function (arr, raw) { | ||
|  |         var hash = md51_array(new Uint8Array(arr)), | ||
|  |             ret = hex(hash); | ||
|  | 
 | ||
|  |         return raw ? hexToBinaryString(ret) : ret; | ||
|  |     }; | ||
|  | 
 | ||
|  |     return SparkMD5; | ||
|  | })); | ||
|  | 
 | ||
|  | },{}],12:[function(_dereq_,module,exports){ | ||
|  | "use strict"; | ||
|  | 
 | ||
|  | Object.defineProperty(exports, "__esModule", { | ||
|  |   value: true | ||
|  | }); | ||
|  | Object.defineProperty(exports, "v1", { | ||
|  |   enumerable: true, | ||
|  |   get: function () { | ||
|  |     return _v.default; | ||
|  |   } | ||
|  | }); | ||
|  | Object.defineProperty(exports, "v3", { | ||
|  |   enumerable: true, | ||
|  |   get: function () { | ||
|  |     return _v2.default; | ||
|  |   } | ||
|  | }); | ||
|  | Object.defineProperty(exports, "v4", { | ||
|  |   enumerable: true, | ||
|  |   get: function () { | ||
|  |     return _v3.default; | ||
|  |   } | ||
|  | }); | ||
|  | Object.defineProperty(exports, "v5", { | ||
|  |   enumerable: true, | ||
|  |   get: function () { | ||
|  |     return _v4.default; | ||
|  |   } | ||
|  | }); | ||
|  | Object.defineProperty(exports, "NIL", { | ||
|  |   enumerable: true, | ||
|  |   get: function () { | ||
|  |     return _nil.default; | ||
|  |   } | ||
|  | }); | ||
|  | Object.defineProperty(exports, "version", { | ||
|  |   enumerable: true, | ||
|  |   get: function () { | ||
|  |     return _version.default; | ||
|  |   } | ||
|  | }); | ||
|  | Object.defineProperty(exports, "validate", { | ||
|  |   enumerable: true, | ||
|  |   get: function () { | ||
|  |     return _validate.default; | ||
|  |   } | ||
|  | }); | ||
|  | Object.defineProperty(exports, "stringify", { | ||
|  |   enumerable: true, | ||
|  |   get: function () { | ||
|  |     return _stringify.default; | ||
|  |   } | ||
|  | }); | ||
|  | Object.defineProperty(exports, "parse", { | ||
|  |   enumerable: true, | ||
|  |   get: function () { | ||
|  |     return _parse.default; | ||
|  |   } | ||
|  | }); | ||
|  | 
 | ||
|  | var _v = _interopRequireDefault(_dereq_(20)); | ||
|  | 
 | ||
|  | var _v2 = _interopRequireDefault(_dereq_(21)); | ||
|  | 
 | ||
|  | var _v3 = _interopRequireDefault(_dereq_(23)); | ||
|  | 
 | ||
|  | var _v4 = _interopRequireDefault(_dereq_(24)); | ||
|  | 
 | ||
|  | var _nil = _interopRequireDefault(_dereq_(14)); | ||
|  | 
 | ||
|  | var _version = _interopRequireDefault(_dereq_(26)); | ||
|  | 
 | ||
|  | var _validate = _interopRequireDefault(_dereq_(25)); | ||
|  | 
 | ||
|  | var _stringify = _interopRequireDefault(_dereq_(19)); | ||
|  | 
 | ||
|  | var _parse = _interopRequireDefault(_dereq_(15)); | ||
|  | 
 | ||
|  | function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } | ||
|  | },{"14":14,"15":15,"19":19,"20":20,"21":21,"23":23,"24":24,"25":25,"26":26}],13:[function(_dereq_,module,exports){ | ||
|  | "use strict"; | ||
|  | 
 | ||
|  | Object.defineProperty(exports, "__esModule", { | ||
|  |   value: true | ||
|  | }); | ||
|  | exports.default = void 0; | ||
|  | 
 | ||
|  | /* | ||
|  |  * Browser-compatible JavaScript MD5 | ||
|  |  * | ||
|  |  * Modification of JavaScript MD5 | ||
|  |  * https://github.com/blueimp/JavaScript-MD5
 | ||
|  |  * | ||
|  |  * Copyright 2011, Sebastian Tschan | ||
|  |  * https://blueimp.net
 | ||
|  |  * | ||
|  |  * Licensed under the MIT license: | ||
|  |  * https://opensource.org/licenses/MIT
 | ||
|  |  * | ||
|  |  * Based on | ||
|  |  * A JavaScript implementation of the RSA Data Security, Inc. MD5 Message | ||
|  |  * Digest Algorithm, as defined in RFC 1321. | ||
|  |  * Version 2.2 Copyright (C) Paul Johnston 1999 - 2009 | ||
|  |  * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet | ||
|  |  * Distributed under the BSD License | ||
|  |  * See http://pajhome.org.uk/crypt/md5 for more info.
 | ||
|  |  */ | ||
|  | function md5(bytes) { | ||
|  |   if (typeof bytes === 'string') { | ||
|  |     const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape
 | ||
|  | 
 | ||
|  |     bytes = new Uint8Array(msg.length); | ||
|  | 
 | ||
|  |     for (let i = 0; i < msg.length; ++i) { | ||
|  |       bytes[i] = msg.charCodeAt(i); | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   return md5ToHexEncodedArray(wordsToMd5(bytesToWords(bytes), bytes.length * 8)); | ||
|  | } | ||
|  | /* | ||
|  |  * Convert an array of little-endian words to an array of bytes | ||
|  |  */ | ||
|  | 
 | ||
|  | 
 | ||
|  | function md5ToHexEncodedArray(input) { | ||
|  |   const output = []; | ||
|  |   const length32 = input.length * 32; | ||
|  |   const hexTab = '0123456789abcdef'; | ||
|  | 
 | ||
|  |   for (let i = 0; i < length32; i += 8) { | ||
|  |     const x = input[i >> 5] >>> i % 32 & 0xff; | ||
|  |     const hex = parseInt(hexTab.charAt(x >>> 4 & 0x0f) + hexTab.charAt(x & 0x0f), 16); | ||
|  |     output.push(hex); | ||
|  |   } | ||
|  | 
 | ||
|  |   return output; | ||
|  | } | ||
|  | /** | ||
|  |  * Calculate output length with padding and bit length | ||
|  |  */ | ||
|  | 
 | ||
|  | 
 | ||
|  | function getOutputLength(inputLength8) { | ||
|  |   return (inputLength8 + 64 >>> 9 << 4) + 14 + 1; | ||
|  | } | ||
|  | /* | ||
|  |  * Calculate the MD5 of an array of little-endian words, and a bit length. | ||
|  |  */ | ||
|  | 
 | ||
|  | 
 | ||
|  | function wordsToMd5(x, len) { | ||
|  |   /* append padding */ | ||
|  |   x[len >> 5] |= 0x80 << len % 32; | ||
|  |   x[getOutputLength(len) - 1] = len; | ||
|  |   let a = 1732584193; | ||
|  |   let b = -271733879; | ||
|  |   let c = -1732584194; | ||
|  |   let d = 271733878; | ||
|  | 
 | ||
|  |   for (let i = 0; i < x.length; i += 16) { | ||
|  |     const olda = a; | ||
|  |     const oldb = b; | ||
|  |     const oldc = c; | ||
|  |     const oldd = d; | ||
|  |     a = md5ff(a, b, c, d, x[i], 7, -680876936); | ||
|  |     d = md5ff(d, a, b, c, x[i + 1], 12, -389564586); | ||
|  |     c = md5ff(c, d, a, b, x[i + 2], 17, 606105819); | ||
|  |     b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330); | ||
|  |     a = md5ff(a, b, c, d, x[i + 4], 7, -176418897); | ||
|  |     d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426); | ||
|  |     c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341); | ||
|  |     b = md5ff(b, c, d, a, x[i + 7], 22, -45705983); | ||
|  |     a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416); | ||
|  |     d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417); | ||
|  |     c = md5ff(c, d, a, b, x[i + 10], 17, -42063); | ||
|  |     b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162); | ||
|  |     a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682); | ||
|  |     d = md5ff(d, a, b, c, x[i + 13], 12, -40341101); | ||
|  |     c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290); | ||
|  |     b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329); | ||
|  |     a = md5gg(a, b, c, d, x[i + 1], 5, -165796510); | ||
|  |     d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632); | ||
|  |     c = md5gg(c, d, a, b, x[i + 11], 14, 643717713); | ||
|  |     b = md5gg(b, c, d, a, x[i], 20, -373897302); | ||
|  |     a = md5gg(a, b, c, d, x[i + 5], 5, -701558691); | ||
|  |     d = md5gg(d, a, b, c, x[i + 10], 9, 38016083); | ||
|  |     c = md5gg(c, d, a, b, x[i + 15], 14, -660478335); | ||
|  |     b = md5gg(b, c, d, a, x[i + 4], 20, -405537848); | ||
|  |     a = md5gg(a, b, c, d, x[i + 9], 5, 568446438); | ||
|  |     d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690); | ||
|  |     c = md5gg(c, d, a, b, x[i + 3], 14, -187363961); | ||
|  |     b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501); | ||
|  |     a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467); | ||
|  |     d = md5gg(d, a, b, c, x[i + 2], 9, -51403784); | ||
|  |     c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473); | ||
|  |     b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734); | ||
|  |     a = md5hh(a, b, c, d, x[i + 5], 4, -378558); | ||
|  |     d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463); | ||
|  |     c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562); | ||
|  |     b = md5hh(b, c, d, a, x[i + 14], 23, -35309556); | ||
|  |     a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060); | ||
|  |     d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353); | ||
|  |     c = md5hh(c, d, a, b, x[i + 7], 16, -155497632); | ||
|  |     b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640); | ||
|  |     a = md5hh(a, b, c, d, x[i + 13], 4, 681279174); | ||
|  |     d = md5hh(d, a, b, c, x[i], 11, -358537222); | ||
|  |     c = md5hh(c, d, a, b, x[i + 3], 16, -722521979); | ||
|  |     b = md5hh(b, c, d, a, x[i + 6], 23, 76029189); | ||
|  |     a = md5hh(a, b, c, d, x[i + 9], 4, -640364487); | ||
|  |     d = md5hh(d, a, b, c, x[i + 12], 11, -421815835); | ||
|  |     c = md5hh(c, d, a, b, x[i + 15], 16, 530742520); | ||
|  |     b = md5hh(b, c, d, a, x[i + 2], 23, -995338651); | ||
|  |     a = md5ii(a, b, c, d, x[i], 6, -198630844); | ||
|  |     d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415); | ||
|  |     c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905); | ||
|  |     b = md5ii(b, c, d, a, x[i + 5], 21, -57434055); | ||
|  |     a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571); | ||
|  |     d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606); | ||
|  |     c = md5ii(c, d, a, b, x[i + 10], 15, -1051523); | ||
|  |     b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799); | ||
|  |     a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359); | ||
|  |     d = md5ii(d, a, b, c, x[i + 15], 10, -30611744); | ||
|  |     c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380); | ||
|  |     b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649); | ||
|  |     a = md5ii(a, b, c, d, x[i + 4], 6, -145523070); | ||
|  |     d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379); | ||
|  |     c = md5ii(c, d, a, b, x[i + 2], 15, 718787259); | ||
|  |     b = md5ii(b, c, d, a, x[i + 9], 21, -343485551); | ||
|  |     a = safeAdd(a, olda); | ||
|  |     b = safeAdd(b, oldb); | ||
|  |     c = safeAdd(c, oldc); | ||
|  |     d = safeAdd(d, oldd); | ||
|  |   } | ||
|  | 
 | ||
|  |   return [a, b, c, d]; | ||
|  | } | ||
|  | /* | ||
|  |  * Convert an array bytes to an array of little-endian words | ||
|  |  * Characters >255 have their high-byte silently ignored. | ||
|  |  */ | ||
|  | 
 | ||
|  | 
 | ||
|  | function bytesToWords(input) { | ||
|  |   if (input.length === 0) { | ||
|  |     return []; | ||
|  |   } | ||
|  | 
 | ||
|  |   const length8 = input.length * 8; | ||
|  |   const output = new Uint32Array(getOutputLength(length8)); | ||
|  | 
 | ||
|  |   for (let i = 0; i < length8; i += 8) { | ||
|  |     output[i >> 5] |= (input[i / 8] & 0xff) << i % 32; | ||
|  |   } | ||
|  | 
 | ||
|  |   return output; | ||
|  | } | ||
|  | /* | ||
|  |  * Add integers, wrapping at 2^32. This uses 16-bit operations internally | ||
|  |  * to work around bugs in some JS interpreters. | ||
|  |  */ | ||
|  | 
 | ||
|  | 
 | ||
|  | function safeAdd(x, y) { | ||
|  |   const lsw = (x & 0xffff) + (y & 0xffff); | ||
|  |   const msw = (x >> 16) + (y >> 16) + (lsw >> 16); | ||
|  |   return msw << 16 | lsw & 0xffff; | ||
|  | } | ||
|  | /* | ||
|  |  * Bitwise rotate a 32-bit number to the left. | ||
|  |  */ | ||
|  | 
 | ||
|  | 
 | ||
|  | function bitRotateLeft(num, cnt) { | ||
|  |   return num << cnt | num >>> 32 - cnt; | ||
|  | } | ||
|  | /* | ||
|  |  * These functions implement the four basic operations the algorithm uses. | ||
|  |  */ | ||
|  | 
 | ||
|  | 
 | ||
|  | function md5cmn(q, a, b, x, s, t) { | ||
|  |   return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b); | ||
|  | } | ||
|  | 
 | ||
|  | function md5ff(a, b, c, d, x, s, t) { | ||
|  |   return md5cmn(b & c | ~b & d, a, b, x, s, t); | ||
|  | } | ||
|  | 
 | ||
|  | function md5gg(a, b, c, d, x, s, t) { | ||
|  |   return md5cmn(b & d | c & ~d, a, b, x, s, t); | ||
|  | } | ||
|  | 
 | ||
|  | function md5hh(a, b, c, d, x, s, t) { | ||
|  |   return md5cmn(b ^ c ^ d, a, b, x, s, t); | ||
|  | } | ||
|  | 
 | ||
|  | function md5ii(a, b, c, d, x, s, t) { | ||
|  |   return md5cmn(c ^ (b | ~d), a, b, x, s, t); | ||
|  | } | ||
|  | 
 | ||
|  | var _default = md5; | ||
|  | exports.default = _default; | ||
|  | },{}],14:[function(_dereq_,module,exports){ | ||
|  | "use strict"; | ||
|  | 
 | ||
|  | Object.defineProperty(exports, "__esModule", { | ||
|  |   value: true | ||
|  | }); | ||
|  | exports.default = void 0; | ||
|  | var _default = '00000000-0000-0000-0000-000000000000'; | ||
|  | exports.default = _default; | ||
|  | },{}],15:[function(_dereq_,module,exports){ | ||
|  | "use strict"; | ||
|  | 
 | ||
|  | Object.defineProperty(exports, "__esModule", { | ||
|  |   value: true | ||
|  | }); | ||
|  | exports.default = void 0; | ||
|  | 
 | ||
|  | var _validate = _interopRequireDefault(_dereq_(25)); | ||
|  | 
 | ||
|  | function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } | ||
|  | 
 | ||
|  | function parse(uuid) { | ||
|  |   if (!(0, _validate.default)(uuid)) { | ||
|  |     throw TypeError('Invalid UUID'); | ||
|  |   } | ||
|  | 
 | ||
|  |   let v; | ||
|  |   const arr = new Uint8Array(16); // Parse ########-....-....-....-............
 | ||
|  | 
 | ||
|  |   arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; | ||
|  |   arr[1] = v >>> 16 & 0xff; | ||
|  |   arr[2] = v >>> 8 & 0xff; | ||
|  |   arr[3] = v & 0xff; // Parse ........-####-....-....-............
 | ||
|  | 
 | ||
|  |   arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; | ||
|  |   arr[5] = v & 0xff; // Parse ........-....-####-....-............
 | ||
|  | 
 | ||
|  |   arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; | ||
|  |   arr[7] = v & 0xff; // Parse ........-....-....-####-............
 | ||
|  | 
 | ||
|  |   arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; | ||
|  |   arr[9] = v & 0xff; // Parse ........-....-....-....-############
 | ||
|  |   // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes)
 | ||
|  | 
 | ||
|  |   arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; | ||
|  |   arr[11] = v / 0x100000000 & 0xff; | ||
|  |   arr[12] = v >>> 24 & 0xff; | ||
|  |   arr[13] = v >>> 16 & 0xff; | ||
|  |   arr[14] = v >>> 8 & 0xff; | ||
|  |   arr[15] = v & 0xff; | ||
|  |   return arr; | ||
|  | } | ||
|  | 
 | ||
|  | var _default = parse; | ||
|  | exports.default = _default; | ||
|  | },{"25":25}],16:[function(_dereq_,module,exports){ | ||
|  | "use strict"; | ||
|  | 
 | ||
|  | Object.defineProperty(exports, "__esModule", { | ||
|  |   value: true | ||
|  | }); | ||
|  | exports.default = void 0; | ||
|  | var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; | ||
|  | exports.default = _default; | ||
|  | },{}],17:[function(_dereq_,module,exports){ | ||
|  | "use strict"; | ||
|  | 
 | ||
|  | Object.defineProperty(exports, "__esModule", { | ||
|  |   value: true | ||
|  | }); | ||
|  | exports.default = rng; | ||
|  | // Unique ID creation requires a high quality random # generator. In the browser we therefore
 | ||
|  | // require the crypto API and do not support built-in fallback to lower quality random number
 | ||
|  | // generators (like Math.random()).
 | ||
|  | let getRandomValues; | ||
|  | const rnds8 = new Uint8Array(16); | ||
|  | 
 | ||
|  | function rng() { | ||
|  |   // lazy load so that environments that need to polyfill have a chance to do so
 | ||
|  |   if (!getRandomValues) { | ||
|  |     // getRandomValues needs to be invoked in a context where "this" is a Crypto implementation. Also,
 | ||
|  |     // find the complete implementation of crypto (msCrypto) on IE11.
 | ||
|  |     getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto) || typeof msCrypto !== 'undefined' && typeof msCrypto.getRandomValues === 'function' && msCrypto.getRandomValues.bind(msCrypto); | ||
|  | 
 | ||
|  |     if (!getRandomValues) { | ||
|  |       throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported'); | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   return getRandomValues(rnds8); | ||
|  | } | ||
|  | },{}],18:[function(_dereq_,module,exports){ | ||
|  | "use strict"; | ||
|  | 
 | ||
|  | Object.defineProperty(exports, "__esModule", { | ||
|  |   value: true | ||
|  | }); | ||
|  | exports.default = void 0; | ||
|  | 
 | ||
|  | // Adapted from Chris Veness' SHA1 code at
 | ||
|  | // http://www.movable-type.co.uk/scripts/sha1.html
 | ||
|  | function f(s, x, y, z) { | ||
|  |   switch (s) { | ||
|  |     case 0: | ||
|  |       return x & y ^ ~x & z; | ||
|  | 
 | ||
|  |     case 1: | ||
|  |       return x ^ y ^ z; | ||
|  | 
 | ||
|  |     case 2: | ||
|  |       return x & y ^ x & z ^ y & z; | ||
|  | 
 | ||
|  |     case 3: | ||
|  |       return x ^ y ^ z; | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function ROTL(x, n) { | ||
|  |   return x << n | x >>> 32 - n; | ||
|  | } | ||
|  | 
 | ||
|  | function sha1(bytes) { | ||
|  |   const K = [0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6]; | ||
|  |   const H = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0]; | ||
|  | 
 | ||
|  |   if (typeof bytes === 'string') { | ||
|  |     const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape
 | ||
|  | 
 | ||
|  |     bytes = []; | ||
|  | 
 | ||
|  |     for (let i = 0; i < msg.length; ++i) { | ||
|  |       bytes.push(msg.charCodeAt(i)); | ||
|  |     } | ||
|  |   } else if (!Array.isArray(bytes)) { | ||
|  |     // Convert Array-like to Array
 | ||
|  |     bytes = Array.prototype.slice.call(bytes); | ||
|  |   } | ||
|  | 
 | ||
|  |   bytes.push(0x80); | ||
|  |   const l = bytes.length / 4 + 2; | ||
|  |   const N = Math.ceil(l / 16); | ||
|  |   const M = new Array(N); | ||
|  | 
 | ||
|  |   for (let i = 0; i < N; ++i) { | ||
|  |     const arr = new Uint32Array(16); | ||
|  | 
 | ||
|  |     for (let j = 0; j < 16; ++j) { | ||
|  |       arr[j] = bytes[i * 64 + j * 4] << 24 | bytes[i * 64 + j * 4 + 1] << 16 | bytes[i * 64 + j * 4 + 2] << 8 | bytes[i * 64 + j * 4 + 3]; | ||
|  |     } | ||
|  | 
 | ||
|  |     M[i] = arr; | ||
|  |   } | ||
|  | 
 | ||
|  |   M[N - 1][14] = (bytes.length - 1) * 8 / Math.pow(2, 32); | ||
|  |   M[N - 1][14] = Math.floor(M[N - 1][14]); | ||
|  |   M[N - 1][15] = (bytes.length - 1) * 8 & 0xffffffff; | ||
|  | 
 | ||
|  |   for (let i = 0; i < N; ++i) { | ||
|  |     const W = new Uint32Array(80); | ||
|  | 
 | ||
|  |     for (let t = 0; t < 16; ++t) { | ||
|  |       W[t] = M[i][t]; | ||
|  |     } | ||
|  | 
 | ||
|  |     for (let t = 16; t < 80; ++t) { | ||
|  |       W[t] = ROTL(W[t - 3] ^ W[t - 8] ^ W[t - 14] ^ W[t - 16], 1); | ||
|  |     } | ||
|  | 
 | ||
|  |     let a = H[0]; | ||
|  |     let b = H[1]; | ||
|  |     let c = H[2]; | ||
|  |     let d = H[3]; | ||
|  |     let e = H[4]; | ||
|  | 
 | ||
|  |     for (let t = 0; t < 80; ++t) { | ||
|  |       const s = Math.floor(t / 20); | ||
|  |       const T = ROTL(a, 5) + f(s, b, c, d) + e + K[s] + W[t] >>> 0; | ||
|  |       e = d; | ||
|  |       d = c; | ||
|  |       c = ROTL(b, 30) >>> 0; | ||
|  |       b = a; | ||
|  |       a = T; | ||
|  |     } | ||
|  | 
 | ||
|  |     H[0] = H[0] + a >>> 0; | ||
|  |     H[1] = H[1] + b >>> 0; | ||
|  |     H[2] = H[2] + c >>> 0; | ||
|  |     H[3] = H[3] + d >>> 0; | ||
|  |     H[4] = H[4] + e >>> 0; | ||
|  |   } | ||
|  | 
 | ||
|  |   return [H[0] >> 24 & 0xff, H[0] >> 16 & 0xff, H[0] >> 8 & 0xff, H[0] & 0xff, H[1] >> 24 & 0xff, H[1] >> 16 & 0xff, H[1] >> 8 & 0xff, H[1] & 0xff, H[2] >> 24 & 0xff, H[2] >> 16 & 0xff, H[2] >> 8 & 0xff, H[2] & 0xff, H[3] >> 24 & 0xff, H[3] >> 16 & 0xff, H[3] >> 8 & 0xff, H[3] & 0xff, H[4] >> 24 & 0xff, H[4] >> 16 & 0xff, H[4] >> 8 & 0xff, H[4] & 0xff]; | ||
|  | } | ||
|  | 
 | ||
|  | var _default = sha1; | ||
|  | exports.default = _default; | ||
|  | },{}],19:[function(_dereq_,module,exports){ | ||
|  | "use strict"; | ||
|  | 
 | ||
|  | Object.defineProperty(exports, "__esModule", { | ||
|  |   value: true | ||
|  | }); | ||
|  | exports.default = void 0; | ||
|  | 
 | ||
|  | var _validate = _interopRequireDefault(_dereq_(25)); | ||
|  | 
 | ||
|  | function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } | ||
|  | 
 | ||
|  | /** | ||
|  |  * Convert array of 16 byte values to UUID string format of the form: | ||
|  |  * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX | ||
|  |  */ | ||
|  | const byteToHex = []; | ||
|  | 
 | ||
|  | for (let i = 0; i < 256; ++i) { | ||
|  |   byteToHex.push((i + 0x100).toString(16).substr(1)); | ||
|  | } | ||
|  | 
 | ||
|  | function stringify(arr, offset = 0) { | ||
|  |   // Note: Be careful editing this code!  It's been tuned for performance
 | ||
|  |   // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434
 | ||
|  |   const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID.  If this throws, it's likely due to one
 | ||
|  |   // of the following:
 | ||
|  |   // - One or more input array values don't map to a hex octet (leading to
 | ||
|  |   // "undefined" in the uuid)
 | ||
|  |   // - Invalid input values for the RFC `version` or `variant` fields
 | ||
|  | 
 | ||
|  |   if (!(0, _validate.default)(uuid)) { | ||
|  |     throw TypeError('Stringified UUID is invalid'); | ||
|  |   } | ||
|  | 
 | ||
|  |   return uuid; | ||
|  | } | ||
|  | 
 | ||
|  | var _default = stringify; | ||
|  | exports.default = _default; | ||
|  | },{"25":25}],20:[function(_dereq_,module,exports){ | ||
|  | "use strict"; | ||
|  | 
 | ||
|  | Object.defineProperty(exports, "__esModule", { | ||
|  |   value: true | ||
|  | }); | ||
|  | exports.default = void 0; | ||
|  | 
 | ||
|  | var _rng = _interopRequireDefault(_dereq_(17)); | ||
|  | 
 | ||
|  | var _stringify = _interopRequireDefault(_dereq_(19)); | ||
|  | 
 | ||
|  | function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } | ||
|  | 
 | ||
|  | // **`v1()` - Generate time-based UUID**
 | ||
|  | //
 | ||
|  | // Inspired by https://github.com/LiosK/UUID.js
 | ||
|  | // and http://docs.python.org/library/uuid.html
 | ||
|  | let _nodeId; | ||
|  | 
 | ||
|  | let _clockseq; // Previous uuid creation time
 | ||
|  | 
 | ||
|  | 
 | ||
|  | let _lastMSecs = 0; | ||
|  | let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details
 | ||
|  | 
 | ||
|  | function v1(options, buf, offset) { | ||
|  |   let i = buf && offset || 0; | ||
|  |   const b = buf || new Array(16); | ||
|  |   options = options || {}; | ||
|  |   let node = options.node || _nodeId; | ||
|  |   let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not
 | ||
|  |   // specified.  We do this lazily to minimize issues related to insufficient
 | ||
|  |   // system entropy.  See #189
 | ||
|  | 
 | ||
|  |   if (node == null || clockseq == null) { | ||
|  |     const seedBytes = options.random || (options.rng || _rng.default)(); | ||
|  | 
 | ||
|  |     if (node == null) { | ||
|  |       // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)
 | ||
|  |       node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; | ||
|  |     } | ||
|  | 
 | ||
|  |     if (clockseq == null) { | ||
|  |       // Per 4.2.2, randomize (14 bit) clockseq
 | ||
|  |       clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; | ||
|  |     } | ||
|  |   } // UUID timestamps are 100 nano-second units since the Gregorian epoch,
 | ||
|  |   // (1582-10-15 00:00).  JSNumbers aren't precise enough for this, so
 | ||
|  |   // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'
 | ||
|  |   // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.
 | ||
|  | 
 | ||
|  | 
 | ||
|  |   let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock
 | ||
|  |   // cycle to simulate higher resolution clock
 | ||
|  | 
 | ||
|  |   let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs)
 | ||
|  | 
 | ||
|  |   const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression
 | ||
|  | 
 | ||
|  |   if (dt < 0 && options.clockseq === undefined) { | ||
|  |     clockseq = clockseq + 1 & 0x3fff; | ||
|  |   } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new
 | ||
|  |   // time interval
 | ||
|  | 
 | ||
|  | 
 | ||
|  |   if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { | ||
|  |     nsecs = 0; | ||
|  |   } // Per 4.2.1.2 Throw error if too many uuids are requested
 | ||
|  | 
 | ||
|  | 
 | ||
|  |   if (nsecs >= 10000) { | ||
|  |     throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); | ||
|  |   } | ||
|  | 
 | ||
|  |   _lastMSecs = msecs; | ||
|  |   _lastNSecs = nsecs; | ||
|  |   _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch
 | ||
|  | 
 | ||
|  |   msecs += 12219292800000; // `time_low`
 | ||
|  | 
 | ||
|  |   const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; | ||
|  |   b[i++] = tl >>> 24 & 0xff; | ||
|  |   b[i++] = tl >>> 16 & 0xff; | ||
|  |   b[i++] = tl >>> 8 & 0xff; | ||
|  |   b[i++] = tl & 0xff; // `time_mid`
 | ||
|  | 
 | ||
|  |   const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; | ||
|  |   b[i++] = tmh >>> 8 & 0xff; | ||
|  |   b[i++] = tmh & 0xff; // `time_high_and_version`
 | ||
|  | 
 | ||
|  |   b[i++] = tmh >>> 24 & 0xf | 0x10; // include version
 | ||
|  | 
 | ||
|  |   b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)
 | ||
|  | 
 | ||
|  |   b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low`
 | ||
|  | 
 | ||
|  |   b[i++] = clockseq & 0xff; // `node`
 | ||
|  | 
 | ||
|  |   for (let n = 0; n < 6; ++n) { | ||
|  |     b[i + n] = node[n]; | ||
|  |   } | ||
|  | 
 | ||
|  |   return buf || (0, _stringify.default)(b); | ||
|  | } | ||
|  | 
 | ||
|  | var _default = v1; | ||
|  | exports.default = _default; | ||
|  | },{"17":17,"19":19}],21:[function(_dereq_,module,exports){ | ||
|  | "use strict"; | ||
|  | 
 | ||
|  | Object.defineProperty(exports, "__esModule", { | ||
|  |   value: true | ||
|  | }); | ||
|  | exports.default = void 0; | ||
|  | 
 | ||
|  | var _v = _interopRequireDefault(_dereq_(22)); | ||
|  | 
 | ||
|  | var _md = _interopRequireDefault(_dereq_(13)); | ||
|  | 
 | ||
|  | function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } | ||
|  | 
 | ||
|  | const v3 = (0, _v.default)('v3', 0x30, _md.default); | ||
|  | var _default = v3; | ||
|  | exports.default = _default; | ||
|  | },{"13":13,"22":22}],22:[function(_dereq_,module,exports){ | ||
|  | "use strict"; | ||
|  | 
 | ||
|  | Object.defineProperty(exports, "__esModule", { | ||
|  |   value: true | ||
|  | }); | ||
|  | exports.default = _default; | ||
|  | exports.URL = exports.DNS = void 0; | ||
|  | 
 | ||
|  | var _stringify = _interopRequireDefault(_dereq_(19)); | ||
|  | 
 | ||
|  | var _parse = _interopRequireDefault(_dereq_(15)); | ||
|  | 
 | ||
|  | function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } | ||
|  | 
 | ||
|  | function stringToBytes(str) { | ||
|  |   str = unescape(encodeURIComponent(str)); // UTF8 escape
 | ||
|  | 
 | ||
|  |   const bytes = []; | ||
|  | 
 | ||
|  |   for (let i = 0; i < str.length; ++i) { | ||
|  |     bytes.push(str.charCodeAt(i)); | ||
|  |   } | ||
|  | 
 | ||
|  |   return bytes; | ||
|  | } | ||
|  | 
 | ||
|  | const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; | ||
|  | exports.DNS = DNS; | ||
|  | const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; | ||
|  | exports.URL = URL; | ||
|  | 
 | ||
|  | function _default(name, version, hashfunc) { | ||
|  |   function generateUUID(value, namespace, buf, offset) { | ||
|  |     if (typeof value === 'string') { | ||
|  |       value = stringToBytes(value); | ||
|  |     } | ||
|  | 
 | ||
|  |     if (typeof namespace === 'string') { | ||
|  |       namespace = (0, _parse.default)(namespace); | ||
|  |     } | ||
|  | 
 | ||
|  |     if (namespace.length !== 16) { | ||
|  |       throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); | ||
|  |     } // Compute hash of namespace and value, Per 4.3
 | ||
|  |     // Future: Use spread syntax when supported on all platforms, e.g. `bytes =
 | ||
|  |     // hashfunc([...namespace, ... value])`
 | ||
|  | 
 | ||
|  | 
 | ||
|  |     let bytes = new Uint8Array(16 + value.length); | ||
|  |     bytes.set(namespace); | ||
|  |     bytes.set(value, namespace.length); | ||
|  |     bytes = hashfunc(bytes); | ||
|  |     bytes[6] = bytes[6] & 0x0f | version; | ||
|  |     bytes[8] = bytes[8] & 0x3f | 0x80; | ||
|  | 
 | ||
|  |     if (buf) { | ||
|  |       offset = offset || 0; | ||
|  | 
 | ||
|  |       for (let i = 0; i < 16; ++i) { | ||
|  |         buf[offset + i] = bytes[i]; | ||
|  |       } | ||
|  | 
 | ||
|  |       return buf; | ||
|  |     } | ||
|  | 
 | ||
|  |     return (0, _stringify.default)(bytes); | ||
|  |   } // Function#name is not settable on some platforms (#270)
 | ||
|  | 
 | ||
|  | 
 | ||
|  |   try { | ||
|  |     generateUUID.name = name; // eslint-disable-next-line no-empty
 | ||
|  |   } catch (err) {} // For CommonJS default export support
 | ||
|  | 
 | ||
|  | 
 | ||
|  |   generateUUID.DNS = DNS; | ||
|  |   generateUUID.URL = URL; | ||
|  |   return generateUUID; | ||
|  | } | ||
|  | },{"15":15,"19":19}],23:[function(_dereq_,module,exports){ | ||
|  | "use strict"; | ||
|  | 
 | ||
|  | Object.defineProperty(exports, "__esModule", { | ||
|  |   value: true | ||
|  | }); | ||
|  | exports.default = void 0; | ||
|  | 
 | ||
|  | var _rng = _interopRequireDefault(_dereq_(17)); | ||
|  | 
 | ||
|  | var _stringify = _interopRequireDefault(_dereq_(19)); | ||
|  | 
 | ||
|  | function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } | ||
|  | 
 | ||
|  | function v4(options, buf, offset) { | ||
|  |   options = options || {}; | ||
|  | 
 | ||
|  |   const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
 | ||
|  | 
 | ||
|  | 
 | ||
|  |   rnds[6] = rnds[6] & 0x0f | 0x40; | ||
|  |   rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided
 | ||
|  | 
 | ||
|  |   if (buf) { | ||
|  |     offset = offset || 0; | ||
|  | 
 | ||
|  |     for (let i = 0; i < 16; ++i) { | ||
|  |       buf[offset + i] = rnds[i]; | ||
|  |     } | ||
|  | 
 | ||
|  |     return buf; | ||
|  |   } | ||
|  | 
 | ||
|  |   return (0, _stringify.default)(rnds); | ||
|  | } | ||
|  | 
 | ||
|  | var _default = v4; | ||
|  | exports.default = _default; | ||
|  | },{"17":17,"19":19}],24:[function(_dereq_,module,exports){ | ||
|  | "use strict"; | ||
|  | 
 | ||
|  | Object.defineProperty(exports, "__esModule", { | ||
|  |   value: true | ||
|  | }); | ||
|  | exports.default = void 0; | ||
|  | 
 | ||
|  | var _v = _interopRequireDefault(_dereq_(22)); | ||
|  | 
 | ||
|  | var _sha = _interopRequireDefault(_dereq_(18)); | ||
|  | 
 | ||
|  | function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } | ||
|  | 
 | ||
|  | const v5 = (0, _v.default)('v5', 0x50, _sha.default); | ||
|  | var _default = v5; | ||
|  | exports.default = _default; | ||
|  | },{"18":18,"22":22}],25:[function(_dereq_,module,exports){ | ||
|  | "use strict"; | ||
|  | 
 | ||
|  | Object.defineProperty(exports, "__esModule", { | ||
|  |   value: true | ||
|  | }); | ||
|  | exports.default = void 0; | ||
|  | 
 | ||
|  | var _regex = _interopRequireDefault(_dereq_(16)); | ||
|  | 
 | ||
|  | function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } | ||
|  | 
 | ||
|  | function validate(uuid) { | ||
|  |   return typeof uuid === 'string' && _regex.default.test(uuid); | ||
|  | } | ||
|  | 
 | ||
|  | var _default = validate; | ||
|  | exports.default = _default; | ||
|  | },{"16":16}],26:[function(_dereq_,module,exports){ | ||
|  | "use strict"; | ||
|  | 
 | ||
|  | Object.defineProperty(exports, "__esModule", { | ||
|  |   value: true | ||
|  | }); | ||
|  | exports.default = void 0; | ||
|  | 
 | ||
|  | var _validate = _interopRequireDefault(_dereq_(25)); | ||
|  | 
 | ||
|  | function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } | ||
|  | 
 | ||
|  | function version(uuid) { | ||
|  |   if (!(0, _validate.default)(uuid)) { | ||
|  |     throw TypeError('Invalid UUID'); | ||
|  |   } | ||
|  | 
 | ||
|  |   return parseInt(uuid.substr(14, 1), 16); | ||
|  | } | ||
|  | 
 | ||
|  | var _default = version; | ||
|  | exports.default = _default; | ||
|  | },{"25":25}],27:[function(_dereq_,module,exports){ | ||
|  | 'use strict'; | ||
|  | 
 | ||
|  | function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } | ||
|  | 
 | ||
|  | _dereq_(1); | ||
|  | var immediate = _interopDefault(_dereq_(4)); | ||
|  | var inherits = _interopDefault(_dereq_(10)); | ||
|  | var EventEmitter = _interopDefault(_dereq_(3)); | ||
|  | var uuid = _dereq_(12); | ||
|  | var Md5 = _interopDefault(_dereq_(11)); | ||
|  | 
 | ||
|  | // most of this is borrowed from lodash.isPlainObject:
 | ||
|  | // https://github.com/fis-components/lodash.isplainobject/
 | ||
|  | // blob/29c358140a74f252aeb08c9eb28bef86f2217d4a/index.js
 | ||
|  | 
 | ||
|  | var funcToString = Function.prototype.toString; | ||
|  | var objectCtorString = funcToString.call(Object); | ||
|  | 
 | ||
|  | function mangle(key) { | ||
|  |   return '$' + key; | ||
|  | } | ||
|  | function unmangle(key) { | ||
|  |   return key.substring(1); | ||
|  | } | ||
|  | function Map$1() { | ||
|  |   this._store = {}; | ||
|  | } | ||
|  | Map$1.prototype.get = function (key) { | ||
|  |   var mangled = mangle(key); | ||
|  |   return this._store[mangled]; | ||
|  | }; | ||
|  | Map$1.prototype.set = function (key, value) { | ||
|  |   var mangled = mangle(key); | ||
|  |   this._store[mangled] = value; | ||
|  |   return true; | ||
|  | }; | ||
|  | Map$1.prototype.has = function (key) { | ||
|  |   var mangled = mangle(key); | ||
|  |   return mangled in this._store; | ||
|  | }; | ||
|  | Map$1.prototype.keys = function () { | ||
|  |   return Object.keys(this._store).map(k => unmangle(k)); | ||
|  | }; | ||
|  | Map$1.prototype["delete"] = function (key) { | ||
|  |   var mangled = mangle(key); | ||
|  |   var res = mangled in this._store; | ||
|  |   delete this._store[mangled]; | ||
|  |   return res; | ||
|  | }; | ||
|  | Map$1.prototype.forEach = function (cb) { | ||
|  |   var keys = Object.keys(this._store); | ||
|  |   for (var i = 0, len = keys.length; i < len; i++) { | ||
|  |     var key = keys[i]; | ||
|  |     var value = this._store[key]; | ||
|  |     key = unmangle(key); | ||
|  |     cb(value, key); | ||
|  |   } | ||
|  | }; | ||
|  | Object.defineProperty(Map$1.prototype, 'size', { | ||
|  |   get: function () { | ||
|  |     return Object.keys(this._store).length; | ||
|  |   } | ||
|  | }); | ||
|  | 
 | ||
|  | function Set$1(array) { | ||
|  |   this._store = new Map$1(); | ||
|  | 
 | ||
|  |   // init with an array
 | ||
|  |   if (array && Array.isArray(array)) { | ||
|  |     for (var i = 0, len = array.length; i < len; i++) { | ||
|  |       this.add(array[i]); | ||
|  |     } | ||
|  |   } | ||
|  | } | ||
|  | Set$1.prototype.add = function (key) { | ||
|  |   return this._store.set(key, true); | ||
|  | }; | ||
|  | Set$1.prototype.has = function (key) { | ||
|  |   return this._store.has(key); | ||
|  | }; | ||
|  | Set$1.prototype.forEach = function (cb) { | ||
|  |   this._store.forEach(function (value, key) { | ||
|  |     cb(key); | ||
|  |   }); | ||
|  | }; | ||
|  | Object.defineProperty(Set$1.prototype, 'size', { | ||
|  |   get: function () { | ||
|  |     return this._store.size; | ||
|  |   } | ||
|  | }); | ||
|  | 
 | ||
|  | // Based on https://kangax.github.io/compat-table/es6/ we can sniff out
 | ||
|  | 
 | ||
|  | // based on https://github.com/montagejs/collections
 | ||
|  | 
 | ||
|  | // like underscore/lodash _.pick()
 | ||
|  | function pick(obj, arr) { | ||
|  |   var res = {}; | ||
|  |   for (var i = 0, len = arr.length; i < len; i++) { | ||
|  |     var prop = arr[i]; | ||
|  |     if (prop in obj) { | ||
|  |       res[prop] = obj[prop]; | ||
|  |     } | ||
|  |   } | ||
|  |   return res; | ||
|  | } | ||
|  | 
 | ||
|  | var hasLocal; | ||
|  | 
 | ||
|  | try { | ||
|  |   localStorage.setItem('_pouch_check_localstorage', 1); | ||
|  |   hasLocal = !!localStorage.getItem('_pouch_check_localstorage'); | ||
|  | } catch (e) { | ||
|  |   hasLocal = false; | ||
|  | } | ||
|  | 
 | ||
|  | function hasLocalStorage() { | ||
|  |   return hasLocal; | ||
|  | } | ||
|  | 
 | ||
|  | // Custom nextTick() shim for browsers. In node, this will just be process.nextTick(). We
 | ||
|  | 
 | ||
|  | inherits(Changes, EventEmitter); | ||
|  | 
 | ||
|  | /* istanbul ignore next */ | ||
|  | function attachBrowserEvents(self) { | ||
|  |   if (hasLocalStorage()) { | ||
|  |     addEventListener("storage", function (e) { | ||
|  |       self.emit(e.key); | ||
|  |     }); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function Changes() { | ||
|  |   EventEmitter.call(this); | ||
|  |   this._listeners = {}; | ||
|  | 
 | ||
|  |   attachBrowserEvents(this); | ||
|  | } | ||
|  | Changes.prototype.addListener = function (dbName, id, db, opts) { | ||
|  |   /* istanbul ignore if */ | ||
|  |   if (this._listeners[id]) { | ||
|  |     return; | ||
|  |   } | ||
|  |   var self = this; | ||
|  |   var inprogress = false; | ||
|  |   function eventFunction() { | ||
|  |     /* istanbul ignore if */ | ||
|  |     if (!self._listeners[id]) { | ||
|  |       return; | ||
|  |     } | ||
|  |     if (inprogress) { | ||
|  |       inprogress = 'waiting'; | ||
|  |       return; | ||
|  |     } | ||
|  |     inprogress = true; | ||
|  |     var changesOpts = pick(opts, [ | ||
|  |       'style', 'include_docs', 'attachments', 'conflicts', 'filter', | ||
|  |       'doc_ids', 'view', 'since', 'query_params', 'binary', 'return_docs' | ||
|  |     ]); | ||
|  | 
 | ||
|  |     /* istanbul ignore next */ | ||
|  |     function onError() { | ||
|  |       inprogress = false; | ||
|  |     } | ||
|  | 
 | ||
|  |     db.changes(changesOpts).on('change', function (c) { | ||
|  |       if (c.seq > opts.since && !opts.cancelled) { | ||
|  |         opts.since = c.seq; | ||
|  |         opts.onChange(c); | ||
|  |       } | ||
|  |     }).on('complete', function () { | ||
|  |       if (inprogress === 'waiting') { | ||
|  |         immediate(eventFunction); | ||
|  |       } | ||
|  |       inprogress = false; | ||
|  |     }).on('error', onError); | ||
|  |   } | ||
|  |   this._listeners[id] = eventFunction; | ||
|  |   this.on(dbName, eventFunction); | ||
|  | }; | ||
|  | 
 | ||
|  | Changes.prototype.removeListener = function (dbName, id) { | ||
|  |   /* istanbul ignore if */ | ||
|  |   if (!(id in this._listeners)) { | ||
|  |     return; | ||
|  |   } | ||
|  |   EventEmitter.prototype.removeListener.call(this, dbName, | ||
|  |     this._listeners[id]); | ||
|  |   delete this._listeners[id]; | ||
|  | }; | ||
|  | 
 | ||
|  | 
 | ||
|  | /* istanbul ignore next */ | ||
|  | Changes.prototype.notifyLocalWindows = function (dbName) { | ||
|  |   //do a useless change on a storage thing
 | ||
|  |   //in order to get other windows's listeners to activate
 | ||
|  |   if (hasLocalStorage()) { | ||
|  |     localStorage[dbName] = (localStorage[dbName] === "a") ? "b" : "a"; | ||
|  |   } | ||
|  | }; | ||
|  | 
 | ||
|  | Changes.prototype.notify = function (dbName) { | ||
|  |   this.emit(dbName); | ||
|  |   this.notifyLocalWindows(dbName); | ||
|  | }; | ||
|  | 
 | ||
|  | function guardedConsole(method) { | ||
|  |   /* istanbul ignore else */ | ||
|  |   if (typeof console !== 'undefined' && typeof console[method] === 'function') { | ||
|  |     var args = Array.prototype.slice.call(arguments, 1); | ||
|  |     console[method].apply(console, args); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | var assign; | ||
|  | { | ||
|  |   if (typeof Object.assign === 'function') { | ||
|  |     assign = Object.assign; | ||
|  |   } else { | ||
|  |     // lite Object.assign polyfill based on
 | ||
|  |     // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/assign
 | ||
|  |     assign = function (target) { | ||
|  |       var to = Object(target); | ||
|  | 
 | ||
|  |       for (var index = 1; index < arguments.length; index++) { | ||
|  |         var nextSource = arguments[index]; | ||
|  | 
 | ||
|  |         if (nextSource != null) { // Skip over if undefined or null
 | ||
|  |           for (var nextKey in nextSource) { | ||
|  |             // Avoid bugs when hasOwnProperty is shadowed
 | ||
|  |             if (Object.prototype.hasOwnProperty.call(nextSource, nextKey)) { | ||
|  |               to[nextKey] = nextSource[nextKey]; | ||
|  |             } | ||
|  |           } | ||
|  |         } | ||
|  |       } | ||
|  |       return to; | ||
|  |     }; | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | var $inject_Object_assign = assign; | ||
|  | 
 | ||
|  | inherits(PouchError, Error); | ||
|  | 
 | ||
|  | function PouchError(status, error, reason) { | ||
|  |   Error.call(this, reason); | ||
|  |   this.status = status; | ||
|  |   this.name = error; | ||
|  |   this.message = reason; | ||
|  |   this.error = true; | ||
|  | } | ||
|  | 
 | ||
|  | PouchError.prototype.toString = function () { | ||
|  |   return JSON.stringify({ | ||
|  |     status: this.status, | ||
|  |     name: this.name, | ||
|  |     message: this.message, | ||
|  |     reason: this.reason | ||
|  |   }); | ||
|  | }; | ||
|  | 
 | ||
|  | var UNAUTHORIZED = new PouchError(401, 'unauthorized', "Name or password is incorrect."); | ||
|  | var MISSING_BULK_DOCS = new PouchError(400, 'bad_request', "Missing JSON list of 'docs'"); | ||
|  | var MISSING_DOC = new PouchError(404, 'not_found', 'missing'); | ||
|  | var REV_CONFLICT = new PouchError(409, 'conflict', 'Document update conflict'); | ||
|  | var INVALID_ID = new PouchError(400, 'bad_request', '_id field must contain a string'); | ||
|  | var MISSING_ID = new PouchError(412, 'missing_id', '_id is required for puts'); | ||
|  | var RESERVED_ID = new PouchError(400, 'bad_request', 'Only reserved document ids may start with underscore.'); | ||
|  | var NOT_OPEN = new PouchError(412, 'precondition_failed', 'Database not open'); | ||
|  | var UNKNOWN_ERROR = new PouchError(500, 'unknown_error', 'Database encountered an unknown error'); | ||
|  | var BAD_ARG = new PouchError(500, 'badarg', 'Some query argument is invalid'); | ||
|  | var INVALID_REQUEST = new PouchError(400, 'invalid_request', 'Request was invalid'); | ||
|  | var QUERY_PARSE_ERROR = new PouchError(400, 'query_parse_error', 'Some query parameter is invalid'); | ||
|  | var DOC_VALIDATION = new PouchError(500, 'doc_validation', 'Bad special document member'); | ||
|  | var BAD_REQUEST = new PouchError(400, 'bad_request', 'Something wrong with the request'); | ||
|  | var NOT_AN_OBJECT = new PouchError(400, 'bad_request', 'Document must be a JSON object'); | ||
|  | var DB_MISSING = new PouchError(404, 'not_found', 'Database not found'); | ||
|  | var IDB_ERROR = new PouchError(500, 'indexed_db_went_bad', 'unknown'); | ||
|  | var WSQ_ERROR = new PouchError(500, 'web_sql_went_bad', 'unknown'); | ||
|  | var LDB_ERROR = new PouchError(500, 'levelDB_went_went_bad', 'unknown'); | ||
|  | var FORBIDDEN = new PouchError(403, 'forbidden', 'Forbidden by design doc validate_doc_update function'); | ||
|  | var INVALID_REV = new PouchError(400, 'bad_request', 'Invalid rev format'); | ||
|  | var FILE_EXISTS = new PouchError(412, 'file_exists', 'The database could not be created, the file already exists.'); | ||
|  | var MISSING_STUB = new PouchError(412, 'missing_stub', 'A pre-existing attachment stub wasn\'t found'); | ||
|  | var INVALID_URL = new PouchError(413, 'invalid_url', 'Provided URL is invalid'); | ||
|  | 
 | ||
|  | function createError(error, reason) { | ||
|  |   function CustomPouchError(reason) { | ||
|  |     // inherit error properties from our parent error manually
 | ||
|  |     // so as to allow proper JSON parsing.
 | ||
|  |     /* jshint ignore:start */ | ||
|  |     var names = Object.getOwnPropertyNames(error); | ||
|  |     for (var i = 0, len = names.length; i < len; i++) { | ||
|  |       if (typeof error[names[i]] !== 'function') { | ||
|  |         this[names[i]] = error[names[i]]; | ||
|  |       } | ||
|  |     } | ||
|  | 
 | ||
|  |     if (this.stack === undefined) { | ||
|  |       this.stack = (new Error()).stack; | ||
|  |     } | ||
|  | 
 | ||
|  |     /* jshint ignore:end */ | ||
|  |     if (reason !== undefined) { | ||
|  |       this.reason = reason; | ||
|  |     } | ||
|  |   } | ||
|  |   CustomPouchError.prototype = PouchError.prototype; | ||
|  |   return new CustomPouchError(reason); | ||
|  | } | ||
|  | 
 | ||
|  | function tryFilter(filter, doc, req) { | ||
|  |   try { | ||
|  |     return !filter(doc, req); | ||
|  |   } catch (err) { | ||
|  |     var msg = 'Filter function threw: ' + err.toString(); | ||
|  |     return createError(BAD_REQUEST, msg); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function filterChange(opts) { | ||
|  |   var req = {}; | ||
|  |   var hasFilter = opts.filter && typeof opts.filter === 'function'; | ||
|  |   req.query = opts.query_params; | ||
|  | 
 | ||
|  |   return function filter(change) { | ||
|  |     if (!change.doc) { | ||
|  |       // CSG sends events on the changes feed that don't have documents,
 | ||
|  |       // this hack makes a whole lot of existing code robust.
 | ||
|  |       change.doc = {}; | ||
|  |     } | ||
|  | 
 | ||
|  |     var filterReturn = hasFilter && tryFilter(opts.filter, change.doc, req); | ||
|  | 
 | ||
|  |     if (typeof filterReturn === 'object') { | ||
|  |       return filterReturn; | ||
|  |     } | ||
|  | 
 | ||
|  |     if (filterReturn) { | ||
|  |       return false; | ||
|  |     } | ||
|  | 
 | ||
|  |     if (!opts.include_docs) { | ||
|  |       delete change.doc; | ||
|  |     } else if (!opts.attachments) { | ||
|  |       for (var att in change.doc._attachments) { | ||
|  |         /* istanbul ignore else */ | ||
|  |         if (Object.prototype.hasOwnProperty.call(change.doc._attachments, att)) { | ||
|  |           change.doc._attachments[att].stub = true; | ||
|  |         } | ||
|  |       } | ||
|  |     } | ||
|  |     return true; | ||
|  |   }; | ||
|  | } | ||
|  | 
 | ||
|  | // shim for Function.prototype.name,
 | ||
|  | 
 | ||
|  | // Determine id an ID is valid
 | ||
|  | //   - invalid IDs begin with an underescore that does not begin '_design' or
 | ||
|  | //     '_local'
 | ||
|  | //   - any other string value is a valid id
 | ||
|  | // Returns the specific error object for each case
 | ||
|  | function invalidIdError(id) { | ||
|  |   var err; | ||
|  |   if (!id) { | ||
|  |     err = createError(MISSING_ID); | ||
|  |   } else if (typeof id !== 'string') { | ||
|  |     err = createError(INVALID_ID); | ||
|  |   } else if (/^_/.test(id) && !(/^_(design|local)/).test(id)) { | ||
|  |     err = createError(RESERVED_ID); | ||
|  |   } | ||
|  |   if (err) { | ||
|  |     throw err; | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | // Checks if a PouchDB object is "remote" or not. This is
 | ||
|  | 
 | ||
|  | // originally parseUri 1.2.2, now patched by us
 | ||
|  | 
 | ||
|  | // Based on https://github.com/alexdavid/scope-eval v0.0.3
 | ||
|  | 
 | ||
|  | var thisBtoa = function (str) { | ||
|  |   return btoa(str); | ||
|  | }; | ||
|  | 
 | ||
|  | // Abstracts constructing a Blob object, so it also works in older
 | ||
|  | // browsers that don't support the native Blob constructor (e.g.
 | ||
|  | // old QtWebKit versions, Android < 4.4).
 | ||
|  | function createBlob(parts, properties) { | ||
|  |   /* global BlobBuilder,MSBlobBuilder,MozBlobBuilder,WebKitBlobBuilder */ | ||
|  |   parts = parts || []; | ||
|  |   properties = properties || {}; | ||
|  |   try { | ||
|  |     return new Blob(parts, properties); | ||
|  |   } catch (e) { | ||
|  |     if (e.name !== "TypeError") { | ||
|  |       throw e; | ||
|  |     } | ||
|  |     var Builder = typeof BlobBuilder !== 'undefined' ? BlobBuilder : | ||
|  |                   typeof MSBlobBuilder !== 'undefined' ? MSBlobBuilder : | ||
|  |                   typeof MozBlobBuilder !== 'undefined' ? MozBlobBuilder : | ||
|  |                   WebKitBlobBuilder; | ||
|  |     var builder = new Builder(); | ||
|  |     for (var i = 0; i < parts.length; i += 1) { | ||
|  |       builder.append(parts[i]); | ||
|  |     } | ||
|  |     return builder.getBlob(properties.type); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | // From http://stackoverflow.com/questions/14967647/ (continues on next line)
 | ||
|  | // encode-decode-image-with-base64-breaks-image (2013-04-21)
 | ||
|  | function binaryStringToArrayBuffer(bin) { | ||
|  |   var length = bin.length; | ||
|  |   var buf = new ArrayBuffer(length); | ||
|  |   var arr = new Uint8Array(buf); | ||
|  |   for (var i = 0; i < length; i++) { | ||
|  |     arr[i] = bin.charCodeAt(i); | ||
|  |   } | ||
|  |   return buf; | ||
|  | } | ||
|  | 
 | ||
|  | function binStringToBluffer(binString, type) { | ||
|  |   return createBlob([binaryStringToArrayBuffer(binString)], {type: type}); | ||
|  | } | ||
|  | 
 | ||
|  | //Can't find original post, but this is close
 | ||
|  | //http://stackoverflow.com/questions/6965107/ (continues on next line)
 | ||
|  | //converting-between-strings-and-arraybuffers
 | ||
|  | function arrayBufferToBinaryString(buffer) { | ||
|  |   var binary = ''; | ||
|  |   var bytes = new Uint8Array(buffer); | ||
|  |   var length = bytes.byteLength; | ||
|  |   for (var i = 0; i < length; i++) { | ||
|  |     binary += String.fromCharCode(bytes[i]); | ||
|  |   } | ||
|  |   return binary; | ||
|  | } | ||
|  | 
 | ||
|  | // shim for browsers that don't support it
 | ||
|  | function readAsBinaryString(blob, callback) { | ||
|  |   var reader = new FileReader(); | ||
|  |   var hasBinaryString = typeof reader.readAsBinaryString === 'function'; | ||
|  |   reader.onloadend = function (e) { | ||
|  |     var result = e.target.result || ''; | ||
|  |     if (hasBinaryString) { | ||
|  |       return callback(result); | ||
|  |     } | ||
|  |     callback(arrayBufferToBinaryString(result)); | ||
|  |   }; | ||
|  |   if (hasBinaryString) { | ||
|  |     reader.readAsBinaryString(blob); | ||
|  |   } else { | ||
|  |     reader.readAsArrayBuffer(blob); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | // simplified API. universal browser support is assumed
 | ||
|  | function readAsArrayBuffer(blob, callback) { | ||
|  |   var reader = new FileReader(); | ||
|  |   reader.onloadend = function (e) { | ||
|  |     var result = e.target.result || new ArrayBuffer(0); | ||
|  |     callback(result); | ||
|  |   }; | ||
|  |   reader.readAsArrayBuffer(blob); | ||
|  | } | ||
|  | 
 | ||
|  | // this is not used in the browser
 | ||
|  | 
 | ||
|  | var setImmediateShim = self.setImmediate || self.setTimeout; | ||
|  | var MD5_CHUNK_SIZE = 32768; | ||
|  | 
 | ||
|  | function rawToBase64(raw) { | ||
|  |   return thisBtoa(raw); | ||
|  | } | ||
|  | 
 | ||
|  | function sliceBlob(blob, start, end) { | ||
|  |   if (blob.webkitSlice) { | ||
|  |     return blob.webkitSlice(start, end); | ||
|  |   } | ||
|  |   return blob.slice(start, end); | ||
|  | } | ||
|  | 
 | ||
|  | function appendBlob(buffer, blob, start, end, callback) { | ||
|  |   if (start > 0 || end < blob.size) { | ||
|  |     // only slice blob if we really need to
 | ||
|  |     blob = sliceBlob(blob, start, end); | ||
|  |   } | ||
|  |   readAsArrayBuffer(blob, function (arrayBuffer) { | ||
|  |     buffer.append(arrayBuffer); | ||
|  |     callback(); | ||
|  |   }); | ||
|  | } | ||
|  | 
 | ||
|  | function appendString(buffer, string, start, end, callback) { | ||
|  |   if (start > 0 || end < string.length) { | ||
|  |     // only create a substring if we really need to
 | ||
|  |     string = string.substring(start, end); | ||
|  |   } | ||
|  |   buffer.appendBinary(string); | ||
|  |   callback(); | ||
|  | } | ||
|  | 
 | ||
|  | function binaryMd5(data, callback) { | ||
|  |   var inputIsString = typeof data === 'string'; | ||
|  |   var len = inputIsString ? data.length : data.size; | ||
|  |   var chunkSize = Math.min(MD5_CHUNK_SIZE, len); | ||
|  |   var chunks = Math.ceil(len / chunkSize); | ||
|  |   var currentChunk = 0; | ||
|  |   var buffer = inputIsString ? new Md5() : new Md5.ArrayBuffer(); | ||
|  | 
 | ||
|  |   var append = inputIsString ? appendString : appendBlob; | ||
|  | 
 | ||
|  |   function next() { | ||
|  |     setImmediateShim(loadNextChunk); | ||
|  |   } | ||
|  | 
 | ||
|  |   function done() { | ||
|  |     var raw = buffer.end(true); | ||
|  |     var base64 = rawToBase64(raw); | ||
|  |     callback(base64); | ||
|  |     buffer.destroy(); | ||
|  |   } | ||
|  | 
 | ||
|  |   function loadNextChunk() { | ||
|  |     var start = currentChunk * chunkSize; | ||
|  |     var end = start + chunkSize; | ||
|  |     currentChunk++; | ||
|  |     if (currentChunk < chunks) { | ||
|  |       append(buffer, data, start, end, next); | ||
|  |     } else { | ||
|  |       append(buffer, data, start, end, done); | ||
|  |     } | ||
|  |   } | ||
|  |   loadNextChunk(); | ||
|  | } | ||
|  | 
 | ||
|  | function stringMd5(string) { | ||
|  |   return Md5.hash(string); | ||
|  | } | ||
|  | 
 | ||
|  | /** | ||
|  |  * Creates a new revision string that does NOT include the revision height | ||
|  |  * For example '56649f1b0506c6ca9fda0746eb0cacdf' | ||
|  |  */ | ||
|  | function rev$$1(doc, deterministic_revs) { | ||
|  |   if (!deterministic_revs) { | ||
|  |     return uuid.v4().replace(/-/g, '').toLowerCase(); | ||
|  |   } | ||
|  | 
 | ||
|  |   var mutateableDoc = $inject_Object_assign({}, doc); | ||
|  |   delete mutateableDoc._rev_tree; | ||
|  |   return stringMd5(JSON.stringify(mutateableDoc)); | ||
|  | } | ||
|  | 
 | ||
|  | var uuid$1 = uuid.v4; // mimic old import, only v4 is ever used elsewhere
 | ||
|  | 
 | ||
|  | var IDB_NULL = Number.MIN_SAFE_INTEGER; | ||
|  | var IDB_FALSE = Number.MIN_SAFE_INTEGER + 1; | ||
|  | var IDB_TRUE = Number.MIN_SAFE_INTEGER + 2; | ||
|  | 
 | ||
|  | // These are the same as bellow but without the global flag
 | ||
|  | // we want to use RegExp.test because it's really fast, but the global flag
 | ||
|  | // makes the regex const stateful (seriously) as it walked through all instances
 | ||
|  | var TEST_KEY_INVALID = /^[^a-zA-Z_$]|[^a-zA-Z0-9_$]+/; | ||
|  | var TEST_PATH_INVALID = /\\.|(^|\.)[^a-zA-Z_$]|[^a-zA-Z0-9_$.]+/; | ||
|  | function needsSanitise(name, isPath) { | ||
|  |   if (isPath) { | ||
|  |     return TEST_PATH_INVALID.test(name); | ||
|  |   } else { | ||
|  |     return TEST_KEY_INVALID.test(name); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | //
 | ||
|  | // IndexedDB only allows valid JS names in its index paths, whereas JSON allows
 | ||
|  | // for any string at all. This converts invalid JS names to valid ones, to allow
 | ||
|  | // for them to be indexed.
 | ||
|  | //
 | ||
|  | // For example, "foo-bar" is a valid JSON key, but cannot be a valid JS name
 | ||
|  | // (because that would be read as foo minus bar).
 | ||
|  | //
 | ||
|  | // Very high level rules for valid JS names are:
 | ||
|  | //  - First character cannot start with a number
 | ||
|  | //  - Otherwise all characters must be be a-z, A-Z, 0-9, $ or _.
 | ||
|  | //  - We allow . unless the name represents a single field, as that represents
 | ||
|  | //    a deep index path.
 | ||
|  | //
 | ||
|  | // This is more aggressive than it needs to be, but also simpler.
 | ||
|  | //
 | ||
|  | var KEY_INVALID = new RegExp(TEST_KEY_INVALID.source, 'g'); | ||
|  | var PATH_INVALID = new RegExp(TEST_PATH_INVALID.source, 'g'); | ||
|  | var SLASH = '\\'.charCodeAt(0); | ||
|  | const IS_DOT = '.'.charCodeAt(0); | ||
|  | 
 | ||
|  | function sanitise(name, isPath) { | ||
|  |   var correctCharacters = function (match) { | ||
|  |     var good = ''; | ||
|  |     for (var i = 0; i < match.length; i++) { | ||
|  |       var code = match.charCodeAt(i); | ||
|  |       // If you're sanitising a path, a slash character is there to be interpreted
 | ||
|  |       // by whatever parses the path later as "escape the next thing".
 | ||
|  |       //
 | ||
|  |       // e.g., if you want to index THIS string:
 | ||
|  |       //   {"foo": {"bar.baz": "THIS"}}
 | ||
|  |       // Your index path would be "foo.bar\.baz".
 | ||
|  | 
 | ||
|  |       if (code === IS_DOT && isPath && i === 0) { | ||
|  |         good += '.'; | ||
|  |       } else if (code === SLASH && isPath) { | ||
|  |         continue; | ||
|  |       } else { | ||
|  |         good += '_c' + code + '_'; | ||
|  |       } | ||
|  |     } | ||
|  |     return good; | ||
|  |   }; | ||
|  | 
 | ||
|  |   if (isPath) { | ||
|  |     return name.replace(PATH_INVALID, correctCharacters); | ||
|  |   } else { | ||
|  |     return name.replace(KEY_INVALID, correctCharacters); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function needsRewrite(data) { | ||
|  |   for (var key of Object.keys(data)) { | ||
|  |     if (needsSanitise(key)) { | ||
|  |       return true; | ||
|  |     } else if (data[key] === null || typeof data[key] === 'boolean') { | ||
|  |       return true; | ||
|  |     } else if (typeof data[key] === 'object') { | ||
|  |       return needsRewrite(data[key]); | ||
|  |     } | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function rewrite(data) { | ||
|  |   if (!needsRewrite(data)) { | ||
|  |     return false; | ||
|  |   } | ||
|  | 
 | ||
|  |   var isArray = Array.isArray(data); | ||
|  |   var clone = isArray | ||
|  |     ? [] | ||
|  |     : {}; | ||
|  | 
 | ||
|  |   Object.keys(data).forEach(function (key) { | ||
|  |     var safeKey = isArray ? key : sanitise(key); | ||
|  | 
 | ||
|  |     if (data[key] === null) { | ||
|  |       clone[safeKey] = IDB_NULL; | ||
|  |     } else if (typeof data[key] === 'boolean') { | ||
|  |       clone[safeKey] = data[key] ? IDB_TRUE : IDB_FALSE; | ||
|  |     } else if (typeof data[key] === 'object') { | ||
|  |       clone[safeKey] = rewrite(data[key]); | ||
|  |     } else { | ||
|  |       clone[safeKey] = data[key]; | ||
|  |     } | ||
|  |   }); | ||
|  | 
 | ||
|  |   return clone; | ||
|  | } | ||
|  | 
 | ||
|  | var DOC_STORE = 'docs'; | ||
|  | var META_STORE = 'meta'; | ||
|  | 
 | ||
|  | function idbError(callback) { | ||
|  |   return function (evt) { | ||
|  |     var message = 'unknown_error'; | ||
|  |     if (evt.target && evt.target.error) { | ||
|  |       message = evt.target.error.name || evt.target.error.message; | ||
|  |     } | ||
|  |     callback(createError(IDB_ERROR, message, evt.type)); | ||
|  |   }; | ||
|  | } | ||
|  | 
 | ||
|  | function processAttachment(name, src, doc, isBinary) { | ||
|  | 
 | ||
|  |   delete doc._attachments[name].stub; | ||
|  | 
 | ||
|  |   if (isBinary) { | ||
|  |     doc._attachments[name].data = | ||
|  |       src.attachments[doc._attachments[name].digest].data; | ||
|  |     return Promise.resolve(); | ||
|  |   } | ||
|  | 
 | ||
|  |   return new Promise(function (resolve) { | ||
|  |     var data = src.attachments[doc._attachments[name].digest].data; | ||
|  |     readAsBinaryString(data, function (binString) { | ||
|  |       doc._attachments[name].data = thisBtoa(binString); | ||
|  |       delete doc._attachments[name].length; | ||
|  |       resolve(); | ||
|  |     }); | ||
|  |   }); | ||
|  | } | ||
|  | 
 | ||
|  | function rawIndexFields(ddoc, viewName) { | ||
|  |   // fields are an array of either the string name of the field, or a key value
 | ||
|  |   var fields = ddoc.views[viewName].options && | ||
|  |                ddoc.views[viewName].options.def && | ||
|  |                ddoc.views[viewName].options.def.fields || []; | ||
|  | 
 | ||
|  |   // Either ['foo'] or [{'foo': 'desc'}]
 | ||
|  |   return fields.map(function (field) { | ||
|  |     if (typeof field === 'string') { | ||
|  |       return field; | ||
|  |     } else { | ||
|  |       return Object.keys(field)[0]; | ||
|  |     } | ||
|  |   }); | ||
|  | } | ||
|  | 
 | ||
|  | function naturalIndexName(fields) { | ||
|  |   return '_find_idx/' + fields.join('/'); | ||
|  | } | ||
|  | 
 | ||
|  | /** | ||
|  |  * Convert the fields the user gave us in the view and convert them to work for | ||
|  |  * indexeddb. | ||
|  |  * | ||
|  |  * fields is an array of field strings. A field string could be one field: | ||
|  |  *   'foo' | ||
|  |  * Or it could be a json path: | ||
|  |  *   'foo.bar' | ||
|  |  */ | ||
|  | function correctIndexFields(fields) { | ||
|  |   // Every index has to have deleted at the front, because when we do a query
 | ||
|  |   // we need to filter out deleted documents.
 | ||
|  |   return ['deleted'].concat( | ||
|  |     fields.map(function (field) { | ||
|  |       if (field in ['_id', '_rev', '_deleted', '_attachments']) { | ||
|  |         // These properties are stored at the top level without the underscore
 | ||
|  |         return field.substr(1); | ||
|  |       } else { | ||
|  |         // The custom document fields are inside the `data` property
 | ||
|  |         return 'data.' + sanitise(field, true); | ||
|  |       } | ||
|  |     }) | ||
|  |   ); | ||
|  | } | ||
|  | 
 | ||
|  | //
 | ||
|  | // Core PouchDB schema version. Increment this if we, as a library, want to make
 | ||
|  | // schema changes in indexeddb. See upgradePouchDbSchema()
 | ||
|  | //
 | ||
|  | var POUCHDB_IDB_VERSION = 1; | ||
|  | 
 | ||
|  | //
 | ||
|  | // Functions that manage a combinate indexeddb version, by combining the current
 | ||
|  | // time in millis that represents user migrations with a large multiplier that
 | ||
|  | // represents PouchDB system migrations.
 | ||
|  | //
 | ||
|  | // This lets us use the idb version number to both represent
 | ||
|  | // PouchDB-library-level migrations as well as "user migrations" required for
 | ||
|  | // when design documents trigger the addition or removal of native indexes.
 | ||
|  | //
 | ||
|  | // Given that Number.MAX_SAFE_INTEGER = 9007199254740991
 | ||
|  | //
 | ||
|  | // We can easily use the largest 2-3 digits and either allow:
 | ||
|  | //  - 900 system migrations up to 2198/02/18
 | ||
|  | //  - or 89 system migrations up to 5050/02/14
 | ||
|  | //
 | ||
|  | // This impl does the former. If this code still exists after 2198 someone send my
 | ||
|  | // decendents a Spacebook message congratulating them on their impressive genes.
 | ||
|  | //
 | ||
|  | // 9007199254740991 <- MAX_SAFE_INTEGER
 | ||
|  | //   10000000000000 <- 10^13
 | ||
|  | //    7199254740991 <- 2198-02-18T16:59:00.991Z
 | ||
|  | //
 | ||
|  | var versionMultiplier = Math.pow(10, 13); | ||
|  | function createIdbVersion() { | ||
|  |   return (versionMultiplier * POUCHDB_IDB_VERSION) + new Date().getTime(); | ||
|  | } | ||
|  | function getPouchDbVersion(version) { | ||
|  |   return Math.floor(version / versionMultiplier); | ||
|  | } | ||
|  | 
 | ||
|  | function maintainNativeIndexes(openReq, reject) { | ||
|  |   var docStore = openReq.transaction.objectStore(DOC_STORE); | ||
|  |   var ddocsReq = docStore.getAll(IDBKeyRange.bound('_design/', '_design/\uffff')); | ||
|  | 
 | ||
|  |   ddocsReq.onsuccess = function (e) { | ||
|  |     var results = e.target.result; | ||
|  |     var existingIndexNames = Array.from(docStore.indexNames); | ||
|  | 
 | ||
|  |     // NB: the only thing we're supporting here is the declared indexing
 | ||
|  |     // fields nothing more.
 | ||
|  |     var expectedIndexes = results.filter(function (row) { | ||
|  |       return row.deleted === 0 && row.revs[row.rev].data.views; | ||
|  |     }).map(function (row) { | ||
|  |       return row.revs[row.rev].data; | ||
|  |     }).reduce(function (indexes, ddoc) { | ||
|  |       return Object.keys(ddoc.views).reduce(function (acc, viewName) { | ||
|  |         var fields = rawIndexFields(ddoc, viewName); | ||
|  | 
 | ||
|  |         if (fields && fields.length > 0) { | ||
|  |           acc[naturalIndexName(fields)] = correctIndexFields(fields); | ||
|  |         } | ||
|  | 
 | ||
|  |         return acc; | ||
|  |       }, indexes); | ||
|  |     }, {}); | ||
|  | 
 | ||
|  |     var expectedIndexNames = Object.keys(expectedIndexes); | ||
|  | 
 | ||
|  |     // Delete any indexes that aren't system indexes or expected
 | ||
|  |     var systemIndexNames = ['seq']; | ||
|  |     existingIndexNames.forEach(function (index) { | ||
|  |       if (systemIndexNames.indexOf(index) === -1  && expectedIndexNames.indexOf(index) === -1) { | ||
|  |         docStore.deleteIndex(index); | ||
|  |       } | ||
|  |     }); | ||
|  | 
 | ||
|  |     // Work out which indexes are missing and create them
 | ||
|  |     var newIndexNames = expectedIndexNames.filter(function (ei) { | ||
|  |       return existingIndexNames.indexOf(ei) === -1; | ||
|  |     }); | ||
|  | 
 | ||
|  |     try { | ||
|  |       newIndexNames.forEach(function (indexName) { | ||
|  |         docStore.createIndex(indexName, expectedIndexes[indexName]); | ||
|  |       }); | ||
|  |     } catch (err) { | ||
|  |       reject(err); | ||
|  |     } | ||
|  |   }; | ||
|  | } | ||
|  | 
 | ||
|  | function upgradePouchDbSchema(db, pouchdbVersion) { | ||
|  |   if (pouchdbVersion < 1) { | ||
|  |     var docStore = db.createObjectStore(DOC_STORE, {keyPath : 'id'}); | ||
|  |     docStore.createIndex('seq', 'seq', {unique: true}); | ||
|  | 
 | ||
|  |     db.createObjectStore(META_STORE, {keyPath: 'id'}); | ||
|  |   } | ||
|  | 
 | ||
|  |   // Declare more PouchDB schema changes here
 | ||
|  |   // if (pouchdbVersion < 2) { .. }
 | ||
|  | } | ||
|  | 
 | ||
|  | function openDatabase(openDatabases, api, opts, resolve, reject) { | ||
|  |   var openReq = opts.versionchanged ? | ||
|  |     indexedDB.open(opts.name) : | ||
|  |     indexedDB.open(opts.name, createIdbVersion()); | ||
|  | 
 | ||
|  |   openReq.onupgradeneeded = function (e) { | ||
|  |     if (e.oldVersion > 0 && e.oldVersion < versionMultiplier) { | ||
|  |       // This DB was created with the "idb" adapter, **not** this one.
 | ||
|  |       // For now we're going to just error out here: users must manually
 | ||
|  |       // migrate between the two. In the future, dependent on performance tests,
 | ||
|  |       // we might silently migrate
 | ||
|  |       throw new Error('Incorrect adapter: you should specify the "idb" adapter to open this DB'); | ||
|  |     } else if (e.oldVersion === 0 && e.newVersion < versionMultiplier) { | ||
|  |       // Firefox still creates the database with version=1 even if we throw,
 | ||
|  |       // so we need to be sure to destroy the empty database before throwing
 | ||
|  |       indexedDB.deleteDatabase(opts.name); | ||
|  |       throw new Error('Database was deleted while open'); | ||
|  |     } | ||
|  | 
 | ||
|  |     var db = e.target.result; | ||
|  | 
 | ||
|  |     var pouchdbVersion = getPouchDbVersion(e.oldVersion); | ||
|  |     upgradePouchDbSchema(db, pouchdbVersion); | ||
|  |     maintainNativeIndexes(openReq, reject); | ||
|  |   }; | ||
|  | 
 | ||
|  |   openReq.onblocked = function (e) { | ||
|  |       // AFAICT this only occurs if, after sending `onversionchange` events to
 | ||
|  |       // all other open DBs (ie in different tabs), there are still open
 | ||
|  |       // connections to the DB. In this code we should never see this because we
 | ||
|  |       // close our DBs on these events, and all DB interactions are wrapped in
 | ||
|  |       // safely re-opening the DB.
 | ||
|  |       console.error('onblocked, this should never happen', e); | ||
|  |   }; | ||
|  | 
 | ||
|  |   openReq.onsuccess = function (e) { | ||
|  |     var idb = e.target.result; | ||
|  | 
 | ||
|  |     idb.onabort = function (e) { | ||
|  |       console.error('Database has a global failure', e.target.error); | ||
|  |       delete openDatabases[opts.name]; | ||
|  |       idb.close(); | ||
|  |     }; | ||
|  | 
 | ||
|  |     idb.onversionchange = function () { | ||
|  |       console.log('Database was made stale, closing handle'); | ||
|  |       openDatabases[opts.name].versionchanged = true; | ||
|  |       idb.close(); | ||
|  |     }; | ||
|  | 
 | ||
|  |     idb.onclose = function () { | ||
|  |       console.log('Database was made stale, closing handle'); | ||
|  |       if (opts.name in openDatabases) { | ||
|  |         openDatabases[opts.name].versionchanged = true; | ||
|  |       } | ||
|  |     }; | ||
|  | 
 | ||
|  |     var metadata = {id: META_STORE}; | ||
|  |     var txn = idb.transaction([META_STORE], 'readwrite'); | ||
|  | 
 | ||
|  |     txn.oncomplete = function () { | ||
|  |       resolve({idb: idb, metadata: metadata}); | ||
|  |     }; | ||
|  | 
 | ||
|  |     var metaStore = txn.objectStore(META_STORE); | ||
|  |     metaStore.get(META_STORE).onsuccess = function (e) { | ||
|  |       metadata = e.target.result || metadata; | ||
|  |       var changed = false; | ||
|  | 
 | ||
|  |       if (!('doc_count' in metadata)) { | ||
|  |         changed = true; | ||
|  |         metadata.doc_count = 0; | ||
|  |       } | ||
|  | 
 | ||
|  |       if (!('seq' in metadata)) { | ||
|  |         changed = true; | ||
|  |         metadata.seq = 0; | ||
|  |       } | ||
|  | 
 | ||
|  |       if (!('db_uuid' in metadata)) { | ||
|  |         changed = true; | ||
|  |         metadata.db_uuid = uuid$1(); | ||
|  |       } | ||
|  | 
 | ||
|  |       if (changed) { | ||
|  |         metaStore.put(metadata); | ||
|  |       } | ||
|  |     }; | ||
|  |   }; | ||
|  | 
 | ||
|  |   openReq.onerror = function (e) { | ||
|  |     reject(e.target.error); | ||
|  |   }; | ||
|  | } | ||
|  | 
 | ||
|  | function setup (openDatabases, api, opts) { | ||
|  |   if (!openDatabases[opts.name] || openDatabases[opts.name].versionchanged) { | ||
|  |     opts.versionchanged = openDatabases[opts.name] && | ||
|  |                           openDatabases[opts.name].versionchanged; | ||
|  | 
 | ||
|  |     openDatabases[opts.name] = new Promise(function (resolve, reject) { | ||
|  |       openDatabase(openDatabases, api, opts, resolve, reject); | ||
|  |     }); | ||
|  |   } | ||
|  | 
 | ||
|  |   return openDatabases[opts.name]; | ||
|  | } | ||
|  | 
 | ||
|  | function info (metadata, callback) { | ||
|  |   callback(null, { | ||
|  |     doc_count: metadata.doc_count, | ||
|  |     update_seq: metadata.seq | ||
|  |   }); | ||
|  | } | ||
|  | 
 | ||
|  | // We fetch all leafs of the revision tree, and sort them based on tree length
 | ||
|  | // and whether they were deleted, undeleted documents with the longest revision
 | ||
|  | // tree (most edits) win
 | ||
|  | // The final sort algorithm is slightly documented in a sidebar here:
 | ||
|  | // http://guide.couchdb.org/draft/conflicts.html
 | ||
|  | function winningRev(metadata) { | ||
|  |   var winningId; | ||
|  |   var winningPos; | ||
|  |   var winningDeleted; | ||
|  |   var toVisit = metadata.rev_tree.slice(); | ||
|  |   var node; | ||
|  |   while ((node = toVisit.pop())) { | ||
|  |     var tree = node.ids; | ||
|  |     var branches = tree[2]; | ||
|  |     var pos = node.pos; | ||
|  |     if (branches.length) { // non-leaf
 | ||
|  |       for (var i = 0, len = branches.length; i < len; i++) { | ||
|  |         toVisit.push({pos: pos + 1, ids: branches[i]}); | ||
|  |       } | ||
|  |       continue; | ||
|  |     } | ||
|  |     var deleted = !!tree[1].deleted; | ||
|  |     var id = tree[0]; | ||
|  |     // sort by deleted, then pos, then id
 | ||
|  |     if (!winningId || (winningDeleted !== deleted ? winningDeleted : | ||
|  |         winningPos !== pos ? winningPos < pos : winningId < id)) { | ||
|  |       winningId = id; | ||
|  |       winningPos = pos; | ||
|  |       winningDeleted = deleted; | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   return winningPos + '-' + winningId; | ||
|  | } | ||
|  | 
 | ||
|  | // Pretty much all below can be combined into a higher order function to
 | ||
|  | // traverse revisions
 | ||
|  | // The return value from the callback will be passed as context to all
 | ||
|  | // children of that node
 | ||
|  | function traverseRevTree(revs, callback) { | ||
|  |   var toVisit = revs.slice(); | ||
|  | 
 | ||
|  |   var node; | ||
|  |   while ((node = toVisit.pop())) { | ||
|  |     var pos = node.pos; | ||
|  |     var tree = node.ids; | ||
|  |     var branches = tree[2]; | ||
|  |     var newCtx = | ||
|  |       callback(branches.length === 0, pos, tree[0], node.ctx, tree[1]); | ||
|  |     for (var i = 0, len = branches.length; i < len; i++) { | ||
|  |       toVisit.push({pos: pos + 1, ids: branches[i], ctx: newCtx}); | ||
|  |     } | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function sortByPos(a, b) { | ||
|  |   return a.pos - b.pos; | ||
|  | } | ||
|  | 
 | ||
|  | function collectLeaves(revs) { | ||
|  |   var leaves = []; | ||
|  |   traverseRevTree(revs, function (isLeaf, pos, id, acc, opts) { | ||
|  |     if (isLeaf) { | ||
|  |       leaves.push({rev: pos + "-" + id, pos: pos, opts: opts}); | ||
|  |     } | ||
|  |   }); | ||
|  |   leaves.sort(sortByPos).reverse(); | ||
|  |   for (var i = 0, len = leaves.length; i < len; i++) { | ||
|  |     delete leaves[i].pos; | ||
|  |   } | ||
|  |   return leaves; | ||
|  | } | ||
|  | 
 | ||
|  | // returns revs of all conflicts that is leaves such that
 | ||
|  | // 1. are not deleted and
 | ||
|  | // 2. are different than winning revision
 | ||
|  | function collectConflicts(metadata) { | ||
|  |   var win = winningRev(metadata); | ||
|  |   var leaves = collectLeaves(metadata.rev_tree); | ||
|  |   var conflicts = []; | ||
|  |   for (var i = 0, len = leaves.length; i < len; i++) { | ||
|  |     var leaf = leaves[i]; | ||
|  |     if (leaf.rev !== win && !leaf.opts.deleted) { | ||
|  |       conflicts.push(leaf.rev); | ||
|  |     } | ||
|  |   } | ||
|  |   return conflicts; | ||
|  | } | ||
|  | 
 | ||
|  | // compact a tree by marking its non-leafs as missing,
 | ||
|  | // and return a list of revs to delete
 | ||
|  | function compactTree(metadata) { | ||
|  |   var revs = []; | ||
|  |   traverseRevTree(metadata.rev_tree, function (isLeaf, pos, | ||
|  |                                                revHash, ctx, opts) { | ||
|  |     if (opts.status === 'available' && !isLeaf) { | ||
|  |       revs.push(pos + '-' + revHash); | ||
|  |       opts.status = 'missing'; | ||
|  |     } | ||
|  |   }); | ||
|  |   return revs; | ||
|  | } | ||
|  | 
 | ||
|  | // build up a list of all the paths to the leafs in this revision tree
 | ||
|  | function rootToLeaf(revs) { | ||
|  |   var paths = []; | ||
|  |   var toVisit = revs.slice(); | ||
|  |   var node; | ||
|  |   while ((node = toVisit.pop())) { | ||
|  |     var pos = node.pos; | ||
|  |     var tree = node.ids; | ||
|  |     var id = tree[0]; | ||
|  |     var opts = tree[1]; | ||
|  |     var branches = tree[2]; | ||
|  |     var isLeaf = branches.length === 0; | ||
|  | 
 | ||
|  |     var history = node.history ? node.history.slice() : []; | ||
|  |     history.push({id: id, opts: opts}); | ||
|  |     if (isLeaf) { | ||
|  |       paths.push({pos: (pos + 1 - history.length), ids: history}); | ||
|  |     } | ||
|  |     for (var i = 0, len = branches.length; i < len; i++) { | ||
|  |       toVisit.push({pos: pos + 1, ids: branches[i], history: history}); | ||
|  |     } | ||
|  |   } | ||
|  |   return paths.reverse(); | ||
|  | } | ||
|  | 
 | ||
|  | // for a better overview of what this is doing, read:
 | ||
|  | 
 | ||
|  | function sortByPos$1(a, b) { | ||
|  |   return a.pos - b.pos; | ||
|  | } | ||
|  | 
 | ||
|  | // classic binary search
 | ||
|  | function binarySearch(arr, item, comparator) { | ||
|  |   var low = 0; | ||
|  |   var high = arr.length; | ||
|  |   var mid; | ||
|  |   while (low < high) { | ||
|  |     mid = (low + high) >>> 1; | ||
|  |     if (comparator(arr[mid], item) < 0) { | ||
|  |       low = mid + 1; | ||
|  |     } else { | ||
|  |       high = mid; | ||
|  |     } | ||
|  |   } | ||
|  |   return low; | ||
|  | } | ||
|  | 
 | ||
|  | // assuming the arr is sorted, insert the item in the proper place
 | ||
|  | function insertSorted(arr, item, comparator) { | ||
|  |   var idx = binarySearch(arr, item, comparator); | ||
|  |   arr.splice(idx, 0, item); | ||
|  | } | ||
|  | 
 | ||
|  | // Turn a path as a flat array into a tree with a single branch.
 | ||
|  | // If any should be stemmed from the beginning of the array, that's passed
 | ||
|  | // in as the second argument
 | ||
|  | function pathToTree(path, numStemmed) { | ||
|  |   var root; | ||
|  |   var leaf; | ||
|  |   for (var i = numStemmed, len = path.length; i < len; i++) { | ||
|  |     var node = path[i]; | ||
|  |     var currentLeaf = [node.id, node.opts, []]; | ||
|  |     if (leaf) { | ||
|  |       leaf[2].push(currentLeaf); | ||
|  |       leaf = currentLeaf; | ||
|  |     } else { | ||
|  |       root = leaf = currentLeaf; | ||
|  |     } | ||
|  |   } | ||
|  |   return root; | ||
|  | } | ||
|  | 
 | ||
|  | // compare the IDs of two trees
 | ||
|  | function compareTree(a, b) { | ||
|  |   return a[0] < b[0] ? -1 : 1; | ||
|  | } | ||
|  | 
 | ||
|  | // Merge two trees together
 | ||
|  | // The roots of tree1 and tree2 must be the same revision
 | ||
|  | function mergeTree(in_tree1, in_tree2) { | ||
|  |   var queue = [{tree1: in_tree1, tree2: in_tree2}]; | ||
|  |   var conflicts = false; | ||
|  |   while (queue.length > 0) { | ||
|  |     var item = queue.pop(); | ||
|  |     var tree1 = item.tree1; | ||
|  |     var tree2 = item.tree2; | ||
|  | 
 | ||
|  |     if (tree1[1].status || tree2[1].status) { | ||
|  |       tree1[1].status = | ||
|  |         (tree1[1].status ===  'available' || | ||
|  |         tree2[1].status === 'available') ? 'available' : 'missing'; | ||
|  |     } | ||
|  | 
 | ||
|  |     for (var i = 0; i < tree2[2].length; i++) { | ||
|  |       if (!tree1[2][0]) { | ||
|  |         conflicts = 'new_leaf'; | ||
|  |         tree1[2][0] = tree2[2][i]; | ||
|  |         continue; | ||
|  |       } | ||
|  | 
 | ||
|  |       var merged = false; | ||
|  |       for (var j = 0; j < tree1[2].length; j++) { | ||
|  |         if (tree1[2][j][0] === tree2[2][i][0]) { | ||
|  |           queue.push({tree1: tree1[2][j], tree2: tree2[2][i]}); | ||
|  |           merged = true; | ||
|  |         } | ||
|  |       } | ||
|  |       if (!merged) { | ||
|  |         conflicts = 'new_branch'; | ||
|  |         insertSorted(tree1[2], tree2[2][i], compareTree); | ||
|  |       } | ||
|  |     } | ||
|  |   } | ||
|  |   return {conflicts: conflicts, tree: in_tree1}; | ||
|  | } | ||
|  | 
 | ||
|  | function doMerge(tree, path, dontExpand) { | ||
|  |   var restree = []; | ||
|  |   var conflicts = false; | ||
|  |   var merged = false; | ||
|  |   var res; | ||
|  | 
 | ||
|  |   if (!tree.length) { | ||
|  |     return {tree: [path], conflicts: 'new_leaf'}; | ||
|  |   } | ||
|  | 
 | ||
|  |   for (var i = 0, len = tree.length; i < len; i++) { | ||
|  |     var branch = tree[i]; | ||
|  |     if (branch.pos === path.pos && branch.ids[0] === path.ids[0]) { | ||
|  |       // Paths start at the same position and have the same root, so they need
 | ||
|  |       // merged
 | ||
|  |       res = mergeTree(branch.ids, path.ids); | ||
|  |       restree.push({pos: branch.pos, ids: res.tree}); | ||
|  |       conflicts = conflicts || res.conflicts; | ||
|  |       merged = true; | ||
|  |     } else if (dontExpand !== true) { | ||
|  |       // The paths start at a different position, take the earliest path and
 | ||
|  |       // traverse up until it as at the same point from root as the path we
 | ||
|  |       // want to merge.  If the keys match we return the longer path with the
 | ||
|  |       // other merged After stemming we dont want to expand the trees
 | ||
|  | 
 | ||
|  |       var t1 = branch.pos < path.pos ? branch : path; | ||
|  |       var t2 = branch.pos < path.pos ? path : branch; | ||
|  |       var diff = t2.pos - t1.pos; | ||
|  | 
 | ||
|  |       var candidateParents = []; | ||
|  | 
 | ||
|  |       var trees = []; | ||
|  |       trees.push({ids: t1.ids, diff: diff, parent: null, parentIdx: null}); | ||
|  |       while (trees.length > 0) { | ||
|  |         var item = trees.pop(); | ||
|  |         if (item.diff === 0) { | ||
|  |           if (item.ids[0] === t2.ids[0]) { | ||
|  |             candidateParents.push(item); | ||
|  |           } | ||
|  |           continue; | ||
|  |         } | ||
|  |         var elements = item.ids[2]; | ||
|  |         for (var j = 0, elementsLen = elements.length; j < elementsLen; j++) { | ||
|  |           trees.push({ | ||
|  |             ids: elements[j], | ||
|  |             diff: item.diff - 1, | ||
|  |             parent: item.ids, | ||
|  |             parentIdx: j | ||
|  |           }); | ||
|  |         } | ||
|  |       } | ||
|  | 
 | ||
|  |       var el = candidateParents[0]; | ||
|  | 
 | ||
|  |       if (!el) { | ||
|  |         restree.push(branch); | ||
|  |       } else { | ||
|  |         res = mergeTree(el.ids, t2.ids); | ||
|  |         el.parent[2][el.parentIdx] = res.tree; | ||
|  |         restree.push({pos: t1.pos, ids: t1.ids}); | ||
|  |         conflicts = conflicts || res.conflicts; | ||
|  |         merged = true; | ||
|  |       } | ||
|  |     } else { | ||
|  |       restree.push(branch); | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   // We didnt find
 | ||
|  |   if (!merged) { | ||
|  |     restree.push(path); | ||
|  |   } | ||
|  | 
 | ||
|  |   restree.sort(sortByPos$1); | ||
|  | 
 | ||
|  |   return { | ||
|  |     tree: restree, | ||
|  |     conflicts: conflicts || 'internal_node' | ||
|  |   }; | ||
|  | } | ||
|  | 
 | ||
|  | // To ensure we dont grow the revision tree infinitely, we stem old revisions
 | ||
|  | function stem(tree, depth) { | ||
|  |   // First we break out the tree into a complete list of root to leaf paths
 | ||
|  |   var paths = rootToLeaf(tree); | ||
|  |   var stemmedRevs; | ||
|  | 
 | ||
|  |   var result; | ||
|  |   for (var i = 0, len = paths.length; i < len; i++) { | ||
|  |     // Then for each path, we cut off the start of the path based on the
 | ||
|  |     // `depth` to stem to, and generate a new set of flat trees
 | ||
|  |     var path = paths[i]; | ||
|  |     var stemmed = path.ids; | ||
|  |     var node; | ||
|  |     if (stemmed.length > depth) { | ||
|  |       // only do the stemming work if we actually need to stem
 | ||
|  |       if (!stemmedRevs) { | ||
|  |         stemmedRevs = {}; // avoid allocating this object unnecessarily
 | ||
|  |       } | ||
|  |       var numStemmed = stemmed.length - depth; | ||
|  |       node = { | ||
|  |         pos: path.pos + numStemmed, | ||
|  |         ids: pathToTree(stemmed, numStemmed) | ||
|  |       }; | ||
|  | 
 | ||
|  |       for (var s = 0; s < numStemmed; s++) { | ||
|  |         var rev = (path.pos + s) + '-' + stemmed[s].id; | ||
|  |         stemmedRevs[rev] = true; | ||
|  |       } | ||
|  |     } else { // no need to actually stem
 | ||
|  |       node = { | ||
|  |         pos: path.pos, | ||
|  |         ids: pathToTree(stemmed, 0) | ||
|  |       }; | ||
|  |     } | ||
|  | 
 | ||
|  |     // Then we remerge all those flat trees together, ensuring that we dont
 | ||
|  |     // connect trees that would go beyond the depth limit
 | ||
|  |     if (result) { | ||
|  |       result = doMerge(result, node, true).tree; | ||
|  |     } else { | ||
|  |       result = [node]; | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   // this is memory-heavy per Chrome profiler, avoid unless we actually stemmed
 | ||
|  |   if (stemmedRevs) { | ||
|  |     traverseRevTree(result, function (isLeaf, pos, revHash) { | ||
|  |       // some revisions may have been removed in a branch but not in another
 | ||
|  |       delete stemmedRevs[pos + '-' + revHash]; | ||
|  |     }); | ||
|  |   } | ||
|  | 
 | ||
|  |   return { | ||
|  |     tree: result, | ||
|  |     revs: stemmedRevs ? Object.keys(stemmedRevs) : [] | ||
|  |   }; | ||
|  | } | ||
|  | 
 | ||
|  | function merge(tree, path, depth) { | ||
|  |   var newTree = doMerge(tree, path); | ||
|  |   var stemmed = stem(newTree.tree, depth); | ||
|  |   return { | ||
|  |     tree: stemmed.tree, | ||
|  |     stemmedRevs: stemmed.revs, | ||
|  |     conflicts: newTree.conflicts | ||
|  |   }; | ||
|  | } | ||
|  | 
 | ||
|  | // return true if a rev exists in the rev tree, false otherwise
 | ||
|  | 
 | ||
|  | // returns the current leaf node for a given revision
 | ||
|  | function latest(rev, metadata) { | ||
|  |   var toVisit = metadata.rev_tree.slice(); | ||
|  |   var node; | ||
|  |   while ((node = toVisit.pop())) { | ||
|  |     var pos = node.pos; | ||
|  |     var tree = node.ids; | ||
|  |     var id = tree[0]; | ||
|  |     var opts = tree[1]; | ||
|  |     var branches = tree[2]; | ||
|  |     var isLeaf = branches.length === 0; | ||
|  | 
 | ||
|  |     var history = node.history ? node.history.slice() : []; | ||
|  |     history.push({id: id, pos: pos, opts: opts}); | ||
|  | 
 | ||
|  |     if (isLeaf) { | ||
|  |       for (var i = 0, len = history.length; i < len; i++) { | ||
|  |         var historyNode = history[i]; | ||
|  |         var historyRev = historyNode.pos + '-' + historyNode.id; | ||
|  | 
 | ||
|  |         if (historyRev === rev) { | ||
|  |           // return the rev of this leaf
 | ||
|  |           return pos + '-' + id; | ||
|  |         } | ||
|  |       } | ||
|  |     } | ||
|  | 
 | ||
|  |     for (var j = 0, l = branches.length; j < l; j++) { | ||
|  |       toVisit.push({pos: pos + 1, ids: branches[j], history: history}); | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   /* istanbul ignore next */ | ||
|  |   throw new Error('Unable to resolve latest revision for id ' + metadata.id + ', rev ' + rev); | ||
|  | } | ||
|  | 
 | ||
|  | function get (txn, id, opts, callback) { | ||
|  |   if (txn.error) { | ||
|  |     return callback(txn.error); | ||
|  |   } | ||
|  | 
 | ||
|  |   txn.txn.objectStore(DOC_STORE).get(id).onsuccess = function (e) { | ||
|  |     var doc = e.target.result; | ||
|  |     var rev; | ||
|  |     if (!opts.rev) { | ||
|  |       rev = (doc && doc.rev); | ||
|  |     } else { | ||
|  |       rev = opts.latest ? latest(opts.rev, doc) : opts.rev; | ||
|  |     } | ||
|  | 
 | ||
|  |     if (!doc || (doc.deleted && !opts.rev) || !(rev in doc.revs)) { | ||
|  |       callback(createError(MISSING_DOC, 'missing')); | ||
|  |       return; | ||
|  |     } | ||
|  | 
 | ||
|  |     var result = doc.revs[rev].data; | ||
|  |     result._id = doc.id; | ||
|  |     result._rev = rev; | ||
|  | 
 | ||
|  |     // WARNING: expecting possible old format
 | ||
|  |     // TODO: why are we passing the transaction in the context?
 | ||
|  |     //       It's not clear we ever thread these txns usefully
 | ||
|  |     callback(null, { | ||
|  |       doc: result, | ||
|  |       metadata: doc, | ||
|  |       ctx: txn | ||
|  |     }); | ||
|  |   }; | ||
|  | } | ||
|  | 
 | ||
|  | function parseAttachment(attachment, opts, cb) { | ||
|  |   if (opts.binary) { | ||
|  |     return cb(null, attachment); | ||
|  |   } else { | ||
|  |     readAsBinaryString(attachment, function (binString) { | ||
|  |       cb(null, thisBtoa(binString)); | ||
|  |     }); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function getAttachment(txn, docId, attachId, _, opts, cb) { | ||
|  |   if (txn.error) { | ||
|  |     return cb(txn.error); | ||
|  |   } | ||
|  | 
 | ||
|  |   var attachment; | ||
|  | 
 | ||
|  |   txn.txn.objectStore(DOC_STORE).get(docId).onsuccess = function (e) { | ||
|  |     var doc = e.target.result; | ||
|  |     var rev = doc.revs[opts.rev || doc.rev].data; | ||
|  |     var digest = rev._attachments[attachId].digest; | ||
|  |     attachment = doc.attachments[digest].data; | ||
|  |   }; | ||
|  | 
 | ||
|  |   txn.txn.oncomplete = function () { | ||
|  |     parseAttachment(attachment, opts, cb); | ||
|  |   }; | ||
|  | 
 | ||
|  |   txn.txn.onabort = cb; | ||
|  | } | ||
|  | 
 | ||
|  | function toObject(array) { | ||
|  |   return array.reduce(function (obj, item) { | ||
|  |     obj[item] = true; | ||
|  |     return obj; | ||
|  |   }, {}); | ||
|  | } | ||
|  | // List of top level reserved words for doc
 | ||
|  | var reservedWords = toObject([ | ||
|  |   '_id', | ||
|  |   '_rev', | ||
|  |   '_access', | ||
|  |   '_attachments', | ||
|  |   '_deleted', | ||
|  |   '_revisions', | ||
|  |   '_revs_info', | ||
|  |   '_conflicts', | ||
|  |   '_deleted_conflicts', | ||
|  |   '_local_seq', | ||
|  |   '_rev_tree', | ||
|  |   // replication documents
 | ||
|  |   '_replication_id', | ||
|  |   '_replication_state', | ||
|  |   '_replication_state_time', | ||
|  |   '_replication_state_reason', | ||
|  |   '_replication_stats', | ||
|  |   // Specific to Couchbase Sync Gateway
 | ||
|  |   '_removed' | ||
|  | ]); | ||
|  | 
 | ||
|  | // List of reserved words that should end up in the document
 | ||
|  | var dataWords = toObject([ | ||
|  |   '_access', | ||
|  |   '_attachments', | ||
|  |   // replication documents
 | ||
|  |   '_replication_id', | ||
|  |   '_replication_state', | ||
|  |   '_replication_state_time', | ||
|  |   '_replication_state_reason', | ||
|  |   '_replication_stats' | ||
|  | ]); | ||
|  | 
 | ||
|  | function parseRevisionInfo(rev) { | ||
|  |   if (!/^\d+-/.test(rev)) { | ||
|  |     return createError(INVALID_REV); | ||
|  |   } | ||
|  |   var idx = rev.indexOf('-'); | ||
|  |   var left = rev.substring(0, idx); | ||
|  |   var right = rev.substring(idx + 1); | ||
|  |   return { | ||
|  |     prefix: parseInt(left, 10), | ||
|  |     id: right | ||
|  |   }; | ||
|  | } | ||
|  | 
 | ||
|  | function makeRevTreeFromRevisions(revisions, opts) { | ||
|  |   var pos = revisions.start - revisions.ids.length + 1; | ||
|  | 
 | ||
|  |   var revisionIds = revisions.ids; | ||
|  |   var ids = [revisionIds[0], opts, []]; | ||
|  | 
 | ||
|  |   for (var i = 1, len = revisionIds.length; i < len; i++) { | ||
|  |     ids = [revisionIds[i], {status: 'missing'}, [ids]]; | ||
|  |   } | ||
|  | 
 | ||
|  |   return [{ | ||
|  |     pos: pos, | ||
|  |     ids: ids | ||
|  |   }]; | ||
|  | } | ||
|  | 
 | ||
|  | // Preprocess documents, parse their revisions, assign an id and a
 | ||
|  | // revision for new writes that are missing them, etc
 | ||
|  | function parseDoc(doc, newEdits, dbOpts) { | ||
|  |   if (!dbOpts) { | ||
|  |     dbOpts = { | ||
|  |       deterministic_revs: true | ||
|  |     }; | ||
|  |   } | ||
|  | 
 | ||
|  |   var nRevNum; | ||
|  |   var newRevId; | ||
|  |   var revInfo; | ||
|  |   var opts = {status: 'available'}; | ||
|  |   if (doc._deleted) { | ||
|  |     opts.deleted = true; | ||
|  |   } | ||
|  | 
 | ||
|  |   if (newEdits) { | ||
|  |     if (!doc._id) { | ||
|  |       doc._id = uuid$1(); | ||
|  |     } | ||
|  |     newRevId = rev$$1(doc, dbOpts.deterministic_revs); | ||
|  |     if (doc._rev) { | ||
|  |       revInfo = parseRevisionInfo(doc._rev); | ||
|  |       if (revInfo.error) { | ||
|  |         return revInfo; | ||
|  |       } | ||
|  |       doc._rev_tree = [{ | ||
|  |         pos: revInfo.prefix, | ||
|  |         ids: [revInfo.id, {status: 'missing'}, [[newRevId, opts, []]]] | ||
|  |       }]; | ||
|  |       nRevNum = revInfo.prefix + 1; | ||
|  |     } else { | ||
|  |       doc._rev_tree = [{ | ||
|  |         pos: 1, | ||
|  |         ids : [newRevId, opts, []] | ||
|  |       }]; | ||
|  |       nRevNum = 1; | ||
|  |     } | ||
|  |   } else { | ||
|  |     if (doc._revisions) { | ||
|  |       doc._rev_tree = makeRevTreeFromRevisions(doc._revisions, opts); | ||
|  |       nRevNum = doc._revisions.start; | ||
|  |       newRevId = doc._revisions.ids[0]; | ||
|  |     } | ||
|  |     if (!doc._rev_tree) { | ||
|  |       revInfo = parseRevisionInfo(doc._rev); | ||
|  |       if (revInfo.error) { | ||
|  |         return revInfo; | ||
|  |       } | ||
|  |       nRevNum = revInfo.prefix; | ||
|  |       newRevId = revInfo.id; | ||
|  |       doc._rev_tree = [{ | ||
|  |         pos: nRevNum, | ||
|  |         ids: [newRevId, opts, []] | ||
|  |       }]; | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   invalidIdError(doc._id); | ||
|  | 
 | ||
|  |   doc._rev = nRevNum + '-' + newRevId; | ||
|  | 
 | ||
|  |   var result = {metadata : {}, data : {}}; | ||
|  |   for (var key in doc) { | ||
|  |     /* istanbul ignore else */ | ||
|  |     if (Object.prototype.hasOwnProperty.call(doc, key)) { | ||
|  |       var specialKey = key[0] === '_'; | ||
|  |       if (specialKey && !reservedWords[key]) { | ||
|  |         var error = createError(DOC_VALIDATION, key); | ||
|  |         error.message = DOC_VALIDATION.message + ': ' + key; | ||
|  |         throw error; | ||
|  |       } else if (specialKey && !dataWords[key]) { | ||
|  |         result.metadata[key.slice(1)] = doc[key]; | ||
|  |       } else { | ||
|  |         result.data[key] = doc[key]; | ||
|  |       } | ||
|  |     } | ||
|  |   } | ||
|  |   return result; | ||
|  | } | ||
|  | 
 | ||
|  | function bulkDocs (api, req, opts, metadata, dbOpts, idbChanges, callback) { | ||
|  | 
 | ||
|  |   var txn; | ||
|  | 
 | ||
|  |   // TODO: I would prefer to get rid of these globals
 | ||
|  |   var error; | ||
|  |   var results = []; | ||
|  |   var docs = []; | ||
|  |   var lastWriteIndex; | ||
|  | 
 | ||
|  |   var revsLimit = dbOpts.revs_limit || 1000; | ||
|  |   var rewriteEnabled = dbOpts.name.indexOf("-mrview-") === -1; | ||
|  |   const autoCompaction = dbOpts.auto_compaction; | ||
|  | 
 | ||
|  |   // We only need to track 1 revision for local documents
 | ||
|  |   function docsRevsLimit(doc) { | ||
|  |     return /^_local/.test(doc.id) ? 1 : revsLimit; | ||
|  |   } | ||
|  | 
 | ||
|  |   function rootIsMissing(doc) { | ||
|  |     return doc.rev_tree[0].ids[1].status === 'missing'; | ||
|  |   } | ||
|  | 
 | ||
|  |   function parseBase64(data) { | ||
|  |     try { | ||
|  |       return atob(data); | ||
|  |     } catch (e) { | ||
|  |       return { | ||
|  |         error: createError(BAD_ARG, 'Attachment is not a valid base64 string') | ||
|  |       }; | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   // Reads the original doc from the store if available
 | ||
|  |   // As in allDocs with keys option using multiple get calls is the fastest way
 | ||
|  |   function fetchExistingDocs(txn, docs) { | ||
|  |     var fetched = 0; | ||
|  |     var oldDocs = {}; | ||
|  | 
 | ||
|  |     function readDone(e) { | ||
|  |       if (e.target.result) { | ||
|  |         oldDocs[e.target.result.id] = e.target.result; | ||
|  |       } | ||
|  |       if (++fetched === docs.length) { | ||
|  |         processDocs$$1(txn, docs, oldDocs); | ||
|  |       } | ||
|  |     } | ||
|  | 
 | ||
|  |     docs.forEach(function (doc) { | ||
|  |       txn.objectStore(DOC_STORE).get(doc.id).onsuccess = readDone; | ||
|  |     }); | ||
|  |   } | ||
|  | 
 | ||
|  |   function revHasAttachment(doc, rev, digest) { | ||
|  |     return doc.revs[rev] && | ||
|  |       doc.revs[rev].data._attachments && | ||
|  |       Object.values(doc.revs[rev].data._attachments).find(function (att) { | ||
|  |         return att.digest === digest; | ||
|  |       }); | ||
|  |   } | ||
|  | 
 | ||
|  |   function processDocs$$1(txn, docs, oldDocs) { | ||
|  | 
 | ||
|  |     docs.forEach(function (doc, i) { | ||
|  |       var newDoc; | ||
|  | 
 | ||
|  |       // The first document write cannot be a deletion
 | ||
|  |       if ('was_delete' in opts && !(Object.prototype.hasOwnProperty.call(oldDocs, doc.id))) { | ||
|  |         newDoc = createError(MISSING_DOC, 'deleted'); | ||
|  | 
 | ||
|  |       // The first write of a document cannot specify a revision
 | ||
|  |       } else if (opts.new_edits && | ||
|  |                  !Object.prototype.hasOwnProperty.call(oldDocs, doc.id) && | ||
|  |                  rootIsMissing(doc)) { | ||
|  |         newDoc = createError(REV_CONFLICT); | ||
|  | 
 | ||
|  |       // Update the existing document
 | ||
|  |       } else if (Object.prototype.hasOwnProperty.call(oldDocs, doc.id)) { | ||
|  |         newDoc = update(txn, doc, oldDocs[doc.id]); | ||
|  |         // The update can be rejected if it is an update to an existing
 | ||
|  |         // revision, if so skip it
 | ||
|  |         if (newDoc == false) { | ||
|  |           return; | ||
|  |         } | ||
|  | 
 | ||
|  |       // New document
 | ||
|  |       } else { | ||
|  |         // Ensure new documents are also stemmed
 | ||
|  |         var merged = merge([], doc.rev_tree[0], docsRevsLimit(doc)); | ||
|  |         doc.rev_tree = merged.tree; | ||
|  |         doc.stemmedRevs = merged.stemmedRevs; | ||
|  |         newDoc = doc; | ||
|  |         newDoc.isNewDoc = true; | ||
|  |         newDoc.wasDeleted = doc.revs[doc.rev].deleted ? 1 : 0; | ||
|  |       } | ||
|  | 
 | ||
|  |       if (newDoc.error) { | ||
|  |         results[i] = newDoc; | ||
|  |       } else { | ||
|  |         oldDocs[newDoc.id] = newDoc; | ||
|  |         lastWriteIndex = i; | ||
|  |         write(txn, newDoc, i); | ||
|  |       } | ||
|  |     }); | ||
|  |   } | ||
|  | 
 | ||
|  |   // Converts from the format returned by parseDoc into the new format
 | ||
|  |   // we use to store
 | ||
|  |   function convertDocFormat(doc) { | ||
|  | 
 | ||
|  |     var newDoc = { | ||
|  |       id: doc.metadata.id, | ||
|  |       rev: doc.metadata.rev, | ||
|  |       rev_tree: doc.metadata.rev_tree, | ||
|  |       revs: doc.metadata.revs || {} | ||
|  |     }; | ||
|  | 
 | ||
|  |     newDoc.revs[newDoc.rev] = { | ||
|  |       data: doc.data, | ||
|  |       deleted: doc.metadata.deleted | ||
|  |     }; | ||
|  | 
 | ||
|  |     return newDoc; | ||
|  |   } | ||
|  | 
 | ||
|  |   function update(txn, doc, oldDoc) { | ||
|  | 
 | ||
|  |     // Ignore updates to existing revisions
 | ||
|  |     if ((doc.rev in oldDoc.revs) && !opts.new_edits) { | ||
|  |       return false; | ||
|  |     } | ||
|  | 
 | ||
|  |     var isRoot = /^1-/.test(doc.rev); | ||
|  | 
 | ||
|  |     // Reattach first writes after a deletion to last deleted tree
 | ||
|  |     if (oldDoc.deleted && !doc.deleted && opts.new_edits && isRoot) { | ||
|  |       var tmp = doc.revs[doc.rev].data; | ||
|  |       tmp._rev = oldDoc.rev; | ||
|  |       tmp._id = oldDoc.id; | ||
|  |       doc = convertDocFormat(parseDoc(tmp, opts.new_edits, dbOpts)); | ||
|  |     } | ||
|  | 
 | ||
|  |     var merged = merge(oldDoc.rev_tree, doc.rev_tree[0], docsRevsLimit(doc)); | ||
|  |     doc.stemmedRevs = merged.stemmedRevs; | ||
|  |     doc.rev_tree = merged.tree; | ||
|  | 
 | ||
|  |     // Merge the old and new rev data
 | ||
|  |     var revs = oldDoc.revs; | ||
|  |     revs[doc.rev] = doc.revs[doc.rev]; | ||
|  |     doc.revs = revs; | ||
|  | 
 | ||
|  |     doc.attachments = oldDoc.attachments; | ||
|  | 
 | ||
|  |     var inConflict = opts.new_edits && (((oldDoc.deleted && doc.deleted) || | ||
|  |        (!oldDoc.deleted && merged.conflicts !== 'new_leaf') || | ||
|  |        (oldDoc.deleted && !doc.deleted && merged.conflicts === 'new_branch') || | ||
|  |        (oldDoc.rev === doc.rev))); | ||
|  | 
 | ||
|  |     if (inConflict) { | ||
|  |       return createError(REV_CONFLICT); | ||
|  |     } | ||
|  | 
 | ||
|  |     doc.wasDeleted = oldDoc.deleted; | ||
|  | 
 | ||
|  |     return doc; | ||
|  |   } | ||
|  | 
 | ||
|  |   function write(txn, doc, i) { | ||
|  | 
 | ||
|  |     // We copy the data from the winning revision into the root
 | ||
|  |     // of the document so that it can be indexed
 | ||
|  |     var winningRev$$1 = winningRev(doc); | ||
|  |     // rev of new doc for attachments and to return it
 | ||
|  |     var writtenRev = doc.rev; | ||
|  |     var isLocal = /^_local/.test(doc.id); | ||
|  | 
 | ||
|  |     var theDoc = doc.revs[winningRev$$1].data; | ||
|  | 
 | ||
|  |     const isNewDoc = doc.isNewDoc; | ||
|  | 
 | ||
|  |     if (rewriteEnabled) { | ||
|  |       // doc.data is what we index, so we need to clone and rewrite it, and clean
 | ||
|  |       // it up for indexability
 | ||
|  |       var result = rewrite(theDoc); | ||
|  |       if (result) { | ||
|  |         doc.data = result; | ||
|  |         delete doc.data._attachments; | ||
|  |       } else { | ||
|  |         doc.data = theDoc; | ||
|  |       } | ||
|  |     } else { | ||
|  |       doc.data = theDoc; | ||
|  |     } | ||
|  | 
 | ||
|  |     doc.rev = winningRev$$1; | ||
|  |     // .deleted needs to be an int for indexing
 | ||
|  |     doc.deleted = doc.revs[winningRev$$1].deleted ? 1 : 0; | ||
|  | 
 | ||
|  |     // Bump the seq for every new (non local) revision written
 | ||
|  |     // TODO: index expects a unique seq, not sure if ignoring local will
 | ||
|  |     // work
 | ||
|  |     if (!isLocal) { | ||
|  |       doc.seq = ++metadata.seq; | ||
|  | 
 | ||
|  |       var delta = 0; | ||
|  |       // If its a new document, we wont decrement if deleted
 | ||
|  |       if (doc.isNewDoc) { | ||
|  |         delta = doc.deleted ? 0 : 1; | ||
|  |       } else if (doc.wasDeleted !== doc.deleted) { | ||
|  |         delta = doc.deleted ? -1 : 1; | ||
|  |       } | ||
|  |       metadata.doc_count += delta; | ||
|  |     } | ||
|  |     delete doc.isNewDoc; | ||
|  |     delete doc.wasDeleted; | ||
|  | 
 | ||
|  |     // If there have been revisions stemmed when merging trees,
 | ||
|  |     // delete their data
 | ||
|  |     let revsToDelete = doc.stemmedRevs || []; | ||
|  | 
 | ||
|  |     if (autoCompaction && !isNewDoc) { | ||
|  |       const result = compactTree(doc); | ||
|  |       if (result.length) { | ||
|  |         revsToDelete = revsToDelete.concat(result); | ||
|  |       } | ||
|  |     } | ||
|  | 
 | ||
|  |     if (revsToDelete.length) { | ||
|  |       revsToDelete.forEach(function (rev) { delete doc.revs[rev]; }); | ||
|  |     } | ||
|  | 
 | ||
|  |     delete doc.stemmedRevs; | ||
|  | 
 | ||
|  |     if (!('attachments' in doc)) { | ||
|  |       doc.attachments = {}; | ||
|  |     } | ||
|  | 
 | ||
|  |     if (theDoc._attachments) { | ||
|  |       for (var k in theDoc._attachments) { | ||
|  |         var attachment = theDoc._attachments[k]; | ||
|  |         if (attachment.stub) { | ||
|  |           if (!(attachment.digest in doc.attachments)) { | ||
|  |             error = createError(MISSING_STUB); | ||
|  |             // TODO: Not sure how safe this manual abort is, seeing
 | ||
|  |             // console issues
 | ||
|  |             txn.abort(); | ||
|  |             return; | ||
|  |           } | ||
|  | 
 | ||
|  |           if (revHasAttachment(doc, writtenRev, attachment.digest)) { | ||
|  |             doc.attachments[attachment.digest].revs[writtenRev] = true; | ||
|  |           } | ||
|  | 
 | ||
|  |         } else { | ||
|  | 
 | ||
|  |           doc.attachments[attachment.digest] = attachment; | ||
|  |           doc.attachments[attachment.digest].revs = {}; | ||
|  |           doc.attachments[attachment.digest].revs[writtenRev] = true; | ||
|  | 
 | ||
|  |           theDoc._attachments[k] = { | ||
|  |             stub: true, | ||
|  |             digest: attachment.digest, | ||
|  |             content_type: attachment.content_type, | ||
|  |             length: attachment.length, | ||
|  |             revpos: parseInt(writtenRev, 10) | ||
|  |           }; | ||
|  |         } | ||
|  |       } | ||
|  |     } | ||
|  | 
 | ||
|  |     // Local documents have different revision handling
 | ||
|  |     if (isLocal && doc.deleted) { | ||
|  |       txn.objectStore(DOC_STORE)["delete"](doc.id).onsuccess = function () { | ||
|  |         results[i] = { | ||
|  |           ok: true, | ||
|  |           id: doc.id, | ||
|  |           rev: '0-0' | ||
|  |         }; | ||
|  |       }; | ||
|  |       updateSeq(i); | ||
|  |       return; | ||
|  |     } | ||
|  | 
 | ||
|  |     txn.objectStore(DOC_STORE).put(doc).onsuccess = function () { | ||
|  |       results[i] = { | ||
|  |         ok: true, | ||
|  |         id: doc.id, | ||
|  |         rev: writtenRev | ||
|  |       }; | ||
|  |       updateSeq(i); | ||
|  |     }; | ||
|  |   } | ||
|  | 
 | ||
|  |   function updateSeq(i) { | ||
|  |     if (i === lastWriteIndex) { | ||
|  |       txn.objectStore(META_STORE).put(metadata); | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   function preProcessAttachment(attachment) { | ||
|  |     if (attachment.stub) { | ||
|  |       return Promise.resolve(attachment); | ||
|  |     } | ||
|  | 
 | ||
|  |     var binData; | ||
|  |     if (typeof attachment.data === 'string') { | ||
|  |       binData = parseBase64(attachment.data); | ||
|  |       if (binData.error) { | ||
|  |         return Promise.reject(binData.error); | ||
|  |       } | ||
|  |       attachment.data = binStringToBluffer(binData, attachment.content_type); | ||
|  |     } else { | ||
|  |       binData = attachment.data; | ||
|  |     } | ||
|  | 
 | ||
|  |     return new Promise(function (resolve) { | ||
|  |       binaryMd5(binData, function (result) { | ||
|  |         attachment.digest = 'md5-' + result; | ||
|  |         attachment.length = binData.size || binData.length || 0; | ||
|  |         resolve(attachment); | ||
|  |       }); | ||
|  |     }); | ||
|  |   } | ||
|  | 
 | ||
|  |   function preProcessAttachments() { | ||
|  |     var promises = docs.map(function (doc) { | ||
|  |       var data = doc.revs[doc.rev].data; | ||
|  |       if (!data._attachments) { | ||
|  |         return Promise.resolve(data); | ||
|  |       } | ||
|  |       var attachments = Object.keys(data._attachments).map(function (k) { | ||
|  |         data._attachments[k].name = k; | ||
|  |         return preProcessAttachment(data._attachments[k]); | ||
|  |       }); | ||
|  | 
 | ||
|  |       return Promise.all(attachments).then(function (newAttachments) { | ||
|  |         var processed = {}; | ||
|  |         newAttachments.forEach(function (attachment) { | ||
|  |           processed[attachment.name] = attachment; | ||
|  |           delete attachment.name; | ||
|  |         }); | ||
|  |         data._attachments = processed; | ||
|  |         return data; | ||
|  |       }); | ||
|  |     }); | ||
|  |     return Promise.all(promises); | ||
|  |   } | ||
|  | 
 | ||
|  |   for (var i = 0, len = req.docs.length; i < len; i++) { | ||
|  |     var result; | ||
|  |     // TODO: We should get rid of throwing for invalid docs, also not sure
 | ||
|  |     // why this is needed in idb-next and not idb
 | ||
|  |     try { | ||
|  |       result = parseDoc(req.docs[i], opts.new_edits, dbOpts); | ||
|  |     } catch (err) { | ||
|  |       result = err; | ||
|  |     } | ||
|  |     if (result.error) { | ||
|  |       return callback(result); | ||
|  |     } | ||
|  | 
 | ||
|  |     // Ideally parseDoc would return data in this format, but it is currently
 | ||
|  |     // shared so we need to convert
 | ||
|  |     docs.push(convertDocFormat(result)); | ||
|  |   } | ||
|  | 
 | ||
|  |   preProcessAttachments().then(function () { | ||
|  |     api._openTransactionSafely([DOC_STORE, META_STORE], 'readwrite', function (err, _txn) { | ||
|  |       if (err) { | ||
|  |         return callback(err); | ||
|  |       } | ||
|  | 
 | ||
|  |       txn = _txn; | ||
|  | 
 | ||
|  |       txn.onabort = function () { | ||
|  |         callback(error || createError(UNKNOWN_ERROR, 'transaction was aborted')); | ||
|  |       }; | ||
|  |       txn.ontimeout = idbError(callback); | ||
|  | 
 | ||
|  |       txn.oncomplete = function () { | ||
|  |         idbChanges.notify(dbOpts.name); | ||
|  |         callback(null, results); | ||
|  |       }; | ||
|  | 
 | ||
|  |       // We would like to use promises here, but idb sucks
 | ||
|  |       fetchExistingDocs(txn, docs); | ||
|  |     }); | ||
|  |   })["catch"](function (err) { | ||
|  |     callback(err); | ||
|  |   }); | ||
|  | } | ||
|  | 
 | ||
|  | function allDocsKeys(keys, docStore, allDocsInner) { | ||
|  |   // It's not guaranted to be returned in right order
 | ||
|  |   var valuesBatch = new Array(keys.length); | ||
|  |   var count = 0; | ||
|  |   keys.forEach(function (key, index) { | ||
|  |     docStore.get(key).onsuccess = function (event) { | ||
|  |       if (event.target.result) { | ||
|  |       valuesBatch[index] = event.target.result; | ||
|  |       } else { | ||
|  |         valuesBatch[index] = {key: key, error: 'not_found'}; | ||
|  |       } | ||
|  |       count++; | ||
|  |       if (count === keys.length) { | ||
|  |         valuesBatch.forEach(function (doc) { | ||
|  |             allDocsInner(doc); | ||
|  |         }); | ||
|  |       } | ||
|  |     }; | ||
|  |   }); | ||
|  | } | ||
|  | 
 | ||
|  | function createKeyRange(start, end, inclusiveEnd, key, descending) { | ||
|  |   try { | ||
|  |     if (start && end) { | ||
|  |       if (descending) { | ||
|  |         return IDBKeyRange.bound(end, start, !inclusiveEnd, false); | ||
|  |       } else { | ||
|  |         return IDBKeyRange.bound(start, end, false, !inclusiveEnd); | ||
|  |       } | ||
|  |     } else if (start) { | ||
|  |       if (descending) { | ||
|  |         return IDBKeyRange.upperBound(start); | ||
|  |       } else { | ||
|  |         return IDBKeyRange.lowerBound(start); | ||
|  |       } | ||
|  |     } else if (end) { | ||
|  |       if (descending) { | ||
|  |         return IDBKeyRange.lowerBound(end, !inclusiveEnd); | ||
|  |       } else { | ||
|  |         return IDBKeyRange.upperBound(end, !inclusiveEnd); | ||
|  |       } | ||
|  |     } else if (key) { | ||
|  |       return IDBKeyRange.only(key); | ||
|  |     } | ||
|  |   } catch (e) { | ||
|  |     return {error: e}; | ||
|  |   } | ||
|  |   return null; | ||
|  | } | ||
|  | 
 | ||
|  | function handleKeyRangeError(opts, metadata, err, callback) { | ||
|  |   if (err.name === "DataError" && err.code === 0) { | ||
|  |     // data error, start is less than end
 | ||
|  |     var returnVal = { | ||
|  |       total_rows: metadata.doc_count, | ||
|  |       offset: opts.skip, | ||
|  |       rows: [] | ||
|  |     }; | ||
|  |     /* istanbul ignore if */ | ||
|  |     if (opts.update_seq) { | ||
|  |       returnVal.update_seq = metadata.seq; | ||
|  |     } | ||
|  |     return callback(null, returnVal); | ||
|  |   } | ||
|  |   callback(createError(IDB_ERROR, err.name, err.message)); | ||
|  | } | ||
|  | 
 | ||
|  | function allDocs (txn, metadata, opts, callback) { | ||
|  |   if (txn.error) { | ||
|  |     return callback(txn.error); | ||
|  |   } | ||
|  | 
 | ||
|  |   // TODO: Weird hack, I dont like it
 | ||
|  |   if (opts.limit === 0) { | ||
|  |     var returnVal = { | ||
|  |       total_rows: metadata.doc_count, | ||
|  |       offset: opts.skip, | ||
|  |       rows: [] | ||
|  |     }; | ||
|  | 
 | ||
|  |     /* istanbul ignore if */ | ||
|  |     if (opts.update_seq) { | ||
|  |       returnVal.update_seq = metadata.seq; | ||
|  |     } | ||
|  |     return callback(null, returnVal); | ||
|  |   } | ||
|  | 
 | ||
|  |   var results = []; | ||
|  |   var processing = []; | ||
|  | 
 | ||
|  |   var start = 'startkey' in opts ? opts.startkey : false; | ||
|  |   var end = 'endkey' in opts ? opts.endkey : false; | ||
|  |   var key = 'key' in opts ? opts.key : false; | ||
|  |   var keys = 'keys' in opts ? opts.keys : false; | ||
|  |   var skip = opts.skip || 0; | ||
|  |   var limit = typeof opts.limit === 'number' ? opts.limit : -1; | ||
|  |   var inclusiveEnd = opts.inclusive_end !== false; | ||
|  |   var descending = 'descending' in opts && opts.descending ? 'prev' : null; | ||
|  | 
 | ||
|  |   var keyRange; | ||
|  |   if (!keys) { | ||
|  |     keyRange = createKeyRange(start, end, inclusiveEnd, key, descending); | ||
|  |     if (keyRange && keyRange.error) { | ||
|  |       return handleKeyRangeError(opts, metadata, keyRange.error, callback); | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   var docStore = txn.txn.objectStore(DOC_STORE); | ||
|  | 
 | ||
|  |   txn.txn.oncomplete = onTxnComplete; | ||
|  | 
 | ||
|  |   if (keys) { | ||
|  |     return allDocsKeys(opts.keys, docStore, allDocsInner); | ||
|  |   } | ||
|  | 
 | ||
|  |   function include_doc(row, doc) { | ||
|  |     var docData = doc.revs[doc.rev].data; | ||
|  | 
 | ||
|  |     row.doc = docData; | ||
|  |     row.doc._id = doc.id; | ||
|  |     row.doc._rev = doc.rev; | ||
|  |     if (opts.conflicts) { | ||
|  |       var conflicts = collectConflicts(doc); | ||
|  |       if (conflicts.length) { | ||
|  |         row.doc._conflicts = conflicts; | ||
|  |       } | ||
|  |     } | ||
|  |     if (opts.attachments && docData._attachments) { | ||
|  |       for (var name in docData._attachments) { | ||
|  |         processing.push(processAttachment(name, doc, row.doc, opts.binary)); | ||
|  |       } | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   function allDocsInner(doc) { | ||
|  |     if (doc.error && keys) { | ||
|  |       // key was not found with "keys" requests
 | ||
|  |       results.push(doc); | ||
|  |       return true; | ||
|  |     } | ||
|  | 
 | ||
|  |     var row = { | ||
|  |       id: doc.id, | ||
|  |       key: doc.id, | ||
|  |       value: { | ||
|  |         rev: doc.rev | ||
|  |       } | ||
|  |     }; | ||
|  | 
 | ||
|  |     var deleted = doc.deleted; | ||
|  |     if (deleted) { | ||
|  |       if (keys) { | ||
|  |         results.push(row); | ||
|  |         row.value.deleted = true; | ||
|  |         row.doc = null; | ||
|  |       } | ||
|  |     } else if (skip-- <= 0) { | ||
|  |       results.push(row); | ||
|  |       if (opts.include_docs) { | ||
|  |         include_doc(row, doc); | ||
|  |       } | ||
|  |       if (--limit === 0) { | ||
|  |         return false; | ||
|  |       } | ||
|  |     } | ||
|  |     return true; | ||
|  |   } | ||
|  | 
 | ||
|  |   function onTxnComplete() { | ||
|  |     Promise.all(processing).then(function () { | ||
|  |       var returnVal = { | ||
|  |         total_rows: metadata.doc_count, | ||
|  |         offset: 0, | ||
|  |         rows: results | ||
|  |       }; | ||
|  | 
 | ||
|  |       /* istanbul ignore if */ | ||
|  |       if (opts.update_seq) { | ||
|  |         returnVal.update_seq = metadata.seq; | ||
|  |       } | ||
|  |       callback(null, returnVal); | ||
|  |     }); | ||
|  |   } | ||
|  | 
 | ||
|  |   var cursor = descending ? | ||
|  |     docStore.openCursor(keyRange, descending) : | ||
|  |     docStore.openCursor(keyRange); | ||
|  | 
 | ||
|  |   cursor.onsuccess = function (e) { | ||
|  | 
 | ||
|  |     var doc = e.target.result && e.target.result.value; | ||
|  | 
 | ||
|  |     // Happens if opts does not have limit,
 | ||
|  |     // because cursor will end normally then,
 | ||
|  |     // when all docs are retrieved.
 | ||
|  |     // Would not be needed, if getAll() optimization was used like in #6059
 | ||
|  |     if (!doc) { return; } | ||
|  | 
 | ||
|  |     // Skip local docs
 | ||
|  |     if (/^_local/.test(doc.id)) { | ||
|  |       return e.target.result["continue"](); | ||
|  |     } | ||
|  | 
 | ||
|  |     var continueCursor = allDocsInner(doc); | ||
|  |     if (continueCursor) { | ||
|  |       e.target.result["continue"](); | ||
|  |     } | ||
|  |   }; | ||
|  | 
 | ||
|  | } | ||
|  | 
 | ||
|  | function changes (txn, idbChanges, api, dbOpts, opts) { | ||
|  |   if (txn.error) { | ||
|  |     return opts.complete(txn.error); | ||
|  |   } | ||
|  | 
 | ||
|  |   if (opts.continuous) { | ||
|  |     var id = dbOpts.name + ':' + uuid$1(); | ||
|  |     idbChanges.addListener(dbOpts.name, id, api, opts); | ||
|  |     idbChanges.notify(dbOpts.name); | ||
|  |     return { | ||
|  |       cancel: function () { | ||
|  |         idbChanges.removeListener(dbOpts.name, id); | ||
|  |       } | ||
|  |     }; | ||
|  |   } | ||
|  | 
 | ||
|  |   var limit = 'limit' in opts ? opts.limit : -1; | ||
|  |   if (limit === 0) { | ||
|  |     limit = 1; | ||
|  |   } | ||
|  | 
 | ||
|  |   var store = txn.txn.objectStore(DOC_STORE).index('seq'); | ||
|  | 
 | ||
|  |   var filter = filterChange(opts); | ||
|  |   var received = 0; | ||
|  | 
 | ||
|  |   var lastSeq = opts.since || 0; | ||
|  |   var results = []; | ||
|  | 
 | ||
|  |   var processing = []; | ||
|  | 
 | ||
|  |   function onReqSuccess(e) { | ||
|  |     if (!e.target.result) { return; } | ||
|  |     var cursor = e.target.result; | ||
|  |     var doc = cursor.value; | ||
|  |     // Overwrite doc.data, which may have been rewritten (see rewrite.js) with
 | ||
|  |     // the clean version for that rev
 | ||
|  |     doc.data = doc.revs[doc.rev].data; | ||
|  |     doc.data._id = doc.id; | ||
|  |     doc.data._rev = doc.rev; | ||
|  |     if (doc.deleted) { | ||
|  |       doc.data._deleted = true; | ||
|  |     } | ||
|  | 
 | ||
|  |     if (opts.doc_ids && opts.doc_ids.indexOf(doc.id) === -1) { | ||
|  |       return cursor["continue"](); | ||
|  |     } | ||
|  | 
 | ||
|  |     // WARNING: expecting possible old format
 | ||
|  |     var change = opts.processChange(doc.data, doc, opts); | ||
|  |     change.seq = doc.seq; | ||
|  |     lastSeq = doc.seq; | ||
|  |     var filtered = filter(change); | ||
|  | 
 | ||
|  |     // If its an error
 | ||
|  |     if (typeof filtered === 'object') { | ||
|  |       return opts.complete(filtered); | ||
|  |     } | ||
|  | 
 | ||
|  |     if (filtered) { | ||
|  |       received++; | ||
|  |       if (opts.return_docs) { | ||
|  |         results.push(change); | ||
|  |       } | ||
|  | 
 | ||
|  |       if (opts.include_docs && opts.attachments && doc.data._attachments) { | ||
|  |         var promises = []; | ||
|  |         for (var name in doc.data._attachments) { | ||
|  |           var p = processAttachment(name, doc, change.doc, opts.binary); | ||
|  |           // We add the processing promise to 2 arrays, one tracks all
 | ||
|  |           // the promises needed before we fire onChange, the other
 | ||
|  |           // ensure we process all attachments before onComplete
 | ||
|  |           promises.push(p); | ||
|  |           processing.push(p); | ||
|  |         } | ||
|  | 
 | ||
|  |         Promise.all(promises).then(function () { | ||
|  |           opts.onChange(change); | ||
|  |         }); | ||
|  |       } else { | ||
|  |         opts.onChange(change); | ||
|  |       } | ||
|  |     } | ||
|  |     if (received !== limit) { | ||
|  |       cursor["continue"](); | ||
|  |     } | ||
|  |   } | ||
|  | 
 | ||
|  |   function onTxnComplete() { | ||
|  |     Promise.all(processing).then(function () { | ||
|  |       opts.complete(null, { | ||
|  |         results: results, | ||
|  |         last_seq: lastSeq | ||
|  |       }); | ||
|  |     }); | ||
|  |   } | ||
|  | 
 | ||
|  |   var req; | ||
|  |   if (opts.descending) { | ||
|  |     req = store.openCursor(null, 'prev'); | ||
|  |   } else { | ||
|  |     req = store.openCursor(IDBKeyRange.lowerBound(opts.since, true)); | ||
|  |   } | ||
|  | 
 | ||
|  |   txn.txn.oncomplete = onTxnComplete; | ||
|  |   req.onsuccess = onReqSuccess; | ||
|  | } | ||
|  | 
 | ||
|  | function getRevisionTree (txn, id, callback) { | ||
|  |   if (txn.error) { | ||
|  |     return callback(txn.error); | ||
|  |   } | ||
|  | 
 | ||
|  |   var req = txn.txn.objectStore(DOC_STORE).get(id); | ||
|  |   req.onsuccess = function (e) { | ||
|  |     if (!e.target.result) { | ||
|  |       callback(createError(MISSING_DOC)); | ||
|  |     } else { | ||
|  |       callback(null, e.target.result.rev_tree); | ||
|  |     } | ||
|  |   }; | ||
|  | } | ||
|  | 
 | ||
|  | function doCompaction (txn, id, revs, callback) { | ||
|  |   if (txn.error) { | ||
|  |     return callback(txn.error); | ||
|  |   } | ||
|  | 
 | ||
|  |   var docStore = txn.txn.objectStore(DOC_STORE); | ||
|  | 
 | ||
|  |   docStore.get(id).onsuccess = function (e) { | ||
|  |     var doc = e.target.result; | ||
|  | 
 | ||
|  |     traverseRevTree(doc.rev_tree, function (isLeaf, pos, revHash, ctx, opts) { | ||
|  |       var rev = pos + '-' + revHash; | ||
|  |       if (revs.indexOf(rev) !== -1) { | ||
|  |         opts.status = 'missing'; | ||
|  |       } | ||
|  |     }); | ||
|  | 
 | ||
|  |     var attachments = []; | ||
|  | 
 | ||
|  |     revs.forEach(function (rev) { | ||
|  |       if (rev in doc.revs) { | ||
|  |         // Make a list of attachments that are used by the revisions being
 | ||
|  |         // deleted
 | ||
|  |         if (doc.revs[rev].data._attachments) { | ||
|  |           for (var k in doc.revs[rev].data._attachments) { | ||
|  |             attachments.push(doc.revs[rev].data._attachments[k].digest); | ||
|  |           } | ||
|  |         } | ||
|  |         delete doc.revs[rev]; | ||
|  |       } | ||
|  |     }); | ||
|  | 
 | ||
|  |     // Attachments have a list of revisions that are using them, when
 | ||
|  |     // that list becomes empty we can delete the attachment.
 | ||
|  |     attachments.forEach(function (digest) { | ||
|  |       revs.forEach(function (rev) { | ||
|  |         delete doc.attachments[digest].revs[rev]; | ||
|  |       }); | ||
|  |       if (!Object.keys(doc.attachments[digest].revs).length) { | ||
|  |         delete doc.attachments[digest]; | ||
|  |       } | ||
|  |     }); | ||
|  | 
 | ||
|  |     docStore.put(doc); | ||
|  |   }; | ||
|  | 
 | ||
|  |   txn.txn.oncomplete = function () { | ||
|  |     callback(); | ||
|  |   }; | ||
|  | } | ||
|  | 
 | ||
|  | function destroy (dbOpts, openDatabases, idbChanges, callback) { | ||
|  | 
 | ||
|  |   idbChanges.removeAllListeners(dbOpts.name); | ||
|  | 
 | ||
|  |   function doDestroy() { | ||
|  |     var req = indexedDB.deleteDatabase(dbOpts.name); | ||
|  |     req.onsuccess = function () { | ||
|  |       delete openDatabases[dbOpts.name]; | ||
|  |       callback(null, {ok: true}); | ||
|  |     }; | ||
|  |   } | ||
|  | 
 | ||
|  |   // If the database is open we need to close it
 | ||
|  |   if (dbOpts.name in openDatabases) { | ||
|  |     openDatabases[dbOpts.name].then(function (res) { | ||
|  |       res.idb.close(); | ||
|  |       doDestroy(); | ||
|  |     }); | ||
|  |   } else { | ||
|  |     doDestroy(); | ||
|  |   } | ||
|  | 
 | ||
|  | } | ||
|  | 
 | ||
|  | // Adapted from
 | ||
|  | // https://github.com/pouchdb/pouchdb/blob/master/packages/node_modules/pouchdb-find/src/adapters/local/find/query-planner.js#L20-L24
 | ||
|  | // This could change / improve in the future?
 | ||
|  | var COUCH_COLLATE_LO = null; | ||
|  | var COUCH_COLLATE_HI = '\uffff'; // actually used as {"\uffff": {}}
 | ||
|  | 
 | ||
|  | // Adapted from: https://www.w3.org/TR/IndexedDB/#compare-two-keys
 | ||
|  | // Importantly, *there is no upper bound possible* in idb. The ideal data
 | ||
|  | // structure an infintely deep array:
 | ||
|  | //   var IDB_COLLATE_HI = []; IDB_COLLATE_HI.push(IDB_COLLATE_HI)
 | ||
|  | // But IDBKeyRange is not a fan of shenanigans, so I've just gone with 12 layers
 | ||
|  | // because it looks nice and surely that's enough!
 | ||
|  | var IDB_COLLATE_LO = Number.NEGATIVE_INFINITY; | ||
|  | var IDB_COLLATE_HI = [[[[[[[[[[[[]]]]]]]]]]]]; | ||
|  | 
 | ||
|  | //
 | ||
|  | // TODO: this should be made offical somewhere and used by AllDocs / get /
 | ||
|  | // changes etc as well.
 | ||
|  | //
 | ||
|  | function externaliseRecord(idbDoc) { | ||
|  |   var doc = idbDoc.revs[idbDoc.rev].data; | ||
|  |   doc._id = idbDoc.id; | ||
|  |   doc._rev = idbDoc.rev; | ||
|  |   if (idbDoc.deleted) { | ||
|  |     doc._deleted = true; | ||
|  |   } | ||
|  | 
 | ||
|  |   return doc; | ||
|  | } | ||
|  | 
 | ||
|  | /** | ||
|  |  * Generates a keyrange based on the opts passed to query | ||
|  |  * | ||
|  |  * The first key is always 0, as that's how we're filtering out deleted entries. | ||
|  |  */ | ||
|  | function generateKeyRange(opts) { | ||
|  |   function defined(obj, k) { | ||
|  |     return obj[k] !== void 0; | ||
|  |   } | ||
|  | 
 | ||
|  |   // Converts a valid CouchDB key into a valid IndexedDB one
 | ||
|  |   function convert(key, exact) { | ||
|  |     // The first item in every native index is doc.deleted, and we always want
 | ||
|  |     // to only search documents that are not deleted.
 | ||
|  |     // "foo" -> [0, "foo"]
 | ||
|  |     var filterDeleted = [0].concat(key); | ||
|  | 
 | ||
|  |     return filterDeleted.map(function (k) { | ||
|  |       // null, true and false are not indexable by indexeddb. When we write
 | ||
|  |       // these values we convert them to these constants, and so when we
 | ||
|  |       // query for them we need to convert the query also.
 | ||
|  |       if (k === null && exact) { | ||
|  |         // for non-exact queries we treat null as a collate property
 | ||
|  |         // see `if (!exact)` block below
 | ||
|  |         return IDB_NULL; | ||
|  |       } else if (k === true) { | ||
|  |         return IDB_TRUE; | ||
|  |       } else if (k === false) { | ||
|  |         return IDB_FALSE; | ||
|  |       } | ||
|  | 
 | ||
|  |       if (!exact) { | ||
|  |         // We get passed CouchDB's collate low and high values, so for non-exact
 | ||
|  |         // ranged queries we're going to convert them to our IDB equivalents
 | ||
|  |         if (k === COUCH_COLLATE_LO) { | ||
|  |           return IDB_COLLATE_LO; | ||
|  |         } else if (Object.prototype.hasOwnProperty.call(k, COUCH_COLLATE_HI)) { | ||
|  |           return IDB_COLLATE_HI; | ||
|  |         } | ||
|  |       } | ||
|  | 
 | ||
|  |       return k; | ||
|  |     }); | ||
|  |   } | ||
|  | 
 | ||
|  |   // CouchDB and so PouchdB defaults to true. We need to make this explicit as
 | ||
|  |   // we invert these later for IndexedDB.
 | ||
|  |   if (!defined(opts, 'inclusive_end')) { | ||
|  |     opts.inclusive_end = true; | ||
|  |   } | ||
|  |   if (!defined(opts, 'inclusive_start')) { | ||
|  |     opts.inclusive_start = true; | ||
|  |   } | ||
|  | 
 | ||
|  |   if (opts.descending) { | ||
|  |     // Flip before generating. We'll check descending again later when performing
 | ||
|  |     // an index request
 | ||
|  |     var realEndkey = opts.startkey, | ||
|  |         realInclusiveEnd = opts.inclusive_start; | ||
|  | 
 | ||
|  |     opts.startkey = opts.endkey; | ||
|  |     opts.endkey = realEndkey; | ||
|  |     opts.inclusive_start = opts.inclusive_end; | ||
|  |     opts.inclusive_end = realInclusiveEnd; | ||
|  |   } | ||
|  | 
 | ||
|  |   try { | ||
|  |     if (defined(opts, 'key')) { | ||
|  |       return IDBKeyRange.only(convert(opts.key, true)); | ||
|  |     } | ||
|  | 
 | ||
|  |     if (defined(opts, 'startkey') && !defined(opts, 'endkey')) { | ||
|  |       return IDBKeyRange.lowerBound(convert(opts.startkey), !opts.inclusive_start); | ||
|  |     } | ||
|  | 
 | ||
|  |     if (!defined(opts, 'startkey') && defined(opts, 'endkey')) { | ||
|  |       return IDBKeyRange.upperBound(convert(opts.endkey), !opts.inclusive_end); | ||
|  |     } | ||
|  | 
 | ||
|  |     if (defined(opts, 'startkey') && defined(opts, 'endkey')) { | ||
|  |       return IDBKeyRange.bound( | ||
|  |         convert(opts.startkey),    convert(opts.endkey), | ||
|  |         !opts.inclusive_start, !opts.inclusive_end | ||
|  |       ); | ||
|  |     } | ||
|  | 
 | ||
|  |     return IDBKeyRange.only([0]); | ||
|  |   } catch (err) { | ||
|  |     console.error('Could not generate keyRange', err, opts); | ||
|  |     throw Error('Could not generate key range with ' + JSON.stringify(opts)); | ||
|  |   } | ||
|  | } | ||
|  | 
 | ||
|  | function getIndexHandle(pdb, fields, reject) { | ||
|  |   var indexName = naturalIndexName(fields); | ||
|  | 
 | ||
|  |   return new Promise(function (resolve) { | ||
|  |     pdb._openTransactionSafely([DOC_STORE], 'readonly', function (err, txn) { | ||
|  |       if (err) { | ||
|  |         return idbError(reject)(err); | ||
|  |       } | ||
|  | 
 | ||
|  |       txn.onabort = idbError(reject); | ||
|  |       txn.ontimeout = idbError(reject); | ||
|  | 
 | ||
|  |       var existingIndexNames = Array.from(txn.objectStore(DOC_STORE).indexNames); | ||
|  | 
 | ||
|  |       if (existingIndexNames.indexOf(indexName) === -1) { | ||
|  |         // The index is missing, force a db restart and try again
 | ||
|  |         pdb._freshen() | ||
|  |           .then(function () { return getIndexHandle(pdb, fields, reject); }) | ||
|  |           .then(resolve); | ||
|  |       } else { | ||
|  |         resolve(txn.objectStore(DOC_STORE).index(indexName)); | ||
|  |       } | ||
|  |     }); | ||
|  |   }); | ||
|  | } | ||
|  | 
 | ||
|  | // In theory we should return something like the doc example below, but find
 | ||
|  | // only needs rows: [{doc: {...}}], so I think we can just not bother for now
 | ||
|  | // {
 | ||
|  | //   "offset" : 0,
 | ||
|  | //   "rows": [{
 | ||
|  | //     "id": "doc3",
 | ||
|  | //     "key": "Lisa Says",
 | ||
|  | //     "value": null,
 | ||
|  | //     "doc": {
 | ||
|  | //       "_id": "doc3",
 | ||
|  | //       "_rev": "1-z",
 | ||
|  | //       "title": "Lisa Says"
 | ||
|  | //     }
 | ||
|  | //   }],
 | ||
|  | //   "total_rows" : 4
 | ||
|  | // }
 | ||
|  | function query(idb, signature, opts) { | ||
|  |   // At this stage, in the current implementation, find has already gone through
 | ||
|  |   // and determined if the index already exists from PouchDB's perspective (eg
 | ||
|  |   // there is a design doc for it).
 | ||
|  |   //
 | ||
|  |   // If we find that the index doesn't exist this means we have to close and
 | ||
|  |   // re-open the DB to correct indexes before proceeding, at which point the
 | ||
|  |   // index should exist.
 | ||
|  | 
 | ||
|  |   var pdb = this; | ||
|  | 
 | ||
|  |   // Assumption, there will be only one /, between the design document name
 | ||
|  |   // and the view name.
 | ||
|  |   var parts = signature.split('/'); | ||
|  | 
 | ||
|  |   return new Promise(function (resolve, reject) { | ||
|  |     pdb.get('_design/' + parts[0]).then(function (ddoc) { | ||
|  |       var fields = rawIndexFields(ddoc, parts[1]); | ||
|  |       if (!fields) { | ||
|  |         throw new Error('ddoc ' + ddoc._id +' with view ' + parts[1] + | ||
|  |           ' does not have map.options.def.fields defined.'); | ||
|  |       } | ||
|  | 
 | ||
|  |       var skip = opts.skip; | ||
|  |       var limit = Number.isInteger(opts.limit) && opts.limit; | ||
|  | 
 | ||
|  |       return getIndexHandle(pdb, fields, reject) | ||
|  |         .then(function (indexHandle) { | ||
|  |           var keyRange = generateKeyRange(opts); | ||
|  |           var req = indexHandle.openCursor(keyRange, opts.descending ? 'prev' : 'next'); | ||
|  | 
 | ||
|  |           var rows = []; | ||
|  |           req.onerror = idbError(reject); | ||
|  |           req.onsuccess = function (e) { | ||
|  |             var cursor = e.target.result; | ||
|  | 
 | ||
|  |             if (!cursor || limit === 0) { | ||
|  |               return resolve({ | ||
|  |                 rows: rows | ||
|  |               }); | ||
|  |             } | ||
|  | 
 | ||
|  |             if (skip) { | ||
|  |               cursor.advance(skip); | ||
|  |               skip = false; | ||
|  |               return; | ||
|  |             } | ||
|  | 
 | ||
|  |             if (limit) { | ||
|  |               limit = limit - 1; | ||
|  |             } | ||
|  | 
 | ||
|  |             rows.push({doc: externaliseRecord(cursor.value)}); | ||
|  |             cursor["continue"](); | ||
|  |           }; | ||
|  |         }); | ||
|  |       })[ | ||
|  |       "catch"](reject); | ||
|  |   }); | ||
|  | 
 | ||
|  | } | ||
|  | 
 | ||
|  | function viewCleanup() { | ||
|  |   // I'm not sure we have to do anything here.
 | ||
|  |   //
 | ||
|  |   // One option is to just close and re-open the DB, which performs the same
 | ||
|  |   // action. The only reason you'd want to call this is if you deleted a bunch
 | ||
|  |   // of indexes and wanted the space back immediately.
 | ||
|  |   //
 | ||
|  |   // Otherwise index cleanup happens when:
 | ||
|  |   //  - A DB is opened
 | ||
|  |   //  - A find query is performed against an index that doesn't exist but should
 | ||
|  | 
 | ||
|  |   return Promise.resolve(); | ||
|  | } | ||
|  | 
 | ||
|  | var ADAPTER_NAME = 'indexeddb'; | ||
|  | 
 | ||
|  | // TODO: Constructor should be capitalised
 | ||
|  | var idbChanges = new Changes(); | ||
|  | 
 | ||
|  | // A shared list of database handles
 | ||
|  | var openDatabases = {}; | ||
|  | 
 | ||
|  | function IdbPouch(dbOpts, callback) { | ||
|  | 
 | ||
|  |   if (dbOpts.view_adapter) { | ||
|  |     console.log('Please note that the indexeddb adapter manages _find indexes itself, therefore it is not using your specified view_adapter'); | ||
|  |   } | ||
|  |    | ||
|  |   var api = this; | ||
|  |   var metadata = {}; | ||
|  | 
 | ||
|  |   // Wrapper that gives you an active DB handle. You probably want $t.
 | ||
|  |   var $ = function (fun) { | ||
|  |     return function () { | ||
|  |       var args = Array.prototype.slice.call(arguments); | ||
|  |       setup(openDatabases, api, dbOpts).then(function (res) { | ||
|  |         metadata = res.metadata; | ||
|  |         args.unshift(res.idb); | ||
|  |         fun.apply(api, args); | ||
|  |       })["catch"](function (err) { | ||
|  |         var last = args.pop(); | ||
|  |         if (typeof last === 'function') { | ||
|  |           last(err); | ||
|  |         } else { | ||
|  |           console.error(err); | ||
|  |         } | ||
|  |       }); | ||
|  |     }; | ||
|  |   }; | ||
|  |   // the promise version of $
 | ||
|  |   var $p = function (fun) { | ||
|  |     return function () { | ||
|  |       var args = Array.prototype.slice.call(arguments); | ||
|  | 
 | ||
|  |       return setup(openDatabases, api, dbOpts).then(function (res) { | ||
|  |         metadata = res.metadata; | ||
|  |         args.unshift(res.idb); | ||
|  | 
 | ||
|  |         return fun.apply(api, args); | ||
|  |       }); | ||
|  |     }; | ||
|  |   }; | ||
|  |   // Wrapper that gives you a safe transaction handle. It's important to use
 | ||
|  |   // this instead of opening your own transaction from a db handle got from $,
 | ||
|  |   // because in the time between getting the db handle and opening the
 | ||
|  |   // transaction it may have been invalidated by index changes.
 | ||
|  |   var $t = function (fun, stores, mode) { | ||
|  |     stores = stores || [DOC_STORE]; | ||
|  |     mode = mode || 'readonly'; | ||
|  | 
 | ||
|  |     return function () { | ||
|  |       var args = Array.prototype.slice.call(arguments); | ||
|  |       var txn = {}; | ||
|  |       setup(openDatabases, api, dbOpts).then(function (res) { | ||
|  |         metadata = res.metadata; | ||
|  |         txn.txn = res.idb.transaction(stores, mode); | ||
|  |       })["catch"](function (err) { | ||
|  |         console.error('Failed to establish transaction safely'); | ||
|  |         console.error(err); | ||
|  |         txn.error = err; | ||
|  |       }).then(function () { | ||
|  |         args.unshift(txn); | ||
|  |         fun.apply(api, args); | ||
|  |       }); | ||
|  |     }; | ||
|  |   }; | ||
|  | 
 | ||
|  |   api._openTransactionSafely = function (stores, mode, callback) { | ||
|  |     $t(function (txn, callback) { | ||
|  |       callback(txn.error, txn.txn); | ||
|  |     }, stores, mode)(callback); | ||
|  |   }; | ||
|  | 
 | ||
|  |   api._remote = false; | ||
|  |   api.type = function () { return ADAPTER_NAME; }; | ||
|  | 
 | ||
|  |   api._id = $(function (_, cb) { | ||
|  |     cb(null, metadata.db_uuid); | ||
|  |   }); | ||
|  | 
 | ||
|  |   api._info = $(function (_, cb) { | ||
|  |     return info(metadata, cb); | ||
|  |   }); | ||
|  | 
 | ||
|  |   api._get = $t(get); | ||
|  | 
 | ||
|  |   api._bulkDocs = $(function (_, req, opts, callback) { | ||
|  |     bulkDocs(api, req, opts, metadata, dbOpts, idbChanges, callback); | ||
|  |   }); | ||
|  | 
 | ||
|  |   api._allDocs = $t(function (txn, opts, cb) { | ||
|  |     allDocs(txn, metadata, opts, cb); | ||
|  |   }); | ||
|  | 
 | ||
|  |   api._getAttachment = $t(getAttachment); | ||
|  | 
 | ||
|  |   api._changes = $t(function (txn, opts) { | ||
|  |     changes(txn, idbChanges, api, dbOpts, opts); | ||
|  |   }); | ||
|  | 
 | ||
|  |   api._getRevisionTree = $t(getRevisionTree); | ||
|  |   api._doCompaction = $t(doCompaction, [DOC_STORE], 'readwrite'); | ||
|  | 
 | ||
|  |   api._customFindAbstractMapper = { | ||
|  |     query: $p(query), | ||
|  |     viewCleanup: $p(viewCleanup) | ||
|  |   }; | ||
|  | 
 | ||
|  |   api._destroy = function (opts, callback) { | ||
|  |     return destroy(dbOpts, openDatabases, idbChanges, callback); | ||
|  |   }; | ||
|  | 
 | ||
|  |   api._close = $(function (db, cb) { | ||
|  |     delete openDatabases[dbOpts.name]; | ||
|  |     db.close(); | ||
|  |     cb(); | ||
|  |   }); | ||
|  | 
 | ||
|  |   // Closing and re-opening the DB re-generates native indexes
 | ||
|  |   api._freshen = function () { | ||
|  |     return new Promise(function (resolve) { | ||
|  |       api._close(function () { | ||
|  |         $(resolve)(); | ||
|  |       }); | ||
|  |     }); | ||
|  |   }; | ||
|  | 
 | ||
|  |   // TODO: this setTimeout seems nasty, if its needed lets
 | ||
|  |   // figure out / explain why
 | ||
|  |   setTimeout(function () { | ||
|  |     callback(null, api); | ||
|  |   }); | ||
|  | } | ||
|  | 
 | ||
|  | // TODO: this isnt really valid permanently, just being lazy to start
 | ||
|  | IdbPouch.valid = function () { | ||
|  |   return true; | ||
|  | }; | ||
|  | 
 | ||
|  | function IndexeddbPouchPlugin (PouchDB) { | ||
|  |   PouchDB.adapter(ADAPTER_NAME, IdbPouch, true); | ||
|  | } | ||
|  | 
 | ||
|  | // this code only runs in the browser, as its own dist/ script
 | ||
|  | 
 | ||
|  | if (typeof PouchDB === 'undefined') { | ||
|  |   guardedConsole('error', 'indexeddb adapter plugin error: ' + | ||
|  |     'Cannot find global "PouchDB" object! ' + | ||
|  |     'Did you remember to include pouchdb.js?'); | ||
|  | } else { | ||
|  |   PouchDB.plugin(IndexeddbPouchPlugin); | ||
|  | } | ||
|  | 
 | ||
|  | },{"1":1,"10":10,"11":11,"12":12,"3":3,"4":4}]},{},[27]); |