mirror of
				https://github.com/flynx/pWiki.git
				synced 2025-10-31 02:50:08 +00:00 
			
		
		
		
	
		
			
				
	
	
		
			6799 lines
		
	
	
		
			192 KiB
		
	
	
	
		
			JavaScript
		
	
	
		
			Executable File
		
	
	
	
	
			
		
		
	
	
			6799 lines
		
	
	
		
			192 KiB
		
	
	
	
		
			JavaScript
		
	
	
		
			Executable File
		
	
	
	
	
| // pouchdb-find plugin 7.3.0
 | |
| // Based on Mango: https://github.com/cloudant/mango
 | |
| // 
 | |
| // (c) 2012-2022 Dale Harvey and the PouchDB team
 | |
| // PouchDB may be freely distributed under the Apache license, version 2.0.
 | |
| // For all details and documentation:
 | |
| // http://pouchdb.com
 | |
| (function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i<t.length;i++)o(t[i]);return o}return r})()({1:[function(_dereq_,module,exports){
 | |
| 'use strict';
 | |
| 
 | |
| module.exports = argsArray;
 | |
| 
 | |
| function argsArray(fun) {
 | |
|   return function () {
 | |
|     var len = arguments.length;
 | |
|     if (len) {
 | |
|       var args = [];
 | |
|       var i = -1;
 | |
|       while (++i < len) {
 | |
|         args[i] = arguments[i];
 | |
|       }
 | |
|       return fun.call(this, args);
 | |
|     } else {
 | |
|       return fun.call(this, []);
 | |
|     }
 | |
|   };
 | |
| }
 | |
| },{}],2:[function(_dereq_,module,exports){
 | |
| 
 | |
| },{}],3:[function(_dereq_,module,exports){
 | |
| // Copyright Joyent, Inc. and other Node contributors.
 | |
| //
 | |
| // Permission is hereby granted, free of charge, to any person obtaining a
 | |
| // copy of this software and associated documentation files (the
 | |
| // "Software"), to deal in the Software without restriction, including
 | |
| // without limitation the rights to use, copy, modify, merge, publish,
 | |
| // distribute, sublicense, and/or sell copies of the Software, and to permit
 | |
| // persons to whom the Software is furnished to do so, subject to the
 | |
| // following conditions:
 | |
| //
 | |
| // The above copyright notice and this permission notice shall be included
 | |
| // in all copies or substantial portions of the Software.
 | |
| //
 | |
| // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
 | |
| // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
 | |
| // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
 | |
| // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
 | |
| // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
 | |
| // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
 | |
| // USE OR OTHER DEALINGS IN THE SOFTWARE.
 | |
| 
 | |
| var objectCreate = Object.create || objectCreatePolyfill
 | |
| var objectKeys = Object.keys || objectKeysPolyfill
 | |
| var bind = Function.prototype.bind || functionBindPolyfill
 | |
| 
 | |
| function EventEmitter() {
 | |
|   if (!this._events || !Object.prototype.hasOwnProperty.call(this, '_events')) {
 | |
|     this._events = objectCreate(null);
 | |
|     this._eventsCount = 0;
 | |
|   }
 | |
| 
 | |
|   this._maxListeners = this._maxListeners || undefined;
 | |
| }
 | |
| module.exports = EventEmitter;
 | |
| 
 | |
| // Backwards-compat with node 0.10.x
 | |
| EventEmitter.EventEmitter = EventEmitter;
 | |
| 
 | |
| EventEmitter.prototype._events = undefined;
 | |
| EventEmitter.prototype._maxListeners = undefined;
 | |
| 
 | |
| // By default EventEmitters will print a warning if more than 10 listeners are
 | |
| // added to it. This is a useful default which helps finding memory leaks.
 | |
| var defaultMaxListeners = 10;
 | |
| 
 | |
| var hasDefineProperty;
 | |
| try {
 | |
|   var o = {};
 | |
|   if (Object.defineProperty) Object.defineProperty(o, 'x', { value: 0 });
 | |
|   hasDefineProperty = o.x === 0;
 | |
| } catch (err) { hasDefineProperty = false }
 | |
| if (hasDefineProperty) {
 | |
|   Object.defineProperty(EventEmitter, 'defaultMaxListeners', {
 | |
|     enumerable: true,
 | |
|     get: function() {
 | |
|       return defaultMaxListeners;
 | |
|     },
 | |
|     set: function(arg) {
 | |
|       // check whether the input is a positive number (whose value is zero or
 | |
|       // greater and not a NaN).
 | |
|       if (typeof arg !== 'number' || arg < 0 || arg !== arg)
 | |
|         throw new TypeError('"defaultMaxListeners" must be a positive number');
 | |
|       defaultMaxListeners = arg;
 | |
|     }
 | |
|   });
 | |
| } else {
 | |
|   EventEmitter.defaultMaxListeners = defaultMaxListeners;
 | |
| }
 | |
| 
 | |
| // Obviously not all Emitters should be limited to 10. This function allows
 | |
| // that to be increased. Set to zero for unlimited.
 | |
| EventEmitter.prototype.setMaxListeners = function setMaxListeners(n) {
 | |
|   if (typeof n !== 'number' || n < 0 || isNaN(n))
 | |
|     throw new TypeError('"n" argument must be a positive number');
 | |
|   this._maxListeners = n;
 | |
|   return this;
 | |
| };
 | |
| 
 | |
| function $getMaxListeners(that) {
 | |
|   if (that._maxListeners === undefined)
 | |
|     return EventEmitter.defaultMaxListeners;
 | |
|   return that._maxListeners;
 | |
| }
 | |
| 
 | |
| EventEmitter.prototype.getMaxListeners = function getMaxListeners() {
 | |
|   return $getMaxListeners(this);
 | |
| };
 | |
| 
 | |
| // These standalone emit* functions are used to optimize calling of event
 | |
| // handlers for fast cases because emit() itself often has a variable number of
 | |
| // arguments and can be deoptimized because of that. These functions always have
 | |
| // the same number of arguments and thus do not get deoptimized, so the code
 | |
| // inside them can execute faster.
 | |
| function emitNone(handler, isFn, self) {
 | |
|   if (isFn)
 | |
|     handler.call(self);
 | |
|   else {
 | |
|     var len = handler.length;
 | |
|     var listeners = arrayClone(handler, len);
 | |
|     for (var i = 0; i < len; ++i)
 | |
|       listeners[i].call(self);
 | |
|   }
 | |
| }
 | |
| function emitOne(handler, isFn, self, arg1) {
 | |
|   if (isFn)
 | |
|     handler.call(self, arg1);
 | |
|   else {
 | |
|     var len = handler.length;
 | |
|     var listeners = arrayClone(handler, len);
 | |
|     for (var i = 0; i < len; ++i)
 | |
|       listeners[i].call(self, arg1);
 | |
|   }
 | |
| }
 | |
| function emitTwo(handler, isFn, self, arg1, arg2) {
 | |
|   if (isFn)
 | |
|     handler.call(self, arg1, arg2);
 | |
|   else {
 | |
|     var len = handler.length;
 | |
|     var listeners = arrayClone(handler, len);
 | |
|     for (var i = 0; i < len; ++i)
 | |
|       listeners[i].call(self, arg1, arg2);
 | |
|   }
 | |
| }
 | |
| function emitThree(handler, isFn, self, arg1, arg2, arg3) {
 | |
|   if (isFn)
 | |
|     handler.call(self, arg1, arg2, arg3);
 | |
|   else {
 | |
|     var len = handler.length;
 | |
|     var listeners = arrayClone(handler, len);
 | |
|     for (var i = 0; i < len; ++i)
 | |
|       listeners[i].call(self, arg1, arg2, arg3);
 | |
|   }
 | |
| }
 | |
| 
 | |
| function emitMany(handler, isFn, self, args) {
 | |
|   if (isFn)
 | |
|     handler.apply(self, args);
 | |
|   else {
 | |
|     var len = handler.length;
 | |
|     var listeners = arrayClone(handler, len);
 | |
|     for (var i = 0; i < len; ++i)
 | |
|       listeners[i].apply(self, args);
 | |
|   }
 | |
| }
 | |
| 
 | |
| EventEmitter.prototype.emit = function emit(type) {
 | |
|   var er, handler, len, args, i, events;
 | |
|   var doError = (type === 'error');
 | |
| 
 | |
|   events = this._events;
 | |
|   if (events)
 | |
|     doError = (doError && events.error == null);
 | |
|   else if (!doError)
 | |
|     return false;
 | |
| 
 | |
|   // If there is no 'error' event listener then throw.
 | |
|   if (doError) {
 | |
|     if (arguments.length > 1)
 | |
|       er = arguments[1];
 | |
|     if (er instanceof Error) {
 | |
|       throw er; // Unhandled 'error' event
 | |
|     } else {
 | |
|       // At least give some kind of context to the user
 | |
|       var err = new Error('Unhandled "error" event. (' + er + ')');
 | |
|       err.context = er;
 | |
|       throw err;
 | |
|     }
 | |
|     return false;
 | |
|   }
 | |
| 
 | |
|   handler = events[type];
 | |
| 
 | |
|   if (!handler)
 | |
|     return false;
 | |
| 
 | |
|   var isFn = typeof handler === 'function';
 | |
|   len = arguments.length;
 | |
|   switch (len) {
 | |
|       // fast cases
 | |
|     case 1:
 | |
|       emitNone(handler, isFn, this);
 | |
|       break;
 | |
|     case 2:
 | |
|       emitOne(handler, isFn, this, arguments[1]);
 | |
|       break;
 | |
|     case 3:
 | |
|       emitTwo(handler, isFn, this, arguments[1], arguments[2]);
 | |
|       break;
 | |
|     case 4:
 | |
|       emitThree(handler, isFn, this, arguments[1], arguments[2], arguments[3]);
 | |
|       break;
 | |
|       // slower
 | |
|     default:
 | |
|       args = new Array(len - 1);
 | |
|       for (i = 1; i < len; i++)
 | |
|         args[i - 1] = arguments[i];
 | |
|       emitMany(handler, isFn, this, args);
 | |
|   }
 | |
| 
 | |
|   return true;
 | |
| };
 | |
| 
 | |
| function _addListener(target, type, listener, prepend) {
 | |
|   var m;
 | |
|   var events;
 | |
|   var existing;
 | |
| 
 | |
|   if (typeof listener !== 'function')
 | |
|     throw new TypeError('"listener" argument must be a function');
 | |
| 
 | |
|   events = target._events;
 | |
|   if (!events) {
 | |
|     events = target._events = objectCreate(null);
 | |
|     target._eventsCount = 0;
 | |
|   } else {
 | |
|     // To avoid recursion in the case that type === "newListener"! Before
 | |
|     // adding it to the listeners, first emit "newListener".
 | |
|     if (events.newListener) {
 | |
|       target.emit('newListener', type,
 | |
|           listener.listener ? listener.listener : listener);
 | |
| 
 | |
|       // Re-assign `events` because a newListener handler could have caused the
 | |
|       // this._events to be assigned to a new object
 | |
|       events = target._events;
 | |
|     }
 | |
|     existing = events[type];
 | |
|   }
 | |
| 
 | |
|   if (!existing) {
 | |
|     // Optimize the case of one listener. Don't need the extra array object.
 | |
|     existing = events[type] = listener;
 | |
|     ++target._eventsCount;
 | |
|   } else {
 | |
|     if (typeof existing === 'function') {
 | |
|       // Adding the second element, need to change to array.
 | |
|       existing = events[type] =
 | |
|           prepend ? [listener, existing] : [existing, listener];
 | |
|     } else {
 | |
|       // If we've already got an array, just append.
 | |
|       if (prepend) {
 | |
|         existing.unshift(listener);
 | |
|       } else {
 | |
|         existing.push(listener);
 | |
|       }
 | |
|     }
 | |
| 
 | |
|     // Check for listener leak
 | |
|     if (!existing.warned) {
 | |
|       m = $getMaxListeners(target);
 | |
|       if (m && m > 0 && existing.length > m) {
 | |
|         existing.warned = true;
 | |
|         var w = new Error('Possible EventEmitter memory leak detected. ' +
 | |
|             existing.length + ' "' + String(type) + '" listeners ' +
 | |
|             'added. Use emitter.setMaxListeners() to ' +
 | |
|             'increase limit.');
 | |
|         w.name = 'MaxListenersExceededWarning';
 | |
|         w.emitter = target;
 | |
|         w.type = type;
 | |
|         w.count = existing.length;
 | |
|         if (typeof console === 'object' && console.warn) {
 | |
|           console.warn('%s: %s', w.name, w.message);
 | |
|         }
 | |
|       }
 | |
|     }
 | |
|   }
 | |
| 
 | |
|   return target;
 | |
| }
 | |
| 
 | |
| EventEmitter.prototype.addListener = function addListener(type, listener) {
 | |
|   return _addListener(this, type, listener, false);
 | |
| };
 | |
| 
 | |
| EventEmitter.prototype.on = EventEmitter.prototype.addListener;
 | |
| 
 | |
| EventEmitter.prototype.prependListener =
 | |
|     function prependListener(type, listener) {
 | |
|       return _addListener(this, type, listener, true);
 | |
|     };
 | |
| 
 | |
| function onceWrapper() {
 | |
|   if (!this.fired) {
 | |
|     this.target.removeListener(this.type, this.wrapFn);
 | |
|     this.fired = true;
 | |
|     switch (arguments.length) {
 | |
|       case 0:
 | |
|         return this.listener.call(this.target);
 | |
|       case 1:
 | |
|         return this.listener.call(this.target, arguments[0]);
 | |
|       case 2:
 | |
|         return this.listener.call(this.target, arguments[0], arguments[1]);
 | |
|       case 3:
 | |
|         return this.listener.call(this.target, arguments[0], arguments[1],
 | |
|             arguments[2]);
 | |
|       default:
 | |
|         var args = new Array(arguments.length);
 | |
|         for (var i = 0; i < args.length; ++i)
 | |
|           args[i] = arguments[i];
 | |
|         this.listener.apply(this.target, args);
 | |
|     }
 | |
|   }
 | |
| }
 | |
| 
 | |
| function _onceWrap(target, type, listener) {
 | |
|   var state = { fired: false, wrapFn: undefined, target: target, type: type, listener: listener };
 | |
|   var wrapped = bind.call(onceWrapper, state);
 | |
|   wrapped.listener = listener;
 | |
|   state.wrapFn = wrapped;
 | |
|   return wrapped;
 | |
| }
 | |
| 
 | |
| EventEmitter.prototype.once = function once(type, listener) {
 | |
|   if (typeof listener !== 'function')
 | |
|     throw new TypeError('"listener" argument must be a function');
 | |
|   this.on(type, _onceWrap(this, type, listener));
 | |
|   return this;
 | |
| };
 | |
| 
 | |
| EventEmitter.prototype.prependOnceListener =
 | |
|     function prependOnceListener(type, listener) {
 | |
|       if (typeof listener !== 'function')
 | |
|         throw new TypeError('"listener" argument must be a function');
 | |
|       this.prependListener(type, _onceWrap(this, type, listener));
 | |
|       return this;
 | |
|     };
 | |
| 
 | |
| // Emits a 'removeListener' event if and only if the listener was removed.
 | |
| EventEmitter.prototype.removeListener =
 | |
|     function removeListener(type, listener) {
 | |
|       var list, events, position, i, originalListener;
 | |
| 
 | |
|       if (typeof listener !== 'function')
 | |
|         throw new TypeError('"listener" argument must be a function');
 | |
| 
 | |
|       events = this._events;
 | |
|       if (!events)
 | |
|         return this;
 | |
| 
 | |
|       list = events[type];
 | |
|       if (!list)
 | |
|         return this;
 | |
| 
 | |
|       if (list === listener || list.listener === listener) {
 | |
|         if (--this._eventsCount === 0)
 | |
|           this._events = objectCreate(null);
 | |
|         else {
 | |
|           delete events[type];
 | |
|           if (events.removeListener)
 | |
|             this.emit('removeListener', type, list.listener || listener);
 | |
|         }
 | |
|       } else if (typeof list !== 'function') {
 | |
|         position = -1;
 | |
| 
 | |
|         for (i = list.length - 1; i >= 0; i--) {
 | |
|           if (list[i] === listener || list[i].listener === listener) {
 | |
|             originalListener = list[i].listener;
 | |
|             position = i;
 | |
|             break;
 | |
|           }
 | |
|         }
 | |
| 
 | |
|         if (position < 0)
 | |
|           return this;
 | |
| 
 | |
|         if (position === 0)
 | |
|           list.shift();
 | |
|         else
 | |
|           spliceOne(list, position);
 | |
| 
 | |
|         if (list.length === 1)
 | |
|           events[type] = list[0];
 | |
| 
 | |
|         if (events.removeListener)
 | |
|           this.emit('removeListener', type, originalListener || listener);
 | |
|       }
 | |
| 
 | |
|       return this;
 | |
|     };
 | |
| 
 | |
| EventEmitter.prototype.removeAllListeners =
 | |
|     function removeAllListeners(type) {
 | |
|       var listeners, events, i;
 | |
| 
 | |
|       events = this._events;
 | |
|       if (!events)
 | |
|         return this;
 | |
| 
 | |
|       // not listening for removeListener, no need to emit
 | |
|       if (!events.removeListener) {
 | |
|         if (arguments.length === 0) {
 | |
|           this._events = objectCreate(null);
 | |
|           this._eventsCount = 0;
 | |
|         } else if (events[type]) {
 | |
|           if (--this._eventsCount === 0)
 | |
|             this._events = objectCreate(null);
 | |
|           else
 | |
|             delete events[type];
 | |
|         }
 | |
|         return this;
 | |
|       }
 | |
| 
 | |
|       // emit removeListener for all listeners on all events
 | |
|       if (arguments.length === 0) {
 | |
|         var keys = objectKeys(events);
 | |
|         var key;
 | |
|         for (i = 0; i < keys.length; ++i) {
 | |
|           key = keys[i];
 | |
|           if (key === 'removeListener') continue;
 | |
|           this.removeAllListeners(key);
 | |
|         }
 | |
|         this.removeAllListeners('removeListener');
 | |
|         this._events = objectCreate(null);
 | |
|         this._eventsCount = 0;
 | |
|         return this;
 | |
|       }
 | |
| 
 | |
|       listeners = events[type];
 | |
| 
 | |
|       if (typeof listeners === 'function') {
 | |
|         this.removeListener(type, listeners);
 | |
|       } else if (listeners) {
 | |
|         // LIFO order
 | |
|         for (i = listeners.length - 1; i >= 0; i--) {
 | |
|           this.removeListener(type, listeners[i]);
 | |
|         }
 | |
|       }
 | |
| 
 | |
|       return this;
 | |
|     };
 | |
| 
 | |
| function _listeners(target, type, unwrap) {
 | |
|   var events = target._events;
 | |
| 
 | |
|   if (!events)
 | |
|     return [];
 | |
| 
 | |
|   var evlistener = events[type];
 | |
|   if (!evlistener)
 | |
|     return [];
 | |
| 
 | |
|   if (typeof evlistener === 'function')
 | |
|     return unwrap ? [evlistener.listener || evlistener] : [evlistener];
 | |
| 
 | |
|   return unwrap ? unwrapListeners(evlistener) : arrayClone(evlistener, evlistener.length);
 | |
| }
 | |
| 
 | |
| EventEmitter.prototype.listeners = function listeners(type) {
 | |
|   return _listeners(this, type, true);
 | |
| };
 | |
| 
 | |
| EventEmitter.prototype.rawListeners = function rawListeners(type) {
 | |
|   return _listeners(this, type, false);
 | |
| };
 | |
| 
 | |
| EventEmitter.listenerCount = function(emitter, type) {
 | |
|   if (typeof emitter.listenerCount === 'function') {
 | |
|     return emitter.listenerCount(type);
 | |
|   } else {
 | |
|     return listenerCount.call(emitter, type);
 | |
|   }
 | |
| };
 | |
| 
 | |
| EventEmitter.prototype.listenerCount = listenerCount;
 | |
| function listenerCount(type) {
 | |
|   var events = this._events;
 | |
| 
 | |
|   if (events) {
 | |
|     var evlistener = events[type];
 | |
| 
 | |
|     if (typeof evlistener === 'function') {
 | |
|       return 1;
 | |
|     } else if (evlistener) {
 | |
|       return evlistener.length;
 | |
|     }
 | |
|   }
 | |
| 
 | |
|   return 0;
 | |
| }
 | |
| 
 | |
| EventEmitter.prototype.eventNames = function eventNames() {
 | |
|   return this._eventsCount > 0 ? Reflect.ownKeys(this._events) : [];
 | |
| };
 | |
| 
 | |
| // About 1.5x faster than the two-arg version of Array#splice().
 | |
| function spliceOne(list, index) {
 | |
|   for (var i = index, k = i + 1, n = list.length; k < n; i += 1, k += 1)
 | |
|     list[i] = list[k];
 | |
|   list.pop();
 | |
| }
 | |
| 
 | |
| function arrayClone(arr, n) {
 | |
|   var copy = new Array(n);
 | |
|   for (var i = 0; i < n; ++i)
 | |
|     copy[i] = arr[i];
 | |
|   return copy;
 | |
| }
 | |
| 
 | |
| function unwrapListeners(arr) {
 | |
|   var ret = new Array(arr.length);
 | |
|   for (var i = 0; i < ret.length; ++i) {
 | |
|     ret[i] = arr[i].listener || arr[i];
 | |
|   }
 | |
|   return ret;
 | |
| }
 | |
| 
 | |
| function objectCreatePolyfill(proto) {
 | |
|   var F = function() {};
 | |
|   F.prototype = proto;
 | |
|   return new F;
 | |
| }
 | |
| function objectKeysPolyfill(obj) {
 | |
|   var keys = [];
 | |
|   for (var k in obj) if (Object.prototype.hasOwnProperty.call(obj, k)) {
 | |
|     keys.push(k);
 | |
|   }
 | |
|   return k;
 | |
| }
 | |
| function functionBindPolyfill(context) {
 | |
|   var fn = this;
 | |
|   return function () {
 | |
|     return fn.apply(context, arguments);
 | |
|   };
 | |
| }
 | |
| 
 | |
| },{}],4:[function(_dereq_,module,exports){
 | |
| 'use strict';
 | |
| var types = [
 | |
|   _dereq_(2),
 | |
|   _dereq_(7),
 | |
|   _dereq_(6),
 | |
|   _dereq_(5),
 | |
|   _dereq_(8),
 | |
|   _dereq_(9)
 | |
| ];
 | |
| var draining;
 | |
| var currentQueue;
 | |
| var queueIndex = -1;
 | |
| var queue = [];
 | |
| var scheduled = false;
 | |
| function cleanUpNextTick() {
 | |
|   if (!draining || !currentQueue) {
 | |
|     return;
 | |
|   }
 | |
|   draining = false;
 | |
|   if (currentQueue.length) {
 | |
|     queue = currentQueue.concat(queue);
 | |
|   } else {
 | |
|     queueIndex = -1;
 | |
|   }
 | |
|   if (queue.length) {
 | |
|     nextTick();
 | |
|   }
 | |
| }
 | |
| 
 | |
| //named nextTick for less confusing stack traces
 | |
| function nextTick() {
 | |
|   if (draining) {
 | |
|     return;
 | |
|   }
 | |
|   scheduled = false;
 | |
|   draining = true;
 | |
|   var len = queue.length;
 | |
|   var timeout = setTimeout(cleanUpNextTick);
 | |
|   while (len) {
 | |
|     currentQueue = queue;
 | |
|     queue = [];
 | |
|     while (currentQueue && ++queueIndex < len) {
 | |
|       currentQueue[queueIndex].run();
 | |
|     }
 | |
|     queueIndex = -1;
 | |
|     len = queue.length;
 | |
|   }
 | |
|   currentQueue = null;
 | |
|   queueIndex = -1;
 | |
|   draining = false;
 | |
|   clearTimeout(timeout);
 | |
| }
 | |
| var scheduleDrain;
 | |
| var i = -1;
 | |
| var len = types.length;
 | |
| while (++i < len) {
 | |
|   if (types[i] && types[i].test && types[i].test()) {
 | |
|     scheduleDrain = types[i].install(nextTick);
 | |
|     break;
 | |
|   }
 | |
| }
 | |
| // v8 likes predictible objects
 | |
| function Item(fun, array) {
 | |
|   this.fun = fun;
 | |
|   this.array = array;
 | |
| }
 | |
| Item.prototype.run = function () {
 | |
|   var fun = this.fun;
 | |
|   var array = this.array;
 | |
|   switch (array.length) {
 | |
|   case 0:
 | |
|     return fun();
 | |
|   case 1:
 | |
|     return fun(array[0]);
 | |
|   case 2:
 | |
|     return fun(array[0], array[1]);
 | |
|   case 3:
 | |
|     return fun(array[0], array[1], array[2]);
 | |
|   default:
 | |
|     return fun.apply(null, array);
 | |
|   }
 | |
| 
 | |
| };
 | |
| module.exports = immediate;
 | |
| function immediate(task) {
 | |
|   var args = new Array(arguments.length - 1);
 | |
|   if (arguments.length > 1) {
 | |
|     for (var i = 1; i < arguments.length; i++) {
 | |
|       args[i - 1] = arguments[i];
 | |
|     }
 | |
|   }
 | |
|   queue.push(new Item(task, args));
 | |
|   if (!scheduled && !draining) {
 | |
|     scheduled = true;
 | |
|     scheduleDrain();
 | |
|   }
 | |
| }
 | |
| 
 | |
| },{"2":2,"5":5,"6":6,"7":7,"8":8,"9":9}],5:[function(_dereq_,module,exports){
 | |
| (function (global){(function (){
 | |
| 'use strict';
 | |
| 
 | |
| exports.test = function () {
 | |
|   if (global.setImmediate) {
 | |
|     // we can only get here in IE10
 | |
|     // which doesn't handel postMessage well
 | |
|     return false;
 | |
|   }
 | |
|   return typeof global.MessageChannel !== 'undefined';
 | |
| };
 | |
| 
 | |
| exports.install = function (func) {
 | |
|   var channel = new global.MessageChannel();
 | |
|   channel.port1.onmessage = func;
 | |
|   return function () {
 | |
|     channel.port2.postMessage(0);
 | |
|   };
 | |
| };
 | |
| }).call(this)}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
 | |
| },{}],6:[function(_dereq_,module,exports){
 | |
| (function (global){(function (){
 | |
| 'use strict';
 | |
| //based off rsvp https://github.com/tildeio/rsvp.js
 | |
| //license https://github.com/tildeio/rsvp.js/blob/master/LICENSE
 | |
| //https://github.com/tildeio/rsvp.js/blob/master/lib/rsvp/asap.js
 | |
| 
 | |
| var Mutation = global.MutationObserver || global.WebKitMutationObserver;
 | |
| 
 | |
| exports.test = function () {
 | |
|   return Mutation;
 | |
| };
 | |
| 
 | |
| exports.install = function (handle) {
 | |
|   var called = 0;
 | |
|   var observer = new Mutation(handle);
 | |
|   var element = global.document.createTextNode('');
 | |
|   observer.observe(element, {
 | |
|     characterData: true
 | |
|   });
 | |
|   return function () {
 | |
|     element.data = (called = ++called % 2);
 | |
|   };
 | |
| };
 | |
| }).call(this)}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
 | |
| },{}],7:[function(_dereq_,module,exports){
 | |
| (function (global){(function (){
 | |
| 'use strict';
 | |
| exports.test = function () {
 | |
|   return typeof global.queueMicrotask === 'function';
 | |
| };
 | |
| 
 | |
| exports.install = function (func) {
 | |
|   return function () {
 | |
|     global.queueMicrotask(func);
 | |
|   };
 | |
| };
 | |
| 
 | |
| }).call(this)}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
 | |
| },{}],8:[function(_dereq_,module,exports){
 | |
| (function (global){(function (){
 | |
| 'use strict';
 | |
| 
 | |
| exports.test = function () {
 | |
|   return 'document' in global && 'onreadystatechange' in global.document.createElement('script');
 | |
| };
 | |
| 
 | |
| exports.install = function (handle) {
 | |
|   return function () {
 | |
| 
 | |
|     // Create a <script> element; its readystatechange event will be fired asynchronously once it is inserted
 | |
|     // into the document. Do so, thus queuing up the task. Remember to clean up once it's been called.
 | |
|     var scriptEl = global.document.createElement('script');
 | |
|     scriptEl.onreadystatechange = function () {
 | |
|       handle();
 | |
| 
 | |
|       scriptEl.onreadystatechange = null;
 | |
|       scriptEl.parentNode.removeChild(scriptEl);
 | |
|       scriptEl = null;
 | |
|     };
 | |
|     global.document.documentElement.appendChild(scriptEl);
 | |
| 
 | |
|     return handle;
 | |
|   };
 | |
| };
 | |
| }).call(this)}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
 | |
| },{}],9:[function(_dereq_,module,exports){
 | |
| 'use strict';
 | |
| exports.test = function () {
 | |
|   return true;
 | |
| };
 | |
| 
 | |
| exports.install = function (t) {
 | |
|   return function () {
 | |
|     setTimeout(t, 0);
 | |
|   };
 | |
| };
 | |
| },{}],10:[function(_dereq_,module,exports){
 | |
| if (typeof Object.create === 'function') {
 | |
|   // implementation from standard node.js 'util' module
 | |
|   module.exports = function inherits(ctor, superCtor) {
 | |
|     if (superCtor) {
 | |
|       ctor.super_ = superCtor
 | |
|       ctor.prototype = Object.create(superCtor.prototype, {
 | |
|         constructor: {
 | |
|           value: ctor,
 | |
|           enumerable: false,
 | |
|           writable: true,
 | |
|           configurable: true
 | |
|         }
 | |
|       })
 | |
|     }
 | |
|   };
 | |
| } else {
 | |
|   // old school shim for old browsers
 | |
|   module.exports = function inherits(ctor, superCtor) {
 | |
|     if (superCtor) {
 | |
|       ctor.super_ = superCtor
 | |
|       var TempCtor = function () {}
 | |
|       TempCtor.prototype = superCtor.prototype
 | |
|       ctor.prototype = new TempCtor()
 | |
|       ctor.prototype.constructor = ctor
 | |
|     }
 | |
|   }
 | |
| }
 | |
| 
 | |
| },{}],11:[function(_dereq_,module,exports){
 | |
| (function (factory) {
 | |
|     if (typeof exports === 'object') {
 | |
|         // Node/CommonJS
 | |
|         module.exports = factory();
 | |
|     } else if (typeof define === 'function' && define.amd) {
 | |
|         // AMD
 | |
|         define(factory);
 | |
|     } else {
 | |
|         // Browser globals (with support for web workers)
 | |
|         var glob;
 | |
| 
 | |
|         try {
 | |
|             glob = window;
 | |
|         } catch (e) {
 | |
|             glob = self;
 | |
|         }
 | |
| 
 | |
|         glob.SparkMD5 = factory();
 | |
|     }
 | |
| }(function (undefined) {
 | |
| 
 | |
|     'use strict';
 | |
| 
 | |
|     /*
 | |
|      * Fastest md5 implementation around (JKM md5).
 | |
|      * Credits: Joseph Myers
 | |
|      *
 | |
|      * @see http://www.myersdaily.org/joseph/javascript/md5-text.html
 | |
|      * @see http://jsperf.com/md5-shootout/7
 | |
|      */
 | |
| 
 | |
|     /* this function is much faster,
 | |
|       so if possible we use it. Some IEs
 | |
|       are the only ones I know of that
 | |
|       need the idiotic second function,
 | |
|       generated by an if clause.  */
 | |
|     var add32 = function (a, b) {
 | |
|         return (a + b) & 0xFFFFFFFF;
 | |
|     },
 | |
|         hex_chr = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'];
 | |
| 
 | |
| 
 | |
|     function cmn(q, a, b, x, s, t) {
 | |
|         a = add32(add32(a, q), add32(x, t));
 | |
|         return add32((a << s) | (a >>> (32 - s)), b);
 | |
|     }
 | |
| 
 | |
|     function md5cycle(x, k) {
 | |
|         var a = x[0],
 | |
|             b = x[1],
 | |
|             c = x[2],
 | |
|             d = x[3];
 | |
| 
 | |
|         a += (b & c | ~b & d) + k[0] - 680876936 | 0;
 | |
|         a  = (a << 7 | a >>> 25) + b | 0;
 | |
|         d += (a & b | ~a & c) + k[1] - 389564586 | 0;
 | |
|         d  = (d << 12 | d >>> 20) + a | 0;
 | |
|         c += (d & a | ~d & b) + k[2] + 606105819 | 0;
 | |
|         c  = (c << 17 | c >>> 15) + d | 0;
 | |
|         b += (c & d | ~c & a) + k[3] - 1044525330 | 0;
 | |
|         b  = (b << 22 | b >>> 10) + c | 0;
 | |
|         a += (b & c | ~b & d) + k[4] - 176418897 | 0;
 | |
|         a  = (a << 7 | a >>> 25) + b | 0;
 | |
|         d += (a & b | ~a & c) + k[5] + 1200080426 | 0;
 | |
|         d  = (d << 12 | d >>> 20) + a | 0;
 | |
|         c += (d & a | ~d & b) + k[6] - 1473231341 | 0;
 | |
|         c  = (c << 17 | c >>> 15) + d | 0;
 | |
|         b += (c & d | ~c & a) + k[7] - 45705983 | 0;
 | |
|         b  = (b << 22 | b >>> 10) + c | 0;
 | |
|         a += (b & c | ~b & d) + k[8] + 1770035416 | 0;
 | |
|         a  = (a << 7 | a >>> 25) + b | 0;
 | |
|         d += (a & b | ~a & c) + k[9] - 1958414417 | 0;
 | |
|         d  = (d << 12 | d >>> 20) + a | 0;
 | |
|         c += (d & a | ~d & b) + k[10] - 42063 | 0;
 | |
|         c  = (c << 17 | c >>> 15) + d | 0;
 | |
|         b += (c & d | ~c & a) + k[11] - 1990404162 | 0;
 | |
|         b  = (b << 22 | b >>> 10) + c | 0;
 | |
|         a += (b & c | ~b & d) + k[12] + 1804603682 | 0;
 | |
|         a  = (a << 7 | a >>> 25) + b | 0;
 | |
|         d += (a & b | ~a & c) + k[13] - 40341101 | 0;
 | |
|         d  = (d << 12 | d >>> 20) + a | 0;
 | |
|         c += (d & a | ~d & b) + k[14] - 1502002290 | 0;
 | |
|         c  = (c << 17 | c >>> 15) + d | 0;
 | |
|         b += (c & d | ~c & a) + k[15] + 1236535329 | 0;
 | |
|         b  = (b << 22 | b >>> 10) + c | 0;
 | |
| 
 | |
|         a += (b & d | c & ~d) + k[1] - 165796510 | 0;
 | |
|         a  = (a << 5 | a >>> 27) + b | 0;
 | |
|         d += (a & c | b & ~c) + k[6] - 1069501632 | 0;
 | |
|         d  = (d << 9 | d >>> 23) + a | 0;
 | |
|         c += (d & b | a & ~b) + k[11] + 643717713 | 0;
 | |
|         c  = (c << 14 | c >>> 18) + d | 0;
 | |
|         b += (c & a | d & ~a) + k[0] - 373897302 | 0;
 | |
|         b  = (b << 20 | b >>> 12) + c | 0;
 | |
|         a += (b & d | c & ~d) + k[5] - 701558691 | 0;
 | |
|         a  = (a << 5 | a >>> 27) + b | 0;
 | |
|         d += (a & c | b & ~c) + k[10] + 38016083 | 0;
 | |
|         d  = (d << 9 | d >>> 23) + a | 0;
 | |
|         c += (d & b | a & ~b) + k[15] - 660478335 | 0;
 | |
|         c  = (c << 14 | c >>> 18) + d | 0;
 | |
|         b += (c & a | d & ~a) + k[4] - 405537848 | 0;
 | |
|         b  = (b << 20 | b >>> 12) + c | 0;
 | |
|         a += (b & d | c & ~d) + k[9] + 568446438 | 0;
 | |
|         a  = (a << 5 | a >>> 27) + b | 0;
 | |
|         d += (a & c | b & ~c) + k[14] - 1019803690 | 0;
 | |
|         d  = (d << 9 | d >>> 23) + a | 0;
 | |
|         c += (d & b | a & ~b) + k[3] - 187363961 | 0;
 | |
|         c  = (c << 14 | c >>> 18) + d | 0;
 | |
|         b += (c & a | d & ~a) + k[8] + 1163531501 | 0;
 | |
|         b  = (b << 20 | b >>> 12) + c | 0;
 | |
|         a += (b & d | c & ~d) + k[13] - 1444681467 | 0;
 | |
|         a  = (a << 5 | a >>> 27) + b | 0;
 | |
|         d += (a & c | b & ~c) + k[2] - 51403784 | 0;
 | |
|         d  = (d << 9 | d >>> 23) + a | 0;
 | |
|         c += (d & b | a & ~b) + k[7] + 1735328473 | 0;
 | |
|         c  = (c << 14 | c >>> 18) + d | 0;
 | |
|         b += (c & a | d & ~a) + k[12] - 1926607734 | 0;
 | |
|         b  = (b << 20 | b >>> 12) + c | 0;
 | |
| 
 | |
|         a += (b ^ c ^ d) + k[5] - 378558 | 0;
 | |
|         a  = (a << 4 | a >>> 28) + b | 0;
 | |
|         d += (a ^ b ^ c) + k[8] - 2022574463 | 0;
 | |
|         d  = (d << 11 | d >>> 21) + a | 0;
 | |
|         c += (d ^ a ^ b) + k[11] + 1839030562 | 0;
 | |
|         c  = (c << 16 | c >>> 16) + d | 0;
 | |
|         b += (c ^ d ^ a) + k[14] - 35309556 | 0;
 | |
|         b  = (b << 23 | b >>> 9) + c | 0;
 | |
|         a += (b ^ c ^ d) + k[1] - 1530992060 | 0;
 | |
|         a  = (a << 4 | a >>> 28) + b | 0;
 | |
|         d += (a ^ b ^ c) + k[4] + 1272893353 | 0;
 | |
|         d  = (d << 11 | d >>> 21) + a | 0;
 | |
|         c += (d ^ a ^ b) + k[7] - 155497632 | 0;
 | |
|         c  = (c << 16 | c >>> 16) + d | 0;
 | |
|         b += (c ^ d ^ a) + k[10] - 1094730640 | 0;
 | |
|         b  = (b << 23 | b >>> 9) + c | 0;
 | |
|         a += (b ^ c ^ d) + k[13] + 681279174 | 0;
 | |
|         a  = (a << 4 | a >>> 28) + b | 0;
 | |
|         d += (a ^ b ^ c) + k[0] - 358537222 | 0;
 | |
|         d  = (d << 11 | d >>> 21) + a | 0;
 | |
|         c += (d ^ a ^ b) + k[3] - 722521979 | 0;
 | |
|         c  = (c << 16 | c >>> 16) + d | 0;
 | |
|         b += (c ^ d ^ a) + k[6] + 76029189 | 0;
 | |
|         b  = (b << 23 | b >>> 9) + c | 0;
 | |
|         a += (b ^ c ^ d) + k[9] - 640364487 | 0;
 | |
|         a  = (a << 4 | a >>> 28) + b | 0;
 | |
|         d += (a ^ b ^ c) + k[12] - 421815835 | 0;
 | |
|         d  = (d << 11 | d >>> 21) + a | 0;
 | |
|         c += (d ^ a ^ b) + k[15] + 530742520 | 0;
 | |
|         c  = (c << 16 | c >>> 16) + d | 0;
 | |
|         b += (c ^ d ^ a) + k[2] - 995338651 | 0;
 | |
|         b  = (b << 23 | b >>> 9) + c | 0;
 | |
| 
 | |
|         a += (c ^ (b | ~d)) + k[0] - 198630844 | 0;
 | |
|         a  = (a << 6 | a >>> 26) + b | 0;
 | |
|         d += (b ^ (a | ~c)) + k[7] + 1126891415 | 0;
 | |
|         d  = (d << 10 | d >>> 22) + a | 0;
 | |
|         c += (a ^ (d | ~b)) + k[14] - 1416354905 | 0;
 | |
|         c  = (c << 15 | c >>> 17) + d | 0;
 | |
|         b += (d ^ (c | ~a)) + k[5] - 57434055 | 0;
 | |
|         b  = (b << 21 |b >>> 11) + c | 0;
 | |
|         a += (c ^ (b | ~d)) + k[12] + 1700485571 | 0;
 | |
|         a  = (a << 6 | a >>> 26) + b | 0;
 | |
|         d += (b ^ (a | ~c)) + k[3] - 1894986606 | 0;
 | |
|         d  = (d << 10 | d >>> 22) + a | 0;
 | |
|         c += (a ^ (d | ~b)) + k[10] - 1051523 | 0;
 | |
|         c  = (c << 15 | c >>> 17) + d | 0;
 | |
|         b += (d ^ (c | ~a)) + k[1] - 2054922799 | 0;
 | |
|         b  = (b << 21 |b >>> 11) + c | 0;
 | |
|         a += (c ^ (b | ~d)) + k[8] + 1873313359 | 0;
 | |
|         a  = (a << 6 | a >>> 26) + b | 0;
 | |
|         d += (b ^ (a | ~c)) + k[15] - 30611744 | 0;
 | |
|         d  = (d << 10 | d >>> 22) + a | 0;
 | |
|         c += (a ^ (d | ~b)) + k[6] - 1560198380 | 0;
 | |
|         c  = (c << 15 | c >>> 17) + d | 0;
 | |
|         b += (d ^ (c | ~a)) + k[13] + 1309151649 | 0;
 | |
|         b  = (b << 21 |b >>> 11) + c | 0;
 | |
|         a += (c ^ (b | ~d)) + k[4] - 145523070 | 0;
 | |
|         a  = (a << 6 | a >>> 26) + b | 0;
 | |
|         d += (b ^ (a | ~c)) + k[11] - 1120210379 | 0;
 | |
|         d  = (d << 10 | d >>> 22) + a | 0;
 | |
|         c += (a ^ (d | ~b)) + k[2] + 718787259 | 0;
 | |
|         c  = (c << 15 | c >>> 17) + d | 0;
 | |
|         b += (d ^ (c | ~a)) + k[9] - 343485551 | 0;
 | |
|         b  = (b << 21 | b >>> 11) + c | 0;
 | |
| 
 | |
|         x[0] = a + x[0] | 0;
 | |
|         x[1] = b + x[1] | 0;
 | |
|         x[2] = c + x[2] | 0;
 | |
|         x[3] = d + x[3] | 0;
 | |
|     }
 | |
| 
 | |
|     function md5blk(s) {
 | |
|         var md5blks = [],
 | |
|             i; /* Andy King said do it this way. */
 | |
| 
 | |
|         for (i = 0; i < 64; i += 4) {
 | |
|             md5blks[i >> 2] = s.charCodeAt(i) + (s.charCodeAt(i + 1) << 8) + (s.charCodeAt(i + 2) << 16) + (s.charCodeAt(i + 3) << 24);
 | |
|         }
 | |
|         return md5blks;
 | |
|     }
 | |
| 
 | |
|     function md5blk_array(a) {
 | |
|         var md5blks = [],
 | |
|             i; /* Andy King said do it this way. */
 | |
| 
 | |
|         for (i = 0; i < 64; i += 4) {
 | |
|             md5blks[i >> 2] = a[i] + (a[i + 1] << 8) + (a[i + 2] << 16) + (a[i + 3] << 24);
 | |
|         }
 | |
|         return md5blks;
 | |
|     }
 | |
| 
 | |
|     function md51(s) {
 | |
|         var n = s.length,
 | |
|             state = [1732584193, -271733879, -1732584194, 271733878],
 | |
|             i,
 | |
|             length,
 | |
|             tail,
 | |
|             tmp,
 | |
|             lo,
 | |
|             hi;
 | |
| 
 | |
|         for (i = 64; i <= n; i += 64) {
 | |
|             md5cycle(state, md5blk(s.substring(i - 64, i)));
 | |
|         }
 | |
|         s = s.substring(i - 64);
 | |
|         length = s.length;
 | |
|         tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
 | |
|         for (i = 0; i < length; i += 1) {
 | |
|             tail[i >> 2] |= s.charCodeAt(i) << ((i % 4) << 3);
 | |
|         }
 | |
|         tail[i >> 2] |= 0x80 << ((i % 4) << 3);
 | |
|         if (i > 55) {
 | |
|             md5cycle(state, tail);
 | |
|             for (i = 0; i < 16; i += 1) {
 | |
|                 tail[i] = 0;
 | |
|             }
 | |
|         }
 | |
| 
 | |
|         // Beware that the final length might not fit in 32 bits so we take care of that
 | |
|         tmp = n * 8;
 | |
|         tmp = tmp.toString(16).match(/(.*?)(.{0,8})$/);
 | |
|         lo = parseInt(tmp[2], 16);
 | |
|         hi = parseInt(tmp[1], 16) || 0;
 | |
| 
 | |
|         tail[14] = lo;
 | |
|         tail[15] = hi;
 | |
| 
 | |
|         md5cycle(state, tail);
 | |
|         return state;
 | |
|     }
 | |
| 
 | |
|     function md51_array(a) {
 | |
|         var n = a.length,
 | |
|             state = [1732584193, -271733879, -1732584194, 271733878],
 | |
|             i,
 | |
|             length,
 | |
|             tail,
 | |
|             tmp,
 | |
|             lo,
 | |
|             hi;
 | |
| 
 | |
|         for (i = 64; i <= n; i += 64) {
 | |
|             md5cycle(state, md5blk_array(a.subarray(i - 64, i)));
 | |
|         }
 | |
| 
 | |
|         // Not sure if it is a bug, however IE10 will always produce a sub array of length 1
 | |
|         // containing the last element of the parent array if the sub array specified starts
 | |
|         // beyond the length of the parent array - weird.
 | |
|         // https://connect.microsoft.com/IE/feedback/details/771452/typed-array-subarray-issue
 | |
|         a = (i - 64) < n ? a.subarray(i - 64) : new Uint8Array(0);
 | |
| 
 | |
|         length = a.length;
 | |
|         tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
 | |
|         for (i = 0; i < length; i += 1) {
 | |
|             tail[i >> 2] |= a[i] << ((i % 4) << 3);
 | |
|         }
 | |
| 
 | |
|         tail[i >> 2] |= 0x80 << ((i % 4) << 3);
 | |
|         if (i > 55) {
 | |
|             md5cycle(state, tail);
 | |
|             for (i = 0; i < 16; i += 1) {
 | |
|                 tail[i] = 0;
 | |
|             }
 | |
|         }
 | |
| 
 | |
|         // Beware that the final length might not fit in 32 bits so we take care of that
 | |
|         tmp = n * 8;
 | |
|         tmp = tmp.toString(16).match(/(.*?)(.{0,8})$/);
 | |
|         lo = parseInt(tmp[2], 16);
 | |
|         hi = parseInt(tmp[1], 16) || 0;
 | |
| 
 | |
|         tail[14] = lo;
 | |
|         tail[15] = hi;
 | |
| 
 | |
|         md5cycle(state, tail);
 | |
| 
 | |
|         return state;
 | |
|     }
 | |
| 
 | |
|     function rhex(n) {
 | |
|         var s = '',
 | |
|             j;
 | |
|         for (j = 0; j < 4; j += 1) {
 | |
|             s += hex_chr[(n >> (j * 8 + 4)) & 0x0F] + hex_chr[(n >> (j * 8)) & 0x0F];
 | |
|         }
 | |
|         return s;
 | |
|     }
 | |
| 
 | |
|     function hex(x) {
 | |
|         var i;
 | |
|         for (i = 0; i < x.length; i += 1) {
 | |
|             x[i] = rhex(x[i]);
 | |
|         }
 | |
|         return x.join('');
 | |
|     }
 | |
| 
 | |
|     // In some cases the fast add32 function cannot be used..
 | |
|     if (hex(md51('hello')) !== '5d41402abc4b2a76b9719d911017c592') {
 | |
|         add32 = function (x, y) {
 | |
|             var lsw = (x & 0xFFFF) + (y & 0xFFFF),
 | |
|                 msw = (x >> 16) + (y >> 16) + (lsw >> 16);
 | |
|             return (msw << 16) | (lsw & 0xFFFF);
 | |
|         };
 | |
|     }
 | |
| 
 | |
|     // ---------------------------------------------------
 | |
| 
 | |
|     /**
 | |
|      * ArrayBuffer slice polyfill.
 | |
|      *
 | |
|      * @see https://github.com/ttaubert/node-arraybuffer-slice
 | |
|      */
 | |
| 
 | |
|     if (typeof ArrayBuffer !== 'undefined' && !ArrayBuffer.prototype.slice) {
 | |
|         (function () {
 | |
|             function clamp(val, length) {
 | |
|                 val = (val | 0) || 0;
 | |
| 
 | |
|                 if (val < 0) {
 | |
|                     return Math.max(val + length, 0);
 | |
|                 }
 | |
| 
 | |
|                 return Math.min(val, length);
 | |
|             }
 | |
| 
 | |
|             ArrayBuffer.prototype.slice = function (from, to) {
 | |
|                 var length = this.byteLength,
 | |
|                     begin = clamp(from, length),
 | |
|                     end = length,
 | |
|                     num,
 | |
|                     target,
 | |
|                     targetArray,
 | |
|                     sourceArray;
 | |
| 
 | |
|                 if (to !== undefined) {
 | |
|                     end = clamp(to, length);
 | |
|                 }
 | |
| 
 | |
|                 if (begin > end) {
 | |
|                     return new ArrayBuffer(0);
 | |
|                 }
 | |
| 
 | |
|                 num = end - begin;
 | |
|                 target = new ArrayBuffer(num);
 | |
|                 targetArray = new Uint8Array(target);
 | |
| 
 | |
|                 sourceArray = new Uint8Array(this, begin, num);
 | |
|                 targetArray.set(sourceArray);
 | |
| 
 | |
|                 return target;
 | |
|             };
 | |
|         })();
 | |
|     }
 | |
| 
 | |
|     // ---------------------------------------------------
 | |
| 
 | |
|     /**
 | |
|      * Helpers.
 | |
|      */
 | |
| 
 | |
|     function toUtf8(str) {
 | |
|         if (/[\u0080-\uFFFF]/.test(str)) {
 | |
|             str = unescape(encodeURIComponent(str));
 | |
|         }
 | |
| 
 | |
|         return str;
 | |
|     }
 | |
| 
 | |
|     function utf8Str2ArrayBuffer(str, returnUInt8Array) {
 | |
|         var length = str.length,
 | |
|            buff = new ArrayBuffer(length),
 | |
|            arr = new Uint8Array(buff),
 | |
|            i;
 | |
| 
 | |
|         for (i = 0; i < length; i += 1) {
 | |
|             arr[i] = str.charCodeAt(i);
 | |
|         }
 | |
| 
 | |
|         return returnUInt8Array ? arr : buff;
 | |
|     }
 | |
| 
 | |
|     function arrayBuffer2Utf8Str(buff) {
 | |
|         return String.fromCharCode.apply(null, new Uint8Array(buff));
 | |
|     }
 | |
| 
 | |
|     function concatenateArrayBuffers(first, second, returnUInt8Array) {
 | |
|         var result = new Uint8Array(first.byteLength + second.byteLength);
 | |
| 
 | |
|         result.set(new Uint8Array(first));
 | |
|         result.set(new Uint8Array(second), first.byteLength);
 | |
| 
 | |
|         return returnUInt8Array ? result : result.buffer;
 | |
|     }
 | |
| 
 | |
|     function hexToBinaryString(hex) {
 | |
|         var bytes = [],
 | |
|             length = hex.length,
 | |
|             x;
 | |
| 
 | |
|         for (x = 0; x < length - 1; x += 2) {
 | |
|             bytes.push(parseInt(hex.substr(x, 2), 16));
 | |
|         }
 | |
| 
 | |
|         return String.fromCharCode.apply(String, bytes);
 | |
|     }
 | |
| 
 | |
|     // ---------------------------------------------------
 | |
| 
 | |
|     /**
 | |
|      * SparkMD5 OOP implementation.
 | |
|      *
 | |
|      * Use this class to perform an incremental md5, otherwise use the
 | |
|      * static methods instead.
 | |
|      */
 | |
| 
 | |
|     function SparkMD5() {
 | |
|         // call reset to init the instance
 | |
|         this.reset();
 | |
|     }
 | |
| 
 | |
|     /**
 | |
|      * Appends a string.
 | |
|      * A conversion will be applied if an utf8 string is detected.
 | |
|      *
 | |
|      * @param {String} str The string to be appended
 | |
|      *
 | |
|      * @return {SparkMD5} The instance itself
 | |
|      */
 | |
|     SparkMD5.prototype.append = function (str) {
 | |
|         // Converts the string to utf8 bytes if necessary
 | |
|         // Then append as binary
 | |
|         this.appendBinary(toUtf8(str));
 | |
| 
 | |
|         return this;
 | |
|     };
 | |
| 
 | |
|     /**
 | |
|      * Appends a binary string.
 | |
|      *
 | |
|      * @param {String} contents The binary string to be appended
 | |
|      *
 | |
|      * @return {SparkMD5} The instance itself
 | |
|      */
 | |
|     SparkMD5.prototype.appendBinary = function (contents) {
 | |
|         this._buff += contents;
 | |
|         this._length += contents.length;
 | |
| 
 | |
|         var length = this._buff.length,
 | |
|             i;
 | |
| 
 | |
|         for (i = 64; i <= length; i += 64) {
 | |
|             md5cycle(this._hash, md5blk(this._buff.substring(i - 64, i)));
 | |
|         }
 | |
| 
 | |
|         this._buff = this._buff.substring(i - 64);
 | |
| 
 | |
|         return this;
 | |
|     };
 | |
| 
 | |
|     /**
 | |
|      * Finishes the incremental computation, reseting the internal state and
 | |
|      * returning the result.
 | |
|      *
 | |
|      * @param {Boolean} raw True to get the raw string, false to get the hex string
 | |
|      *
 | |
|      * @return {String} The result
 | |
|      */
 | |
|     SparkMD5.prototype.end = function (raw) {
 | |
|         var buff = this._buff,
 | |
|             length = buff.length,
 | |
|             i,
 | |
|             tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
 | |
|             ret;
 | |
| 
 | |
|         for (i = 0; i < length; i += 1) {
 | |
|             tail[i >> 2] |= buff.charCodeAt(i) << ((i % 4) << 3);
 | |
|         }
 | |
| 
 | |
|         this._finish(tail, length);
 | |
|         ret = hex(this._hash);
 | |
| 
 | |
|         if (raw) {
 | |
|             ret = hexToBinaryString(ret);
 | |
|         }
 | |
| 
 | |
|         this.reset();
 | |
| 
 | |
|         return ret;
 | |
|     };
 | |
| 
 | |
|     /**
 | |
|      * Resets the internal state of the computation.
 | |
|      *
 | |
|      * @return {SparkMD5} The instance itself
 | |
|      */
 | |
|     SparkMD5.prototype.reset = function () {
 | |
|         this._buff = '';
 | |
|         this._length = 0;
 | |
|         this._hash = [1732584193, -271733879, -1732584194, 271733878];
 | |
| 
 | |
|         return this;
 | |
|     };
 | |
| 
 | |
|     /**
 | |
|      * Gets the internal state of the computation.
 | |
|      *
 | |
|      * @return {Object} The state
 | |
|      */
 | |
|     SparkMD5.prototype.getState = function () {
 | |
|         return {
 | |
|             buff: this._buff,
 | |
|             length: this._length,
 | |
|             hash: this._hash.slice()
 | |
|         };
 | |
|     };
 | |
| 
 | |
|     /**
 | |
|      * Gets the internal state of the computation.
 | |
|      *
 | |
|      * @param {Object} state The state
 | |
|      *
 | |
|      * @return {SparkMD5} The instance itself
 | |
|      */
 | |
|     SparkMD5.prototype.setState = function (state) {
 | |
|         this._buff = state.buff;
 | |
|         this._length = state.length;
 | |
|         this._hash = state.hash;
 | |
| 
 | |
|         return this;
 | |
|     };
 | |
| 
 | |
|     /**
 | |
|      * Releases memory used by the incremental buffer and other additional
 | |
|      * resources. If you plan to use the instance again, use reset instead.
 | |
|      */
 | |
|     SparkMD5.prototype.destroy = function () {
 | |
|         delete this._hash;
 | |
|         delete this._buff;
 | |
|         delete this._length;
 | |
|     };
 | |
| 
 | |
|     /**
 | |
|      * Finish the final calculation based on the tail.
 | |
|      *
 | |
|      * @param {Array}  tail   The tail (will be modified)
 | |
|      * @param {Number} length The length of the remaining buffer
 | |
|      */
 | |
|     SparkMD5.prototype._finish = function (tail, length) {
 | |
|         var i = length,
 | |
|             tmp,
 | |
|             lo,
 | |
|             hi;
 | |
| 
 | |
|         tail[i >> 2] |= 0x80 << ((i % 4) << 3);
 | |
|         if (i > 55) {
 | |
|             md5cycle(this._hash, tail);
 | |
|             for (i = 0; i < 16; i += 1) {
 | |
|                 tail[i] = 0;
 | |
|             }
 | |
|         }
 | |
| 
 | |
|         // Do the final computation based on the tail and length
 | |
|         // Beware that the final length may not fit in 32 bits so we take care of that
 | |
|         tmp = this._length * 8;
 | |
|         tmp = tmp.toString(16).match(/(.*?)(.{0,8})$/);
 | |
|         lo = parseInt(tmp[2], 16);
 | |
|         hi = parseInt(tmp[1], 16) || 0;
 | |
| 
 | |
|         tail[14] = lo;
 | |
|         tail[15] = hi;
 | |
|         md5cycle(this._hash, tail);
 | |
|     };
 | |
| 
 | |
|     /**
 | |
|      * Performs the md5 hash on a string.
 | |
|      * A conversion will be applied if utf8 string is detected.
 | |
|      *
 | |
|      * @param {String}  str The string
 | |
|      * @param {Boolean} [raw] True to get the raw string, false to get the hex string
 | |
|      *
 | |
|      * @return {String} The result
 | |
|      */
 | |
|     SparkMD5.hash = function (str, raw) {
 | |
|         // Converts the string to utf8 bytes if necessary
 | |
|         // Then compute it using the binary function
 | |
|         return SparkMD5.hashBinary(toUtf8(str), raw);
 | |
|     };
 | |
| 
 | |
|     /**
 | |
|      * Performs the md5 hash on a binary string.
 | |
|      *
 | |
|      * @param {String}  content The binary string
 | |
|      * @param {Boolean} [raw]     True to get the raw string, false to get the hex string
 | |
|      *
 | |
|      * @return {String} The result
 | |
|      */
 | |
|     SparkMD5.hashBinary = function (content, raw) {
 | |
|         var hash = md51(content),
 | |
|             ret = hex(hash);
 | |
| 
 | |
|         return raw ? hexToBinaryString(ret) : ret;
 | |
|     };
 | |
| 
 | |
|     // ---------------------------------------------------
 | |
| 
 | |
|     /**
 | |
|      * SparkMD5 OOP implementation for array buffers.
 | |
|      *
 | |
|      * Use this class to perform an incremental md5 ONLY for array buffers.
 | |
|      */
 | |
|     SparkMD5.ArrayBuffer = function () {
 | |
|         // call reset to init the instance
 | |
|         this.reset();
 | |
|     };
 | |
| 
 | |
|     /**
 | |
|      * Appends an array buffer.
 | |
|      *
 | |
|      * @param {ArrayBuffer} arr The array to be appended
 | |
|      *
 | |
|      * @return {SparkMD5.ArrayBuffer} The instance itself
 | |
|      */
 | |
|     SparkMD5.ArrayBuffer.prototype.append = function (arr) {
 | |
|         var buff = concatenateArrayBuffers(this._buff.buffer, arr, true),
 | |
|             length = buff.length,
 | |
|             i;
 | |
| 
 | |
|         this._length += arr.byteLength;
 | |
| 
 | |
|         for (i = 64; i <= length; i += 64) {
 | |
|             md5cycle(this._hash, md5blk_array(buff.subarray(i - 64, i)));
 | |
|         }
 | |
| 
 | |
|         this._buff = (i - 64) < length ? new Uint8Array(buff.buffer.slice(i - 64)) : new Uint8Array(0);
 | |
| 
 | |
|         return this;
 | |
|     };
 | |
| 
 | |
|     /**
 | |
|      * Finishes the incremental computation, reseting the internal state and
 | |
|      * returning the result.
 | |
|      *
 | |
|      * @param {Boolean} raw True to get the raw string, false to get the hex string
 | |
|      *
 | |
|      * @return {String} The result
 | |
|      */
 | |
|     SparkMD5.ArrayBuffer.prototype.end = function (raw) {
 | |
|         var buff = this._buff,
 | |
|             length = buff.length,
 | |
|             tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
 | |
|             i,
 | |
|             ret;
 | |
| 
 | |
|         for (i = 0; i < length; i += 1) {
 | |
|             tail[i >> 2] |= buff[i] << ((i % 4) << 3);
 | |
|         }
 | |
| 
 | |
|         this._finish(tail, length);
 | |
|         ret = hex(this._hash);
 | |
| 
 | |
|         if (raw) {
 | |
|             ret = hexToBinaryString(ret);
 | |
|         }
 | |
| 
 | |
|         this.reset();
 | |
| 
 | |
|         return ret;
 | |
|     };
 | |
| 
 | |
|     /**
 | |
|      * Resets the internal state of the computation.
 | |
|      *
 | |
|      * @return {SparkMD5.ArrayBuffer} The instance itself
 | |
|      */
 | |
|     SparkMD5.ArrayBuffer.prototype.reset = function () {
 | |
|         this._buff = new Uint8Array(0);
 | |
|         this._length = 0;
 | |
|         this._hash = [1732584193, -271733879, -1732584194, 271733878];
 | |
| 
 | |
|         return this;
 | |
|     };
 | |
| 
 | |
|     /**
 | |
|      * Gets the internal state of the computation.
 | |
|      *
 | |
|      * @return {Object} The state
 | |
|      */
 | |
|     SparkMD5.ArrayBuffer.prototype.getState = function () {
 | |
|         var state = SparkMD5.prototype.getState.call(this);
 | |
| 
 | |
|         // Convert buffer to a string
 | |
|         state.buff = arrayBuffer2Utf8Str(state.buff);
 | |
| 
 | |
|         return state;
 | |
|     };
 | |
| 
 | |
|     /**
 | |
|      * Gets the internal state of the computation.
 | |
|      *
 | |
|      * @param {Object} state The state
 | |
|      *
 | |
|      * @return {SparkMD5.ArrayBuffer} The instance itself
 | |
|      */
 | |
|     SparkMD5.ArrayBuffer.prototype.setState = function (state) {
 | |
|         // Convert string to buffer
 | |
|         state.buff = utf8Str2ArrayBuffer(state.buff, true);
 | |
| 
 | |
|         return SparkMD5.prototype.setState.call(this, state);
 | |
|     };
 | |
| 
 | |
|     SparkMD5.ArrayBuffer.prototype.destroy = SparkMD5.prototype.destroy;
 | |
| 
 | |
|     SparkMD5.ArrayBuffer.prototype._finish = SparkMD5.prototype._finish;
 | |
| 
 | |
|     /**
 | |
|      * Performs the md5 hash on an array buffer.
 | |
|      *
 | |
|      * @param {ArrayBuffer} arr The array buffer
 | |
|      * @param {Boolean}     [raw] True to get the raw string, false to get the hex one
 | |
|      *
 | |
|      * @return {String} The result
 | |
|      */
 | |
|     SparkMD5.ArrayBuffer.hash = function (arr, raw) {
 | |
|         var hash = md51_array(new Uint8Array(arr)),
 | |
|             ret = hex(hash);
 | |
| 
 | |
|         return raw ? hexToBinaryString(ret) : ret;
 | |
|     };
 | |
| 
 | |
|     return SparkMD5;
 | |
| }));
 | |
| 
 | |
| },{}],12:[function(_dereq_,module,exports){
 | |
| "use strict";
 | |
| 
 | |
| Object.defineProperty(exports, "__esModule", {
 | |
|   value: true
 | |
| });
 | |
| Object.defineProperty(exports, "v1", {
 | |
|   enumerable: true,
 | |
|   get: function () {
 | |
|     return _v.default;
 | |
|   }
 | |
| });
 | |
| Object.defineProperty(exports, "v3", {
 | |
|   enumerable: true,
 | |
|   get: function () {
 | |
|     return _v2.default;
 | |
|   }
 | |
| });
 | |
| Object.defineProperty(exports, "v4", {
 | |
|   enumerable: true,
 | |
|   get: function () {
 | |
|     return _v3.default;
 | |
|   }
 | |
| });
 | |
| Object.defineProperty(exports, "v5", {
 | |
|   enumerable: true,
 | |
|   get: function () {
 | |
|     return _v4.default;
 | |
|   }
 | |
| });
 | |
| Object.defineProperty(exports, "NIL", {
 | |
|   enumerable: true,
 | |
|   get: function () {
 | |
|     return _nil.default;
 | |
|   }
 | |
| });
 | |
| Object.defineProperty(exports, "version", {
 | |
|   enumerable: true,
 | |
|   get: function () {
 | |
|     return _version.default;
 | |
|   }
 | |
| });
 | |
| Object.defineProperty(exports, "validate", {
 | |
|   enumerable: true,
 | |
|   get: function () {
 | |
|     return _validate.default;
 | |
|   }
 | |
| });
 | |
| Object.defineProperty(exports, "stringify", {
 | |
|   enumerable: true,
 | |
|   get: function () {
 | |
|     return _stringify.default;
 | |
|   }
 | |
| });
 | |
| Object.defineProperty(exports, "parse", {
 | |
|   enumerable: true,
 | |
|   get: function () {
 | |
|     return _parse.default;
 | |
|   }
 | |
| });
 | |
| 
 | |
| var _v = _interopRequireDefault(_dereq_(20));
 | |
| 
 | |
| var _v2 = _interopRequireDefault(_dereq_(21));
 | |
| 
 | |
| var _v3 = _interopRequireDefault(_dereq_(23));
 | |
| 
 | |
| var _v4 = _interopRequireDefault(_dereq_(24));
 | |
| 
 | |
| var _nil = _interopRequireDefault(_dereq_(14));
 | |
| 
 | |
| var _version = _interopRequireDefault(_dereq_(26));
 | |
| 
 | |
| var _validate = _interopRequireDefault(_dereq_(25));
 | |
| 
 | |
| var _stringify = _interopRequireDefault(_dereq_(19));
 | |
| 
 | |
| var _parse = _interopRequireDefault(_dereq_(15));
 | |
| 
 | |
| function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
 | |
| },{"14":14,"15":15,"19":19,"20":20,"21":21,"23":23,"24":24,"25":25,"26":26}],13:[function(_dereq_,module,exports){
 | |
| "use strict";
 | |
| 
 | |
| Object.defineProperty(exports, "__esModule", {
 | |
|   value: true
 | |
| });
 | |
| exports.default = void 0;
 | |
| 
 | |
| /*
 | |
|  * Browser-compatible JavaScript MD5
 | |
|  *
 | |
|  * Modification of JavaScript MD5
 | |
|  * https://github.com/blueimp/JavaScript-MD5
 | |
|  *
 | |
|  * Copyright 2011, Sebastian Tschan
 | |
|  * https://blueimp.net
 | |
|  *
 | |
|  * Licensed under the MIT license:
 | |
|  * https://opensource.org/licenses/MIT
 | |
|  *
 | |
|  * Based on
 | |
|  * A JavaScript implementation of the RSA Data Security, Inc. MD5 Message
 | |
|  * Digest Algorithm, as defined in RFC 1321.
 | |
|  * Version 2.2 Copyright (C) Paul Johnston 1999 - 2009
 | |
|  * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet
 | |
|  * Distributed under the BSD License
 | |
|  * See http://pajhome.org.uk/crypt/md5 for more info.
 | |
|  */
 | |
| function md5(bytes) {
 | |
|   if (typeof bytes === 'string') {
 | |
|     const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape
 | |
| 
 | |
|     bytes = new Uint8Array(msg.length);
 | |
| 
 | |
|     for (let i = 0; i < msg.length; ++i) {
 | |
|       bytes[i] = msg.charCodeAt(i);
 | |
|     }
 | |
|   }
 | |
| 
 | |
|   return md5ToHexEncodedArray(wordsToMd5(bytesToWords(bytes), bytes.length * 8));
 | |
| }
 | |
| /*
 | |
|  * Convert an array of little-endian words to an array of bytes
 | |
|  */
 | |
| 
 | |
| 
 | |
| function md5ToHexEncodedArray(input) {
 | |
|   const output = [];
 | |
|   const length32 = input.length * 32;
 | |
|   const hexTab = '0123456789abcdef';
 | |
| 
 | |
|   for (let i = 0; i < length32; i += 8) {
 | |
|     const x = input[i >> 5] >>> i % 32 & 0xff;
 | |
|     const hex = parseInt(hexTab.charAt(x >>> 4 & 0x0f) + hexTab.charAt(x & 0x0f), 16);
 | |
|     output.push(hex);
 | |
|   }
 | |
| 
 | |
|   return output;
 | |
| }
 | |
| /**
 | |
|  * Calculate output length with padding and bit length
 | |
|  */
 | |
| 
 | |
| 
 | |
| function getOutputLength(inputLength8) {
 | |
|   return (inputLength8 + 64 >>> 9 << 4) + 14 + 1;
 | |
| }
 | |
| /*
 | |
|  * Calculate the MD5 of an array of little-endian words, and a bit length.
 | |
|  */
 | |
| 
 | |
| 
 | |
| function wordsToMd5(x, len) {
 | |
|   /* append padding */
 | |
|   x[len >> 5] |= 0x80 << len % 32;
 | |
|   x[getOutputLength(len) - 1] = len;
 | |
|   let a = 1732584193;
 | |
|   let b = -271733879;
 | |
|   let c = -1732584194;
 | |
|   let d = 271733878;
 | |
| 
 | |
|   for (let i = 0; i < x.length; i += 16) {
 | |
|     const olda = a;
 | |
|     const oldb = b;
 | |
|     const oldc = c;
 | |
|     const oldd = d;
 | |
|     a = md5ff(a, b, c, d, x[i], 7, -680876936);
 | |
|     d = md5ff(d, a, b, c, x[i + 1], 12, -389564586);
 | |
|     c = md5ff(c, d, a, b, x[i + 2], 17, 606105819);
 | |
|     b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330);
 | |
|     a = md5ff(a, b, c, d, x[i + 4], 7, -176418897);
 | |
|     d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426);
 | |
|     c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341);
 | |
|     b = md5ff(b, c, d, a, x[i + 7], 22, -45705983);
 | |
|     a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416);
 | |
|     d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417);
 | |
|     c = md5ff(c, d, a, b, x[i + 10], 17, -42063);
 | |
|     b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162);
 | |
|     a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682);
 | |
|     d = md5ff(d, a, b, c, x[i + 13], 12, -40341101);
 | |
|     c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290);
 | |
|     b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329);
 | |
|     a = md5gg(a, b, c, d, x[i + 1], 5, -165796510);
 | |
|     d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632);
 | |
|     c = md5gg(c, d, a, b, x[i + 11], 14, 643717713);
 | |
|     b = md5gg(b, c, d, a, x[i], 20, -373897302);
 | |
|     a = md5gg(a, b, c, d, x[i + 5], 5, -701558691);
 | |
|     d = md5gg(d, a, b, c, x[i + 10], 9, 38016083);
 | |
|     c = md5gg(c, d, a, b, x[i + 15], 14, -660478335);
 | |
|     b = md5gg(b, c, d, a, x[i + 4], 20, -405537848);
 | |
|     a = md5gg(a, b, c, d, x[i + 9], 5, 568446438);
 | |
|     d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690);
 | |
|     c = md5gg(c, d, a, b, x[i + 3], 14, -187363961);
 | |
|     b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501);
 | |
|     a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467);
 | |
|     d = md5gg(d, a, b, c, x[i + 2], 9, -51403784);
 | |
|     c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473);
 | |
|     b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734);
 | |
|     a = md5hh(a, b, c, d, x[i + 5], 4, -378558);
 | |
|     d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463);
 | |
|     c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562);
 | |
|     b = md5hh(b, c, d, a, x[i + 14], 23, -35309556);
 | |
|     a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060);
 | |
|     d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353);
 | |
|     c = md5hh(c, d, a, b, x[i + 7], 16, -155497632);
 | |
|     b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640);
 | |
|     a = md5hh(a, b, c, d, x[i + 13], 4, 681279174);
 | |
|     d = md5hh(d, a, b, c, x[i], 11, -358537222);
 | |
|     c = md5hh(c, d, a, b, x[i + 3], 16, -722521979);
 | |
|     b = md5hh(b, c, d, a, x[i + 6], 23, 76029189);
 | |
|     a = md5hh(a, b, c, d, x[i + 9], 4, -640364487);
 | |
|     d = md5hh(d, a, b, c, x[i + 12], 11, -421815835);
 | |
|     c = md5hh(c, d, a, b, x[i + 15], 16, 530742520);
 | |
|     b = md5hh(b, c, d, a, x[i + 2], 23, -995338651);
 | |
|     a = md5ii(a, b, c, d, x[i], 6, -198630844);
 | |
|     d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415);
 | |
|     c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905);
 | |
|     b = md5ii(b, c, d, a, x[i + 5], 21, -57434055);
 | |
|     a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571);
 | |
|     d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606);
 | |
|     c = md5ii(c, d, a, b, x[i + 10], 15, -1051523);
 | |
|     b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799);
 | |
|     a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359);
 | |
|     d = md5ii(d, a, b, c, x[i + 15], 10, -30611744);
 | |
|     c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380);
 | |
|     b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649);
 | |
|     a = md5ii(a, b, c, d, x[i + 4], 6, -145523070);
 | |
|     d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379);
 | |
|     c = md5ii(c, d, a, b, x[i + 2], 15, 718787259);
 | |
|     b = md5ii(b, c, d, a, x[i + 9], 21, -343485551);
 | |
|     a = safeAdd(a, olda);
 | |
|     b = safeAdd(b, oldb);
 | |
|     c = safeAdd(c, oldc);
 | |
|     d = safeAdd(d, oldd);
 | |
|   }
 | |
| 
 | |
|   return [a, b, c, d];
 | |
| }
 | |
| /*
 | |
|  * Convert an array bytes to an array of little-endian words
 | |
|  * Characters >255 have their high-byte silently ignored.
 | |
|  */
 | |
| 
 | |
| 
 | |
| function bytesToWords(input) {
 | |
|   if (input.length === 0) {
 | |
|     return [];
 | |
|   }
 | |
| 
 | |
|   const length8 = input.length * 8;
 | |
|   const output = new Uint32Array(getOutputLength(length8));
 | |
| 
 | |
|   for (let i = 0; i < length8; i += 8) {
 | |
|     output[i >> 5] |= (input[i / 8] & 0xff) << i % 32;
 | |
|   }
 | |
| 
 | |
|   return output;
 | |
| }
 | |
| /*
 | |
|  * Add integers, wrapping at 2^32. This uses 16-bit operations internally
 | |
|  * to work around bugs in some JS interpreters.
 | |
|  */
 | |
| 
 | |
| 
 | |
| function safeAdd(x, y) {
 | |
|   const lsw = (x & 0xffff) + (y & 0xffff);
 | |
|   const msw = (x >> 16) + (y >> 16) + (lsw >> 16);
 | |
|   return msw << 16 | lsw & 0xffff;
 | |
| }
 | |
| /*
 | |
|  * Bitwise rotate a 32-bit number to the left.
 | |
|  */
 | |
| 
 | |
| 
 | |
| function bitRotateLeft(num, cnt) {
 | |
|   return num << cnt | num >>> 32 - cnt;
 | |
| }
 | |
| /*
 | |
|  * These functions implement the four basic operations the algorithm uses.
 | |
|  */
 | |
| 
 | |
| 
 | |
| function md5cmn(q, a, b, x, s, t) {
 | |
|   return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b);
 | |
| }
 | |
| 
 | |
| function md5ff(a, b, c, d, x, s, t) {
 | |
|   return md5cmn(b & c | ~b & d, a, b, x, s, t);
 | |
| }
 | |
| 
 | |
| function md5gg(a, b, c, d, x, s, t) {
 | |
|   return md5cmn(b & d | c & ~d, a, b, x, s, t);
 | |
| }
 | |
| 
 | |
| function md5hh(a, b, c, d, x, s, t) {
 | |
|   return md5cmn(b ^ c ^ d, a, b, x, s, t);
 | |
| }
 | |
| 
 | |
| function md5ii(a, b, c, d, x, s, t) {
 | |
|   return md5cmn(c ^ (b | ~d), a, b, x, s, t);
 | |
| }
 | |
| 
 | |
| var _default = md5;
 | |
| exports.default = _default;
 | |
| },{}],14:[function(_dereq_,module,exports){
 | |
| "use strict";
 | |
| 
 | |
| Object.defineProperty(exports, "__esModule", {
 | |
|   value: true
 | |
| });
 | |
| exports.default = void 0;
 | |
| var _default = '00000000-0000-0000-0000-000000000000';
 | |
| exports.default = _default;
 | |
| },{}],15:[function(_dereq_,module,exports){
 | |
| "use strict";
 | |
| 
 | |
| Object.defineProperty(exports, "__esModule", {
 | |
|   value: true
 | |
| });
 | |
| exports.default = void 0;
 | |
| 
 | |
| var _validate = _interopRequireDefault(_dereq_(25));
 | |
| 
 | |
| function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
 | |
| 
 | |
| function parse(uuid) {
 | |
|   if (!(0, _validate.default)(uuid)) {
 | |
|     throw TypeError('Invalid UUID');
 | |
|   }
 | |
| 
 | |
|   let v;
 | |
|   const arr = new Uint8Array(16); // Parse ########-....-....-....-............
 | |
| 
 | |
|   arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24;
 | |
|   arr[1] = v >>> 16 & 0xff;
 | |
|   arr[2] = v >>> 8 & 0xff;
 | |
|   arr[3] = v & 0xff; // Parse ........-####-....-....-............
 | |
| 
 | |
|   arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8;
 | |
|   arr[5] = v & 0xff; // Parse ........-....-####-....-............
 | |
| 
 | |
|   arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8;
 | |
|   arr[7] = v & 0xff; // Parse ........-....-....-####-............
 | |
| 
 | |
|   arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8;
 | |
|   arr[9] = v & 0xff; // Parse ........-....-....-....-############
 | |
|   // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes)
 | |
| 
 | |
|   arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff;
 | |
|   arr[11] = v / 0x100000000 & 0xff;
 | |
|   arr[12] = v >>> 24 & 0xff;
 | |
|   arr[13] = v >>> 16 & 0xff;
 | |
|   arr[14] = v >>> 8 & 0xff;
 | |
|   arr[15] = v & 0xff;
 | |
|   return arr;
 | |
| }
 | |
| 
 | |
| var _default = parse;
 | |
| exports.default = _default;
 | |
| },{"25":25}],16:[function(_dereq_,module,exports){
 | |
| "use strict";
 | |
| 
 | |
| Object.defineProperty(exports, "__esModule", {
 | |
|   value: true
 | |
| });
 | |
| exports.default = void 0;
 | |
| var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;
 | |
| exports.default = _default;
 | |
| },{}],17:[function(_dereq_,module,exports){
 | |
| "use strict";
 | |
| 
 | |
| Object.defineProperty(exports, "__esModule", {
 | |
|   value: true
 | |
| });
 | |
| exports.default = rng;
 | |
| // Unique ID creation requires a high quality random # generator. In the browser we therefore
 | |
| // require the crypto API and do not support built-in fallback to lower quality random number
 | |
| // generators (like Math.random()).
 | |
| let getRandomValues;
 | |
| const rnds8 = new Uint8Array(16);
 | |
| 
 | |
| function rng() {
 | |
|   // lazy load so that environments that need to polyfill have a chance to do so
 | |
|   if (!getRandomValues) {
 | |
|     // getRandomValues needs to be invoked in a context where "this" is a Crypto implementation. Also,
 | |
|     // find the complete implementation of crypto (msCrypto) on IE11.
 | |
|     getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto) || typeof msCrypto !== 'undefined' && typeof msCrypto.getRandomValues === 'function' && msCrypto.getRandomValues.bind(msCrypto);
 | |
| 
 | |
|     if (!getRandomValues) {
 | |
|       throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported');
 | |
|     }
 | |
|   }
 | |
| 
 | |
|   return getRandomValues(rnds8);
 | |
| }
 | |
| },{}],18:[function(_dereq_,module,exports){
 | |
| "use strict";
 | |
| 
 | |
| Object.defineProperty(exports, "__esModule", {
 | |
|   value: true
 | |
| });
 | |
| exports.default = void 0;
 | |
| 
 | |
| // Adapted from Chris Veness' SHA1 code at
 | |
| // http://www.movable-type.co.uk/scripts/sha1.html
 | |
| function f(s, x, y, z) {
 | |
|   switch (s) {
 | |
|     case 0:
 | |
|       return x & y ^ ~x & z;
 | |
| 
 | |
|     case 1:
 | |
|       return x ^ y ^ z;
 | |
| 
 | |
|     case 2:
 | |
|       return x & y ^ x & z ^ y & z;
 | |
| 
 | |
|     case 3:
 | |
|       return x ^ y ^ z;
 | |
|   }
 | |
| }
 | |
| 
 | |
| function ROTL(x, n) {
 | |
|   return x << n | x >>> 32 - n;
 | |
| }
 | |
| 
 | |
| function sha1(bytes) {
 | |
|   const K = [0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6];
 | |
|   const H = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0];
 | |
| 
 | |
|   if (typeof bytes === 'string') {
 | |
|     const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape
 | |
| 
 | |
|     bytes = [];
 | |
| 
 | |
|     for (let i = 0; i < msg.length; ++i) {
 | |
|       bytes.push(msg.charCodeAt(i));
 | |
|     }
 | |
|   } else if (!Array.isArray(bytes)) {
 | |
|     // Convert Array-like to Array
 | |
|     bytes = Array.prototype.slice.call(bytes);
 | |
|   }
 | |
| 
 | |
|   bytes.push(0x80);
 | |
|   const l = bytes.length / 4 + 2;
 | |
|   const N = Math.ceil(l / 16);
 | |
|   const M = new Array(N);
 | |
| 
 | |
|   for (let i = 0; i < N; ++i) {
 | |
|     const arr = new Uint32Array(16);
 | |
| 
 | |
|     for (let j = 0; j < 16; ++j) {
 | |
|       arr[j] = bytes[i * 64 + j * 4] << 24 | bytes[i * 64 + j * 4 + 1] << 16 | bytes[i * 64 + j * 4 + 2] << 8 | bytes[i * 64 + j * 4 + 3];
 | |
|     }
 | |
| 
 | |
|     M[i] = arr;
 | |
|   }
 | |
| 
 | |
|   M[N - 1][14] = (bytes.length - 1) * 8 / Math.pow(2, 32);
 | |
|   M[N - 1][14] = Math.floor(M[N - 1][14]);
 | |
|   M[N - 1][15] = (bytes.length - 1) * 8 & 0xffffffff;
 | |
| 
 | |
|   for (let i = 0; i < N; ++i) {
 | |
|     const W = new Uint32Array(80);
 | |
| 
 | |
|     for (let t = 0; t < 16; ++t) {
 | |
|       W[t] = M[i][t];
 | |
|     }
 | |
| 
 | |
|     for (let t = 16; t < 80; ++t) {
 | |
|       W[t] = ROTL(W[t - 3] ^ W[t - 8] ^ W[t - 14] ^ W[t - 16], 1);
 | |
|     }
 | |
| 
 | |
|     let a = H[0];
 | |
|     let b = H[1];
 | |
|     let c = H[2];
 | |
|     let d = H[3];
 | |
|     let e = H[4];
 | |
| 
 | |
|     for (let t = 0; t < 80; ++t) {
 | |
|       const s = Math.floor(t / 20);
 | |
|       const T = ROTL(a, 5) + f(s, b, c, d) + e + K[s] + W[t] >>> 0;
 | |
|       e = d;
 | |
|       d = c;
 | |
|       c = ROTL(b, 30) >>> 0;
 | |
|       b = a;
 | |
|       a = T;
 | |
|     }
 | |
| 
 | |
|     H[0] = H[0] + a >>> 0;
 | |
|     H[1] = H[1] + b >>> 0;
 | |
|     H[2] = H[2] + c >>> 0;
 | |
|     H[3] = H[3] + d >>> 0;
 | |
|     H[4] = H[4] + e >>> 0;
 | |
|   }
 | |
| 
 | |
|   return [H[0] >> 24 & 0xff, H[0] >> 16 & 0xff, H[0] >> 8 & 0xff, H[0] & 0xff, H[1] >> 24 & 0xff, H[1] >> 16 & 0xff, H[1] >> 8 & 0xff, H[1] & 0xff, H[2] >> 24 & 0xff, H[2] >> 16 & 0xff, H[2] >> 8 & 0xff, H[2] & 0xff, H[3] >> 24 & 0xff, H[3] >> 16 & 0xff, H[3] >> 8 & 0xff, H[3] & 0xff, H[4] >> 24 & 0xff, H[4] >> 16 & 0xff, H[4] >> 8 & 0xff, H[4] & 0xff];
 | |
| }
 | |
| 
 | |
| var _default = sha1;
 | |
| exports.default = _default;
 | |
| },{}],19:[function(_dereq_,module,exports){
 | |
| "use strict";
 | |
| 
 | |
| Object.defineProperty(exports, "__esModule", {
 | |
|   value: true
 | |
| });
 | |
| exports.default = void 0;
 | |
| 
 | |
| var _validate = _interopRequireDefault(_dereq_(25));
 | |
| 
 | |
| function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
 | |
| 
 | |
| /**
 | |
|  * Convert array of 16 byte values to UUID string format of the form:
 | |
|  * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX
 | |
|  */
 | |
| const byteToHex = [];
 | |
| 
 | |
| for (let i = 0; i < 256; ++i) {
 | |
|   byteToHex.push((i + 0x100).toString(16).substr(1));
 | |
| }
 | |
| 
 | |
| function stringify(arr, offset = 0) {
 | |
|   // Note: Be careful editing this code!  It's been tuned for performance
 | |
|   // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434
 | |
|   const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID.  If this throws, it's likely due to one
 | |
|   // of the following:
 | |
|   // - One or more input array values don't map to a hex octet (leading to
 | |
|   // "undefined" in the uuid)
 | |
|   // - Invalid input values for the RFC `version` or `variant` fields
 | |
| 
 | |
|   if (!(0, _validate.default)(uuid)) {
 | |
|     throw TypeError('Stringified UUID is invalid');
 | |
|   }
 | |
| 
 | |
|   return uuid;
 | |
| }
 | |
| 
 | |
| var _default = stringify;
 | |
| exports.default = _default;
 | |
| },{"25":25}],20:[function(_dereq_,module,exports){
 | |
| "use strict";
 | |
| 
 | |
| Object.defineProperty(exports, "__esModule", {
 | |
|   value: true
 | |
| });
 | |
| exports.default = void 0;
 | |
| 
 | |
| var _rng = _interopRequireDefault(_dereq_(17));
 | |
| 
 | |
| var _stringify = _interopRequireDefault(_dereq_(19));
 | |
| 
 | |
| function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
 | |
| 
 | |
| // **`v1()` - Generate time-based UUID**
 | |
| //
 | |
| // Inspired by https://github.com/LiosK/UUID.js
 | |
| // and http://docs.python.org/library/uuid.html
 | |
| let _nodeId;
 | |
| 
 | |
| let _clockseq; // Previous uuid creation time
 | |
| 
 | |
| 
 | |
| let _lastMSecs = 0;
 | |
| let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details
 | |
| 
 | |
| function v1(options, buf, offset) {
 | |
|   let i = buf && offset || 0;
 | |
|   const b = buf || new Array(16);
 | |
|   options = options || {};
 | |
|   let node = options.node || _nodeId;
 | |
|   let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not
 | |
|   // specified.  We do this lazily to minimize issues related to insufficient
 | |
|   // system entropy.  See #189
 | |
| 
 | |
|   if (node == null || clockseq == null) {
 | |
|     const seedBytes = options.random || (options.rng || _rng.default)();
 | |
| 
 | |
|     if (node == null) {
 | |
|       // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)
 | |
|       node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]];
 | |
|     }
 | |
| 
 | |
|     if (clockseq == null) {
 | |
|       // Per 4.2.2, randomize (14 bit) clockseq
 | |
|       clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;
 | |
|     }
 | |
|   } // UUID timestamps are 100 nano-second units since the Gregorian epoch,
 | |
|   // (1582-10-15 00:00).  JSNumbers aren't precise enough for this, so
 | |
|   // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'
 | |
|   // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.
 | |
| 
 | |
| 
 | |
|   let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock
 | |
|   // cycle to simulate higher resolution clock
 | |
| 
 | |
|   let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs)
 | |
| 
 | |
|   const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression
 | |
| 
 | |
|   if (dt < 0 && options.clockseq === undefined) {
 | |
|     clockseq = clockseq + 1 & 0x3fff;
 | |
|   } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new
 | |
|   // time interval
 | |
| 
 | |
| 
 | |
|   if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {
 | |
|     nsecs = 0;
 | |
|   } // Per 4.2.1.2 Throw error if too many uuids are requested
 | |
| 
 | |
| 
 | |
|   if (nsecs >= 10000) {
 | |
|     throw new Error("uuid.v1(): Can't create more than 10M uuids/sec");
 | |
|   }
 | |
| 
 | |
|   _lastMSecs = msecs;
 | |
|   _lastNSecs = nsecs;
 | |
|   _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch
 | |
| 
 | |
|   msecs += 12219292800000; // `time_low`
 | |
| 
 | |
|   const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;
 | |
|   b[i++] = tl >>> 24 & 0xff;
 | |
|   b[i++] = tl >>> 16 & 0xff;
 | |
|   b[i++] = tl >>> 8 & 0xff;
 | |
|   b[i++] = tl & 0xff; // `time_mid`
 | |
| 
 | |
|   const tmh = msecs / 0x100000000 * 10000 & 0xfffffff;
 | |
|   b[i++] = tmh >>> 8 & 0xff;
 | |
|   b[i++] = tmh & 0xff; // `time_high_and_version`
 | |
| 
 | |
|   b[i++] = tmh >>> 24 & 0xf | 0x10; // include version
 | |
| 
 | |
|   b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)
 | |
| 
 | |
|   b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low`
 | |
| 
 | |
|   b[i++] = clockseq & 0xff; // `node`
 | |
| 
 | |
|   for (let n = 0; n < 6; ++n) {
 | |
|     b[i + n] = node[n];
 | |
|   }
 | |
| 
 | |
|   return buf || (0, _stringify.default)(b);
 | |
| }
 | |
| 
 | |
| var _default = v1;
 | |
| exports.default = _default;
 | |
| },{"17":17,"19":19}],21:[function(_dereq_,module,exports){
 | |
| "use strict";
 | |
| 
 | |
| Object.defineProperty(exports, "__esModule", {
 | |
|   value: true
 | |
| });
 | |
| exports.default = void 0;
 | |
| 
 | |
| var _v = _interopRequireDefault(_dereq_(22));
 | |
| 
 | |
| var _md = _interopRequireDefault(_dereq_(13));
 | |
| 
 | |
| function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
 | |
| 
 | |
| const v3 = (0, _v.default)('v3', 0x30, _md.default);
 | |
| var _default = v3;
 | |
| exports.default = _default;
 | |
| },{"13":13,"22":22}],22:[function(_dereq_,module,exports){
 | |
| "use strict";
 | |
| 
 | |
| Object.defineProperty(exports, "__esModule", {
 | |
|   value: true
 | |
| });
 | |
| exports.default = _default;
 | |
| exports.URL = exports.DNS = void 0;
 | |
| 
 | |
| var _stringify = _interopRequireDefault(_dereq_(19));
 | |
| 
 | |
| var _parse = _interopRequireDefault(_dereq_(15));
 | |
| 
 | |
| function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
 | |
| 
 | |
| function stringToBytes(str) {
 | |
|   str = unescape(encodeURIComponent(str)); // UTF8 escape
 | |
| 
 | |
|   const bytes = [];
 | |
| 
 | |
|   for (let i = 0; i < str.length; ++i) {
 | |
|     bytes.push(str.charCodeAt(i));
 | |
|   }
 | |
| 
 | |
|   return bytes;
 | |
| }
 | |
| 
 | |
| const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';
 | |
| exports.DNS = DNS;
 | |
| const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8';
 | |
| exports.URL = URL;
 | |
| 
 | |
| function _default(name, version, hashfunc) {
 | |
|   function generateUUID(value, namespace, buf, offset) {
 | |
|     if (typeof value === 'string') {
 | |
|       value = stringToBytes(value);
 | |
|     }
 | |
| 
 | |
|     if (typeof namespace === 'string') {
 | |
|       namespace = (0, _parse.default)(namespace);
 | |
|     }
 | |
| 
 | |
|     if (namespace.length !== 16) {
 | |
|       throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)');
 | |
|     } // Compute hash of namespace and value, Per 4.3
 | |
|     // Future: Use spread syntax when supported on all platforms, e.g. `bytes =
 | |
|     // hashfunc([...namespace, ... value])`
 | |
| 
 | |
| 
 | |
|     let bytes = new Uint8Array(16 + value.length);
 | |
|     bytes.set(namespace);
 | |
|     bytes.set(value, namespace.length);
 | |
|     bytes = hashfunc(bytes);
 | |
|     bytes[6] = bytes[6] & 0x0f | version;
 | |
|     bytes[8] = bytes[8] & 0x3f | 0x80;
 | |
| 
 | |
|     if (buf) {
 | |
|       offset = offset || 0;
 | |
| 
 | |
|       for (let i = 0; i < 16; ++i) {
 | |
|         buf[offset + i] = bytes[i];
 | |
|       }
 | |
| 
 | |
|       return buf;
 | |
|     }
 | |
| 
 | |
|     return (0, _stringify.default)(bytes);
 | |
|   } // Function#name is not settable on some platforms (#270)
 | |
| 
 | |
| 
 | |
|   try {
 | |
|     generateUUID.name = name; // eslint-disable-next-line no-empty
 | |
|   } catch (err) {} // For CommonJS default export support
 | |
| 
 | |
| 
 | |
|   generateUUID.DNS = DNS;
 | |
|   generateUUID.URL = URL;
 | |
|   return generateUUID;
 | |
| }
 | |
| },{"15":15,"19":19}],23:[function(_dereq_,module,exports){
 | |
| "use strict";
 | |
| 
 | |
| Object.defineProperty(exports, "__esModule", {
 | |
|   value: true
 | |
| });
 | |
| exports.default = void 0;
 | |
| 
 | |
| var _rng = _interopRequireDefault(_dereq_(17));
 | |
| 
 | |
| var _stringify = _interopRequireDefault(_dereq_(19));
 | |
| 
 | |
| function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
 | |
| 
 | |
| function v4(options, buf, offset) {
 | |
|   options = options || {};
 | |
| 
 | |
|   const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
 | |
| 
 | |
| 
 | |
|   rnds[6] = rnds[6] & 0x0f | 0x40;
 | |
|   rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided
 | |
| 
 | |
|   if (buf) {
 | |
|     offset = offset || 0;
 | |
| 
 | |
|     for (let i = 0; i < 16; ++i) {
 | |
|       buf[offset + i] = rnds[i];
 | |
|     }
 | |
| 
 | |
|     return buf;
 | |
|   }
 | |
| 
 | |
|   return (0, _stringify.default)(rnds);
 | |
| }
 | |
| 
 | |
| var _default = v4;
 | |
| exports.default = _default;
 | |
| },{"17":17,"19":19}],24:[function(_dereq_,module,exports){
 | |
| "use strict";
 | |
| 
 | |
| Object.defineProperty(exports, "__esModule", {
 | |
|   value: true
 | |
| });
 | |
| exports.default = void 0;
 | |
| 
 | |
| var _v = _interopRequireDefault(_dereq_(22));
 | |
| 
 | |
| var _sha = _interopRequireDefault(_dereq_(18));
 | |
| 
 | |
| function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
 | |
| 
 | |
| const v5 = (0, _v.default)('v5', 0x50, _sha.default);
 | |
| var _default = v5;
 | |
| exports.default = _default;
 | |
| },{"18":18,"22":22}],25:[function(_dereq_,module,exports){
 | |
| "use strict";
 | |
| 
 | |
| Object.defineProperty(exports, "__esModule", {
 | |
|   value: true
 | |
| });
 | |
| exports.default = void 0;
 | |
| 
 | |
| var _regex = _interopRequireDefault(_dereq_(16));
 | |
| 
 | |
| function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
 | |
| 
 | |
| function validate(uuid) {
 | |
|   return typeof uuid === 'string' && _regex.default.test(uuid);
 | |
| }
 | |
| 
 | |
| var _default = validate;
 | |
| exports.default = _default;
 | |
| },{"16":16}],26:[function(_dereq_,module,exports){
 | |
| "use strict";
 | |
| 
 | |
| Object.defineProperty(exports, "__esModule", {
 | |
|   value: true
 | |
| });
 | |
| exports.default = void 0;
 | |
| 
 | |
| var _validate = _interopRequireDefault(_dereq_(25));
 | |
| 
 | |
| function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
 | |
| 
 | |
| function version(uuid) {
 | |
|   if (!(0, _validate.default)(uuid)) {
 | |
|     throw TypeError('Invalid UUID');
 | |
|   }
 | |
| 
 | |
|   return parseInt(uuid.substr(14, 1), 16);
 | |
| }
 | |
| 
 | |
| var _default = version;
 | |
| exports.default = _default;
 | |
| },{"25":25}],27:[function(_dereq_,module,exports){
 | |
| 'use strict';
 | |
| 
 | |
| function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
 | |
| 
 | |
| var immediate = _interopDefault(_dereq_(4));
 | |
| var EventEmitter = _interopDefault(_dereq_(3));
 | |
| _dereq_(12);
 | |
| var Md5 = _interopDefault(_dereq_(11));
 | |
| var getArguments = _interopDefault(_dereq_(1));
 | |
| var inherits = _interopDefault(_dereq_(10));
 | |
| 
 | |
| function isBinaryObject(object) {
 | |
|   return (typeof ArrayBuffer !== 'undefined' && object instanceof ArrayBuffer) ||
 | |
|     (typeof Blob !== 'undefined' && object instanceof Blob);
 | |
| }
 | |
| 
 | |
| function cloneArrayBuffer(buff) {
 | |
|   if (typeof buff.slice === 'function') {
 | |
|     return buff.slice(0);
 | |
|   }
 | |
|   // IE10-11 slice() polyfill
 | |
|   var target = new ArrayBuffer(buff.byteLength);
 | |
|   var targetArray = new Uint8Array(target);
 | |
|   var sourceArray = new Uint8Array(buff);
 | |
|   targetArray.set(sourceArray);
 | |
|   return target;
 | |
| }
 | |
| 
 | |
| function cloneBinaryObject(object) {
 | |
|   if (object instanceof ArrayBuffer) {
 | |
|     return cloneArrayBuffer(object);
 | |
|   }
 | |
|   var size = object.size;
 | |
|   var type = object.type;
 | |
|   // Blob
 | |
|   if (typeof object.slice === 'function') {
 | |
|     return object.slice(0, size, type);
 | |
|   }
 | |
|   // PhantomJS slice() replacement
 | |
|   return object.webkitSlice(0, size, type);
 | |
| }
 | |
| 
 | |
| // most of this is borrowed from lodash.isPlainObject:
 | |
| // https://github.com/fis-components/lodash.isplainobject/
 | |
| // blob/29c358140a74f252aeb08c9eb28bef86f2217d4a/index.js
 | |
| 
 | |
| var funcToString = Function.prototype.toString;
 | |
| var objectCtorString = funcToString.call(Object);
 | |
| 
 | |
| function isPlainObject(value) {
 | |
|   var proto = Object.getPrototypeOf(value);
 | |
|   /* istanbul ignore if */
 | |
|   if (proto === null) { // not sure when this happens, but I guess it can
 | |
|     return true;
 | |
|   }
 | |
|   var Ctor = proto.constructor;
 | |
|   return (typeof Ctor == 'function' &&
 | |
|     Ctor instanceof Ctor && funcToString.call(Ctor) == objectCtorString);
 | |
| }
 | |
| 
 | |
| function clone(object) {
 | |
|   var newObject;
 | |
|   var i;
 | |
|   var len;
 | |
| 
 | |
|   if (!object || typeof object !== 'object') {
 | |
|     return object;
 | |
|   }
 | |
| 
 | |
|   if (Array.isArray(object)) {
 | |
|     newObject = [];
 | |
|     for (i = 0, len = object.length; i < len; i++) {
 | |
|       newObject[i] = clone(object[i]);
 | |
|     }
 | |
|     return newObject;
 | |
|   }
 | |
| 
 | |
|   // special case: to avoid inconsistencies between IndexedDB
 | |
|   // and other backends, we automatically stringify Dates
 | |
|   if (object instanceof Date && isFinite(object)) {
 | |
|     return object.toISOString();
 | |
|   }
 | |
| 
 | |
|   if (isBinaryObject(object)) {
 | |
|     return cloneBinaryObject(object);
 | |
|   }
 | |
| 
 | |
|   if (!isPlainObject(object)) {
 | |
|     return object; // don't clone objects like Workers
 | |
|   }
 | |
| 
 | |
|   newObject = {};
 | |
|   for (i in object) {
 | |
|     /* istanbul ignore else */
 | |
|     if (Object.prototype.hasOwnProperty.call(object, i)) {
 | |
|       var value = clone(object[i]);
 | |
|       if (typeof value !== 'undefined') {
 | |
|         newObject[i] = value;
 | |
|       }
 | |
|     }
 | |
|   }
 | |
|   return newObject;
 | |
| }
 | |
| 
 | |
| function once(fun) {
 | |
|   var called = false;
 | |
|   return getArguments(function (args) {
 | |
|     /* istanbul ignore if */
 | |
|     if (called) {
 | |
|       // this is a smoke test and should never actually happen
 | |
|       throw new Error('once called more than once');
 | |
|     } else {
 | |
|       called = true;
 | |
|       fun.apply(this, args);
 | |
|     }
 | |
|   });
 | |
| }
 | |
| 
 | |
| function toPromise(func) {
 | |
|   //create the function we will be returning
 | |
|   return getArguments(function (args) {
 | |
|     // Clone arguments
 | |
|     args = clone(args);
 | |
|     var self = this;
 | |
|     // if the last argument is a function, assume its a callback
 | |
|     var usedCB = (typeof args[args.length - 1] === 'function') ? args.pop() : false;
 | |
|     var promise = new Promise(function (fulfill, reject) {
 | |
|       var resp;
 | |
|       try {
 | |
|         var callback = once(function (err, mesg) {
 | |
|           if (err) {
 | |
|             reject(err);
 | |
|           } else {
 | |
|             fulfill(mesg);
 | |
|           }
 | |
|         });
 | |
|         // create a callback for this invocation
 | |
|         // apply the function in the orig context
 | |
|         args.push(callback);
 | |
|         resp = func.apply(self, args);
 | |
|         if (resp && typeof resp.then === 'function') {
 | |
|           fulfill(resp);
 | |
|         }
 | |
|       } catch (e) {
 | |
|         reject(e);
 | |
|       }
 | |
|     });
 | |
|     // if there is a callback, call it back
 | |
|     if (usedCB) {
 | |
|       promise.then(function (result) {
 | |
|         usedCB(null, result);
 | |
|       }, usedCB);
 | |
|     }
 | |
|     return promise;
 | |
|   });
 | |
| }
 | |
| 
 | |
| function mangle(key) {
 | |
|   return '$' + key;
 | |
| }
 | |
| function unmangle(key) {
 | |
|   return key.substring(1);
 | |
| }
 | |
| function Map$1() {
 | |
|   this._store = {};
 | |
| }
 | |
| Map$1.prototype.get = function (key) {
 | |
|   var mangled = mangle(key);
 | |
|   return this._store[mangled];
 | |
| };
 | |
| Map$1.prototype.set = function (key, value) {
 | |
|   var mangled = mangle(key);
 | |
|   this._store[mangled] = value;
 | |
|   return true;
 | |
| };
 | |
| Map$1.prototype.has = function (key) {
 | |
|   var mangled = mangle(key);
 | |
|   return mangled in this._store;
 | |
| };
 | |
| Map$1.prototype.keys = function () {
 | |
|   return Object.keys(this._store).map(k => unmangle(k));
 | |
| };
 | |
| Map$1.prototype["delete"] = function (key) {
 | |
|   var mangled = mangle(key);
 | |
|   var res = mangled in this._store;
 | |
|   delete this._store[mangled];
 | |
|   return res;
 | |
| };
 | |
| Map$1.prototype.forEach = function (cb) {
 | |
|   var keys = Object.keys(this._store);
 | |
|   for (var i = 0, len = keys.length; i < len; i++) {
 | |
|     var key = keys[i];
 | |
|     var value = this._store[key];
 | |
|     key = unmangle(key);
 | |
|     cb(value, key);
 | |
|   }
 | |
| };
 | |
| Object.defineProperty(Map$1.prototype, 'size', {
 | |
|   get: function () {
 | |
|     return Object.keys(this._store).length;
 | |
|   }
 | |
| });
 | |
| 
 | |
| function Set$1(array) {
 | |
|   this._store = new Map$1();
 | |
| 
 | |
|   // init with an array
 | |
|   if (array && Array.isArray(array)) {
 | |
|     for (var i = 0, len = array.length; i < len; i++) {
 | |
|       this.add(array[i]);
 | |
|     }
 | |
|   }
 | |
| }
 | |
| Set$1.prototype.add = function (key) {
 | |
|   return this._store.set(key, true);
 | |
| };
 | |
| Set$1.prototype.has = function (key) {
 | |
|   return this._store.has(key);
 | |
| };
 | |
| Set$1.prototype.forEach = function (cb) {
 | |
|   this._store.forEach(function (value, key) {
 | |
|     cb(key);
 | |
|   });
 | |
| };
 | |
| Object.defineProperty(Set$1.prototype, 'size', {
 | |
|   get: function () {
 | |
|     return this._store.size;
 | |
|   }
 | |
| });
 | |
| 
 | |
| // Based on https://kangax.github.io/compat-table/es6/ we can sniff out
 | |
| // incomplete Map/Set implementations which would otherwise cause our tests to fail.
 | |
| // Notably they fail in IE11 and iOS 8.4, which this prevents.
 | |
| function supportsMapAndSet() {
 | |
|   if (typeof Symbol === 'undefined' || typeof Map === 'undefined' || typeof Set === 'undefined') {
 | |
|     return false;
 | |
|   }
 | |
|   var prop = Object.getOwnPropertyDescriptor(Map, Symbol.species);
 | |
|   return prop && 'get' in prop && Map[Symbol.species] === Map;
 | |
| }
 | |
| 
 | |
| // based on https://github.com/montagejs/collections
 | |
| 
 | |
| var ExportedSet;
 | |
| var ExportedMap;
 | |
| 
 | |
| {
 | |
|   if (supportsMapAndSet()) { // prefer built-in Map/Set
 | |
|     ExportedSet = Set;
 | |
|     ExportedMap = Map;
 | |
|   } else { // fall back to our polyfill
 | |
|     ExportedSet = Set$1;
 | |
|     ExportedMap = Map$1;
 | |
|   }
 | |
| }
 | |
| 
 | |
| // like underscore/lodash _.pick()
 | |
| function pick(obj, arr) {
 | |
|   var res = {};
 | |
|   for (var i = 0, len = arr.length; i < len; i++) {
 | |
|     var prop = arr[i];
 | |
|     if (prop in obj) {
 | |
|       res[prop] = obj[prop];
 | |
|     }
 | |
|   }
 | |
|   return res;
 | |
| }
 | |
| 
 | |
| var hasLocal;
 | |
| 
 | |
| try {
 | |
|   localStorage.setItem('_pouch_check_localstorage', 1);
 | |
|   hasLocal = !!localStorage.getItem('_pouch_check_localstorage');
 | |
| } catch (e) {
 | |
|   hasLocal = false;
 | |
| }
 | |
| 
 | |
| function hasLocalStorage() {
 | |
|   return hasLocal;
 | |
| }
 | |
| 
 | |
| // Custom nextTick() shim for browsers. In node, this will just be process.nextTick(). We
 | |
| 
 | |
| inherits(Changes, EventEmitter);
 | |
| 
 | |
| /* istanbul ignore next */
 | |
| function attachBrowserEvents(self) {
 | |
|   if (hasLocalStorage()) {
 | |
|     addEventListener("storage", function (e) {
 | |
|       self.emit(e.key);
 | |
|     });
 | |
|   }
 | |
| }
 | |
| 
 | |
| function Changes() {
 | |
|   EventEmitter.call(this);
 | |
|   this._listeners = {};
 | |
| 
 | |
|   attachBrowserEvents(this);
 | |
| }
 | |
| Changes.prototype.addListener = function (dbName, id, db, opts) {
 | |
|   /* istanbul ignore if */
 | |
|   if (this._listeners[id]) {
 | |
|     return;
 | |
|   }
 | |
|   var self = this;
 | |
|   var inprogress = false;
 | |
|   function eventFunction() {
 | |
|     /* istanbul ignore if */
 | |
|     if (!self._listeners[id]) {
 | |
|       return;
 | |
|     }
 | |
|     if (inprogress) {
 | |
|       inprogress = 'waiting';
 | |
|       return;
 | |
|     }
 | |
|     inprogress = true;
 | |
|     var changesOpts = pick(opts, [
 | |
|       'style', 'include_docs', 'attachments', 'conflicts', 'filter',
 | |
|       'doc_ids', 'view', 'since', 'query_params', 'binary', 'return_docs'
 | |
|     ]);
 | |
| 
 | |
|     /* istanbul ignore next */
 | |
|     function onError() {
 | |
|       inprogress = false;
 | |
|     }
 | |
| 
 | |
|     db.changes(changesOpts).on('change', function (c) {
 | |
|       if (c.seq > opts.since && !opts.cancelled) {
 | |
|         opts.since = c.seq;
 | |
|         opts.onChange(c);
 | |
|       }
 | |
|     }).on('complete', function () {
 | |
|       if (inprogress === 'waiting') {
 | |
|         immediate(eventFunction);
 | |
|       }
 | |
|       inprogress = false;
 | |
|     }).on('error', onError);
 | |
|   }
 | |
|   this._listeners[id] = eventFunction;
 | |
|   this.on(dbName, eventFunction);
 | |
| };
 | |
| 
 | |
| Changes.prototype.removeListener = function (dbName, id) {
 | |
|   /* istanbul ignore if */
 | |
|   if (!(id in this._listeners)) {
 | |
|     return;
 | |
|   }
 | |
|   EventEmitter.prototype.removeListener.call(this, dbName,
 | |
|     this._listeners[id]);
 | |
|   delete this._listeners[id];
 | |
| };
 | |
| 
 | |
| 
 | |
| /* istanbul ignore next */
 | |
| Changes.prototype.notifyLocalWindows = function (dbName) {
 | |
|   //do a useless change on a storage thing
 | |
|   //in order to get other windows's listeners to activate
 | |
|   if (hasLocalStorage()) {
 | |
|     localStorage[dbName] = (localStorage[dbName] === "a") ? "b" : "a";
 | |
|   }
 | |
| };
 | |
| 
 | |
| Changes.prototype.notify = function (dbName) {
 | |
|   this.emit(dbName);
 | |
|   this.notifyLocalWindows(dbName);
 | |
| };
 | |
| 
 | |
| function guardedConsole(method) {
 | |
|   /* istanbul ignore else */
 | |
|   if (typeof console !== 'undefined' && typeof console[method] === 'function') {
 | |
|     var args = Array.prototype.slice.call(arguments, 1);
 | |
|     console[method].apply(console, args);
 | |
|   }
 | |
| }
 | |
| 
 | |
| var assign;
 | |
| {
 | |
|   if (typeof Object.assign === 'function') {
 | |
|     assign = Object.assign;
 | |
|   } else {
 | |
|     // lite Object.assign polyfill based on
 | |
|     // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/assign
 | |
|     assign = function (target) {
 | |
|       var to = Object(target);
 | |
| 
 | |
|       for (var index = 1; index < arguments.length; index++) {
 | |
|         var nextSource = arguments[index];
 | |
| 
 | |
|         if (nextSource != null) { // Skip over if undefined or null
 | |
|           for (var nextKey in nextSource) {
 | |
|             // Avoid bugs when hasOwnProperty is shadowed
 | |
|             if (Object.prototype.hasOwnProperty.call(nextSource, nextKey)) {
 | |
|               to[nextKey] = nextSource[nextKey];
 | |
|             }
 | |
|           }
 | |
|         }
 | |
|       }
 | |
|       return to;
 | |
|     };
 | |
|   }
 | |
| }
 | |
| 
 | |
| var $inject_Object_assign = assign;
 | |
| 
 | |
| inherits(PouchError, Error);
 | |
| 
 | |
| function PouchError(status, error, reason) {
 | |
|   Error.call(this, reason);
 | |
|   this.status = status;
 | |
|   this.name = error;
 | |
|   this.message = reason;
 | |
|   this.error = true;
 | |
| }
 | |
| 
 | |
| PouchError.prototype.toString = function () {
 | |
|   return JSON.stringify({
 | |
|     status: this.status,
 | |
|     name: this.name,
 | |
|     message: this.message,
 | |
|     reason: this.reason
 | |
|   });
 | |
| };
 | |
| 
 | |
| var UNAUTHORIZED = new PouchError(401, 'unauthorized', "Name or password is incorrect.");
 | |
| var MISSING_BULK_DOCS = new PouchError(400, 'bad_request', "Missing JSON list of 'docs'");
 | |
| var MISSING_DOC = new PouchError(404, 'not_found', 'missing');
 | |
| var REV_CONFLICT = new PouchError(409, 'conflict', 'Document update conflict');
 | |
| var INVALID_ID = new PouchError(400, 'bad_request', '_id field must contain a string');
 | |
| var MISSING_ID = new PouchError(412, 'missing_id', '_id is required for puts');
 | |
| var RESERVED_ID = new PouchError(400, 'bad_request', 'Only reserved document ids may start with underscore.');
 | |
| var NOT_OPEN = new PouchError(412, 'precondition_failed', 'Database not open');
 | |
| var UNKNOWN_ERROR = new PouchError(500, 'unknown_error', 'Database encountered an unknown error');
 | |
| var BAD_ARG = new PouchError(500, 'badarg', 'Some query argument is invalid');
 | |
| var INVALID_REQUEST = new PouchError(400, 'invalid_request', 'Request was invalid');
 | |
| var QUERY_PARSE_ERROR = new PouchError(400, 'query_parse_error', 'Some query parameter is invalid');
 | |
| var DOC_VALIDATION = new PouchError(500, 'doc_validation', 'Bad special document member');
 | |
| var BAD_REQUEST = new PouchError(400, 'bad_request', 'Something wrong with the request');
 | |
| var NOT_AN_OBJECT = new PouchError(400, 'bad_request', 'Document must be a JSON object');
 | |
| var DB_MISSING = new PouchError(404, 'not_found', 'Database not found');
 | |
| var IDB_ERROR = new PouchError(500, 'indexed_db_went_bad', 'unknown');
 | |
| var WSQ_ERROR = new PouchError(500, 'web_sql_went_bad', 'unknown');
 | |
| var LDB_ERROR = new PouchError(500, 'levelDB_went_went_bad', 'unknown');
 | |
| var FORBIDDEN = new PouchError(403, 'forbidden', 'Forbidden by design doc validate_doc_update function');
 | |
| var INVALID_REV = new PouchError(400, 'bad_request', 'Invalid rev format');
 | |
| var FILE_EXISTS = new PouchError(412, 'file_exists', 'The database could not be created, the file already exists.');
 | |
| var MISSING_STUB = new PouchError(412, 'missing_stub', 'A pre-existing attachment stub wasn\'t found');
 | |
| var INVALID_URL = new PouchError(413, 'invalid_url', 'Provided URL is invalid');
 | |
| 
 | |
| function generateErrorFromResponse(err) {
 | |
| 
 | |
|   if (typeof err !== 'object') {
 | |
|     var data = err;
 | |
|     err = UNKNOWN_ERROR;
 | |
|     err.data = data;
 | |
|   }
 | |
| 
 | |
|   if ('error' in err && err.error === 'conflict') {
 | |
|     err.name = 'conflict';
 | |
|     err.status = 409;
 | |
|   }
 | |
| 
 | |
|   if (!('name' in err)) {
 | |
|     err.name = err.error || 'unknown';
 | |
|   }
 | |
| 
 | |
|   if (!('status' in err)) {
 | |
|     err.status = 500;
 | |
|   }
 | |
| 
 | |
|   if (!('message' in err)) {
 | |
|     err.message = err.message || err.reason;
 | |
|   }
 | |
| 
 | |
|   if (!('stack' in err)) {
 | |
|     err.stack = (new Error()).stack;
 | |
|   }
 | |
| 
 | |
|   return err;
 | |
| }
 | |
| 
 | |
| function flatten(arrs) {
 | |
|   var res = [];
 | |
|   for (var i = 0, len = arrs.length; i < len; i++) {
 | |
|     res = res.concat(arrs[i]);
 | |
|   }
 | |
|   return res;
 | |
| }
 | |
| 
 | |
| // shim for Function.prototype.name,
 | |
| 
 | |
| // Checks if a PouchDB object is "remote" or not. This is
 | |
| 
 | |
| function isRemote(db) {
 | |
|   if (typeof db._remote === 'boolean') {
 | |
|     return db._remote;
 | |
|   }
 | |
|   /* istanbul ignore next */
 | |
|   if (typeof db.type === 'function') {
 | |
|     guardedConsole('warn',
 | |
|       'db.type() is deprecated and will be removed in ' +
 | |
|       'a future version of PouchDB');
 | |
|     return db.type() === 'http';
 | |
|   }
 | |
|   /* istanbul ignore next */
 | |
|   return false;
 | |
| }
 | |
| 
 | |
| // originally parseUri 1.2.2, now patched by us
 | |
| 
 | |
| // Based on https://github.com/alexdavid/scope-eval v0.0.3
 | |
| 
 | |
| // this is essentially the "update sugar" function from daleharvey/pouchdb#1388
 | |
| // the diffFun tells us what delta to apply to the doc.  it either returns
 | |
| // the doc, or false if it doesn't need to do an update after all
 | |
| function upsert(db, docId, diffFun) {
 | |
|   return db.get(docId)[
 | |
|     "catch"](function (err) {
 | |
|       /* istanbul ignore next */
 | |
|       if (err.status !== 404) {
 | |
|         throw err;
 | |
|       }
 | |
|       return {};
 | |
|     })
 | |
|     .then(function (doc) {
 | |
|       // the user might change the _rev, so save it for posterity
 | |
|       var docRev = doc._rev;
 | |
|       var newDoc = diffFun(doc);
 | |
| 
 | |
|       if (!newDoc) {
 | |
|         // if the diffFun returns falsy, we short-circuit as
 | |
|         // an optimization
 | |
|         return {updated: false, rev: docRev};
 | |
|       }
 | |
| 
 | |
|       // users aren't allowed to modify these values,
 | |
|       // so reset them here
 | |
|       newDoc._id = docId;
 | |
|       newDoc._rev = docRev;
 | |
|       return tryAndPut(db, newDoc, diffFun);
 | |
|     });
 | |
| }
 | |
| 
 | |
| function tryAndPut(db, doc, diffFun) {
 | |
|   return db.put(doc).then(function (res) {
 | |
|     return {
 | |
|       updated: true,
 | |
|       rev: res.rev
 | |
|     };
 | |
|   }, function (err) {
 | |
|     /* istanbul ignore next */
 | |
|     if (err.status !== 409) {
 | |
|       throw err;
 | |
|     }
 | |
|     return upsert(db, doc._id, diffFun);
 | |
|   });
 | |
| }
 | |
| 
 | |
| var thisAtob = function (str) {
 | |
|   return atob(str);
 | |
| };
 | |
| 
 | |
| // Abstracts constructing a Blob object, so it also works in older
 | |
| // browsers that don't support the native Blob constructor (e.g.
 | |
| // old QtWebKit versions, Android < 4.4).
 | |
| function createBlob(parts, properties) {
 | |
|   /* global BlobBuilder,MSBlobBuilder,MozBlobBuilder,WebKitBlobBuilder */
 | |
|   parts = parts || [];
 | |
|   properties = properties || {};
 | |
|   try {
 | |
|     return new Blob(parts, properties);
 | |
|   } catch (e) {
 | |
|     if (e.name !== "TypeError") {
 | |
|       throw e;
 | |
|     }
 | |
|     var Builder = typeof BlobBuilder !== 'undefined' ? BlobBuilder :
 | |
|                   typeof MSBlobBuilder !== 'undefined' ? MSBlobBuilder :
 | |
|                   typeof MozBlobBuilder !== 'undefined' ? MozBlobBuilder :
 | |
|                   WebKitBlobBuilder;
 | |
|     var builder = new Builder();
 | |
|     for (var i = 0; i < parts.length; i += 1) {
 | |
|       builder.append(parts[i]);
 | |
|     }
 | |
|     return builder.getBlob(properties.type);
 | |
|   }
 | |
| }
 | |
| 
 | |
| // From http://stackoverflow.com/questions/14967647/ (continues on next line)
 | |
| // encode-decode-image-with-base64-breaks-image (2013-04-21)
 | |
| function binaryStringToArrayBuffer(bin) {
 | |
|   var length = bin.length;
 | |
|   var buf = new ArrayBuffer(length);
 | |
|   var arr = new Uint8Array(buf);
 | |
|   for (var i = 0; i < length; i++) {
 | |
|     arr[i] = bin.charCodeAt(i);
 | |
|   }
 | |
|   return buf;
 | |
| }
 | |
| 
 | |
| function binStringToBluffer(binString, type) {
 | |
|   return createBlob([binaryStringToArrayBuffer(binString)], {type: type});
 | |
| }
 | |
| 
 | |
| function b64ToBluffer(b64, type) {
 | |
|   return binStringToBluffer(thisAtob(b64), type);
 | |
| }
 | |
| 
 | |
| //Can't find original post, but this is close
 | |
| 
 | |
| // simplified API. universal browser support is assumed
 | |
| 
 | |
| // this is not used in the browser
 | |
| 
 | |
| var setImmediateShim = self.setImmediate || self.setTimeout;
 | |
| 
 | |
| function stringMd5(string) {
 | |
|   return Md5.hash(string);
 | |
| }
 | |
| 
 | |
| var h = Headers;
 | |
| 
 | |
| // we restucture the supplied JSON considerably, because the official
 | |
| // Mango API is very particular about a lot of this stuff, but we like
 | |
| // to be liberal with what we accept in order to prevent mental
 | |
| // breakdowns in our users
 | |
| function massageCreateIndexRequest(requestDef) {
 | |
|   requestDef = clone(requestDef);
 | |
| 
 | |
|   if (!requestDef.index) {
 | |
|     requestDef.index = {};
 | |
|   }
 | |
| 
 | |
|   ['type', 'name', 'ddoc'].forEach(function (key) {
 | |
|     if (requestDef.index[key]) {
 | |
|       requestDef[key] = requestDef.index[key];
 | |
|       delete requestDef.index[key];
 | |
|     }
 | |
|   });
 | |
| 
 | |
|   if (requestDef.fields) {
 | |
|     requestDef.index.fields = requestDef.fields;
 | |
|     delete requestDef.fields;
 | |
|   }
 | |
| 
 | |
|   if (!requestDef.type) {
 | |
|     requestDef.type = 'json';
 | |
|   }
 | |
|   return requestDef;
 | |
| }
 | |
| 
 | |
| // throws if the user is using the wrong query field value type
 | |
| function checkFieldValueType(name, value, isHttp) {
 | |
| 	var message = '';
 | |
| 	var received = value;
 | |
| 	var addReceived = true;
 | |
| 	if ([ '$in', '$nin', '$or', '$and', '$mod', '$nor', '$all' ].indexOf(name) !== -1) {
 | |
| 		if (!Array.isArray(value)) {
 | |
| 			message = 'Query operator ' + name + ' must be an array.';
 | |
| 
 | |
| 		}
 | |
| 	}
 | |
| 
 | |
| 	if ([ '$not', '$elemMatch', '$allMatch' ].indexOf(name) !== -1) {
 | |
| 		if (!(!Array.isArray(value) && typeof value === 'object' && value !== null)) {
 | |
| 			message = 'Query operator ' + name + ' must be an object.';
 | |
| 		}
 | |
| 	}
 | |
| 
 | |
| 	if (name === '$mod' && Array.isArray(value)) {
 | |
| 		if (value.length !== 2) {
 | |
| 			message = 'Query operator $mod must be in the format [divisor, remainder], ' +
 | |
| 				'where divisor and remainder are both integers.';
 | |
| 		} else {
 | |
| 			var divisor = value[0];
 | |
| 			var mod = value[1];
 | |
| 			if (divisor === 0) {
 | |
| 				message = 'Query operator $mod\'s divisor cannot be 0, cannot divide by zero.';
 | |
| 				addReceived = false;
 | |
| 			}
 | |
| 			if (typeof divisor !== 'number' || parseInt(divisor, 10) !== divisor) {
 | |
| 				message = 'Query operator $mod\'s divisor is not an integer.';
 | |
| 				received = divisor;
 | |
| 			}
 | |
| 			if (parseInt(mod, 10) !== mod) {
 | |
| 				message = 'Query operator $mod\'s remainder is not an integer.';
 | |
| 				received = mod;
 | |
| 			}
 | |
| 		}
 | |
| 	}
 | |
| 	if (name === '$exists') {
 | |
| 		if (typeof value !== 'boolean') {
 | |
| 			message = 'Query operator $exists must be a boolean.';
 | |
| 		}
 | |
| 	}
 | |
| 
 | |
| 	if (name === '$type') {
 | |
| 		var allowed = [ 'null', 'boolean', 'number', 'string', 'array', 'object' ];
 | |
| 		var allowedStr = '"' + allowed.slice(0, allowed.length - 1).join('", "') + '", or "' + allowed[allowed.length - 1] + '"';
 | |
| 		if (typeof value !== 'string') {
 | |
| 			message = 'Query operator $type must be a string. Supported values: ' + allowedStr + '.';
 | |
| 		} else if (allowed.indexOf(value) == -1) {
 | |
| 			message = 'Query operator $type must be a string. Supported values: ' + allowedStr + '.';
 | |
| 		}
 | |
| 	}
 | |
| 
 | |
| 	if (name === '$size') {
 | |
| 		if (parseInt(value, 10) !== value) {
 | |
| 			message = 'Query operator $size must be a integer.';
 | |
| 		}
 | |
| 	}
 | |
| 
 | |
| 	if (name === '$regex') {
 | |
| 		if (typeof value !== 'string') {
 | |
| 			console.log("here", isHttp);
 | |
| 			if (isHttp) {
 | |
| 				message = 'Query operator $regex must be a string.';
 | |
| 			} else if (!(value instanceof RegExp)) {
 | |
| 				message = 'Query operator $regex must be a string or an instance ' +
 | |
| 					'of a javascript regular expression.';
 | |
| 			}
 | |
| 		}
 | |
| 	}
 | |
| 
 | |
| 	if (message) {
 | |
| 		if (addReceived) {
 | |
| 
 | |
| 			var type = received === null
 | |
| 			? ' '
 | |
| 			: Array.isArray(received)
 | |
| 			? ' array'
 | |
| 			: ' ' + typeof received;
 | |
| 			var receivedStr = typeof received === 'object' && received !== null
 | |
| 			?  JSON.stringify(received, null, '\t')
 | |
| 			: received;
 | |
| 
 | |
| 			message += ' Received' + type + ': ' + receivedStr;
 | |
| 		}
 | |
| 		throw new Error(message);
 | |
| 	}
 | |
| }
 | |
| 
 | |
| 
 | |
| var requireValidation = [ '$all', '$allMatch', '$and', '$elemMatch', '$exists', '$in', '$mod', '$nin', '$nor', '$not', '$or', '$regex', '$size', '$type' ];
 | |
| 
 | |
| var arrayTypeComparisonOperators = [ '$in', '$nin', '$mod', '$all'];
 | |
| 
 | |
| var equalityOperators = [ '$eq', '$gt', '$gte', '$lt', '$lte' ];
 | |
| 
 | |
| // recursively walks down the a query selector validating any operators
 | |
| function validateSelector(input, isHttp) {
 | |
| 	if (Array.isArray(input)) {
 | |
| 		for (var entry of input) {
 | |
| 			if (typeof entry === 'object' && value !== null) {
 | |
| 				validateSelector(entry, isHttp);
 | |
| 			}
 | |
| 		}
 | |
| 	} else {
 | |
| 		var fields = Object.keys(input);
 | |
| 
 | |
| 		for (var i = 0; i < fields.length; i++) {
 | |
| 			var key = fields[i];
 | |
| 			var value = input[key];
 | |
| 
 | |
| 			if (requireValidation.indexOf(key) !== -1) {
 | |
| 				checkFieldValueType(key, value, isHttp);
 | |
| 			}
 | |
| 			if (equalityOperators.indexOf(key) !== -1) {
 | |
| 				// skip, explicit comparison operators can be anything
 | |
| 				continue;
 | |
| 			}
 | |
| 			if (arrayTypeComparisonOperators.indexOf(key) !== -1) {
 | |
| 				// skip, their values are already valid
 | |
| 				continue;
 | |
| 			}
 | |
| 			if (typeof value === 'object' && value !== null) {
 | |
| 				validateSelector(value, isHttp);
 | |
| 			}
 | |
| 		}
 | |
| 	}
 | |
| }
 | |
| 
 | |
| function dbFetch(db, path, opts, callback) {
 | |
|   var status, ok;
 | |
|   opts.headers = new h({'Content-type': 'application/json'});
 | |
|   db.fetch(path, opts).then(function (response) {
 | |
|     status = response.status;
 | |
|     ok = response.ok;
 | |
|     return response.json();
 | |
|   }).then(function (json) {
 | |
|     if (!ok) {
 | |
|       json.status = status;
 | |
|       var err = generateErrorFromResponse(json);
 | |
|       callback(err);
 | |
|     } else {
 | |
|       callback(null, json);
 | |
|     }
 | |
|   })["catch"](callback);
 | |
| }
 | |
| 
 | |
| function createIndex(db, requestDef, callback) {
 | |
|   requestDef = massageCreateIndexRequest(requestDef);
 | |
|   dbFetch(db, '_index', {
 | |
|     method: 'POST',
 | |
|     body: JSON.stringify(requestDef)
 | |
|   }, callback);
 | |
| }
 | |
| 
 | |
| function find(db, requestDef, callback) {
 | |
|   validateSelector(requestDef.selector, true);
 | |
|   dbFetch(db, '_find', {
 | |
|     method: 'POST',
 | |
|     body: JSON.stringify(requestDef)
 | |
|   }, callback);
 | |
| }
 | |
| 
 | |
| function explain(db, requestDef, callback) {
 | |
|   dbFetch(db, '_explain', {
 | |
|     method: 'POST',
 | |
|     body: JSON.stringify(requestDef)
 | |
|   }, callback);
 | |
| }
 | |
| 
 | |
| function getIndexes(db, callback) {
 | |
|   dbFetch(db, '_index', {
 | |
|     method: 'GET'
 | |
|   }, callback);
 | |
| }
 | |
| 
 | |
| function deleteIndex(db, indexDef, callback) {
 | |
| 
 | |
| 
 | |
|   var ddoc = indexDef.ddoc;
 | |
|   var type = indexDef.type || 'json';
 | |
|   var name = indexDef.name;
 | |
| 
 | |
|   if (!ddoc) {
 | |
|     return callback(new Error('you must provide an index\'s ddoc'));
 | |
|   }
 | |
| 
 | |
|   if (!name) {
 | |
|     return callback(new Error('you must provide an index\'s name'));
 | |
|   }
 | |
| 
 | |
|   var url = '_index/' + [ddoc, type, name].map(encodeURIComponent).join('/');
 | |
| 
 | |
|   dbFetch(db, url, {method: 'DELETE'}, callback);
 | |
| }
 | |
| 
 | |
| // this would just be "return doc[field]", but fields
 | |
| // can be "deep" due to dot notation
 | |
| function getFieldFromDoc(doc, parsedField) {
 | |
|   var value = doc;
 | |
|   for (var i = 0, len = parsedField.length; i < len; i++) {
 | |
|     var key = parsedField[i];
 | |
|     value = value[key];
 | |
|     if (!value) {
 | |
|       break;
 | |
|     }
 | |
|   }
 | |
|   return value;
 | |
| }
 | |
| 
 | |
| function setFieldInDoc(doc, parsedField, value) {
 | |
|   for (var i = 0, len = parsedField.length; i < len-1; i++) {
 | |
|     var elem = parsedField[i];
 | |
|     doc = doc[elem] = doc[elem] || {};
 | |
|   }
 | |
|   doc[parsedField[len-1]] = value;
 | |
| }
 | |
| 
 | |
| function compare(left, right) {
 | |
|   return left < right ? -1 : left > right ? 1 : 0;
 | |
| }
 | |
| 
 | |
| // Converts a string in dot notation to an array of its components, with backslash escaping
 | |
| function parseField(fieldName) {
 | |
|   // fields may be deep (e.g. "foo.bar.baz"), so parse
 | |
|   var fields = [];
 | |
|   var current = '';
 | |
|   for (var i = 0, len = fieldName.length; i < len; i++) {
 | |
|     var ch = fieldName[i];
 | |
|     if (i > 0 && fieldName[i - 1] === '\\' && (ch === '$' || ch === '.')) {
 | |
|       // escaped delimiter
 | |
|       current = current.substring(0, current.length - 1) + ch;
 | |
|     } else if (ch === '.') {
 | |
|       // When `.` is not escaped (above), it is a field delimiter
 | |
|       fields.push(current);
 | |
|       current = '';
 | |
|     } else { // normal character
 | |
|       current += ch;
 | |
|     }
 | |
|   }
 | |
|   fields.push(current);
 | |
|   return fields;
 | |
| }
 | |
| 
 | |
| var combinationFields = ['$or', '$nor', '$not'];
 | |
| function isCombinationalField(field) {
 | |
|   return combinationFields.indexOf(field) > -1;
 | |
| }
 | |
| 
 | |
| function getKey(obj) {
 | |
|   return Object.keys(obj)[0];
 | |
| }
 | |
| 
 | |
| function getValue(obj) {
 | |
|   return obj[getKey(obj)];
 | |
| }
 | |
| 
 | |
| 
 | |
| // flatten an array of selectors joined by an $and operator
 | |
| function mergeAndedSelectors(selectors) {
 | |
| 
 | |
|   // sort to ensure that e.g. if the user specified
 | |
|   // $and: [{$gt: 'a'}, {$gt: 'b'}], then it's collapsed into
 | |
|   // just {$gt: 'b'}
 | |
|   var res = {};
 | |
|   var first = {$or: true, $nor: true};
 | |
| 
 | |
|   selectors.forEach(function (selector) {
 | |
|     Object.keys(selector).forEach(function (field) {
 | |
|       var matcher = selector[field];
 | |
|       if (typeof matcher !== 'object') {
 | |
|         matcher = {$eq: matcher};
 | |
|       }
 | |
| 
 | |
|       if (isCombinationalField(field)) {
 | |
|         // or, nor
 | |
|         if (matcher instanceof Array) {
 | |
|           if (first[field]) {
 | |
|             first[field] = false;
 | |
|             res[field] = matcher;
 | |
|             return;
 | |
|           }
 | |
| 
 | |
|           var entries = [];
 | |
|           res[field].forEach(function (existing) {
 | |
|             Object.keys(matcher).forEach(function (key) {
 | |
|               var m = matcher[key];
 | |
|               var longest = Math.max(Object.keys(existing).length, Object.keys(m).length);
 | |
|               var merged = mergeAndedSelectors([existing, m]);
 | |
|               if (Object.keys(merged).length <= longest) {
 | |
|                 // we have a situation like: (a :{$eq :1} || ...) && (a {$eq: 2} || ...)
 | |
|                 // merging would produce a $eq 2 when actually we shouldn't ever match against these merged conditions
 | |
|                 // merged should always contain more values to be valid
 | |
|                 return;
 | |
|               }
 | |
|               entries.push(merged);
 | |
|             });
 | |
|           });
 | |
|           res[field] = entries;
 | |
|         } else {
 | |
|           // not
 | |
|           res[field] = mergeAndedSelectors([matcher]);
 | |
|         }
 | |
|       } else {
 | |
|         var fieldMatchers = res[field] = res[field] || {};
 | |
|         Object.keys(matcher).forEach(function (operator) {
 | |
|           var value = matcher[operator];
 | |
| 
 | |
|           if (operator === '$gt' || operator === '$gte') {
 | |
|             return mergeGtGte(operator, value, fieldMatchers);
 | |
|           } else if (operator === '$lt' || operator === '$lte') {
 | |
|             return mergeLtLte(operator, value, fieldMatchers);
 | |
|           } else if (operator === '$ne') {
 | |
|             return mergeNe(value, fieldMatchers);
 | |
|           } else if (operator === '$eq') {
 | |
|             return mergeEq(value, fieldMatchers);
 | |
|           } else if (operator === "$regex") {
 | |
|             return mergeRegex(value, fieldMatchers);
 | |
|           }
 | |
|           fieldMatchers[operator] = value;
 | |
|         });
 | |
|       }
 | |
|     });
 | |
|   });
 | |
| 
 | |
|   return res;
 | |
| }
 | |
| 
 | |
| 
 | |
| 
 | |
| // collapse logically equivalent gt/gte values
 | |
| function mergeGtGte(operator, value, fieldMatchers) {
 | |
|   if (typeof fieldMatchers.$eq !== 'undefined') {
 | |
|     return; // do nothing
 | |
|   }
 | |
|   if (typeof fieldMatchers.$gte !== 'undefined') {
 | |
|     if (operator === '$gte') {
 | |
|       if (value > fieldMatchers.$gte) { // more specificity
 | |
|         fieldMatchers.$gte = value;
 | |
|       }
 | |
|     } else { // operator === '$gt'
 | |
|       if (value >= fieldMatchers.$gte) { // more specificity
 | |
|         delete fieldMatchers.$gte;
 | |
|         fieldMatchers.$gt = value;
 | |
|       }
 | |
|     }
 | |
|   } else if (typeof fieldMatchers.$gt !== 'undefined') {
 | |
|     if (operator === '$gte') {
 | |
|       if (value > fieldMatchers.$gt) { // more specificity
 | |
|         delete fieldMatchers.$gt;
 | |
|         fieldMatchers.$gte = value;
 | |
|       }
 | |
|     } else { // operator === '$gt'
 | |
|       if (value > fieldMatchers.$gt) { // more specificity
 | |
|         fieldMatchers.$gt = value;
 | |
|       }
 | |
|     }
 | |
|   } else {
 | |
|     fieldMatchers[operator] = value;
 | |
|   }
 | |
| }
 | |
| 
 | |
| // collapse logically equivalent lt/lte values
 | |
| function mergeLtLte(operator, value, fieldMatchers) {
 | |
|   if (typeof fieldMatchers.$eq !== 'undefined') {
 | |
|     return; // do nothing
 | |
|   }
 | |
|   if (typeof fieldMatchers.$lte !== 'undefined') {
 | |
|     if (operator === '$lte') {
 | |
|       if (value < fieldMatchers.$lte) { // more specificity
 | |
|         fieldMatchers.$lte = value;
 | |
|       }
 | |
|     } else { // operator === '$gt'
 | |
|       if (value <= fieldMatchers.$lte) { // more specificity
 | |
|         delete fieldMatchers.$lte;
 | |
|         fieldMatchers.$lt = value;
 | |
|       }
 | |
|     }
 | |
|   } else if (typeof fieldMatchers.$lt !== 'undefined') {
 | |
|     if (operator === '$lte') {
 | |
|       if (value < fieldMatchers.$lt) { // more specificity
 | |
|         delete fieldMatchers.$lt;
 | |
|         fieldMatchers.$lte = value;
 | |
|       }
 | |
|     } else { // operator === '$gt'
 | |
|       if (value < fieldMatchers.$lt) { // more specificity
 | |
|         fieldMatchers.$lt = value;
 | |
|       }
 | |
|     }
 | |
|   } else {
 | |
|     fieldMatchers[operator] = value;
 | |
|   }
 | |
| }
 | |
| 
 | |
| // combine $ne values into one array
 | |
| function mergeNe(value, fieldMatchers) {
 | |
|   if ('$ne' in fieldMatchers) {
 | |
|     // there are many things this could "not" be
 | |
|     fieldMatchers.$ne.push(value);
 | |
|   } else { // doesn't exist yet
 | |
|     fieldMatchers.$ne = [value];
 | |
|   }
 | |
| }
 | |
| 
 | |
| // add $eq into the mix
 | |
| function mergeEq(value, fieldMatchers) {
 | |
|   // these all have less specificity than the $eq
 | |
|   // TODO: check for user errors here
 | |
|   delete fieldMatchers.$gt;
 | |
|   delete fieldMatchers.$gte;
 | |
|   delete fieldMatchers.$lt;
 | |
|   delete fieldMatchers.$lte;
 | |
|   delete fieldMatchers.$ne;
 | |
|   fieldMatchers.$eq = value;
 | |
| }
 | |
| 
 | |
| // combine $regex values into one array
 | |
| function mergeRegex(value, fieldMatchers) {
 | |
|   if ('$regex' in fieldMatchers) {
 | |
|     // a value could match multiple regexes
 | |
|     fieldMatchers.$regex.push(value);
 | |
|   } else { // doesn't exist yet
 | |
|     fieldMatchers.$regex = [value];
 | |
|   }
 | |
| }
 | |
| 
 | |
| //#7458: execute function mergeAndedSelectors on nested $and
 | |
| function mergeAndedSelectorsNested(obj) {
 | |
|     for (var prop in obj) {
 | |
|         if (Array.isArray(obj)) {
 | |
|             for (var i in obj) {
 | |
|                 if (obj[i]['$and']) {
 | |
|                     obj[i] = mergeAndedSelectors(obj[i]['$and']);
 | |
|                 }
 | |
|             }
 | |
|         }
 | |
|         var value = obj[prop];
 | |
|         if (typeof value === 'object') {
 | |
|             mergeAndedSelectorsNested(value); // <- recursive call
 | |
|         }
 | |
|     }
 | |
|     return obj;
 | |
| }
 | |
| 
 | |
| //#7458: determine id $and is present in selector (at any level)
 | |
| function isAndInSelector(obj, isAnd) {
 | |
|     for (var prop in obj) {
 | |
|         if (prop === '$and') {
 | |
|             isAnd = true;
 | |
|         }
 | |
|         var value = obj[prop];
 | |
|         if (typeof value === 'object') {
 | |
|             isAnd = isAndInSelector(value, isAnd); // <- recursive call
 | |
|         }
 | |
|     }
 | |
|     return isAnd;
 | |
| }
 | |
| 
 | |
| //
 | |
| // normalize the selector
 | |
| //
 | |
| function massageSelector(input) {
 | |
|   var result = clone(input);
 | |
|   var wasAnded = false;
 | |
|     //#7458: if $and is present in selector (at any level) merge nested $and
 | |
|     if (isAndInSelector(result, false)) {
 | |
|         result = mergeAndedSelectorsNested(result);
 | |
|         if ('$and' in result) {
 | |
|             result = mergeAndedSelectors(result['$and']);
 | |
|         }
 | |
|         wasAnded = true;
 | |
|     }
 | |
| 
 | |
|   ['$or', '$nor'].forEach(function (orOrNor) {
 | |
|     if (orOrNor in result) {
 | |
|       // message each individual selector
 | |
|       // e.g. {foo: 'bar'} becomes {foo: {$eq: 'bar'}}
 | |
|       result[orOrNor].forEach(function (subSelector) {
 | |
|         var fields = Object.keys(subSelector);
 | |
|         for (var i = 0; i < fields.length; i++) {
 | |
|           var field = fields[i];
 | |
|           var matcher = subSelector[field];
 | |
|           if (typeof matcher !== 'object' || matcher === null) {
 | |
|             subSelector[field] = {$eq: matcher};
 | |
|           }
 | |
|         }
 | |
|       });
 | |
|     }
 | |
|   });
 | |
| 
 | |
|   if ('$not' in result) {
 | |
|     //This feels a little like forcing, but it will work for now,
 | |
|     //I would like to come back to this and make the merging of selectors a little more generic
 | |
|     result['$not'] = mergeAndedSelectors([result['$not']]);
 | |
|   }
 | |
| 
 | |
|   var fields = Object.keys(result);
 | |
| 
 | |
|   for (var i = 0; i < fields.length; i++) {
 | |
|     var field = fields[i];
 | |
|     var matcher = result[field];
 | |
| 
 | |
|     if (typeof matcher !== 'object' || matcher === null) {
 | |
|       matcher = {$eq: matcher};
 | |
|     } else if (!wasAnded) {
 | |
|       // These values must be placed in an array because these operators can be used multiple times on the same field
 | |
|       // when $and is used, mergeAndedSelectors takes care of putting them into arrays, otherwise it's done here:
 | |
|       if ('$ne' in matcher) {
 | |
|         matcher.$ne = [matcher.$ne];
 | |
|       }
 | |
|       if ('$regex' in matcher) {
 | |
|         matcher.$regex = [matcher.$regex];
 | |
|       }
 | |
|     }
 | |
|     result[field] = matcher;
 | |
|   }
 | |
| 
 | |
|   return result;
 | |
| }
 | |
| 
 | |
| function pad(str, padWith, upToLength) {
 | |
|   var padding = '';
 | |
|   var targetLength = upToLength - str.length;
 | |
|   /* istanbul ignore next */
 | |
|   while (padding.length < targetLength) {
 | |
|     padding += padWith;
 | |
|   }
 | |
|   return padding;
 | |
| }
 | |
| 
 | |
| function padLeft(str, padWith, upToLength) {
 | |
|   var padding = pad(str, padWith, upToLength);
 | |
|   return padding + str;
 | |
| }
 | |
| 
 | |
| var MIN_MAGNITUDE = -324; // verified by -Number.MIN_VALUE
 | |
| var MAGNITUDE_DIGITS = 3; // ditto
 | |
| var SEP = ''; // set to '_' for easier debugging
 | |
| 
 | |
| function collate(a, b) {
 | |
| 
 | |
|   if (a === b) {
 | |
|     return 0;
 | |
|   }
 | |
| 
 | |
|   a = normalizeKey(a);
 | |
|   b = normalizeKey(b);
 | |
| 
 | |
|   var ai = collationIndex(a);
 | |
|   var bi = collationIndex(b);
 | |
|   if ((ai - bi) !== 0) {
 | |
|     return ai - bi;
 | |
|   }
 | |
|   switch (typeof a) {
 | |
|     case 'number':
 | |
|       return a - b;
 | |
|     case 'boolean':
 | |
|       return a < b ? -1 : 1;
 | |
|     case 'string':
 | |
|       return stringCollate(a, b);
 | |
|   }
 | |
|   return Array.isArray(a) ? arrayCollate(a, b) : objectCollate(a, b);
 | |
| }
 | |
| 
 | |
| // couch considers null/NaN/Infinity/-Infinity === undefined,
 | |
| // for the purposes of mapreduce indexes. also, dates get stringified.
 | |
| function normalizeKey(key) {
 | |
|   switch (typeof key) {
 | |
|     case 'undefined':
 | |
|       return null;
 | |
|     case 'number':
 | |
|       if (key === Infinity || key === -Infinity || isNaN(key)) {
 | |
|         return null;
 | |
|       }
 | |
|       return key;
 | |
|     case 'object':
 | |
|       var origKey = key;
 | |
|       if (Array.isArray(key)) {
 | |
|         var len = key.length;
 | |
|         key = new Array(len);
 | |
|         for (var i = 0; i < len; i++) {
 | |
|           key[i] = normalizeKey(origKey[i]);
 | |
|         }
 | |
|       /* istanbul ignore next */
 | |
|       } else if (key instanceof Date) {
 | |
|         return key.toJSON();
 | |
|       } else if (key !== null) { // generic object
 | |
|         key = {};
 | |
|         for (var k in origKey) {
 | |
|           if (Object.prototype.hasOwnProperty.call(origKey, k)) {
 | |
|             var val = origKey[k];
 | |
|             if (typeof val !== 'undefined') {
 | |
|               key[k] = normalizeKey(val);
 | |
|             }
 | |
|           }
 | |
|         }
 | |
|       }
 | |
|   }
 | |
|   return key;
 | |
| }
 | |
| 
 | |
| function indexify(key) {
 | |
|   if (key !== null) {
 | |
|     switch (typeof key) {
 | |
|       case 'boolean':
 | |
|         return key ? 1 : 0;
 | |
|       case 'number':
 | |
|         return numToIndexableString(key);
 | |
|       case 'string':
 | |
|         // We've to be sure that key does not contain \u0000
 | |
|         // Do order-preserving replacements:
 | |
|         // 0 -> 1, 1
 | |
|         // 1 -> 1, 2
 | |
|         // 2 -> 2, 2
 | |
|         /* eslint-disable no-control-regex */
 | |
|         return key
 | |
|           .replace(/\u0002/g, '\u0002\u0002')
 | |
|           .replace(/\u0001/g, '\u0001\u0002')
 | |
|           .replace(/\u0000/g, '\u0001\u0001');
 | |
|         /* eslint-enable no-control-regex */
 | |
|       case 'object':
 | |
|         var isArray = Array.isArray(key);
 | |
|         var arr = isArray ? key : Object.keys(key);
 | |
|         var i = -1;
 | |
|         var len = arr.length;
 | |
|         var result = '';
 | |
|         if (isArray) {
 | |
|           while (++i < len) {
 | |
|             result += toIndexableString(arr[i]);
 | |
|           }
 | |
|         } else {
 | |
|           while (++i < len) {
 | |
|             var objKey = arr[i];
 | |
|             result += toIndexableString(objKey) +
 | |
|                 toIndexableString(key[objKey]);
 | |
|           }
 | |
|         }
 | |
|         return result;
 | |
|     }
 | |
|   }
 | |
|   return '';
 | |
| }
 | |
| 
 | |
| // convert the given key to a string that would be appropriate
 | |
| // for lexical sorting, e.g. within a database, where the
 | |
| // sorting is the same given by the collate() function.
 | |
| function toIndexableString(key) {
 | |
|   var zero = '\u0000';
 | |
|   key = normalizeKey(key);
 | |
|   return collationIndex(key) + SEP + indexify(key) + zero;
 | |
| }
 | |
| 
 | |
| function parseNumber(str, i) {
 | |
|   var originalIdx = i;
 | |
|   var num;
 | |
|   var zero = str[i] === '1';
 | |
|   if (zero) {
 | |
|     num = 0;
 | |
|     i++;
 | |
|   } else {
 | |
|     var neg = str[i] === '0';
 | |
|     i++;
 | |
|     var numAsString = '';
 | |
|     var magAsString = str.substring(i, i + MAGNITUDE_DIGITS);
 | |
|     var magnitude = parseInt(magAsString, 10) + MIN_MAGNITUDE;
 | |
|     /* istanbul ignore next */
 | |
|     if (neg) {
 | |
|       magnitude = -magnitude;
 | |
|     }
 | |
|     i += MAGNITUDE_DIGITS;
 | |
|     while (true) {
 | |
|       var ch = str[i];
 | |
|       if (ch === '\u0000') {
 | |
|         break;
 | |
|       } else {
 | |
|         numAsString += ch;
 | |
|       }
 | |
|       i++;
 | |
|     }
 | |
|     numAsString = numAsString.split('.');
 | |
|     if (numAsString.length === 1) {
 | |
|       num = parseInt(numAsString, 10);
 | |
|     } else {
 | |
|       /* istanbul ignore next */
 | |
|       num = parseFloat(numAsString[0] + '.' + numAsString[1]);
 | |
|     }
 | |
|     /* istanbul ignore next */
 | |
|     if (neg) {
 | |
|       num = num - 10;
 | |
|     }
 | |
|     /* istanbul ignore next */
 | |
|     if (magnitude !== 0) {
 | |
|       // parseFloat is more reliable than pow due to rounding errors
 | |
|       // e.g. Number.MAX_VALUE would return Infinity if we did
 | |
|       // num * Math.pow(10, magnitude);
 | |
|       num = parseFloat(num + 'e' + magnitude);
 | |
|     }
 | |
|   }
 | |
|   return {num: num, length : i - originalIdx};
 | |
| }
 | |
| 
 | |
| // move up the stack while parsing
 | |
| // this function moved outside of parseIndexableString for performance
 | |
| function pop(stack, metaStack) {
 | |
|   var obj = stack.pop();
 | |
| 
 | |
|   if (metaStack.length) {
 | |
|     var lastMetaElement = metaStack[metaStack.length - 1];
 | |
|     if (obj === lastMetaElement.element) {
 | |
|       // popping a meta-element, e.g. an object whose value is another object
 | |
|       metaStack.pop();
 | |
|       lastMetaElement = metaStack[metaStack.length - 1];
 | |
|     }
 | |
|     var element = lastMetaElement.element;
 | |
|     var lastElementIndex = lastMetaElement.index;
 | |
|     if (Array.isArray(element)) {
 | |
|       element.push(obj);
 | |
|     } else if (lastElementIndex === stack.length - 2) { // obj with key+value
 | |
|       var key = stack.pop();
 | |
|       element[key] = obj;
 | |
|     } else {
 | |
|       stack.push(obj); // obj with key only
 | |
|     }
 | |
|   }
 | |
| }
 | |
| 
 | |
| function parseIndexableString(str) {
 | |
|   var stack = [];
 | |
|   var metaStack = []; // stack for arrays and objects
 | |
|   var i = 0;
 | |
| 
 | |
|   /*eslint no-constant-condition: ["error", { "checkLoops": false }]*/
 | |
|   while (true) {
 | |
|     var collationIndex = str[i++];
 | |
|     if (collationIndex === '\u0000') {
 | |
|       if (stack.length === 1) {
 | |
|         return stack.pop();
 | |
|       } else {
 | |
|         pop(stack, metaStack);
 | |
|         continue;
 | |
|       }
 | |
|     }
 | |
|     switch (collationIndex) {
 | |
|       case '1':
 | |
|         stack.push(null);
 | |
|         break;
 | |
|       case '2':
 | |
|         stack.push(str[i] === '1');
 | |
|         i++;
 | |
|         break;
 | |
|       case '3':
 | |
|         var parsedNum = parseNumber(str, i);
 | |
|         stack.push(parsedNum.num);
 | |
|         i += parsedNum.length;
 | |
|         break;
 | |
|       case '4':
 | |
|         var parsedStr = '';
 | |
|         /*eslint no-constant-condition: ["error", { "checkLoops": false }]*/
 | |
|         while (true) {
 | |
|           var ch = str[i];
 | |
|           if (ch === '\u0000') {
 | |
|             break;
 | |
|           }
 | |
|           parsedStr += ch;
 | |
|           i++;
 | |
|         }
 | |
|         // perform the reverse of the order-preserving replacement
 | |
|         // algorithm (see above)
 | |
|         /* eslint-disable no-control-regex */
 | |
|         parsedStr = parsedStr.replace(/\u0001\u0001/g, '\u0000')
 | |
|           .replace(/\u0001\u0002/g, '\u0001')
 | |
|           .replace(/\u0002\u0002/g, '\u0002');
 | |
|         /* eslint-enable no-control-regex */
 | |
|         stack.push(parsedStr);
 | |
|         break;
 | |
|       case '5':
 | |
|         var arrayElement = { element: [], index: stack.length };
 | |
|         stack.push(arrayElement.element);
 | |
|         metaStack.push(arrayElement);
 | |
|         break;
 | |
|       case '6':
 | |
|         var objElement = { element: {}, index: stack.length };
 | |
|         stack.push(objElement.element);
 | |
|         metaStack.push(objElement);
 | |
|         break;
 | |
|       /* istanbul ignore next */
 | |
|       default:
 | |
|         throw new Error(
 | |
|           'bad collationIndex or unexpectedly reached end of input: ' +
 | |
|             collationIndex);
 | |
|     }
 | |
|   }
 | |
| }
 | |
| 
 | |
| function arrayCollate(a, b) {
 | |
|   var len = Math.min(a.length, b.length);
 | |
|   for (var i = 0; i < len; i++) {
 | |
|     var sort = collate(a[i], b[i]);
 | |
|     if (sort !== 0) {
 | |
|       return sort;
 | |
|     }
 | |
|   }
 | |
|   return (a.length === b.length) ? 0 :
 | |
|     (a.length > b.length) ? 1 : -1;
 | |
| }
 | |
| function stringCollate(a, b) {
 | |
|   // See: https://github.com/daleharvey/pouchdb/issues/40
 | |
|   // This is incompatible with the CouchDB implementation, but its the
 | |
|   // best we can do for now
 | |
|   return (a === b) ? 0 : ((a > b) ? 1 : -1);
 | |
| }
 | |
| function objectCollate(a, b) {
 | |
|   var ak = Object.keys(a), bk = Object.keys(b);
 | |
|   var len = Math.min(ak.length, bk.length);
 | |
|   for (var i = 0; i < len; i++) {
 | |
|     // First sort the keys
 | |
|     var sort = collate(ak[i], bk[i]);
 | |
|     if (sort !== 0) {
 | |
|       return sort;
 | |
|     }
 | |
|     // if the keys are equal sort the values
 | |
|     sort = collate(a[ak[i]], b[bk[i]]);
 | |
|     if (sort !== 0) {
 | |
|       return sort;
 | |
|     }
 | |
| 
 | |
|   }
 | |
|   return (ak.length === bk.length) ? 0 :
 | |
|     (ak.length > bk.length) ? 1 : -1;
 | |
| }
 | |
| // The collation is defined by erlangs ordered terms
 | |
| // the atoms null, true, false come first, then numbers, strings,
 | |
| // arrays, then objects
 | |
| // null/undefined/NaN/Infinity/-Infinity are all considered null
 | |
| function collationIndex(x) {
 | |
|   var id = ['boolean', 'number', 'string', 'object'];
 | |
|   var idx = id.indexOf(typeof x);
 | |
|   //false if -1 otherwise true, but fast!!!!1
 | |
|   if (~idx) {
 | |
|     if (x === null) {
 | |
|       return 1;
 | |
|     }
 | |
|     if (Array.isArray(x)) {
 | |
|       return 5;
 | |
|     }
 | |
|     return idx < 3 ? (idx + 2) : (idx + 3);
 | |
|   }
 | |
|   /* istanbul ignore next */
 | |
|   if (Array.isArray(x)) {
 | |
|     return 5;
 | |
|   }
 | |
| }
 | |
| 
 | |
| // conversion:
 | |
| // x yyy zz...zz
 | |
| // x = 0 for negative, 1 for 0, 2 for positive
 | |
| // y = exponent (for negative numbers negated) moved so that it's >= 0
 | |
| // z = mantisse
 | |
| function numToIndexableString(num) {
 | |
| 
 | |
|   if (num === 0) {
 | |
|     return '1';
 | |
|   }
 | |
| 
 | |
|   // convert number to exponential format for easier and
 | |
|   // more succinct string sorting
 | |
|   var expFormat = num.toExponential().split(/e\+?/);
 | |
|   var magnitude = parseInt(expFormat[1], 10);
 | |
| 
 | |
|   var neg = num < 0;
 | |
| 
 | |
|   var result = neg ? '0' : '2';
 | |
| 
 | |
|   // first sort by magnitude
 | |
|   // it's easier if all magnitudes are positive
 | |
|   var magForComparison = ((neg ? -magnitude : magnitude) - MIN_MAGNITUDE);
 | |
|   var magString = padLeft((magForComparison).toString(), '0', MAGNITUDE_DIGITS);
 | |
| 
 | |
|   result += SEP + magString;
 | |
| 
 | |
|   // then sort by the factor
 | |
|   var factor = Math.abs(parseFloat(expFormat[0])); // [1..10)
 | |
|   /* istanbul ignore next */
 | |
|   if (neg) { // for negative reverse ordering
 | |
|     factor = 10 - factor;
 | |
|   }
 | |
| 
 | |
|   var factorStr = factor.toFixed(20);
 | |
| 
 | |
|   // strip zeros from the end
 | |
|   factorStr = factorStr.replace(/\.?0+$/, '');
 | |
| 
 | |
|   result += SEP + factorStr;
 | |
| 
 | |
|   return result;
 | |
| }
 | |
| 
 | |
| // create a comparator based on the sort object
 | |
| function createFieldSorter(sort) {
 | |
| 
 | |
|   function getFieldValuesAsArray(doc) {
 | |
|     return sort.map(function (sorting) {
 | |
|       var fieldName = getKey(sorting);
 | |
|       var parsedField = parseField(fieldName);
 | |
|       var docFieldValue = getFieldFromDoc(doc, parsedField);
 | |
|       return docFieldValue;
 | |
|     });
 | |
|   }
 | |
| 
 | |
|   return function (aRow, bRow) {
 | |
|     var aFieldValues = getFieldValuesAsArray(aRow.doc);
 | |
|     var bFieldValues = getFieldValuesAsArray(bRow.doc);
 | |
|     var collation = collate(aFieldValues, bFieldValues);
 | |
|     if (collation !== 0) {
 | |
|       return collation;
 | |
|     }
 | |
|     // this is what mango seems to do
 | |
|     return compare(aRow.doc._id, bRow.doc._id);
 | |
|   };
 | |
| }
 | |
| 
 | |
| function filterInMemoryFields(rows, requestDef, inMemoryFields) {
 | |
|   rows = rows.filter(function (row) {
 | |
|     return rowFilter(row.doc, requestDef.selector, inMemoryFields);
 | |
|   });
 | |
| 
 | |
|   if (requestDef.sort) {
 | |
|     // in-memory sort
 | |
|     var fieldSorter = createFieldSorter(requestDef.sort);
 | |
|     rows = rows.sort(fieldSorter);
 | |
|     if (typeof requestDef.sort[0] !== 'string' &&
 | |
|         getValue(requestDef.sort[0]) === 'desc') {
 | |
|       rows = rows.reverse();
 | |
|     }
 | |
|   }
 | |
| 
 | |
|   if ('limit' in requestDef || 'skip' in requestDef) {
 | |
|     // have to do the limit in-memory
 | |
|     var skip = requestDef.skip || 0;
 | |
|     var limit = ('limit' in requestDef ? requestDef.limit : rows.length) + skip;
 | |
|     rows = rows.slice(skip, limit);
 | |
|   }
 | |
|   return rows;
 | |
| }
 | |
| 
 | |
| function rowFilter(doc, selector, inMemoryFields) {
 | |
|   return inMemoryFields.every(function (field) {
 | |
|     var matcher = selector[field];
 | |
|     var parsedField = parseField(field);
 | |
|     var docFieldValue = getFieldFromDoc(doc, parsedField);
 | |
|     if (isCombinationalField(field)) {
 | |
|       return matchCominationalSelector(field, matcher, doc);
 | |
|     }
 | |
| 
 | |
|     return matchSelector(matcher, doc, parsedField, docFieldValue);
 | |
|   });
 | |
| }
 | |
| 
 | |
| function matchSelector(matcher, doc, parsedField, docFieldValue) {
 | |
|   if (!matcher) {
 | |
|     // no filtering necessary; this field is just needed for sorting
 | |
|     return true;
 | |
|   }
 | |
| 
 | |
|   // is matcher an object, if so continue recursion
 | |
|   if (typeof matcher === 'object') {
 | |
|     return Object.keys(matcher).every(function (maybeUserOperator) {
 | |
|       var userValue = matcher[ maybeUserOperator ];
 | |
|       // explicit operator
 | |
|       if (maybeUserOperator.indexOf("$") === 0) {
 | |
|         return match(maybeUserOperator, doc, userValue, parsedField, docFieldValue);
 | |
|       } else {
 | |
|         var subParsedField = parseField(maybeUserOperator);
 | |
| 
 | |
|         if (
 | |
|           docFieldValue === undefined &&
 | |
|           typeof userValue !== "object" &&
 | |
|           subParsedField.length > 0
 | |
|         ) {
 | |
|           // the field does not exist, return or getFieldFromDoc will throw
 | |
|           return false;
 | |
|         }
 | |
| 
 | |
|         var subDocFieldValue = getFieldFromDoc(docFieldValue, subParsedField);
 | |
| 
 | |
|         if (typeof userValue === "object") {
 | |
|           // field value is an object that might contain more operators
 | |
|           return matchSelector(userValue, doc, parsedField, subDocFieldValue);
 | |
|         }
 | |
| 
 | |
|         // implicit operator
 | |
|         return match("$eq", doc, userValue, subParsedField, subDocFieldValue);
 | |
|       }
 | |
|     });
 | |
|   }
 | |
| 
 | |
|   // no more depth, No need to recurse further
 | |
|   return matcher === docFieldValue;
 | |
| }
 | |
| 
 | |
| function matchCominationalSelector(field, matcher, doc) {
 | |
| 
 | |
|   if (field === '$or') {
 | |
|     return matcher.some(function (orMatchers) {
 | |
|       return rowFilter(doc, orMatchers, Object.keys(orMatchers));
 | |
|     });
 | |
|   }
 | |
| 
 | |
|   if (field === '$not') {
 | |
|     return !rowFilter(doc, matcher, Object.keys(matcher));
 | |
|   }
 | |
| 
 | |
|   //`$nor`
 | |
|   return !matcher.find(function (orMatchers) {
 | |
|     return rowFilter(doc, orMatchers, Object.keys(orMatchers));
 | |
|   });
 | |
| 
 | |
| }
 | |
| 
 | |
| function match(userOperator, doc, userValue, parsedField, docFieldValue) {
 | |
|   if (!matchers[userOperator]) {
 | |
|     /* istanbul ignore next */
 | |
|     throw new Error('unknown operator "' + userOperator +
 | |
|       '" - should be one of $eq, $lte, $lt, $gt, $gte, $exists, $ne, $in, ' +
 | |
|       '$nin, $size, $mod, $regex, $elemMatch, $type, $allMatch or $all');
 | |
|   }
 | |
|   return matchers[userOperator](doc, userValue, parsedField, docFieldValue);
 | |
| }
 | |
| 
 | |
| function fieldExists(docFieldValue) {
 | |
|   return typeof docFieldValue !== 'undefined' && docFieldValue !== null;
 | |
| }
 | |
| 
 | |
| function fieldIsNotUndefined(docFieldValue) {
 | |
|   return typeof docFieldValue !== 'undefined';
 | |
| }
 | |
| 
 | |
| function modField(docFieldValue, userValue) {
 | |
|   if (typeof docFieldValue !== "number" ||
 | |
|     parseInt(docFieldValue, 10) !== docFieldValue) {
 | |
|     return false;
 | |
|   }
 | |
| 
 | |
|   var divisor = userValue[0];
 | |
|   var mod = userValue[1];
 | |
| 
 | |
|   return docFieldValue % divisor === mod;
 | |
| }
 | |
| 
 | |
| function arrayContainsValue(docFieldValue, userValue) {
 | |
|   return userValue.some(function (val) {
 | |
|     if (docFieldValue instanceof Array) {
 | |
|       return docFieldValue.some(function (docFieldValueItem) {
 | |
|         return collate(val, docFieldValueItem) === 0;
 | |
|       });
 | |
|     }
 | |
| 
 | |
|     return collate(val, docFieldValue) === 0;
 | |
|   });
 | |
| }
 | |
| 
 | |
| function arrayContainsAllValues(docFieldValue, userValue) {
 | |
|   return userValue.every(function (val) {
 | |
|     return docFieldValue.some(function (docFieldValueItem) {
 | |
|       return collate(val, docFieldValueItem) === 0;
 | |
|     });
 | |
|   });
 | |
| }
 | |
| 
 | |
| function arraySize(docFieldValue, userValue) {
 | |
|   return docFieldValue.length === userValue;
 | |
| }
 | |
| 
 | |
| function regexMatch(docFieldValue, userValue) {
 | |
|   var re = new RegExp(userValue);
 | |
| 
 | |
|   return re.test(docFieldValue);
 | |
| }
 | |
| 
 | |
| function typeMatch(docFieldValue, userValue) {
 | |
| 
 | |
|   switch (userValue) {
 | |
|     case 'null':
 | |
|       return docFieldValue === null;
 | |
|     case 'boolean':
 | |
|       return typeof (docFieldValue) === 'boolean';
 | |
|     case 'number':
 | |
|       return typeof (docFieldValue) === 'number';
 | |
|     case 'string':
 | |
|       return typeof (docFieldValue) === 'string';
 | |
|     case 'array':
 | |
|       return docFieldValue instanceof Array;
 | |
|     case 'object':
 | |
|       return ({}).toString.call(docFieldValue) === '[object Object]';
 | |
|   }
 | |
| }
 | |
| 
 | |
| var matchers = {
 | |
| 
 | |
|   '$elemMatch': function (doc, userValue, parsedField, docFieldValue) {
 | |
|     if (!Array.isArray(docFieldValue)) {
 | |
|       return false;
 | |
|     }
 | |
| 
 | |
|     if (docFieldValue.length === 0) {
 | |
|       return false;
 | |
|     }
 | |
| 
 | |
|     if (typeof docFieldValue[0] === 'object') {
 | |
|       return docFieldValue.some(function (val) {
 | |
|         return rowFilter(val, userValue, Object.keys(userValue));
 | |
|       });
 | |
|     }
 | |
| 
 | |
|     return docFieldValue.some(function (val) {
 | |
|       return matchSelector(userValue, doc, parsedField, val);
 | |
|     });
 | |
|   },
 | |
| 
 | |
|   '$allMatch': function (doc, userValue, parsedField, docFieldValue) {
 | |
|     if (!Array.isArray(docFieldValue)) {
 | |
|       return false;
 | |
|     }
 | |
| 
 | |
|     /* istanbul ignore next */
 | |
|     if (docFieldValue.length === 0) {
 | |
|       return false;
 | |
|     }
 | |
| 
 | |
|     if (typeof docFieldValue[0] === 'object') {
 | |
|       return docFieldValue.every(function (val) {
 | |
|         return rowFilter(val, userValue, Object.keys(userValue));
 | |
|       });
 | |
|     }
 | |
| 
 | |
|     return docFieldValue.every(function (val) {
 | |
|       return matchSelector(userValue, doc, parsedField, val);
 | |
|     });
 | |
|   },
 | |
| 
 | |
|   '$eq': function (doc, userValue, parsedField, docFieldValue) {
 | |
|     return fieldIsNotUndefined(docFieldValue) && collate(docFieldValue, userValue) === 0;
 | |
|   },
 | |
| 
 | |
|   '$gte': function (doc, userValue, parsedField, docFieldValue) {
 | |
|     return fieldIsNotUndefined(docFieldValue) && collate(docFieldValue, userValue) >= 0;
 | |
|   },
 | |
| 
 | |
|   '$gt': function (doc, userValue, parsedField, docFieldValue) {
 | |
|     return fieldIsNotUndefined(docFieldValue) && collate(docFieldValue, userValue) > 0;
 | |
|   },
 | |
| 
 | |
|   '$lte': function (doc, userValue, parsedField, docFieldValue) {
 | |
|     return fieldIsNotUndefined(docFieldValue) && collate(docFieldValue, userValue) <= 0;
 | |
|   },
 | |
| 
 | |
|   '$lt': function (doc, userValue, parsedField, docFieldValue) {
 | |
|     return fieldIsNotUndefined(docFieldValue) && collate(docFieldValue, userValue) < 0;
 | |
|   },
 | |
| 
 | |
|   '$exists': function (doc, userValue, parsedField, docFieldValue) {
 | |
|     //a field that is null is still considered to exist
 | |
|     if (userValue) {
 | |
|       return fieldIsNotUndefined(docFieldValue);
 | |
|     }
 | |
| 
 | |
|     return !fieldIsNotUndefined(docFieldValue);
 | |
|   },
 | |
| 
 | |
|   '$mod': function (doc, userValue, parsedField, docFieldValue) {
 | |
|     return fieldExists(docFieldValue) && modField(docFieldValue, userValue);
 | |
|   },
 | |
| 
 | |
|   '$ne': function (doc, userValue, parsedField, docFieldValue) {
 | |
|     return userValue.every(function (neValue) {
 | |
|       return collate(docFieldValue, neValue) !== 0;
 | |
|     });
 | |
|   },
 | |
|   '$in': function (doc, userValue, parsedField, docFieldValue) {
 | |
|     return fieldExists(docFieldValue) && arrayContainsValue(docFieldValue, userValue);
 | |
|   },
 | |
| 
 | |
|   '$nin': function (doc, userValue, parsedField, docFieldValue) {
 | |
|     return fieldExists(docFieldValue) && !arrayContainsValue(docFieldValue, userValue);
 | |
|   },
 | |
| 
 | |
|   '$size': function (doc, userValue, parsedField, docFieldValue) {
 | |
|     return fieldExists(docFieldValue) &&
 | |
|       Array.isArray(docFieldValue) &&
 | |
|       arraySize(docFieldValue, userValue);
 | |
|   },
 | |
| 
 | |
|   '$all': function (doc, userValue, parsedField, docFieldValue) {
 | |
|     return Array.isArray(docFieldValue) && arrayContainsAllValues(docFieldValue, userValue);
 | |
|   },
 | |
| 
 | |
|   '$regex': function (doc, userValue, parsedField, docFieldValue) {
 | |
|     return fieldExists(docFieldValue) &&
 | |
|       typeof docFieldValue == "string" &&
 | |
|       userValue.every(function (regexValue) {
 | |
|         return regexMatch(docFieldValue, regexValue);
 | |
|       });
 | |
|   },
 | |
| 
 | |
|   '$type': function (doc, userValue, parsedField, docFieldValue) {
 | |
|     return typeMatch(docFieldValue, userValue);
 | |
|   }
 | |
| };
 | |
| 
 | |
| // return true if the given doc matches the supplied selector
 | |
| function matchesSelector(doc, selector) {
 | |
|   /* istanbul ignore if */
 | |
|   if (typeof selector !== 'object') {
 | |
|     // match the CouchDB error message
 | |
|     throw new Error('Selector error: expected a JSON object');
 | |
|   }
 | |
| 
 | |
|   selector = massageSelector(selector);
 | |
|   var row = {
 | |
|     'doc': doc
 | |
|   };
 | |
| 
 | |
|   var rowsMatched = filterInMemoryFields([row], { 'selector': selector }, Object.keys(selector));
 | |
|   return rowsMatched && rowsMatched.length === 1;
 | |
| }
 | |
| 
 | |
| function getArguments$1(fun) {
 | |
|   return function () {
 | |
|     var len = arguments.length;
 | |
|     var args = new Array(len);
 | |
|     var i = -1;
 | |
|     while (++i < len) {
 | |
|       args[i] = arguments[i];
 | |
|     }
 | |
|     return fun.call(this, args);
 | |
|   };
 | |
| }
 | |
| 
 | |
| function callbackify(fun) {
 | |
|   return getArguments$1(function (args) {
 | |
|     var cb = args.pop();
 | |
|     var promise = fun.apply(this, args);
 | |
|     promisedCallback(promise, cb);
 | |
|     return promise;
 | |
|   });
 | |
| }
 | |
| 
 | |
| function promisedCallback(promise, callback) {
 | |
|   promise.then(function (res) {
 | |
|     immediate(function () {
 | |
|       callback(null, res);
 | |
|     });
 | |
|   }, function (reason) {
 | |
|     immediate(function () {
 | |
|       callback(reason);
 | |
|     });
 | |
|   });
 | |
|   return promise;
 | |
| }
 | |
| 
 | |
| var flatten$1 = getArguments$1(function (args) {
 | |
|   var res = [];
 | |
|   for (var i = 0, len = args.length; i < len; i++) {
 | |
|     var subArr = args[i];
 | |
|     if (Array.isArray(subArr)) {
 | |
|       res = res.concat(flatten$1.apply(null, subArr));
 | |
|     } else {
 | |
|       res.push(subArr);
 | |
|     }
 | |
|   }
 | |
|   return res;
 | |
| });
 | |
| 
 | |
| function mergeObjects(arr) {
 | |
|   var res = {};
 | |
|   for (var i = 0, len = arr.length; i < len; i++) {
 | |
|     res = $inject_Object_assign(res, arr[i]);
 | |
|   }
 | |
|   return res;
 | |
| }
 | |
| 
 | |
| // Selects a list of fields defined in dot notation from one doc
 | |
| // and copies them to a new doc. Like underscore _.pick but supports nesting.
 | |
| function pick$1(obj, arr) {
 | |
|   var res = {};
 | |
|   for (var i = 0, len = arr.length; i < len; i++) {
 | |
|     var parsedField = parseField(arr[i]);
 | |
|     var value = getFieldFromDoc(obj, parsedField);
 | |
|     if (typeof value !== 'undefined') {
 | |
|       setFieldInDoc(res, parsedField, value);
 | |
|     }
 | |
|   }
 | |
|   return res;
 | |
| }
 | |
| 
 | |
| // e.g. ['a'], ['a', 'b'] is true, but ['b'], ['a', 'b'] is false
 | |
| function oneArrayIsSubArrayOfOther(left, right) {
 | |
| 
 | |
|   for (var i = 0, len = Math.min(left.length, right.length); i < len; i++) {
 | |
|     if (left[i] !== right[i]) {
 | |
|       return false;
 | |
|     }
 | |
|   }
 | |
|   return true;
 | |
| }
 | |
| 
 | |
| // e.g.['a', 'b', 'c'], ['a', 'b'] is false
 | |
| function oneArrayIsStrictSubArrayOfOther(left, right) {
 | |
| 
 | |
|   if (left.length > right.length) {
 | |
|     return false;
 | |
|   }
 | |
| 
 | |
|   return oneArrayIsSubArrayOfOther(left, right);
 | |
| }
 | |
| 
 | |
| // same as above, but treat the left array as an unordered set
 | |
| // e.g. ['b', 'a'], ['a', 'b', 'c'] is true, but ['c'], ['a', 'b', 'c'] is false
 | |
| function oneSetIsSubArrayOfOther(left, right) {
 | |
|   left = left.slice();
 | |
|   for (var i = 0, len = right.length; i < len; i++) {
 | |
|     var field = right[i];
 | |
|     if (!left.length) {
 | |
|       break;
 | |
|     }
 | |
|     var leftIdx = left.indexOf(field);
 | |
|     if (leftIdx === -1) {
 | |
|       return false;
 | |
|     } else {
 | |
|       left.splice(leftIdx, 1);
 | |
|     }
 | |
|   }
 | |
|   return true;
 | |
| }
 | |
| 
 | |
| function arrayToObject(arr) {
 | |
|   var res = {};
 | |
|   for (var i = 0, len = arr.length; i < len; i++) {
 | |
|     res[arr[i]] = true;
 | |
|   }
 | |
|   return res;
 | |
| }
 | |
| 
 | |
| function max(arr, fun) {
 | |
|   var max = null;
 | |
|   var maxScore = -1;
 | |
|   for (var i = 0, len = arr.length; i < len; i++) {
 | |
|     var element = arr[i];
 | |
|     var score = fun(element);
 | |
|     if (score > maxScore) {
 | |
|       maxScore = score;
 | |
|       max = element;
 | |
|     }
 | |
|   }
 | |
|   return max;
 | |
| }
 | |
| 
 | |
| function arrayEquals(arr1, arr2) {
 | |
|   if (arr1.length !== arr2.length) {
 | |
|     return false;
 | |
|   }
 | |
|   for (var i = 0, len = arr1.length; i < len; i++) {
 | |
|     if (arr1[i] !== arr2[i]) {
 | |
|       return false;
 | |
|     }
 | |
|   }
 | |
|   return true;
 | |
| }
 | |
| 
 | |
| function uniq(arr) {
 | |
|   var obj = {};
 | |
|   for (var i = 0; i < arr.length; i++) {
 | |
|     obj['$' + arr[i]] = true;
 | |
|   }
 | |
|   return Object.keys(obj).map(function (key) {
 | |
|     return key.substring(1);
 | |
|   });
 | |
| }
 | |
| 
 | |
| /*
 | |
|  * Simple task queue to sequentialize actions. Assumes
 | |
|  * callbacks will eventually fire (once).
 | |
|  */
 | |
| 
 | |
| 
 | |
| function TaskQueue() {
 | |
|   this.promise = new Promise(function (fulfill) {fulfill(); });
 | |
| }
 | |
| TaskQueue.prototype.add = function (promiseFactory) {
 | |
|   this.promise = this.promise["catch"](function () {
 | |
|     // just recover
 | |
|   }).then(function () {
 | |
|     return promiseFactory();
 | |
|   });
 | |
|   return this.promise;
 | |
| };
 | |
| TaskQueue.prototype.finish = function () {
 | |
|   return this.promise;
 | |
| };
 | |
| 
 | |
| function stringify(input) {
 | |
|   if (!input) {
 | |
|     return 'undefined'; // backwards compat for empty reduce
 | |
|   }
 | |
|   // for backwards compat with mapreduce, functions/strings are stringified
 | |
|   // as-is. everything else is JSON-stringified.
 | |
|   switch (typeof input) {
 | |
|     case 'function':
 | |
|       // e.g. a mapreduce map
 | |
|       return input.toString();
 | |
|     case 'string':
 | |
|       // e.g. a mapreduce built-in _reduce function
 | |
|       return input.toString();
 | |
|     default:
 | |
|       // e.g. a JSON object in the case of mango queries
 | |
|       return JSON.stringify(input);
 | |
|   }
 | |
| }
 | |
| 
 | |
| /* create a string signature for a view so we can cache it and uniq it */
 | |
| function createViewSignature(mapFun, reduceFun) {
 | |
|   // the "undefined" part is for backwards compatibility
 | |
|   return stringify(mapFun) + stringify(reduceFun) + 'undefined';
 | |
| }
 | |
| 
 | |
| function createView(sourceDB, viewName, mapFun, reduceFun, temporary, localDocName) {
 | |
|   var viewSignature = createViewSignature(mapFun, reduceFun);
 | |
| 
 | |
|   var cachedViews;
 | |
|   if (!temporary) {
 | |
|     // cache this to ensure we don't try to update the same view twice
 | |
|     cachedViews = sourceDB._cachedViews = sourceDB._cachedViews || {};
 | |
|     if (cachedViews[viewSignature]) {
 | |
|       return cachedViews[viewSignature];
 | |
|     }
 | |
|   }
 | |
| 
 | |
|   var promiseForView = sourceDB.info().then(function (info) {
 | |
| 
 | |
|     var depDbName = info.db_name + '-mrview-' +
 | |
|       (temporary ? 'temp' : stringMd5(viewSignature));
 | |
| 
 | |
|     // save the view name in the source db so it can be cleaned up if necessary
 | |
|     // (e.g. when the _design doc is deleted, remove all associated view data)
 | |
|     function diffFunction(doc) {
 | |
|       doc.views = doc.views || {};
 | |
|       var fullViewName = viewName;
 | |
|       if (fullViewName.indexOf('/') === -1) {
 | |
|         fullViewName = viewName + '/' + viewName;
 | |
|       }
 | |
|       var depDbs = doc.views[fullViewName] = doc.views[fullViewName] || {};
 | |
|       /* istanbul ignore if */
 | |
|       if (depDbs[depDbName]) {
 | |
|         return; // no update necessary
 | |
|       }
 | |
|       depDbs[depDbName] = true;
 | |
|       return doc;
 | |
|     }
 | |
|     return upsert(sourceDB, '_local/' + localDocName, diffFunction).then(function () {
 | |
|       return sourceDB.registerDependentDatabase(depDbName).then(function (res) {
 | |
|         var db = res.db;
 | |
|         db.auto_compaction = true;
 | |
|         var view = {
 | |
|           name: depDbName,
 | |
|           db: db,
 | |
|           sourceDB: sourceDB,
 | |
|           adapter: sourceDB.adapter,
 | |
|           mapFun: mapFun,
 | |
|           reduceFun: reduceFun
 | |
|         };
 | |
|         return view.db.get('_local/lastSeq')["catch"](function (err) {
 | |
|           /* istanbul ignore if */
 | |
|           if (err.status !== 404) {
 | |
|             throw err;
 | |
|           }
 | |
|         }).then(function (lastSeqDoc) {
 | |
|           view.seq = lastSeqDoc ? lastSeqDoc.seq : 0;
 | |
|           if (cachedViews) {
 | |
|             view.db.once('destroyed', function () {
 | |
|               delete cachedViews[viewSignature];
 | |
|             });
 | |
|           }
 | |
|           return view;
 | |
|         });
 | |
|       });
 | |
|     });
 | |
|   });
 | |
| 
 | |
|   if (cachedViews) {
 | |
|     cachedViews[viewSignature] = promiseForView;
 | |
|   }
 | |
|   return promiseForView;
 | |
| }
 | |
| 
 | |
| function QueryParseError(message) {
 | |
|   this.status = 400;
 | |
|   this.name = 'query_parse_error';
 | |
|   this.message = message;
 | |
|   this.error = true;
 | |
|   try {
 | |
|     Error.captureStackTrace(this, QueryParseError);
 | |
|   } catch (e) {}
 | |
| }
 | |
| 
 | |
| inherits(QueryParseError, Error);
 | |
| 
 | |
| function NotFoundError(message) {
 | |
|   this.status = 404;
 | |
|   this.name = 'not_found';
 | |
|   this.message = message;
 | |
|   this.error = true;
 | |
|   try {
 | |
|     Error.captureStackTrace(this, NotFoundError);
 | |
|   } catch (e) {}
 | |
| }
 | |
| 
 | |
| inherits(NotFoundError, Error);
 | |
| 
 | |
| function BuiltInError(message) {
 | |
|   this.status = 500;
 | |
|   this.name = 'invalid_value';
 | |
|   this.message = message;
 | |
|   this.error = true;
 | |
|   try {
 | |
|     Error.captureStackTrace(this, BuiltInError);
 | |
|   } catch (e) {}
 | |
| }
 | |
| 
 | |
| inherits(BuiltInError, Error);
 | |
| 
 | |
| function promisedCallback$1(promise, callback) {
 | |
|   if (callback) {
 | |
|     promise.then(function (res) {
 | |
|       immediate(function () {
 | |
|         callback(null, res);
 | |
|       });
 | |
|     }, function (reason) {
 | |
|       immediate(function () {
 | |
|         callback(reason);
 | |
|       });
 | |
|     });
 | |
|   }
 | |
|   return promise;
 | |
| }
 | |
| 
 | |
| function callbackify$1(fun) {
 | |
|   return getArguments(function (args) {
 | |
|     var cb = args.pop();
 | |
|     var promise = fun.apply(this, args);
 | |
|     if (typeof cb === 'function') {
 | |
|       promisedCallback$1(promise, cb);
 | |
|     }
 | |
|     return promise;
 | |
|   });
 | |
| }
 | |
| 
 | |
| // Promise finally util similar to Q.finally
 | |
| function fin(promise, finalPromiseFactory) {
 | |
|   return promise.then(function (res) {
 | |
|     return finalPromiseFactory().then(function () {
 | |
|       return res;
 | |
|     });
 | |
|   }, function (reason) {
 | |
|     return finalPromiseFactory().then(function () {
 | |
|       throw reason;
 | |
|     });
 | |
|   });
 | |
| }
 | |
| 
 | |
| function sequentialize(queue, promiseFactory) {
 | |
|   return function () {
 | |
|     var args = arguments;
 | |
|     var that = this;
 | |
|     return queue.add(function () {
 | |
|       return promiseFactory.apply(that, args);
 | |
|     });
 | |
|   };
 | |
| }
 | |
| 
 | |
| // uniq an array of strings, order not guaranteed
 | |
| // similar to underscore/lodash _.uniq
 | |
| function uniq$1(arr) {
 | |
|   var theSet = new ExportedSet(arr);
 | |
|   var result = new Array(theSet.size);
 | |
|   var index = -1;
 | |
|   theSet.forEach(function (value) {
 | |
|     result[++index] = value;
 | |
|   });
 | |
|   return result;
 | |
| }
 | |
| 
 | |
| function mapToKeysArray(map) {
 | |
|   var result = new Array(map.size);
 | |
|   var index = -1;
 | |
|   map.forEach(function (value, key) {
 | |
|     result[++index] = key;
 | |
|   });
 | |
|   return result;
 | |
| }
 | |
| 
 | |
| var persistentQueues = {};
 | |
| var tempViewQueue = new TaskQueue();
 | |
| var CHANGES_BATCH_SIZE = 50;
 | |
| 
 | |
| function parseViewName(name) {
 | |
|   // can be either 'ddocname/viewname' or just 'viewname'
 | |
|   // (where the ddoc name is the same)
 | |
|   return name.indexOf('/') === -1 ? [name, name] : name.split('/');
 | |
| }
 | |
| 
 | |
| function isGenOne(changes) {
 | |
|   // only return true if the current change is 1-
 | |
|   // and there are no other leafs
 | |
|   return changes.length === 1 && /^1-/.test(changes[0].rev);
 | |
| }
 | |
| 
 | |
| function emitError(db, e) {
 | |
|   try {
 | |
|     db.emit('error', e);
 | |
|   } catch (err) {
 | |
|     guardedConsole('error',
 | |
|       'The user\'s map/reduce function threw an uncaught error.\n' +
 | |
|       'You can debug this error by doing:\n' +
 | |
|       'myDatabase.on(\'error\', function (err) { debugger; });\n' +
 | |
|       'Please double-check your map/reduce function.');
 | |
|     guardedConsole('error', e);
 | |
|   }
 | |
| }
 | |
| 
 | |
| /**
 | |
|  * Returns an "abstract" mapreduce object of the form:
 | |
|  *
 | |
|  *   {
 | |
|  *     query: queryFun,
 | |
|  *     viewCleanup: viewCleanupFun
 | |
|  *   }
 | |
|  *
 | |
|  * Arguments are:
 | |
|  *
 | |
|  * localDoc: string
 | |
|  *   This is for the local doc that gets saved in order to track the
 | |
|  *   "dependent" DBs and clean them up for viewCleanup. It should be
 | |
|  *   unique, so that indexer plugins don't collide with each other.
 | |
|  * mapper: function (mapFunDef, emit)
 | |
|  *   Returns a map function based on the mapFunDef, which in the case of
 | |
|  *   normal map/reduce is just the de-stringified function, but may be
 | |
|  *   something else, such as an object in the case of pouchdb-find.
 | |
|  * reducer: function (reduceFunDef)
 | |
|  *   Ditto, but for reducing. Modules don't have to support reducing
 | |
|  *   (e.g. pouchdb-find).
 | |
|  * ddocValidator: function (ddoc, viewName)
 | |
|  *   Throws an error if the ddoc or viewName is not valid.
 | |
|  *   This could be a way to communicate to the user that the configuration for the
 | |
|  *   indexer is invalid.
 | |
|  */
 | |
| function createAbstractMapReduce(localDocName, mapper, reducer, ddocValidator) {
 | |
| 
 | |
|   function tryMap(db, fun, doc) {
 | |
|     // emit an event if there was an error thrown by a map function.
 | |
|     // putting try/catches in a single function also avoids deoptimizations.
 | |
|     try {
 | |
|       fun(doc);
 | |
|     } catch (e) {
 | |
|       emitError(db, e);
 | |
|     }
 | |
|   }
 | |
| 
 | |
|   function tryReduce(db, fun, keys, values, rereduce) {
 | |
|     // same as above, but returning the result or an error. there are two separate
 | |
|     // functions to avoid extra memory allocations since the tryCode() case is used
 | |
|     // for custom map functions (common) vs this function, which is only used for
 | |
|     // custom reduce functions (rare)
 | |
|     try {
 | |
|       return {output : fun(keys, values, rereduce)};
 | |
|     } catch (e) {
 | |
|       emitError(db, e);
 | |
|       return {error: e};
 | |
|     }
 | |
|   }
 | |
| 
 | |
|   function sortByKeyThenValue(x, y) {
 | |
|     var keyCompare = collate(x.key, y.key);
 | |
|     return keyCompare !== 0 ? keyCompare : collate(x.value, y.value);
 | |
|   }
 | |
| 
 | |
|   function sliceResults(results, limit, skip) {
 | |
|     skip = skip || 0;
 | |
|     if (typeof limit === 'number') {
 | |
|       return results.slice(skip, limit + skip);
 | |
|     } else if (skip > 0) {
 | |
|       return results.slice(skip);
 | |
|     }
 | |
|     return results;
 | |
|   }
 | |
| 
 | |
|   function rowToDocId(row) {
 | |
|     var val = row.value;
 | |
|     // Users can explicitly specify a joined doc _id, or it
 | |
|     // defaults to the doc _id that emitted the key/value.
 | |
|     var docId = (val && typeof val === 'object' && val._id) || row.id;
 | |
|     return docId;
 | |
|   }
 | |
| 
 | |
|   function readAttachmentsAsBlobOrBuffer(res) {
 | |
|     res.rows.forEach(function (row) {
 | |
|       var atts = row.doc && row.doc._attachments;
 | |
|       if (!atts) {
 | |
|         return;
 | |
|       }
 | |
|       Object.keys(atts).forEach(function (filename) {
 | |
|         var att = atts[filename];
 | |
|         atts[filename].data = b64ToBluffer(att.data, att.content_type);
 | |
|       });
 | |
|     });
 | |
|   }
 | |
| 
 | |
|   function postprocessAttachments(opts) {
 | |
|     return function (res) {
 | |
|       if (opts.include_docs && opts.attachments && opts.binary) {
 | |
|         readAttachmentsAsBlobOrBuffer(res);
 | |
|       }
 | |
|       return res;
 | |
|     };
 | |
|   }
 | |
| 
 | |
|   function addHttpParam(paramName, opts, params, asJson) {
 | |
|     // add an http param from opts to params, optionally json-encoded
 | |
|     var val = opts[paramName];
 | |
|     if (typeof val !== 'undefined') {
 | |
|       if (asJson) {
 | |
|         val = encodeURIComponent(JSON.stringify(val));
 | |
|       }
 | |
|       params.push(paramName + '=' + val);
 | |
|     }
 | |
|   }
 | |
| 
 | |
|   function coerceInteger(integerCandidate) {
 | |
|     if (typeof integerCandidate !== 'undefined') {
 | |
|       var asNumber = Number(integerCandidate);
 | |
|       // prevents e.g. '1foo' or '1.1' being coerced to 1
 | |
|       if (!isNaN(asNumber) && asNumber === parseInt(integerCandidate, 10)) {
 | |
|         return asNumber;
 | |
|       } else {
 | |
|         return integerCandidate;
 | |
|       }
 | |
|     }
 | |
|   }
 | |
| 
 | |
|   function coerceOptions(opts) {
 | |
|     opts.group_level = coerceInteger(opts.group_level);
 | |
|     opts.limit = coerceInteger(opts.limit);
 | |
|     opts.skip = coerceInteger(opts.skip);
 | |
|     return opts;
 | |
|   }
 | |
| 
 | |
|   function checkPositiveInteger(number) {
 | |
|     if (number) {
 | |
|       if (typeof number !== 'number') {
 | |
|         return  new QueryParseError('Invalid value for integer: "' +
 | |
|           number + '"');
 | |
|       }
 | |
|       if (number < 0) {
 | |
|         return new QueryParseError('Invalid value for positive integer: ' +
 | |
|           '"' + number + '"');
 | |
|       }
 | |
|     }
 | |
|   }
 | |
| 
 | |
|   function checkQueryParseError(options, fun) {
 | |
|     var startkeyName = options.descending ? 'endkey' : 'startkey';
 | |
|     var endkeyName = options.descending ? 'startkey' : 'endkey';
 | |
| 
 | |
|     if (typeof options[startkeyName] !== 'undefined' &&
 | |
|       typeof options[endkeyName] !== 'undefined' &&
 | |
|       collate(options[startkeyName], options[endkeyName]) > 0) {
 | |
|       throw new QueryParseError('No rows can match your key range, ' +
 | |
|         'reverse your start_key and end_key or set {descending : true}');
 | |
|     } else if (fun.reduce && options.reduce !== false) {
 | |
|       if (options.include_docs) {
 | |
|         throw new QueryParseError('{include_docs:true} is invalid for reduce');
 | |
|       } else if (options.keys && options.keys.length > 1 &&
 | |
|         !options.group && !options.group_level) {
 | |
|         throw new QueryParseError('Multi-key fetches for reduce views must use ' +
 | |
|           '{group: true}');
 | |
|       }
 | |
|     }
 | |
|     ['group_level', 'limit', 'skip'].forEach(function (optionName) {
 | |
|       var error = checkPositiveInteger(options[optionName]);
 | |
|       if (error) {
 | |
|         throw error;
 | |
|       }
 | |
|     });
 | |
|   }
 | |
| 
 | |
|   function httpQuery(db, fun, opts) {
 | |
|     // List of parameters to add to the PUT request
 | |
|     var params = [];
 | |
|     var body;
 | |
|     var method = 'GET';
 | |
|     var ok, status;
 | |
| 
 | |
|     // If opts.reduce exists and is defined, then add it to the list
 | |
|     // of parameters.
 | |
|     // If reduce=false then the results are that of only the map function
 | |
|     // not the final result of map and reduce.
 | |
|     addHttpParam('reduce', opts, params);
 | |
|     addHttpParam('include_docs', opts, params);
 | |
|     addHttpParam('attachments', opts, params);
 | |
|     addHttpParam('limit', opts, params);
 | |
|     addHttpParam('descending', opts, params);
 | |
|     addHttpParam('group', opts, params);
 | |
|     addHttpParam('group_level', opts, params);
 | |
|     addHttpParam('skip', opts, params);
 | |
|     addHttpParam('stale', opts, params);
 | |
|     addHttpParam('conflicts', opts, params);
 | |
|     addHttpParam('startkey', opts, params, true);
 | |
|     addHttpParam('start_key', opts, params, true);
 | |
|     addHttpParam('endkey', opts, params, true);
 | |
|     addHttpParam('end_key', opts, params, true);
 | |
|     addHttpParam('inclusive_end', opts, params);
 | |
|     addHttpParam('key', opts, params, true);
 | |
|     addHttpParam('update_seq', opts, params);
 | |
| 
 | |
|     // Format the list of parameters into a valid URI query string
 | |
|     params = params.join('&');
 | |
|     params = params === '' ? '' : '?' + params;
 | |
| 
 | |
|     // If keys are supplied, issue a POST to circumvent GET query string limits
 | |
|     // see http://wiki.apache.org/couchdb/HTTP_view_API#Querying_Options
 | |
|     if (typeof opts.keys !== 'undefined') {
 | |
|       var MAX_URL_LENGTH = 2000;
 | |
|       // according to http://stackoverflow.com/a/417184/680742,
 | |
|       // the de facto URL length limit is 2000 characters
 | |
| 
 | |
|       var keysAsString =
 | |
|         'keys=' + encodeURIComponent(JSON.stringify(opts.keys));
 | |
|       if (keysAsString.length + params.length + 1 <= MAX_URL_LENGTH) {
 | |
|         // If the keys are short enough, do a GET. we do this to work around
 | |
|         // Safari not understanding 304s on POSTs (see pouchdb/pouchdb#1239)
 | |
|         params += (params[0] === '?' ? '&' : '?') + keysAsString;
 | |
|       } else {
 | |
|         method = 'POST';
 | |
|         if (typeof fun === 'string') {
 | |
|           body = {keys: opts.keys};
 | |
|         } else { // fun is {map : mapfun}, so append to this
 | |
|           fun.keys = opts.keys;
 | |
|         }
 | |
|       }
 | |
|     }
 | |
| 
 | |
|     // We are referencing a query defined in the design doc
 | |
|     if (typeof fun === 'string') {
 | |
|       var parts = parseViewName(fun);
 | |
|       return db.fetch('_design/' + parts[0] + '/_view/' + parts[1] + params, {
 | |
|         headers: new h({'Content-Type': 'application/json'}),
 | |
|         method: method,
 | |
|         body: JSON.stringify(body)
 | |
|       }).then(function (response) {
 | |
|         ok = response.ok;
 | |
|         status = response.status;
 | |
|         return response.json();
 | |
|       }).then(function (result) {
 | |
|         if (!ok) {
 | |
|           result.status = status;
 | |
|           throw generateErrorFromResponse(result);
 | |
|         }
 | |
|         // fail the entire request if the result contains an error
 | |
|         result.rows.forEach(function (row) {
 | |
|           /* istanbul ignore if */
 | |
|           if (row.value && row.value.error && row.value.error === "builtin_reduce_error") {
 | |
|             throw new Error(row.reason);
 | |
|           }
 | |
|         });
 | |
|         return result;
 | |
|       }).then(postprocessAttachments(opts));
 | |
|     }
 | |
| 
 | |
|     // We are using a temporary view, terrible for performance, good for testing
 | |
|     body = body || {};
 | |
|     Object.keys(fun).forEach(function (key) {
 | |
|       if (Array.isArray(fun[key])) {
 | |
|         body[key] = fun[key];
 | |
|       } else {
 | |
|         body[key] = fun[key].toString();
 | |
|       }
 | |
|     });
 | |
| 
 | |
|     return db.fetch('_temp_view' + params, {
 | |
|       headers: new h({'Content-Type': 'application/json'}),
 | |
|       method: 'POST',
 | |
|       body: JSON.stringify(body)
 | |
|     }).then(function (response) {
 | |
|         ok = response.ok;
 | |
|         status = response.status;
 | |
|       return response.json();
 | |
|     }).then(function (result) {
 | |
|       if (!ok) {
 | |
|         result.status = status;
 | |
|         throw generateErrorFromResponse(result);
 | |
|       }
 | |
|       return result;
 | |
|     }).then(postprocessAttachments(opts));
 | |
|   }
 | |
| 
 | |
|   // custom adapters can define their own api._query
 | |
|   // and override the default behavior
 | |
|   /* istanbul ignore next */
 | |
|   function customQuery(db, fun, opts) {
 | |
|     return new Promise(function (resolve, reject) {
 | |
|       db._query(fun, opts, function (err, res) {
 | |
|         if (err) {
 | |
|           return reject(err);
 | |
|         }
 | |
|         resolve(res);
 | |
|       });
 | |
|     });
 | |
|   }
 | |
| 
 | |
|   // custom adapters can define their own api._viewCleanup
 | |
|   // and override the default behavior
 | |
|   /* istanbul ignore next */
 | |
|   function customViewCleanup(db) {
 | |
|     return new Promise(function (resolve, reject) {
 | |
|       db._viewCleanup(function (err, res) {
 | |
|         if (err) {
 | |
|           return reject(err);
 | |
|         }
 | |
|         resolve(res);
 | |
|       });
 | |
|     });
 | |
|   }
 | |
| 
 | |
|   function defaultsTo(value) {
 | |
|     return function (reason) {
 | |
|       /* istanbul ignore else */
 | |
|       if (reason.status === 404) {
 | |
|         return value;
 | |
|       } else {
 | |
|         throw reason;
 | |
|       }
 | |
|     };
 | |
|   }
 | |
| 
 | |
|   // returns a promise for a list of docs to update, based on the input docId.
 | |
|   // the order doesn't matter, because post-3.2.0, bulkDocs
 | |
|   // is an atomic operation in all three adapters.
 | |
|   function getDocsToPersist(docId, view, docIdsToChangesAndEmits) {
 | |
|     var metaDocId = '_local/doc_' + docId;
 | |
|     var defaultMetaDoc = {_id: metaDocId, keys: []};
 | |
|     var docData = docIdsToChangesAndEmits.get(docId);
 | |
|     var indexableKeysToKeyValues = docData[0];
 | |
|     var changes = docData[1];
 | |
| 
 | |
|     function getMetaDoc() {
 | |
|       if (isGenOne(changes)) {
 | |
|         // generation 1, so we can safely assume initial state
 | |
|         // for performance reasons (avoids unnecessary GETs)
 | |
|         return Promise.resolve(defaultMetaDoc);
 | |
|       }
 | |
|       return view.db.get(metaDocId)["catch"](defaultsTo(defaultMetaDoc));
 | |
|     }
 | |
| 
 | |
|     function getKeyValueDocs(metaDoc) {
 | |
|       if (!metaDoc.keys.length) {
 | |
|         // no keys, no need for a lookup
 | |
|         return Promise.resolve({rows: []});
 | |
|       }
 | |
|       return view.db.allDocs({
 | |
|         keys: metaDoc.keys,
 | |
|         include_docs: true
 | |
|       });
 | |
|     }
 | |
| 
 | |
|     function processKeyValueDocs(metaDoc, kvDocsRes) {
 | |
|       var kvDocs = [];
 | |
|       var oldKeys = new ExportedSet();
 | |
| 
 | |
|       for (var i = 0, len = kvDocsRes.rows.length; i < len; i++) {
 | |
|         var row = kvDocsRes.rows[i];
 | |
|         var doc = row.doc;
 | |
|         if (!doc) { // deleted
 | |
|           continue;
 | |
|         }
 | |
|         kvDocs.push(doc);
 | |
|         oldKeys.add(doc._id);
 | |
|         doc._deleted = !indexableKeysToKeyValues.has(doc._id);
 | |
|         if (!doc._deleted) {
 | |
|           var keyValue = indexableKeysToKeyValues.get(doc._id);
 | |
|           if ('value' in keyValue) {
 | |
|             doc.value = keyValue.value;
 | |
|           }
 | |
|         }
 | |
|       }
 | |
|       var newKeys = mapToKeysArray(indexableKeysToKeyValues);
 | |
|       newKeys.forEach(function (key) {
 | |
|         if (!oldKeys.has(key)) {
 | |
|           // new doc
 | |
|           var kvDoc = {
 | |
|             _id: key
 | |
|           };
 | |
|           var keyValue = indexableKeysToKeyValues.get(key);
 | |
|           if ('value' in keyValue) {
 | |
|             kvDoc.value = keyValue.value;
 | |
|           }
 | |
|           kvDocs.push(kvDoc);
 | |
|         }
 | |
|       });
 | |
|       metaDoc.keys = uniq$1(newKeys.concat(metaDoc.keys));
 | |
|       kvDocs.push(metaDoc);
 | |
| 
 | |
|       return kvDocs;
 | |
|     }
 | |
| 
 | |
|     return getMetaDoc().then(function (metaDoc) {
 | |
|       return getKeyValueDocs(metaDoc).then(function (kvDocsRes) {
 | |
|         return processKeyValueDocs(metaDoc, kvDocsRes);
 | |
|       });
 | |
|     });
 | |
|   }
 | |
| 
 | |
|   // updates all emitted key/value docs and metaDocs in the mrview database
 | |
|   // for the given batch of documents from the source database
 | |
|   function saveKeyValues(view, docIdsToChangesAndEmits, seq) {
 | |
|     var seqDocId = '_local/lastSeq';
 | |
|     return view.db.get(seqDocId)[
 | |
|       "catch"](defaultsTo({_id: seqDocId, seq: 0}))
 | |
|       .then(function (lastSeqDoc) {
 | |
|         var docIds = mapToKeysArray(docIdsToChangesAndEmits);
 | |
|         return Promise.all(docIds.map(function (docId) {
 | |
|           return getDocsToPersist(docId, view, docIdsToChangesAndEmits);
 | |
|         })).then(function (listOfDocsToPersist) {
 | |
|           var docsToPersist = flatten(listOfDocsToPersist);
 | |
|           lastSeqDoc.seq = seq;
 | |
|           docsToPersist.push(lastSeqDoc);
 | |
|           // write all docs in a single operation, update the seq once
 | |
|           return view.db.bulkDocs({docs : docsToPersist});
 | |
|         });
 | |
|       });
 | |
|   }
 | |
| 
 | |
|   function getQueue(view) {
 | |
|     var viewName = typeof view === 'string' ? view : view.name;
 | |
|     var queue = persistentQueues[viewName];
 | |
|     if (!queue) {
 | |
|       queue = persistentQueues[viewName] = new TaskQueue();
 | |
|     }
 | |
|     return queue;
 | |
|   }
 | |
| 
 | |
|   function updateView(view, opts) {
 | |
|     return sequentialize(getQueue(view), function () {
 | |
|       return updateViewInQueue(view, opts);
 | |
|     })();
 | |
|   }
 | |
| 
 | |
|   function updateViewInQueue(view, opts) {
 | |
|     // bind the emit function once
 | |
|     var mapResults;
 | |
|     var doc;
 | |
| 
 | |
|     function emit(key, value) {
 | |
|       var output = {id: doc._id, key: normalizeKey(key)};
 | |
|       // Don't explicitly store the value unless it's defined and non-null.
 | |
|       // This saves on storage space, because often people don't use it.
 | |
|       if (typeof value !== 'undefined' && value !== null) {
 | |
|         output.value = normalizeKey(value);
 | |
|       }
 | |
|       mapResults.push(output);
 | |
|     }
 | |
| 
 | |
|     var mapFun = mapper(view.mapFun, emit);
 | |
| 
 | |
|     var currentSeq = view.seq || 0;
 | |
| 
 | |
|     function processChange(docIdsToChangesAndEmits, seq) {
 | |
|       return function () {
 | |
|         return saveKeyValues(view, docIdsToChangesAndEmits, seq);
 | |
|       };
 | |
|     }
 | |
| 
 | |
|     let indexed_docs = 0;
 | |
|     let progress = {
 | |
|       view: view.name,
 | |
|       indexed_docs: indexed_docs
 | |
|     };
 | |
|     view.sourceDB.emit('indexing', progress);
 | |
| 
 | |
|     var queue = new TaskQueue();
 | |
| 
 | |
|     function processNextBatch() {
 | |
|       return view.sourceDB.changes({
 | |
|         return_docs: true,
 | |
|         conflicts: true,
 | |
|         include_docs: true,
 | |
|         style: 'all_docs',
 | |
|         since: currentSeq,
 | |
|         limit: opts.changes_batch_size
 | |
|       }).then(processBatch);
 | |
|     }
 | |
| 
 | |
|     function processBatch(response) {
 | |
|       var results = response.results;
 | |
|       if (!results.length) {
 | |
|         return;
 | |
|       }
 | |
|       var docIdsToChangesAndEmits = createDocIdsToChangesAndEmits(results);
 | |
|       queue.add(processChange(docIdsToChangesAndEmits, currentSeq));
 | |
| 
 | |
|       indexed_docs = indexed_docs + results.length;
 | |
|       let progress = {
 | |
|         view: view.name,
 | |
|         last_seq: response.last_seq,
 | |
|         results_count: results.length,
 | |
|         indexed_docs: indexed_docs
 | |
|       };
 | |
|       view.sourceDB.emit('indexing', progress);
 | |
|       
 | |
|       if (results.length < opts.changes_batch_size) {
 | |
|         return;
 | |
|       }
 | |
|       return processNextBatch();
 | |
|     }
 | |
| 
 | |
|     function createDocIdsToChangesAndEmits(results) {
 | |
|       var docIdsToChangesAndEmits = new ExportedMap();
 | |
|       for (var i = 0, len = results.length; i < len; i++) {
 | |
|         var change = results[i];
 | |
|         if (change.doc._id[0] !== '_') {
 | |
|           mapResults = [];
 | |
|           doc = change.doc;
 | |
| 
 | |
|           if (!doc._deleted) {
 | |
|             tryMap(view.sourceDB, mapFun, doc);
 | |
|           }
 | |
|           mapResults.sort(sortByKeyThenValue);
 | |
| 
 | |
|           var indexableKeysToKeyValues = createIndexableKeysToKeyValues(mapResults);
 | |
|           docIdsToChangesAndEmits.set(change.doc._id, [
 | |
|             indexableKeysToKeyValues,
 | |
|             change.changes
 | |
|           ]);
 | |
|         }
 | |
|         currentSeq = change.seq;
 | |
|       }
 | |
|       return docIdsToChangesAndEmits;
 | |
|     }
 | |
| 
 | |
|     function createIndexableKeysToKeyValues(mapResults) {
 | |
|       var indexableKeysToKeyValues = new ExportedMap();
 | |
|       var lastKey;
 | |
|       for (var i = 0, len = mapResults.length; i < len; i++) {
 | |
|         var emittedKeyValue = mapResults[i];
 | |
|         var complexKey = [emittedKeyValue.key, emittedKeyValue.id];
 | |
|         if (i > 0 && collate(emittedKeyValue.key, lastKey) === 0) {
 | |
|           complexKey.push(i); // dup key+id, so make it unique
 | |
|         }
 | |
|         indexableKeysToKeyValues.set(toIndexableString(complexKey), emittedKeyValue);
 | |
|         lastKey = emittedKeyValue.key;
 | |
|       }
 | |
|       return indexableKeysToKeyValues;
 | |
|     }
 | |
| 
 | |
|     return processNextBatch().then(function () {
 | |
|       return queue.finish();
 | |
|     }).then(function () {
 | |
|       view.seq = currentSeq;
 | |
|     });
 | |
|   }
 | |
| 
 | |
|   function reduceView(view, results, options) {
 | |
|     if (options.group_level === 0) {
 | |
|       delete options.group_level;
 | |
|     }
 | |
| 
 | |
|     var shouldGroup = options.group || options.group_level;
 | |
| 
 | |
|     var reduceFun = reducer(view.reduceFun);
 | |
| 
 | |
|     var groups = [];
 | |
|     var lvl = isNaN(options.group_level) ? Number.POSITIVE_INFINITY :
 | |
|       options.group_level;
 | |
|     results.forEach(function (e) {
 | |
|       var last = groups[groups.length - 1];
 | |
|       var groupKey = shouldGroup ? e.key : null;
 | |
| 
 | |
|       // only set group_level for array keys
 | |
|       if (shouldGroup && Array.isArray(groupKey)) {
 | |
|         groupKey = groupKey.slice(0, lvl);
 | |
|       }
 | |
| 
 | |
|       if (last && collate(last.groupKey, groupKey) === 0) {
 | |
|         last.keys.push([e.key, e.id]);
 | |
|         last.values.push(e.value);
 | |
|         return;
 | |
|       }
 | |
|       groups.push({
 | |
|         keys: [[e.key, e.id]],
 | |
|         values: [e.value],
 | |
|         groupKey: groupKey
 | |
|       });
 | |
|     });
 | |
|     results = [];
 | |
|     for (var i = 0, len = groups.length; i < len; i++) {
 | |
|       var e = groups[i];
 | |
|       var reduceTry = tryReduce(view.sourceDB, reduceFun, e.keys, e.values, false);
 | |
|       if (reduceTry.error && reduceTry.error instanceof BuiltInError) {
 | |
|         // CouchDB returns an error if a built-in errors out
 | |
|         throw reduceTry.error;
 | |
|       }
 | |
|       results.push({
 | |
|         // CouchDB just sets the value to null if a non-built-in errors out
 | |
|         value: reduceTry.error ? null : reduceTry.output,
 | |
|         key: e.groupKey
 | |
|       });
 | |
|     }
 | |
|     // no total_rows/offset when reducing
 | |
|     return {rows: sliceResults(results, options.limit, options.skip)};
 | |
|   }
 | |
| 
 | |
|   function queryView(view, opts) {
 | |
|     return sequentialize(getQueue(view), function () {
 | |
|       return queryViewInQueue(view, opts);
 | |
|     })();
 | |
|   }
 | |
| 
 | |
|   function queryViewInQueue(view, opts) {
 | |
|     var totalRows;
 | |
|     var shouldReduce = view.reduceFun && opts.reduce !== false;
 | |
|     var skip = opts.skip || 0;
 | |
|     if (typeof opts.keys !== 'undefined' && !opts.keys.length) {
 | |
|       // equivalent query
 | |
|       opts.limit = 0;
 | |
|       delete opts.keys;
 | |
|     }
 | |
| 
 | |
|     function fetchFromView(viewOpts) {
 | |
|       viewOpts.include_docs = true;
 | |
|       return view.db.allDocs(viewOpts).then(function (res) {
 | |
|         totalRows = res.total_rows;
 | |
|         return res.rows.map(function (result) {
 | |
| 
 | |
|           // implicit migration - in older versions of PouchDB,
 | |
|           // we explicitly stored the doc as {id: ..., key: ..., value: ...}
 | |
|           // this is tested in a migration test
 | |
|           /* istanbul ignore next */
 | |
|           if ('value' in result.doc && typeof result.doc.value === 'object' &&
 | |
|             result.doc.value !== null) {
 | |
|             var keys = Object.keys(result.doc.value).sort();
 | |
|             // this detection method is not perfect, but it's unlikely the user
 | |
|             // emitted a value which was an object with these 3 exact keys
 | |
|             var expectedKeys = ['id', 'key', 'value'];
 | |
|             if (!(keys < expectedKeys || keys > expectedKeys)) {
 | |
|               return result.doc.value;
 | |
|             }
 | |
|           }
 | |
| 
 | |
|           var parsedKeyAndDocId = parseIndexableString(result.doc._id);
 | |
|           return {
 | |
|             key: parsedKeyAndDocId[0],
 | |
|             id: parsedKeyAndDocId[1],
 | |
|             value: ('value' in result.doc ? result.doc.value : null)
 | |
|           };
 | |
|         });
 | |
|       });
 | |
|     }
 | |
| 
 | |
|     function onMapResultsReady(rows) {
 | |
|       var finalResults;
 | |
|       if (shouldReduce) {
 | |
|         finalResults = reduceView(view, rows, opts);
 | |
|       } else if (typeof opts.keys === 'undefined') {
 | |
|         finalResults = {
 | |
|           total_rows: totalRows,
 | |
|           offset: skip,
 | |
|           rows: rows
 | |
|         };
 | |
|       } else {
 | |
|         // support limit, skip for keys query
 | |
|         finalResults = {
 | |
|           total_rows: totalRows,
 | |
|           offset: skip,
 | |
|           rows: sliceResults(rows,opts.limit,opts.skip)
 | |
|         };
 | |
|       }
 | |
|       /* istanbul ignore if */
 | |
|       if (opts.update_seq) {
 | |
|         finalResults.update_seq = view.seq;
 | |
|       }
 | |
|       if (opts.include_docs) {
 | |
|         var docIds = uniq$1(rows.map(rowToDocId));
 | |
| 
 | |
|         return view.sourceDB.allDocs({
 | |
|           keys: docIds,
 | |
|           include_docs: true,
 | |
|           conflicts: opts.conflicts,
 | |
|           attachments: opts.attachments,
 | |
|           binary: opts.binary
 | |
|         }).then(function (allDocsRes) {
 | |
|           var docIdsToDocs = new ExportedMap();
 | |
|           allDocsRes.rows.forEach(function (row) {
 | |
|             docIdsToDocs.set(row.id, row.doc);
 | |
|           });
 | |
|           rows.forEach(function (row) {
 | |
|             var docId = rowToDocId(row);
 | |
|             var doc = docIdsToDocs.get(docId);
 | |
|             if (doc) {
 | |
|               row.doc = doc;
 | |
|             }
 | |
|           });
 | |
|           return finalResults;
 | |
|         });
 | |
|       } else {
 | |
|         return finalResults;
 | |
|       }
 | |
|     }
 | |
| 
 | |
|     if (typeof opts.keys !== 'undefined') {
 | |
|       var keys = opts.keys;
 | |
|       var fetchPromises = keys.map(function (key) {
 | |
|         var viewOpts = {
 | |
|           startkey : toIndexableString([key]),
 | |
|           endkey   : toIndexableString([key, {}])
 | |
|         };
 | |
|         /* istanbul ignore if */
 | |
|         if (opts.update_seq) {
 | |
|           viewOpts.update_seq = true;
 | |
|         }
 | |
|         return fetchFromView(viewOpts);
 | |
|       });
 | |
|       return Promise.all(fetchPromises).then(flatten).then(onMapResultsReady);
 | |
|     } else { // normal query, no 'keys'
 | |
|       var viewOpts = {
 | |
|         descending : opts.descending
 | |
|       };
 | |
|       /* istanbul ignore if */
 | |
|       if (opts.update_seq) {
 | |
|         viewOpts.update_seq = true;
 | |
|       }
 | |
|       var startkey;
 | |
|       var endkey;
 | |
|       if ('start_key' in opts) {
 | |
|         startkey = opts.start_key;
 | |
|       }
 | |
|       if ('startkey' in opts) {
 | |
|         startkey = opts.startkey;
 | |
|       }
 | |
|       if ('end_key' in opts) {
 | |
|         endkey = opts.end_key;
 | |
|       }
 | |
|       if ('endkey' in opts) {
 | |
|         endkey = opts.endkey;
 | |
|       }
 | |
|       if (typeof startkey !== 'undefined') {
 | |
|         viewOpts.startkey = opts.descending ?
 | |
|           toIndexableString([startkey, {}]) :
 | |
|           toIndexableString([startkey]);
 | |
|       }
 | |
|       if (typeof endkey !== 'undefined') {
 | |
|         var inclusiveEnd = opts.inclusive_end !== false;
 | |
|         if (opts.descending) {
 | |
|           inclusiveEnd = !inclusiveEnd;
 | |
|         }
 | |
| 
 | |
|         viewOpts.endkey = toIndexableString(
 | |
|           inclusiveEnd ? [endkey, {}] : [endkey]);
 | |
|       }
 | |
|       if (typeof opts.key !== 'undefined') {
 | |
|         var keyStart = toIndexableString([opts.key]);
 | |
|         var keyEnd = toIndexableString([opts.key, {}]);
 | |
|         if (viewOpts.descending) {
 | |
|           viewOpts.endkey = keyStart;
 | |
|           viewOpts.startkey = keyEnd;
 | |
|         } else {
 | |
|           viewOpts.startkey = keyStart;
 | |
|           viewOpts.endkey = keyEnd;
 | |
|         }
 | |
|       }
 | |
|       if (!shouldReduce) {
 | |
|         if (typeof opts.limit === 'number') {
 | |
|           viewOpts.limit = opts.limit;
 | |
|         }
 | |
|         viewOpts.skip = skip;
 | |
|       }
 | |
|       return fetchFromView(viewOpts).then(onMapResultsReady);
 | |
|     }
 | |
|   }
 | |
| 
 | |
|   function httpViewCleanup(db) {
 | |
|     return db.fetch('_view_cleanup', {
 | |
|       headers: new h({'Content-Type': 'application/json'}),
 | |
|       method: 'POST'
 | |
|     }).then(function (response) {
 | |
|       return response.json();
 | |
|     });
 | |
|   }
 | |
| 
 | |
|   function localViewCleanup(db) {
 | |
|     return db.get('_local/' + localDocName).then(function (metaDoc) {
 | |
|       var docsToViews = new ExportedMap();
 | |
|       Object.keys(metaDoc.views).forEach(function (fullViewName) {
 | |
|         var parts = parseViewName(fullViewName);
 | |
|         var designDocName = '_design/' + parts[0];
 | |
|         var viewName = parts[1];
 | |
|         var views = docsToViews.get(designDocName);
 | |
|         if (!views) {
 | |
|           views = new ExportedSet();
 | |
|           docsToViews.set(designDocName, views);
 | |
|         }
 | |
|         views.add(viewName);
 | |
|       });
 | |
|       var opts = {
 | |
|         keys : mapToKeysArray(docsToViews),
 | |
|         include_docs : true
 | |
|       };
 | |
|       return db.allDocs(opts).then(function (res) {
 | |
|         var viewsToStatus = {};
 | |
|         res.rows.forEach(function (row) {
 | |
|           var ddocName = row.key.substring(8); // cuts off '_design/'
 | |
|           docsToViews.get(row.key).forEach(function (viewName) {
 | |
|             var fullViewName = ddocName + '/' + viewName;
 | |
|             /* istanbul ignore if */
 | |
|             if (!metaDoc.views[fullViewName]) {
 | |
|               // new format, without slashes, to support PouchDB 2.2.0
 | |
|               // migration test in pouchdb's browser.migration.js verifies this
 | |
|               fullViewName = viewName;
 | |
|             }
 | |
|             var viewDBNames = Object.keys(metaDoc.views[fullViewName]);
 | |
|             // design doc deleted, or view function nonexistent
 | |
|             var statusIsGood = row.doc && row.doc.views &&
 | |
|               row.doc.views[viewName];
 | |
|             viewDBNames.forEach(function (viewDBName) {
 | |
|               viewsToStatus[viewDBName] =
 | |
|                 viewsToStatus[viewDBName] || statusIsGood;
 | |
|             });
 | |
|           });
 | |
|         });
 | |
|         var dbsToDelete = Object.keys(viewsToStatus).filter(
 | |
|           function (viewDBName) { return !viewsToStatus[viewDBName]; });
 | |
|         var destroyPromises = dbsToDelete.map(function (viewDBName) {
 | |
|           return sequentialize(getQueue(viewDBName), function () {
 | |
|             return new db.constructor(viewDBName, db.__opts).destroy();
 | |
|           })();
 | |
|         });
 | |
|         return Promise.all(destroyPromises).then(function () {
 | |
|           return {ok: true};
 | |
|         });
 | |
|       });
 | |
|     }, defaultsTo({ok: true}));
 | |
|   }
 | |
| 
 | |
|   function queryPromised(db, fun, opts) {
 | |
|     /* istanbul ignore next */
 | |
|     if (typeof db._query === 'function') {
 | |
|       return customQuery(db, fun, opts);
 | |
|     }
 | |
|     if (isRemote(db)) {
 | |
|       return httpQuery(db, fun, opts);
 | |
|     }
 | |
| 
 | |
|     var updateViewOpts = {
 | |
|       changes_batch_size: db.__opts.view_update_changes_batch_size || CHANGES_BATCH_SIZE
 | |
|     };
 | |
| 
 | |
|     if (typeof fun !== 'string') {
 | |
|       // temp_view
 | |
|       checkQueryParseError(opts, fun);
 | |
| 
 | |
|       tempViewQueue.add(function () {
 | |
|         var createViewPromise = createView(
 | |
|           /* sourceDB */ db,
 | |
|           /* viewName */ 'temp_view/temp_view',
 | |
|           /* mapFun */ fun.map,
 | |
|           /* reduceFun */ fun.reduce,
 | |
|           /* temporary */ true,
 | |
|           /* localDocName */ localDocName);
 | |
|         return createViewPromise.then(function (view) {
 | |
|           return fin(updateView(view, updateViewOpts).then(function () {
 | |
|             return queryView(view, opts);
 | |
|           }), function () {
 | |
|             return view.db.destroy();
 | |
|           });
 | |
|         });
 | |
|       });
 | |
|       return tempViewQueue.finish();
 | |
|     } else {
 | |
|       // persistent view
 | |
|       var fullViewName = fun;
 | |
|       var parts = parseViewName(fullViewName);
 | |
|       var designDocName = parts[0];
 | |
|       var viewName = parts[1];
 | |
|       return db.get('_design/' + designDocName).then(function (doc) {
 | |
|         var fun = doc.views && doc.views[viewName];
 | |
| 
 | |
|         if (!fun) {
 | |
|           // basic validator; it's assumed that every subclass would want this
 | |
|           throw new NotFoundError('ddoc ' + doc._id + ' has no view named ' +
 | |
|             viewName);
 | |
|         }
 | |
| 
 | |
|         ddocValidator(doc, viewName);
 | |
|         checkQueryParseError(opts, fun);
 | |
| 
 | |
|         var createViewPromise = createView(
 | |
|           /* sourceDB */ db,
 | |
|           /* viewName */ fullViewName,
 | |
|           /* mapFun */ fun.map,
 | |
|           /* reduceFun */ fun.reduce,
 | |
|           /* temporary */ false,
 | |
|           /* localDocName */ localDocName);
 | |
|         return createViewPromise.then(function (view) {
 | |
|           if (opts.stale === 'ok' || opts.stale === 'update_after') {
 | |
|             if (opts.stale === 'update_after') {
 | |
|               immediate(function () {
 | |
|                 updateView(view, updateViewOpts);
 | |
|               });
 | |
|             }
 | |
|             return queryView(view, opts);
 | |
|           } else { // stale not ok
 | |
|             return updateView(view, updateViewOpts).then(function () {
 | |
|               return queryView(view, opts);
 | |
|             });
 | |
|           }
 | |
|         });
 | |
|       });
 | |
|     }
 | |
|   }
 | |
| 
 | |
|   function abstractQuery(fun, opts, callback) {
 | |
|     var db = this;
 | |
|     if (typeof opts === 'function') {
 | |
|       callback = opts;
 | |
|       opts = {};
 | |
|     }
 | |
|     opts = opts ? coerceOptions(opts) : {};
 | |
| 
 | |
|     if (typeof fun === 'function') {
 | |
|       fun = {map : fun};
 | |
|     }
 | |
| 
 | |
|     var promise = Promise.resolve().then(function () {
 | |
|       return queryPromised(db, fun, opts);
 | |
|     });
 | |
|     promisedCallback$1(promise, callback);
 | |
|     return promise;
 | |
|   }
 | |
| 
 | |
|   var abstractViewCleanup = callbackify$1(function () {
 | |
|     var db = this;
 | |
|     /* istanbul ignore next */
 | |
|     if (typeof db._viewCleanup === 'function') {
 | |
|       return customViewCleanup(db);
 | |
|     }
 | |
|     if (isRemote(db)) {
 | |
|       return httpViewCleanup(db);
 | |
|     }
 | |
|     return localViewCleanup(db);
 | |
|   });
 | |
| 
 | |
|   return {
 | |
|     query: abstractQuery,
 | |
|     viewCleanup: abstractViewCleanup
 | |
|   };
 | |
| }
 | |
| 
 | |
| //
 | |
| // One thing about these mappers:
 | |
| //
 | |
| // Per the advice of John-David Dalton (http://youtu.be/NthmeLEhDDM),
 | |
| // what you want to do in this case is optimize for the smallest possible
 | |
| // function, since that's the thing that gets run over and over again.
 | |
| //
 | |
| // This code would be a lot simpler if all the if/elses were inside
 | |
| // the function, but it would also be a lot less performant.
 | |
| //
 | |
| 
 | |
| 
 | |
| function createDeepMultiMapper(fields, emit, selector) {
 | |
|   return function (doc) {
 | |
|     if (selector && !matchesSelector(doc, selector)) { return; }
 | |
|     var toEmit = [];
 | |
|     for (var i = 0, iLen = fields.length; i < iLen; i++) {
 | |
|       var parsedField = parseField(fields[i]);
 | |
|       var value = doc;
 | |
|       for (var j = 0, jLen = parsedField.length; j < jLen; j++) {
 | |
|         var key = parsedField[j];
 | |
|         value = value[key];
 | |
|         if (typeof value === 'undefined') {
 | |
|           return; // don't emit
 | |
|         }
 | |
|       }
 | |
|       toEmit.push(value);
 | |
|     }
 | |
|     emit(toEmit);
 | |
|   };
 | |
| }
 | |
| 
 | |
| function createDeepSingleMapper(field, emit, selector) {
 | |
|   var parsedField = parseField(field);
 | |
|   return function (doc) {
 | |
|     if (selector && !matchesSelector(doc, selector)) { return; }
 | |
|     var value = doc;
 | |
|     for (var i = 0, len = parsedField.length; i < len; i++) {
 | |
|       var key = parsedField[i];
 | |
|       value = value[key];
 | |
|       if (typeof value === 'undefined') {
 | |
|         return; // do nothing
 | |
|       }
 | |
|     }
 | |
|     emit(value);
 | |
|   };
 | |
| }
 | |
| 
 | |
| function createShallowSingleMapper(field, emit, selector) {
 | |
|   return function (doc) {
 | |
|     if (selector && !matchesSelector(doc, selector)) { return; }
 | |
|     emit(doc[field]);
 | |
|   };
 | |
| }
 | |
| 
 | |
| function createShallowMultiMapper(fields, emit, selector) {
 | |
|   return function (doc) {
 | |
|     if (selector && !matchesSelector(doc, selector)) { return; }
 | |
|     var toEmit = [];
 | |
|     for (var i = 0, len = fields.length; i < len; i++) {
 | |
|       toEmit.push(doc[fields[i]]);
 | |
|     }
 | |
|     emit(toEmit);
 | |
|   };
 | |
| }
 | |
| 
 | |
| function checkShallow(fields) {
 | |
|   for (var i = 0, len = fields.length; i < len; i++) {
 | |
|     var field = fields[i];
 | |
|     if (field.indexOf('.') !== -1) {
 | |
|       return false;
 | |
|     }
 | |
|   }
 | |
|   return true;
 | |
| }
 | |
| 
 | |
| function createMapper(fields, emit, selector) {
 | |
|   var isShallow = checkShallow(fields);
 | |
|   var isSingle = fields.length === 1;
 | |
| 
 | |
|   // notice we try to optimize for the most common case,
 | |
|   // i.e. single shallow indexes
 | |
|   if (isShallow) {
 | |
|     if (isSingle) {
 | |
|       return createShallowSingleMapper(fields[0], emit, selector);
 | |
|     } else { // multi
 | |
|       return createShallowMultiMapper(fields, emit, selector);
 | |
|     }
 | |
|   } else { // deep
 | |
|     if (isSingle) {
 | |
|       return createDeepSingleMapper(fields[0], emit, selector);
 | |
|     } else { // multi
 | |
|       return createDeepMultiMapper(fields, emit, selector);
 | |
|     }
 | |
|   }
 | |
| }
 | |
| 
 | |
| function mapper(mapFunDef, emit) {
 | |
|   // mapFunDef is a list of fields
 | |
| 
 | |
|   const fields = Object.keys(mapFunDef.fields);
 | |
|   const partialSelector = mapFunDef.partial_filter_selector;
 | |
| 
 | |
|   return createMapper(fields, emit, partialSelector);
 | |
| }
 | |
| 
 | |
| /* istanbul ignore next */
 | |
| function reducer(/*reduceFunDef*/) {
 | |
|   throw new Error('reduce not supported');
 | |
| }
 | |
| 
 | |
| function ddocValidator(ddoc, viewName) {
 | |
|   var view = ddoc.views[viewName];
 | |
|   // This doesn't actually need to be here apparently, but
 | |
|   // I feel safer keeping it.
 | |
|   /* istanbul ignore if */
 | |
|   if (!view.map || !view.map.fields) {
 | |
|     throw new Error('ddoc ' + ddoc._id +' with view ' + viewName +
 | |
|       ' doesn\'t have map.fields defined. ' +
 | |
|       'maybe it wasn\'t created by this plugin?');
 | |
|   }
 | |
| }
 | |
| 
 | |
| var abstractMapper = createAbstractMapReduce(
 | |
|   /* localDocName */ 'indexes',
 | |
|   mapper,
 | |
|   reducer,
 | |
|   ddocValidator
 | |
| );
 | |
| 
 | |
| function abstractMapper$1 (db) {
 | |
|   return db._customFindAbstractMapper || abstractMapper;
 | |
| }
 | |
| 
 | |
| // normalize the "sort" value
 | |
| function massageSort(sort) {
 | |
|   if (!Array.isArray(sort)) {
 | |
|     throw new Error('invalid sort json - should be an array');
 | |
|   }
 | |
|   return sort.map(function (sorting) {
 | |
|     if (typeof sorting === 'string') {
 | |
|       var obj = {};
 | |
|       obj[sorting] = 'asc';
 | |
|       return obj;
 | |
|     } else {
 | |
|       return sorting;
 | |
|     }
 | |
|   });
 | |
| }
 | |
| 
 | |
| function massageUseIndex(useIndex) {
 | |
|   var cleanedUseIndex = [];
 | |
|   if (typeof useIndex === 'string') {
 | |
|     cleanedUseIndex.push(useIndex);
 | |
|   } else {
 | |
|     cleanedUseIndex = useIndex;
 | |
|   }
 | |
| 
 | |
|   return cleanedUseIndex.map(function (name) {
 | |
|     return name.replace('_design/', '');
 | |
|   });
 | |
| }
 | |
| 
 | |
| function massageIndexDef(indexDef) {
 | |
|   indexDef.fields = indexDef.fields.map(function (field) {
 | |
|     if (typeof field === 'string') {
 | |
|       var obj = {};
 | |
|       obj[field] = 'asc';
 | |
|       return obj;
 | |
|     }
 | |
|     return field;
 | |
|   });
 | |
|   return indexDef;
 | |
| }
 | |
| 
 | |
| function getKeyFromDoc(doc, index) {
 | |
|   var res = [];
 | |
|   for (var i = 0; i < index.def.fields.length; i++) {
 | |
|     var field = getKey(index.def.fields[i]);
 | |
|     res.push(doc[field]);
 | |
|   }
 | |
|   return res;
 | |
| }
 | |
| 
 | |
| // have to do this manually because REASONS. I don't know why
 | |
| // CouchDB didn't implement inclusive_start
 | |
| function filterInclusiveStart(rows, targetValue, index) {
 | |
|   var indexFields = index.def.fields;
 | |
|   for (var i = 0, len = rows.length; i < len; i++) {
 | |
|     var row = rows[i];
 | |
| 
 | |
|     // shave off any docs at the beginning that are <= the
 | |
|     // target value
 | |
| 
 | |
|     var docKey = getKeyFromDoc(row.doc, index);
 | |
|     if (indexFields.length === 1) {
 | |
|       docKey = docKey[0]; // only one field, not multi-field
 | |
|     } else { // more than one field in index
 | |
|       // in the case where e.g. the user is searching {$gt: {a: 1}}
 | |
|       // but the index is [a, b], then we need to shorten the doc key
 | |
|       while (docKey.length > targetValue.length) {
 | |
|         docKey.pop();
 | |
|       }
 | |
|     }
 | |
|     //ABS as we just looking for values that don't match
 | |
|     if (Math.abs(collate(docKey, targetValue)) > 0) {
 | |
|       // no need to filter any further; we're past the key
 | |
|       break;
 | |
|     }
 | |
|   }
 | |
|   return i > 0 ? rows.slice(i) : rows;
 | |
| }
 | |
| 
 | |
| function reverseOptions(opts) {
 | |
|   var newOpts = clone(opts);
 | |
|   delete newOpts.startkey;
 | |
|   delete newOpts.endkey;
 | |
|   delete newOpts.inclusive_start;
 | |
|   delete newOpts.inclusive_end;
 | |
| 
 | |
|   if ('endkey' in opts) {
 | |
|     newOpts.startkey = opts.endkey;
 | |
|   }
 | |
|   if ('startkey' in opts) {
 | |
|     newOpts.endkey = opts.startkey;
 | |
|   }
 | |
|   if ('inclusive_start' in opts) {
 | |
|     newOpts.inclusive_end = opts.inclusive_start;
 | |
|   }
 | |
|   if ('inclusive_end' in opts) {
 | |
|     newOpts.inclusive_start = opts.inclusive_end;
 | |
|   }
 | |
|   return newOpts;
 | |
| }
 | |
| 
 | |
| function validateIndex(index) {
 | |
|   var ascFields = index.fields.filter(function (field) {
 | |
|     return getValue(field) === 'asc';
 | |
|   });
 | |
|   if (ascFields.length !== 0 && ascFields.length !== index.fields.length) {
 | |
|     throw new Error('unsupported mixed sorting');
 | |
|   }
 | |
| }
 | |
| 
 | |
| function validateSort(requestDef, index) {
 | |
|   if (index.defaultUsed && requestDef.sort) {
 | |
|     var noneIdSorts = requestDef.sort.filter(function (sortItem) {
 | |
|       return Object.keys(sortItem)[0] !== '_id';
 | |
|     }).map(function (sortItem) {
 | |
|       return Object.keys(sortItem)[0];
 | |
|     });
 | |
| 
 | |
|     if (noneIdSorts.length > 0) {
 | |
|       throw new Error('Cannot sort on field(s) "' + noneIdSorts.join(',') +
 | |
|       '" when using the default index');
 | |
|     }
 | |
|   }
 | |
| 
 | |
|   if (index.defaultUsed) {
 | |
|     return;
 | |
|   }
 | |
| }
 | |
| 
 | |
| function validateFindRequest(requestDef) {
 | |
|   if (typeof requestDef.selector !== 'object') {
 | |
|     throw new Error('you must provide a selector when you find()');
 | |
|   }
 | |
| 
 | |
|   /*var selectors = requestDef.selector['$and'] || [requestDef.selector];
 | |
|   for (var i = 0; i < selectors.length; i++) {
 | |
|     var selector = selectors[i];
 | |
|     var keys = Object.keys(selector);
 | |
|     if (keys.length === 0) {
 | |
|       throw new Error('invalid empty selector');
 | |
|     }
 | |
|     //var selection = selector[keys[0]];
 | |
|     /*if (Object.keys(selection).length !== 1) {
 | |
|       throw new Error('invalid selector: ' + JSON.stringify(selection) +
 | |
|         ' - it must have exactly one key/value');
 | |
|     }
 | |
|   }*/
 | |
| }
 | |
| 
 | |
| // determine the maximum number of fields
 | |
| // we're going to need to query, e.g. if the user
 | |
| // has selection ['a'] and sorting ['a', 'b'], then we
 | |
| // need to use the longer of the two: ['a', 'b']
 | |
| function getUserFields(selector, sort) {
 | |
|   var selectorFields = Object.keys(selector);
 | |
|   var sortFields = sort? sort.map(getKey) : [];
 | |
|   var userFields;
 | |
|   if (selectorFields.length >= sortFields.length) {
 | |
|     userFields = selectorFields;
 | |
|   } else {
 | |
|     userFields = sortFields;
 | |
|   }
 | |
| 
 | |
|   if (sortFields.length === 0) {
 | |
|     return {
 | |
|       fields: userFields
 | |
|     };
 | |
|   }
 | |
| 
 | |
|   // sort according to the user's preferred sorting
 | |
|   userFields = userFields.sort(function (left, right) {
 | |
|     var leftIdx = sortFields.indexOf(left);
 | |
|     if (leftIdx === -1) {
 | |
|       leftIdx = Number.MAX_VALUE;
 | |
|     }
 | |
|     var rightIdx = sortFields.indexOf(right);
 | |
|     if (rightIdx === -1) {
 | |
|       rightIdx = Number.MAX_VALUE;
 | |
|     }
 | |
|     return leftIdx < rightIdx ? -1 : leftIdx > rightIdx ? 1 : 0;
 | |
|   });
 | |
| 
 | |
|   return {
 | |
|     fields: userFields,
 | |
|     sortOrder: sort.map(getKey)
 | |
|   };
 | |
| }
 | |
| 
 | |
| function createIndex$1(db, requestDef) {
 | |
|   requestDef = massageCreateIndexRequest(requestDef);
 | |
|   var originalIndexDef = clone(requestDef.index);
 | |
|   requestDef.index = massageIndexDef(requestDef.index);
 | |
| 
 | |
|   validateIndex(requestDef.index);
 | |
| 
 | |
|   // calculating md5 is expensive - memoize and only
 | |
|   // run if required
 | |
|   var md5;
 | |
|   function getMd5() {
 | |
|     return md5 || (md5 = stringMd5(JSON.stringify(requestDef)));
 | |
|   }
 | |
| 
 | |
|   var viewName = requestDef.name || ('idx-' + getMd5());
 | |
| 
 | |
|   var ddocName = requestDef.ddoc || ('idx-' + getMd5());
 | |
|   var ddocId = '_design/' + ddocName;
 | |
| 
 | |
|   var hasInvalidLanguage = false;
 | |
|   var viewExists = false;
 | |
| 
 | |
|   function updateDdoc(doc) {
 | |
|     if (doc._rev && doc.language !== 'query') {
 | |
|       hasInvalidLanguage = true;
 | |
|     }
 | |
|     doc.language = 'query';
 | |
|     doc.views = doc.views || {};
 | |
| 
 | |
|     viewExists = !!doc.views[viewName];
 | |
| 
 | |
|     if (viewExists) {
 | |
|       return false;
 | |
|     }
 | |
| 
 | |
|     doc.views[viewName] = {
 | |
|       map: {
 | |
|         fields: mergeObjects(requestDef.index.fields)
 | |
|       },
 | |
|       reduce: '_count',
 | |
|       options: {
 | |
|         def: originalIndexDef
 | |
|       }
 | |
|     };
 | |
| 
 | |
|     return doc;
 | |
|   }
 | |
| 
 | |
|   db.constructor.emit('debug', ['find', 'creating index', ddocId]);
 | |
| 
 | |
|   return upsert(db, ddocId, updateDdoc).then(function () {
 | |
|     if (hasInvalidLanguage) {
 | |
|       throw new Error('invalid language for ddoc with id "' +
 | |
|       ddocId +
 | |
|       '" (should be "query")');
 | |
|     }
 | |
|   }).then(function () {
 | |
|     // kick off a build
 | |
|     // TODO: abstract-pouchdb-mapreduce should support auto-updating
 | |
|     // TODO: should also use update_after, but pouchdb/pouchdb#3415 blocks me
 | |
|     var signature = ddocName + '/' + viewName;
 | |
|     return abstractMapper$1(db).query.call(db, signature, {
 | |
|       limit: 0,
 | |
|       reduce: false
 | |
|     }).then(function () {
 | |
|       return {
 | |
|         id: ddocId,
 | |
|         name: viewName,
 | |
|         result: viewExists ? 'exists' : 'created'
 | |
|       };
 | |
|     });
 | |
|   });
 | |
| }
 | |
| 
 | |
| function getIndexes$1(db) {
 | |
|   // just search through all the design docs and filter in-memory.
 | |
|   // hopefully there aren't that many ddocs.
 | |
|   return db.allDocs({
 | |
|     startkey: '_design/',
 | |
|     endkey: '_design/\uffff',
 | |
|     include_docs: true
 | |
|   }).then(function (allDocsRes) {
 | |
|     var res = {
 | |
|       indexes: [{
 | |
|         ddoc: null,
 | |
|         name: '_all_docs',
 | |
|         type: 'special',
 | |
|         def: {
 | |
|           fields: [{_id: 'asc'}]
 | |
|         }
 | |
|       }]
 | |
|     };
 | |
| 
 | |
|     res.indexes = flatten$1(res.indexes, allDocsRes.rows.filter(function (row) {
 | |
|       return row.doc.language === 'query';
 | |
|     }).map(function (row) {
 | |
|       var viewNames = row.doc.views !== undefined ? Object.keys(row.doc.views) : [];
 | |
| 
 | |
|       return viewNames.map(function (viewName) {
 | |
|         var view = row.doc.views[viewName];
 | |
|         return {
 | |
|           ddoc: row.id,
 | |
|           name: viewName,
 | |
|           type: 'json',
 | |
|           def: massageIndexDef(view.options.def)
 | |
|         };
 | |
|       });
 | |
|     }));
 | |
| 
 | |
|     // these are sorted by view name for some reason
 | |
|     res.indexes.sort(function (left, right) {
 | |
|       return compare(left.name, right.name);
 | |
|     });
 | |
|     res.total_rows = res.indexes.length;
 | |
|     return res;
 | |
|   });
 | |
| }
 | |
| 
 | |
| // couchdb lowest collation value
 | |
| var COLLATE_LO = null;
 | |
| 
 | |
| // couchdb highest collation value (TODO: well not really, but close enough amirite)
 | |
| var COLLATE_HI = {"\uffff": {}};
 | |
| 
 | |
| const SHORT_CIRCUIT_QUERY = {
 | |
|   queryOpts: { limit: 0, startkey: COLLATE_HI, endkey: COLLATE_LO },
 | |
|   inMemoryFields: []
 | |
| };
 | |
| 
 | |
| // couchdb second-lowest collation value
 | |
| 
 | |
| function checkFieldInIndex(index, field) {
 | |
|   var indexFields = index.def.fields.map(getKey);
 | |
|   for (var i = 0, len = indexFields.length; i < len; i++) {
 | |
|     var indexField = indexFields[i];
 | |
|     if (field === indexField) {
 | |
|       return true;
 | |
|     }
 | |
|   }
 | |
|   return false;
 | |
| }
 | |
| 
 | |
| // so when you do e.g. $eq/$eq, we can do it entirely in the database.
 | |
| // but when you do e.g. $gt/$eq, the first part can be done
 | |
| // in the database, but the second part has to be done in-memory,
 | |
| // because $gt has forced us to lose precision.
 | |
| // so that's what this determines
 | |
| function userOperatorLosesPrecision(selector, field) {
 | |
|   var matcher = selector[field];
 | |
|   var userOperator = getKey(matcher);
 | |
| 
 | |
|   return userOperator !== '$eq';
 | |
| }
 | |
| 
 | |
| // sort the user fields by their position in the index,
 | |
| // if they're in the index
 | |
| function sortFieldsByIndex(userFields, index) {
 | |
|   var indexFields = index.def.fields.map(getKey);
 | |
| 
 | |
|   return userFields.slice().sort(function (a, b) {
 | |
|     var aIdx = indexFields.indexOf(a);
 | |
|     var bIdx = indexFields.indexOf(b);
 | |
|     if (aIdx === -1) {
 | |
|       aIdx = Number.MAX_VALUE;
 | |
|     }
 | |
|     if (bIdx === -1) {
 | |
|       bIdx = Number.MAX_VALUE;
 | |
|     }
 | |
|     return compare(aIdx, bIdx);
 | |
|   });
 | |
| }
 | |
| 
 | |
| // first pass to try to find fields that will need to be sorted in-memory
 | |
| function getBasicInMemoryFields(index, selector, userFields) {
 | |
| 
 | |
|   userFields = sortFieldsByIndex(userFields, index);
 | |
| 
 | |
|   // check if any of the user selectors lose precision
 | |
|   var needToFilterInMemory = false;
 | |
|   for (var i = 0, len = userFields.length; i < len; i++) {
 | |
|     var field = userFields[i];
 | |
|     if (needToFilterInMemory || !checkFieldInIndex(index, field)) {
 | |
|       return userFields.slice(i);
 | |
|     }
 | |
|     if (i < len - 1 && userOperatorLosesPrecision(selector, field)) {
 | |
|       needToFilterInMemory = true;
 | |
|     }
 | |
|   }
 | |
|   return [];
 | |
| }
 | |
| 
 | |
| function getInMemoryFieldsFromNe(selector) {
 | |
|   var fields = [];
 | |
|   Object.keys(selector).forEach(function (field) {
 | |
|     var matcher = selector[field];
 | |
|     Object.keys(matcher).forEach(function (operator) {
 | |
|       if (operator === '$ne') {
 | |
|         fields.push(field);
 | |
|       }
 | |
|     });
 | |
|   });
 | |
|   return fields;
 | |
| }
 | |
| 
 | |
| function getInMemoryFields(coreInMemoryFields, index, selector, userFields) {
 | |
|   var result = flatten$1(
 | |
|     // in-memory fields reported as necessary by the query planner
 | |
|     coreInMemoryFields,
 | |
|     // combine with another pass that checks for any we may have missed
 | |
|     getBasicInMemoryFields(index, selector, userFields),
 | |
|     // combine with another pass that checks for $ne's
 | |
|     getInMemoryFieldsFromNe(selector)
 | |
|   );
 | |
| 
 | |
|   return sortFieldsByIndex(uniq(result), index);
 | |
| }
 | |
| 
 | |
| // check that at least one field in the user's query is represented
 | |
| // in the index. order matters in the case of sorts
 | |
| function checkIndexFieldsMatch(indexFields, sortOrder, fields) {
 | |
|   if (sortOrder) {
 | |
|     // array has to be a strict subarray of index array. furthermore,
 | |
|     // the sortOrder fields need to all be represented in the index
 | |
|     var sortMatches = oneArrayIsStrictSubArrayOfOther(sortOrder, indexFields);
 | |
|     var selectorMatches = oneArrayIsSubArrayOfOther(fields, indexFields);
 | |
| 
 | |
|     return sortMatches && selectorMatches;
 | |
|   }
 | |
| 
 | |
|   // all of the user's specified fields still need to be
 | |
|   // on the left side of the index array, although the order
 | |
|   // doesn't matter
 | |
|   return oneSetIsSubArrayOfOther(fields, indexFields);
 | |
| }
 | |
| 
 | |
| var logicalMatchers = ['$eq', '$gt', '$gte', '$lt', '$lte'];
 | |
| function isNonLogicalMatcher(matcher) {
 | |
|   return logicalMatchers.indexOf(matcher) === -1;
 | |
| }
 | |
| 
 | |
| // check all the index fields for usages of '$ne'
 | |
| // e.g. if the user queries {foo: {$ne: 'foo'}, bar: {$eq: 'bar'}},
 | |
| // then we can neither use an index on ['foo'] nor an index on
 | |
| // ['foo', 'bar'], but we can use an index on ['bar'] or ['bar', 'foo']
 | |
| function checkFieldsLogicallySound(indexFields, selector) {
 | |
|   var firstField = indexFields[0];
 | |
|   var matcher = selector[firstField];
 | |
| 
 | |
|   if (typeof matcher === 'undefined') {
 | |
|     /* istanbul ignore next */
 | |
|     return true;
 | |
|   }
 | |
| 
 | |
|   var isInvalidNe = Object.keys(matcher).length === 1 &&
 | |
|     getKey(matcher) === '$ne';
 | |
| 
 | |
|   return !isInvalidNe;
 | |
| }
 | |
| 
 | |
| function checkIndexMatches(index, sortOrder, fields, selector) {
 | |
| 
 | |
|   var indexFields = index.def.fields.map(getKey);
 | |
| 
 | |
|   var fieldsMatch = checkIndexFieldsMatch(indexFields, sortOrder, fields);
 | |
| 
 | |
|   if (!fieldsMatch) {
 | |
|     return false;
 | |
|   }
 | |
| 
 | |
|   return checkFieldsLogicallySound(indexFields, selector);
 | |
| }
 | |
| 
 | |
| //
 | |
| // the algorithm is very simple:
 | |
| // take all the fields the user supplies, and if those fields
 | |
| // are a strict subset of the fields in some index,
 | |
| // then use that index
 | |
| //
 | |
| //
 | |
| function findMatchingIndexes(selector, userFields, sortOrder, indexes) {
 | |
|   return indexes.filter(function (index) {
 | |
|     return checkIndexMatches(index, sortOrder, userFields, selector);
 | |
|   });
 | |
| }
 | |
| 
 | |
| // find the best index, i.e. the one that matches the most fields
 | |
| // in the user's query
 | |
| function findBestMatchingIndex(selector, userFields, sortOrder, indexes, useIndex) {
 | |
| 
 | |
|   var matchingIndexes = findMatchingIndexes(selector, userFields, sortOrder, indexes);
 | |
| 
 | |
|   if (matchingIndexes.length === 0) {
 | |
|     if (useIndex) {
 | |
|       throw {
 | |
|         error: "no_usable_index",
 | |
|         message: "There is no index available for this selector."
 | |
|       };
 | |
|     }
 | |
|     //return `all_docs` as a default index;
 | |
|     //I'm assuming that _all_docs is always first
 | |
|     var defaultIndex = indexes[0];
 | |
|     defaultIndex.defaultUsed = true;
 | |
|     return defaultIndex;
 | |
|   }
 | |
|   if (matchingIndexes.length === 1 && !useIndex) {
 | |
|     return matchingIndexes[0];
 | |
|   }
 | |
| 
 | |
|   var userFieldsMap = arrayToObject(userFields);
 | |
| 
 | |
|   function scoreIndex(index) {
 | |
|     var indexFields = index.def.fields.map(getKey);
 | |
|     var score = 0;
 | |
|     for (var i = 0, len = indexFields.length; i < len; i++) {
 | |
|       var indexField = indexFields[i];
 | |
|       if (userFieldsMap[indexField]) {
 | |
|         score++;
 | |
|       }
 | |
|     }
 | |
|     return score;
 | |
|   }
 | |
| 
 | |
|   if (useIndex) {
 | |
|     var useIndexDdoc = '_design/' + useIndex[0];
 | |
|     var useIndexName = useIndex.length === 2 ? useIndex[1] : false;
 | |
|     var index = matchingIndexes.find(function (index) {
 | |
|       if (useIndexName && index.ddoc === useIndexDdoc && useIndexName === index.name) {
 | |
|         return true;
 | |
|       }
 | |
| 
 | |
|       if (index.ddoc === useIndexDdoc) {
 | |
|         /* istanbul ignore next */
 | |
|         return true;
 | |
|       }
 | |
| 
 | |
|       return false;
 | |
|     });
 | |
| 
 | |
|     if (!index) {
 | |
|       throw {
 | |
|         error: "unknown_error",
 | |
|         message: "Could not find that index or could not use that index for the query"
 | |
|       };
 | |
|     }
 | |
|     return index;
 | |
|   }
 | |
| 
 | |
|   return max(matchingIndexes, scoreIndex);
 | |
| }
 | |
| 
 | |
| function getSingleFieldQueryOptsFor(userOperator, userValue) {
 | |
|   switch (userOperator) {
 | |
|     case '$eq':
 | |
|       return {key: userValue};
 | |
|     case '$lte':
 | |
|       return {endkey: userValue};
 | |
|     case '$gte':
 | |
|       return {startkey: userValue};
 | |
|     case '$lt':
 | |
|       return {
 | |
|         endkey: userValue,
 | |
|         inclusive_end: false
 | |
|       };
 | |
|     case '$gt':
 | |
|       return {
 | |
|         startkey: userValue,
 | |
|         inclusive_start: false
 | |
|       };
 | |
|   }
 | |
| 
 | |
|   return {
 | |
|     startkey: COLLATE_LO
 | |
|   };
 | |
| }
 | |
| 
 | |
| function getSingleFieldCoreQueryPlan(selector, index) {
 | |
|   var field = getKey(index.def.fields[0]);
 | |
|   //ignoring this because the test to exercise the branch is skipped at the moment
 | |
|   /* istanbul ignore next */
 | |
|   var matcher = selector[field] || {};
 | |
|   var inMemoryFields = [];
 | |
| 
 | |
|   var userOperators = Object.keys(matcher);
 | |
| 
 | |
|   var combinedOpts;
 | |
| 
 | |
|   userOperators.forEach(function (userOperator) {
 | |
| 
 | |
|     if (isNonLogicalMatcher(userOperator)) {
 | |
|       inMemoryFields.push(field);
 | |
|     }
 | |
| 
 | |
|     var userValue = matcher[userOperator];
 | |
| 
 | |
|     var newQueryOpts = getSingleFieldQueryOptsFor(userOperator, userValue);
 | |
| 
 | |
|     if (combinedOpts) {
 | |
|       combinedOpts = mergeObjects([combinedOpts, newQueryOpts]);
 | |
|     } else {
 | |
|       combinedOpts = newQueryOpts;
 | |
|     }
 | |
|   });
 | |
| 
 | |
|   return {
 | |
|     queryOpts: combinedOpts,
 | |
|     inMemoryFields: inMemoryFields
 | |
|   };
 | |
| }
 | |
| 
 | |
| function getMultiFieldCoreQueryPlan(userOperator, userValue) {
 | |
|   switch (userOperator) {
 | |
|     case '$eq':
 | |
|       return {
 | |
|         startkey: userValue,
 | |
|         endkey: userValue
 | |
|       };
 | |
|     case '$lte':
 | |
|       return {
 | |
|         endkey: userValue
 | |
|       };
 | |
|     case '$gte':
 | |
|       return {
 | |
|         startkey: userValue
 | |
|       };
 | |
|     case '$lt':
 | |
|       return {
 | |
|         endkey: userValue,
 | |
|         inclusive_end: false
 | |
|       };
 | |
|     case '$gt':
 | |
|       return {
 | |
|         startkey: userValue,
 | |
|         inclusive_start: false
 | |
|       };
 | |
|   }
 | |
| }
 | |
| 
 | |
| function getMultiFieldQueryOpts(selector, index) {
 | |
| 
 | |
|   var indexFields = index.def.fields.map(getKey);
 | |
| 
 | |
|   var inMemoryFields = [];
 | |
|   var startkey = [];
 | |
|   var endkey = [];
 | |
|   var inclusiveStart;
 | |
|   var inclusiveEnd;
 | |
| 
 | |
| 
 | |
|   function finish(i) {
 | |
| 
 | |
|     if (inclusiveStart !== false) {
 | |
|       startkey.push(COLLATE_LO);
 | |
|     }
 | |
|     if (inclusiveEnd !== false) {
 | |
|       endkey.push(COLLATE_HI);
 | |
|     }
 | |
|     // keep track of the fields where we lost specificity,
 | |
|     // and therefore need to filter in-memory
 | |
|     inMemoryFields = indexFields.slice(i);
 | |
|   }
 | |
| 
 | |
|   for (var i = 0, len = indexFields.length; i < len; i++) {
 | |
|     var indexField = indexFields[i];
 | |
| 
 | |
|     var matcher = selector[indexField];
 | |
| 
 | |
|     if (!matcher || !Object.keys(matcher).length) { // fewer fields in user query than in index
 | |
|       finish(i);
 | |
|       break;
 | |
|     } else if (Object.keys(matcher).some(isNonLogicalMatcher)) { // non-logical are ignored
 | |
|       finish(i);
 | |
|       break;
 | |
|     } else if (i > 0) {
 | |
|       var usingGtlt = (
 | |
|         '$gt' in matcher || '$gte' in matcher ||
 | |
|         '$lt' in matcher || '$lte' in matcher);
 | |
|       var previousKeys = Object.keys(selector[indexFields[i - 1]]);
 | |
|       var previousWasEq = arrayEquals(previousKeys, ['$eq']);
 | |
|       var previousWasSame = arrayEquals(previousKeys, Object.keys(matcher));
 | |
|       var gtltLostSpecificity = usingGtlt && !previousWasEq && !previousWasSame;
 | |
|       if (gtltLostSpecificity) {
 | |
|         finish(i);
 | |
|         break;
 | |
|       }
 | |
|     }
 | |
| 
 | |
|     var userOperators = Object.keys(matcher);
 | |
| 
 | |
|     var combinedOpts = null;
 | |
| 
 | |
|     for (var j = 0; j < userOperators.length; j++) {
 | |
|       var userOperator = userOperators[j];
 | |
|       var userValue = matcher[userOperator];
 | |
| 
 | |
|       var newOpts = getMultiFieldCoreQueryPlan(userOperator, userValue);
 | |
| 
 | |
|       if (combinedOpts) {
 | |
|         combinedOpts = mergeObjects([combinedOpts, newOpts]);
 | |
|       } else {
 | |
|         combinedOpts = newOpts;
 | |
|       }
 | |
|     }
 | |
| 
 | |
|     startkey.push('startkey' in combinedOpts ? combinedOpts.startkey : COLLATE_LO);
 | |
|     endkey.push('endkey' in combinedOpts ? combinedOpts.endkey : COLLATE_HI);
 | |
|     if ('inclusive_start' in combinedOpts) {
 | |
|       inclusiveStart = combinedOpts.inclusive_start;
 | |
|     }
 | |
|     if ('inclusive_end' in combinedOpts) {
 | |
|       inclusiveEnd = combinedOpts.inclusive_end;
 | |
|     }
 | |
|   }
 | |
| 
 | |
|   var res = {
 | |
|     startkey: startkey,
 | |
|     endkey: endkey
 | |
|   };
 | |
| 
 | |
|   if (typeof inclusiveStart !== 'undefined') {
 | |
|     res.inclusive_start = inclusiveStart;
 | |
|   }
 | |
|   if (typeof inclusiveEnd !== 'undefined') {
 | |
|     res.inclusive_end = inclusiveEnd;
 | |
|   }
 | |
| 
 | |
|   return {
 | |
|     queryOpts: res,
 | |
|     inMemoryFields: inMemoryFields
 | |
|   };
 | |
| }
 | |
| 
 | |
| function shouldShortCircuit(selector) {
 | |
|   // We have a field to select from, but not a valid value
 | |
|   // this should result in a short circuited query 
 | |
|   // just like the http adapter (couchdb) and mongodb
 | |
|   // see tests for issue #7810
 | |
|   
 | |
|   // @todo Use 'Object.values' when Node.js v6 support is dropped.
 | |
|   const values = Object.keys(selector).map(function (key) {
 | |
|     return selector[key];
 | |
|   });
 | |
|   return values.some(function (val) { 
 | |
|     return typeof val === 'object' && Object.keys(val).length === 0;
 | |
| });
 | |
| }
 | |
| 
 | |
| function getDefaultQueryPlan(selector) {
 | |
|   //using default index, so all fields need to be done in memory
 | |
|   return {
 | |
|     queryOpts: {startkey: null},
 | |
|     inMemoryFields: [Object.keys(selector)]
 | |
|   };
 | |
| }
 | |
| 
 | |
| function getCoreQueryPlan(selector, index) {
 | |
|   if (index.defaultUsed) {
 | |
|     return getDefaultQueryPlan(selector, index);
 | |
|   }
 | |
| 
 | |
|   if (index.def.fields.length === 1) {
 | |
|     // one field in index, so the value was indexed as a singleton
 | |
|     return getSingleFieldCoreQueryPlan(selector, index);
 | |
|   }
 | |
|   // else index has multiple fields, so the value was indexed as an array
 | |
|   return getMultiFieldQueryOpts(selector, index);
 | |
| }
 | |
| 
 | |
| function planQuery(request, indexes) {
 | |
| 
 | |
|   var selector = request.selector;
 | |
|   var sort = request.sort;
 | |
| 
 | |
|   if (shouldShortCircuit(selector)) {
 | |
|     return $inject_Object_assign({}, SHORT_CIRCUIT_QUERY, { index: indexes[0] });
 | |
|   }
 | |
| 
 | |
|   var userFieldsRes = getUserFields(selector, sort);
 | |
| 
 | |
|   var userFields = userFieldsRes.fields;
 | |
|   var sortOrder = userFieldsRes.sortOrder;
 | |
|   var index = findBestMatchingIndex(selector, userFields, sortOrder, indexes, request.use_index);
 | |
| 
 | |
|   var coreQueryPlan = getCoreQueryPlan(selector, index);
 | |
|   var queryOpts = coreQueryPlan.queryOpts;
 | |
|   var coreInMemoryFields = coreQueryPlan.inMemoryFields;
 | |
| 
 | |
|   var inMemoryFields = getInMemoryFields(coreInMemoryFields, index, selector, userFields);
 | |
| 
 | |
|   var res = {
 | |
|     queryOpts: queryOpts,
 | |
|     index: index,
 | |
|     inMemoryFields: inMemoryFields
 | |
|   };
 | |
|   return res;
 | |
| }
 | |
| 
 | |
| function indexToSignature(index) {
 | |
|   // remove '_design/'
 | |
|   return index.ddoc.substring(8) + '/' + index.name;
 | |
| }
 | |
| 
 | |
| function doAllDocs(db, originalOpts) {
 | |
|   var opts = clone(originalOpts);
 | |
| 
 | |
|   // CouchDB responds in weird ways when you provide a non-string to _id;
 | |
|   // we mimic the behavior for consistency. See issue66 tests for details.
 | |
|   if (opts.descending) {
 | |
|     if ('endkey' in opts && typeof opts.endkey !== 'string') {
 | |
|       opts.endkey = '';
 | |
|     }
 | |
|     if ('startkey' in opts && typeof opts.startkey !== 'string') {
 | |
|       opts.limit = 0;
 | |
|     }
 | |
|   } else {
 | |
|     if ('startkey' in opts && typeof opts.startkey !== 'string') {
 | |
|       opts.startkey = '';
 | |
|     }
 | |
|     if ('endkey' in opts && typeof opts.endkey !== 'string') {
 | |
|       opts.limit = 0;
 | |
|     }
 | |
|   }
 | |
|   if ('key' in opts && typeof opts.key !== 'string') {
 | |
|     opts.limit = 0;
 | |
|   }
 | |
| 
 | |
|   if (opts.limit > 0 && opts.indexes_count) {
 | |
|     // brute force and quite naive impl.
 | |
|     // amp up the limit with the amount of (indexes) design docs
 | |
|     // or is this too naive? How about skip?
 | |
|     opts.original_limit = opts.limit;
 | |
|     opts.limit += opts.indexes_count;
 | |
|   }
 | |
| 
 | |
|   return db.allDocs(opts)
 | |
|     .then(function (res) {
 | |
|       // filter out any design docs that _all_docs might return
 | |
|       res.rows = res.rows.filter(function (row) {
 | |
|         return !/^_design\//.test(row.id);
 | |
|       });
 | |
|       // put back original limit
 | |
|       if (opts.original_limit) {
 | |
|         opts.limit = opts.original_limit;
 | |
|       }
 | |
|       // enforce the rows to respect the given limit
 | |
|       res.rows = res.rows.slice(0, opts.limit);
 | |
|       return res;
 | |
|     });
 | |
| }
 | |
| 
 | |
| function find$1(db, requestDef, explain) {
 | |
|   if (requestDef.selector) {
 | |
|     // must be validated before massaging
 | |
|     validateSelector(requestDef.selector, false);
 | |
|     requestDef.selector = massageSelector(requestDef.selector);
 | |
|   }
 | |
| 
 | |
|   if (requestDef.sort) {
 | |
|     requestDef.sort = massageSort(requestDef.sort);
 | |
|   }
 | |
| 
 | |
|   if (requestDef.use_index) {
 | |
|     requestDef.use_index = massageUseIndex(requestDef.use_index);
 | |
|   }
 | |
| 
 | |
|   validateFindRequest(requestDef);
 | |
| 
 | |
|   return getIndexes$1(db).then(function (getIndexesRes) {
 | |
| 
 | |
|     db.constructor.emit('debug', ['find', 'planning query', requestDef]);
 | |
|     var queryPlan = planQuery(requestDef, getIndexesRes.indexes);
 | |
|     db.constructor.emit('debug', ['find', 'query plan', queryPlan]);
 | |
| 
 | |
|     var indexToUse = queryPlan.index;
 | |
| 
 | |
|     validateSort(requestDef, indexToUse);
 | |
| 
 | |
|     var opts = $inject_Object_assign({
 | |
|       include_docs: true,
 | |
|       reduce: false,
 | |
|       // Add amount of index for doAllDocs to use (related to issue #7810)
 | |
|       indexes_count: getIndexesRes.total_rows
 | |
|     }, queryPlan.queryOpts);
 | |
| 
 | |
|     if ('startkey' in opts && 'endkey' in opts &&
 | |
|         collate(opts.startkey, opts.endkey) > 0) {
 | |
|       // can't possibly return any results, startkey > endkey
 | |
|       /* istanbul ignore next */
 | |
|       return {docs: []};
 | |
|     }
 | |
| 
 | |
|     var isDescending = requestDef.sort &&
 | |
|       typeof requestDef.sort[0] !== 'string' &&
 | |
|       getValue(requestDef.sort[0]) === 'desc';
 | |
| 
 | |
|     if (isDescending) {
 | |
|       // either all descending or all ascending
 | |
|       opts.descending = true;
 | |
|       opts = reverseOptions(opts);
 | |
|     }
 | |
| 
 | |
|     if (!queryPlan.inMemoryFields.length) {
 | |
|       // no in-memory filtering necessary, so we can let the
 | |
|       // database do the limit/skip for us
 | |
|       if ('limit' in requestDef) {
 | |
|         opts.limit = requestDef.limit;
 | |
|       }
 | |
|       if ('skip' in requestDef) {
 | |
|         opts.skip = requestDef.skip;
 | |
|       }
 | |
|     }
 | |
| 
 | |
|     if (explain) {
 | |
|       return Promise.resolve(queryPlan, opts);
 | |
|     }
 | |
| 
 | |
|     return Promise.resolve().then(function () {
 | |
|       if (indexToUse.name === '_all_docs') {
 | |
|         return doAllDocs(db, opts);
 | |
|       } else {
 | |
|         var signature = indexToSignature(indexToUse);
 | |
|         return abstractMapper$1(db).query.call(db, signature, opts);
 | |
|       }
 | |
|     }).then(function (res) {
 | |
|       if (opts.inclusive_start === false) {
 | |
|         // may have to manually filter the first one,
 | |
|         // since couchdb has no true inclusive_start option
 | |
|         res.rows = filterInclusiveStart(res.rows, opts.startkey, indexToUse);
 | |
|       }
 | |
| 
 | |
|       if (queryPlan.inMemoryFields.length) {
 | |
|         // need to filter some stuff in-memory
 | |
|         res.rows = filterInMemoryFields(res.rows, requestDef, queryPlan.inMemoryFields);
 | |
|       }
 | |
| 
 | |
|       var resp = {
 | |
|         docs: res.rows.map(function (row) {
 | |
|           var doc = row.doc;
 | |
|           if (requestDef.fields) {
 | |
|             return pick$1(doc, requestDef.fields);
 | |
|           }
 | |
|           return doc;
 | |
|         })
 | |
|       };
 | |
| 
 | |
|       if (indexToUse.defaultUsed) {
 | |
|         resp.warning = 'No matching index found, create an index to optimize query time.';
 | |
|       }
 | |
| 
 | |
|       return resp;
 | |
|     });
 | |
|   });
 | |
| }
 | |
| 
 | |
| function explain$1(db, requestDef) {
 | |
|   return find$1(db, requestDef, true)
 | |
|   .then(function (queryPlan) {
 | |
|     return {
 | |
|       dbname: db.name,
 | |
|       index: queryPlan.index,
 | |
|       selector: requestDef.selector,
 | |
|       range: {
 | |
|         start_key: queryPlan.queryOpts.startkey,
 | |
|         end_key: queryPlan.queryOpts.endkey
 | |
|       },
 | |
|       opts: {
 | |
|         use_index: requestDef.use_index || [],
 | |
|         bookmark: "nil", //hardcoded to match CouchDB since its not supported,
 | |
|         limit: requestDef.limit,
 | |
|         skip: requestDef.skip,
 | |
|         sort: requestDef.sort || {},
 | |
|         fields: requestDef.fields,
 | |
|         conflicts: false, //hardcoded to match CouchDB since its not supported,
 | |
|         r: [49] // hardcoded to match CouchDB since its not support
 | |
|       },
 | |
|       limit: requestDef.limit,
 | |
|       skip: requestDef.skip || 0,
 | |
|       fields: requestDef.fields
 | |
|     };
 | |
|   });
 | |
| }
 | |
| 
 | |
| function deleteIndex$1(db, index) {
 | |
| 
 | |
|   if (!index.ddoc) {
 | |
|     throw new Error('you must supply an index.ddoc when deleting');
 | |
|   }
 | |
| 
 | |
|   if (!index.name) {
 | |
|     throw new Error('you must supply an index.name when deleting');
 | |
|   }
 | |
| 
 | |
|   var docId = index.ddoc;
 | |
|   var viewName = index.name;
 | |
| 
 | |
|   function deltaFun(doc) {
 | |
|     if (Object.keys(doc.views).length === 1 && doc.views[viewName]) {
 | |
|       // only one view in this ddoc, delete the whole ddoc
 | |
|       return {_id: docId, _deleted: true};
 | |
|     }
 | |
|     // more than one view here, just remove the view
 | |
|     delete doc.views[viewName];
 | |
|     return doc;
 | |
|   }
 | |
| 
 | |
|   return upsert(db, docId, deltaFun).then(function () {
 | |
|     return abstractMapper$1(db).viewCleanup.apply(db);
 | |
|   }).then(function () {
 | |
|     return {ok: true};
 | |
|   });
 | |
| }
 | |
| 
 | |
| var createIndexAsCallback = callbackify(createIndex$1);
 | |
| var findAsCallback = callbackify(find$1);
 | |
| var explainAsCallback = callbackify(explain$1);
 | |
| var getIndexesAsCallback = callbackify(getIndexes$1);
 | |
| var deleteIndexAsCallback = callbackify(deleteIndex$1);
 | |
| 
 | |
| var plugin = {};
 | |
| plugin.createIndex = toPromise(function (requestDef, callback) {
 | |
| 
 | |
|   if (typeof requestDef !== 'object') {
 | |
|     return callback(new Error('you must provide an index to create'));
 | |
|   }
 | |
| 
 | |
|   var createIndex$$1 = isRemote(this) ?
 | |
|     createIndex : createIndexAsCallback;
 | |
|   createIndex$$1(this, requestDef, callback);
 | |
| });
 | |
| 
 | |
| plugin.find = toPromise(function (requestDef, callback) {
 | |
| 
 | |
|   if (typeof callback === 'undefined') {
 | |
|     callback = requestDef;
 | |
|     requestDef = undefined;
 | |
|   }
 | |
| 
 | |
|   if (typeof requestDef !== 'object') {
 | |
|     return callback(new Error('you must provide search parameters to find()'));
 | |
|   }
 | |
| 
 | |
|   var find$$1 = isRemote(this) ? find : findAsCallback;
 | |
|   find$$1(this, requestDef, callback);
 | |
| });
 | |
| 
 | |
| plugin.explain = toPromise(function (requestDef, callback) {
 | |
| 
 | |
|   if (typeof callback === 'undefined') {
 | |
|     callback = requestDef;
 | |
|     requestDef = undefined;
 | |
|   }
 | |
| 
 | |
|   if (typeof requestDef !== 'object') {
 | |
|     return callback(new Error('you must provide search parameters to explain()'));
 | |
|   }
 | |
| 
 | |
|   var find$$1 = isRemote(this) ? explain : explainAsCallback;
 | |
|   find$$1(this, requestDef, callback);
 | |
| });
 | |
| 
 | |
| plugin.getIndexes = toPromise(function (callback) {
 | |
| 
 | |
|   var getIndexes$$1 = isRemote(this) ? getIndexes : getIndexesAsCallback;
 | |
|   getIndexes$$1(this, callback);
 | |
| });
 | |
| 
 | |
| plugin.deleteIndex = toPromise(function (indexDef, callback) {
 | |
| 
 | |
|   if (typeof indexDef !== 'object') {
 | |
|     return callback(new Error('you must provide an index to delete'));
 | |
|   }
 | |
| 
 | |
|   var deleteIndex$$1 = isRemote(this) ?
 | |
|     deleteIndex : deleteIndexAsCallback;
 | |
|   deleteIndex$$1(this, indexDef, callback);
 | |
| });
 | |
| 
 | |
| // this code only runs in the browser, as its own dist/ script
 | |
| 
 | |
| if (typeof PouchDB === 'undefined') {
 | |
|   guardedConsole('error', 'pouchdb-find plugin error: ' +
 | |
|     'Cannot find global "PouchDB" object! ' +
 | |
|     'Did you remember to include pouchdb.js?');
 | |
| } else {
 | |
|   PouchDB.plugin(plugin);
 | |
| }
 | |
| 
 | |
| },{"1":1,"10":10,"11":11,"12":12,"3":3,"4":4}]},{},[27]);
 |