added merging of multiple indexes on load (not final) + some work on logging...

Signed-off-by: Alex A. Naanou <alex.nanou@gmail.com>
This commit is contained in:
Alex A. Naanou 2015-11-28 04:55:51 +03:00
parent 078aaf0888
commit de083ab8e2
3 changed files with 106 additions and 15 deletions

View File

@ -169,10 +169,17 @@ function loadJSON(path){
var loadIndex =
module.loadIndex =
function(path, logger){
// XXX should this be interactive (a-la EventEmitter) or as it is now
// return the whole thing as a block (Promise)...
// NOTE: one way to do this is use the logger, it will get
// each index data on an index event
return new Promise(function(resolve, reject){
// we've got an index...
// XXX do we need to check if if it's a dir???
if(pathlib.basename(path) == INDEX_DIR){
logger && logger.emit('path', path)
listJSON(path)
// XXX handle errors...
.on('error', function(err){
@ -182,6 +189,9 @@ function(path, logger){
var res = {}
var index = {}
var root = {}
var queued = 0
logger && logger.emit('files-found', files.length, files)
// group by keyword...
//
@ -231,12 +241,14 @@ function(path, logger){
if(index[k] == null){
index[k] = [[d, n]]
logger && logger.emit('queued', n)
queued += 1
// do not add anything past the latest non-diff
// for each keyword...
} else if(index[k].slice(-1)[0][0] == true){
index[k].push([d, n])
logger && logger.emit('queued', n)
queued += 1
}
})
@ -249,14 +261,17 @@ function(path, logger){
if(index[k] == null){
index[k] = [[false, n]]
logger && logger.emit('queued', n)
queued += 1
// add root file if no base is found...
} else if(index[k].slice(-1)[0][0] == true){
index[k].push([false, n])
logger && logger.emit('queued', n)
queued += 1
}
})
logger && logger.emit('files-queued', queued)
// load...
Promise
@ -276,7 +291,7 @@ function(path, logger){
// load latest...
return loadJSON(latest)
.then(function(data){
logger && logger.emit('loaded', latest)
logger && logger.emit('loaded', keyword, latest)
var loading = {}
@ -311,7 +326,7 @@ function(path, logger){
data[n] = json[n]
}
logger && logger.emit('loaded', p)
logger && logger.emit('loaded', keyword+'-diff', p)
})
res[keyword] = data

View File

@ -624,6 +624,21 @@ module.ImagesPrototype = {
},
clone: function(){
return (new Images()).join(this)
},
// NOTE: this will join the other data into the current object in-place,
// use .clone() to preserve current data...
join: function(other){
var that = this
other.forEach(function(gid, img){
that[gid] = img
})
return this
},
// serialization...
loadJSON: function(data){
data = typeof(data) == typeof('str')

View File

@ -1467,7 +1467,7 @@ module.Journal = ImageGridFeatures.Feature({
var PartialRibbonsActions = actions.Actions({
// NOTE: this will not work from chrome when loading from a local fs...
// XXX experimental...
startCacheWorker: [
startCacheWorker: ['Interface/',
function(){
// a worker is started already...
if(this.cacheWorker != null){
@ -1501,7 +1501,7 @@ var PartialRibbonsActions = actions.Actions({
this.cacheWorker = new Worker(url)
this.cacheWorker.url = url
}],
stopCacheWorker: [
stopCacheWorker: ['Interface/',
function(){
if(this.cacheWorker){
this.cacheWorker.terminate()
@ -3235,23 +3235,84 @@ var FileSystemLoaderActions = actions.Actions({
var that = this
// XXX get a logger...
logger = logger || this.logger
// XXX this will not work for explicit path (path to a dir
// that contains the index)
file.loadIndex(path, logger)
.then(function(res){
// XXX if res is empty load raw...
// XXX res may contain multiple indexes, need to
// combine them...
var k = Object.keys(res)[0]
var index = res[k]
// XXX use the logger...
console.log('LOADING:', k, res)
//console.log('FOUND INDEXES:', Object.keys(res).length)
that.load(file.buildIndex(index, k))
// skip nested paths...
// XXX make this optional...
// XXX this is best done BEFORE we load all the
// indexes, e.g. in .loadIndex(..)
var paths = Object.keys(res)
var skipped = []
paths.forEach(function(p){
// already removed...
if(skipped.indexOf(p) >= 0){
return
}
paths
// get all paths that fully contain p...
.filter(function(o){
return o != p && o.indexOf(p) == 0
})
// drop all longer paths...
.forEach(function(e){
skipped.push(e)
delete res[e]
})
})
//console.log('SKIPPING NESTED:', skipped.length)
var index
// NOTE: res may contain multiple indexes...
for(var k in res){
// skip empty indexes...
// XXX should we rebuild or list here???
if(res[k].data == null || res[k].images == null){
continue
}
var part = file.buildIndex(res[k], k)
// load the first index...
if(index == null){
// XXX use the logger...
//console.log('LOADING:', k, res)
logger && logger.emit('base index', k, res)
index = part
// merge indexes...
// XXX need to skip sub-indexes in the same sub-tree...
// ...skip any path that fully contains an
// already loaded path..
// XXX load data in chunks rather than merge...
} else {
//console.log('MERGING:', k, part)
logger && logger.emit('merge index', k, res)
// merge...
// XXX this appears to lose bookmarks and other tags...
index.data.join(part.data)
index.images.join(part.images)
}
// XXX do a better merge and remove this...
break
}
logger && logger.emit('load index', index)
that.load(index)
})
}],
})