updated and migrated file.js

Signed-off-by: Alex A. Naanou <alex.nanou@gmail.com>
This commit is contained in:
Alex A. Naanou 2023-05-08 20:43:05 +03:00
parent 69bbc872c6
commit 250869530f

View File

@ -44,7 +44,6 @@ module.skipNested =
function(paths, index_dir, logger){ function(paths, index_dir, logger){
index_dir = index_dir || INDEX_DIR index_dir = index_dir || INDEX_DIR
logger = logger && logger.push('Skipping nested') logger = logger && logger.push('Skipping nested')
paths = paths paths = paths
.map(function(p){ .map(function(p){
return p.split(index_dir).shift() }) return p.split(index_dir).shift() })
@ -82,11 +81,13 @@ function(paths, index_dir, logger){
// matches manually... // matches manually...
var guaranteeGlobEvents = var guaranteeGlobEvents =
module.guaranteeGlobEvents = module.guaranteeGlobEvents =
function(glob){ return guaranteeEvents('data end', glob) } function(glob){
return guaranteeEvents('data end', glob) }
var gGlob = var gGlob =
module.gGlob = function(){ module.gGlob = function(){
return guaranteeGlobEvents(glob.globStream.apply(null, arguments)) } return guaranteeGlobEvents(
glob.globStream.apply(null, arguments)) }
@ -109,20 +110,21 @@ function(base, index_dir){
var getIndexes = var getIndexes =
module.getIndexes = module.getIndexes =
function(base, index_dir, logger){ function(base, index_dir, logger){
logger = logger && logger.push('Searching') logger = logger
&& logger.push('Searching')
return new Promise(function(resolve, reject){ return new Promise(function(resolve, reject){
var paths = []
listIndexes(base, index_dir) listIndexes(base, index_dir)
.on('error', function(err){ .on('error', function(err){
reject(err) reject(err) })
})
.on('data', function(path){ .on('data', function(path){
logger && logger.emit('found', path) logger
}) && logger.emit('found', path)
.on('end', function(paths){ paths.push(path) })
.on('end', function(){
// skip nested indexes... // skip nested indexes...
resolve(skipNested(paths, index_dir, logger)) resolve(
}) }) } skipNested(paths, index_dir, logger)) }) }) }
var listPreviews = var listPreviews =
@ -136,7 +138,8 @@ function(base, img_pattern){
var listJSON = var listJSON =
module.listJSON = module.listJSON =
function(path, pattern){ function(path, pattern){
pattern = pattern || '*' pattern = pattern
|| '*'
path = util.normalizePath(path) path = util.normalizePath(path)
return gGlob(path +'/'+ pattern +'.json', {strict: false}) } return gGlob(path +'/'+ pattern +'.json', {strict: false}) }
@ -153,8 +156,10 @@ function(func){
return function(){ return function(){
var args = [...arguments] var args = [...arguments]
return new Promise(function(resolve, reject){ return new Promise(function(resolve, reject){
func.apply(that, args.concat([function(err, res){ func.call(that, ...args, function(err, res){
return err ? reject(err) : resolve(res) }])) }) } } return err ?
reject(err)
: resolve(res) }) }) } }
var loadFile = denodeify(fse.readFile) var loadFile = denodeify(fse.readFile)
var writeFile = denodeify(fse.writeFile) var writeFile = denodeify(fse.writeFile)
@ -163,9 +168,8 @@ var ensureDir = denodeify(fse.ensureDir)
// XXX handle errors... // XXX handle errors...
function loadJSON(path){ function loadJSON(path){
path = util.normalizePath(path) return loadFile(util.normalizePath(path))
return loadFile(path).then(JSON.parse) .then(JSON.parse) }
}
// Format: // Format:
@ -189,25 +193,20 @@ var groupByDate =
module.groupByDate = module.groupByDate =
function(list){ function(list){
var res = {} var res = {}
list list
.forEach(function(n){ .forEach(function(n){
var b = pathlib.basename(n) var b = pathlib.basename(n)
var s = b.split(/[-.]/g).slice(0, -1) var s = b.split(/[-.]/g).slice(0, -1)
// no date set... // no date set...
if(s.length == 1){ if(s.length == 1){
res.root = res.root || [] res.root = res.root
|| []
res.root.push(n) res.root.push(n)
} else { } else {
res[s[0]] = res[s[0]] || [] res[s[0]] = res[s[0]]
res[s[0]].push(n) || []
} res[s[0]].push(n) } })
}) return res }
return res
}
@ -249,7 +248,11 @@ function(list, from_date, logger){
.reverse() .reverse()
// skip dates before from_date... // skip dates before from_date...
// NOTE: from_date is included... // NOTE: from_date is included...
.filter(function(d){ return from_date ? d <= from_date || d == 'root' : true }) .filter(function(d){
return from_date ?
d <= from_date
|| d == 'root'
: true })
.forEach(function(d){ .forEach(function(d){
dates[d] dates[d]
.sort() .sort()
@ -265,53 +268,47 @@ function(list, from_date, logger){
var k = s[1] var k = s[1]
var d = s[2] == 'diff' var d = s[2] == 'diff'
k = d ? k : s.slice(1).join('-') k = d ?
k
: s.slice(1).join('-')
// new keyword... // new keyword...
if(index[k] == null){ if(index[k] == null){
index[k] = [[d, n]] index[k] = [[d, n]]
logger && logger.emit('queued', n) logger && logger.emit('queued', n)
queued += 1 queued += 1
// do not add anything past the latest non-diff // do not add anything past the latest non-diff
// for each keyword... // for each keyword...
} else if(index[k].slice(-1)[0][0] == true){ } else if(index[k].slice(-1)[0][0] == true){
index[k].push([d, n]) index[k].push([d, n])
logger && logger.emit('queued', n) logger && logger.emit('queued', n)
queued += 1 queued += 1 } }) })
}
}) });
// add base files back where needed... // add base files back where needed...
// <keyword>.json / non-diff // <keyword>.json / non-diff
(dates.root || []) ;(dates.root || [])
.forEach(function(n){ .forEach(function(n){
var b = pathlib.basename(n) var b = pathlib.basename(n)
var k = b.split(/\./g)[0] var k = b.split(/\./g)[0]
// no diffs... // no diffs...
if(index[k] == null){ if(index[k] == null){
index[k] = [[false, n]] index[k] = [[false, n]]
logger && logger.emit('queued', n) logger
&& logger.emit('queued', n)
queued += 1 queued += 1
// add root file if no base is found... // add root file if no base is found...
} else if(index[k].slice(-1)[0][0] == true){ } else if(index[k].slice(-1)[0][0] == true){
index[k].push([false, n]) index[k].push([false, n])
logger && logger.emit('queued', n) logger
queued += 1 && logger.emit('queued', n)
} queued += 1 } })
})
// remove the flags... // remove the flags...
for(var k in index){ for(var k in index){
index[k] = index[k] index[k] = index[k]
.map(function(e){ return e[1] }) } .map(function(e){
return e[1] }) }
logger && logger.emit('files-queued', queued, index) logger
&& logger.emit('files-queued', queued, index)
return index return index }
}
@ -326,40 +323,41 @@ function(list, from_date, logger){
var loadSaveHistoryList = var loadSaveHistoryList =
module.loadSaveHistoryList = module.loadSaveHistoryList =
function(path, index_dir, logger){ function(path, index_dir, logger){
logger = logger && logger.push('Save history') logger = logger
&& logger.push('Save history')
path = util.normalizePath(path) path = util.normalizePath(path)
index_dir = index_dir || INDEX_DIR index_dir = index_dir
|| INDEX_DIR
return new Promise(function(resolve, reject){ return new Promise(function(resolve, reject){
// direct index... // direct index...
if(pathlib.basename(path) == index_dir){ if(pathlib.basename(path) == index_dir){
var files = []
listJSON(path) listJSON(path)
// XXX handle errors... // XXX handle errors...
.on('error', function(err){ .on('error', function(err){
logger && logger.emit('error', err) logger
console.error(err) && logger.emit('error', err)
}) console.error(err) })
.on('end', function(files){ .on('data', function(path){
paths.push(path) })
.on('end', function(){
var data = groupByDate(files) var data = groupByDate(files)
// XXX should we mark the root timestamp in any way??? // XXX should we mark the root timestamp in any way???
if('root' in data && data.root.length > 0){ if('root' in data
&& data.root.length > 0){
// XXX handle stat error... // XXX handle stat error...
data[fse.statSync(data.root[0]).birthtime.getTimeStamp()] = data.root data[fse.statSync(data.root[0]).birthtime.getTimeStamp()] = data.root
delete data.root delete data.root }
} resolve(data) })
resolve(data)
})
// need to locate indexes... // need to locate indexes...
} else { } else {
var res = {} var res = {}
getIndexes(path, index_dir, logger) getIndexes(path, index_dir, logger)
.catch(function(err){ .catch(function(err){
logger && logger.emit('error', err) logger
&& logger.emit('error', err)
console.error(err) }) console.error(err) })
.then(function(paths){ .then(function(paths){
// start loading... // start loading...
@ -376,7 +374,8 @@ function(path, index_dir, logger){
// we do not need to include the index // we do not need to include the index
// itself in the base path... // itself in the base path...
res[p] = obj }) })) }) res[p] = obj }) })) })
.then(function(){ resolve(res) }) } }) } .then(function(){
resolve(res) }) } }) }
@ -470,22 +469,25 @@ function(path, index_dir, logger){
var loadIndex = var loadIndex =
module.loadIndex = module.loadIndex =
function(path, index_dir, from_date, logger){ function(path, index_dir, from_date, logger){
logger = logger && logger.push('Index') logger = logger
&& logger.push('Index')
path = util.normalizePath(path) path = util.normalizePath(path)
if(index_dir && index_dir.emit != null){ if(index_dir
&& index_dir.emit != null){
logger = index_dir logger = index_dir
index_dir = from_date = null index_dir = from_date = null
} else if(from_date && from_date.emit != null){ } else if(from_date
&& from_date.emit != null){
logger = from_date logger = from_date
from_date = null from_date = null }
}
//index_dir = index_dir || INDEX_DIR //index_dir = index_dir || INDEX_DIR
index_dir = index_dir === false ? index_dir = index_dir === false ?
index_dir index_dir
: (index_dir || INDEX_DIR) : (index_dir
|| INDEX_DIR)
// XXX should this be interactive (a-la EventEmitter) or as it is now // XXX should this be interactive (a-la EventEmitter) or as it is now
// return the whole thing as a block (Promise)... // return the whole thing as a block (Promise)...
@ -499,25 +501,29 @@ function(path, index_dir, from_date, logger){
var p = util.normalizePath(path).split(/[\\\/]/g).slice(-i.length) var p = util.normalizePath(path).split(/[\\\/]/g).slice(-i.length)
var explicit_index_dir = !index_dir var explicit_index_dir = !index_dir
|| (i.filter(function(e, j){ return e == p[j] }).length == i.length) || (i
.filter(function(e, j){
return e == p[j] })
.length == i.length)
// we've got an index... // we've got an index...
// XXX do we need to check if it's a dir??? // XXX do we need to check if it's a dir???
if(explicit_index_dir){ if(explicit_index_dir){
logger
&& logger.emit('path', path)
logger && logger.emit('path', path) var files = []
listJSON(path) listJSON(path)
// XXX handle errors... // XXX handle errors...
.on('error', function(err){ .on('error', function(err){
logger && logger.emit('error', err) logger
console.error(err) && logger.emit('error', err)
}) console.error(err) })
.on('end', function(files){ .on('data', function(path){
files.push(path)})
.on('end', function(){
var res = {} var res = {}
var index = groupByKeyword(files, from_date, logger) var index = groupByKeyword(files, from_date, logger)
// load... // load...
Promise Promise
.all(Object.keys(index).map(function(keyword){ .all(Object.keys(index).map(function(keyword){
@ -528,12 +534,10 @@ function(path, index_dir, from_date, logger){
// XXX not sure about this... // XXX not sure about this...
if(keyword == '__dates'){ if(keyword == '__dates'){
res.__dates = index.__dates res.__dates = index.__dates
return true return true }
}
if(keyword == '__date'){ if(keyword == '__date'){
res.__date = index.__date res.__date = index.__date
return true return true }
}
// NOTE: so far I really do not like how nested and // NOTE: so far I really do not like how nested and
// unreadable the Promise/Deferred code becomes // unreadable the Promise/Deferred code becomes
@ -549,7 +553,6 @@ function(path, index_dir, from_date, logger){
logger && logger.emit('loaded', keyword, latest) logger && logger.emit('loaded', keyword, latest)
var loading = {} var loading = {}
// handle diffs... // handle diffs...
return Promise return Promise
// load diffs... // load diffs...
@ -559,47 +562,32 @@ function(path, index_dir, from_date, logger){
// XXX we should abort loading this index... // XXX we should abort loading this index...
.catch(function(err){ .catch(function(err){
logger && logger.emit('error', err) logger && logger.emit('error', err)
console.error(err) console.error(err) })
})
.then(function(json){ .then(function(json){
// NOTE: we can't merge here // NOTE: we can't merge here
// as the files can be // as the files can be
// read in arbitrary order... // read in arbitrary order...
loading[p] = json loading[p] = json }) }))
})
}))
// merge diffs... // merge diffs...
.then(function(){ .then(function(){
diffs diffs
.reverse() .reverse()
.forEach(function(p){ .forEach(function(p){
var json = loading[p] var json = loading[p]
for(var n in json){ for(var n in json){
data[n] = json[n] data[n] = json[n] }
} logger
&& logger.emit('loaded', keyword+'-diff', p) })
logger && logger.emit('loaded', keyword+'-diff', p) res[keyword] = data }) }) }))
})
res[keyword] = data
})
})
}))
.then(function(){ .then(function(){
logger && logger.emit('index', path, res) logger
&& logger.emit('index', path, res)
var d = {} var d = {}
d[path] = res d[path] = res
resolve(d) }) })
resolve(d)
})
})
// no explicit index given -- find all in sub tree... // no explicit index given -- find all in sub tree...
} else { } else {
var res = {} var res = {}
// special case: root index... // special case: root index...
if(fse.existsSync(path +'/'+ index_dir)){ if(fse.existsSync(path +'/'+ index_dir)){
var n = path +'/'+ index_dir var n = path +'/'+ index_dir
@ -612,19 +600,16 @@ function(path, index_dir, from_date, logger){
// we do not need to include the index // we do not need to include the index
// itself in the base path... // itself in the base path...
res[path] = obj[n] res[path] = obj[n]
resolve(res) resolve(res) }) }
})
}
// full search... // full search...
getIndexes(path, index_dir, logger) getIndexes(path, index_dir, logger)
.catch(function(err){ .catch(function(err){
logger && logger.emit('error', err) logger && logger.emit('error', err)
console.error(err) console.error(err) })
})
.then(function(paths){ .then(function(paths){
// start loading... // start loading...
Promise.all(paths.map(function(p){ Promise
.all(paths.map(function(p){
p = util.normalizePath(p) p = util.normalizePath(p)
//var path = pathlib.normalize(p +'/'+ index_dir) //var path = pathlib.normalize(p +'/'+ index_dir)
var path = util.normalizePath(p +'/'+ index_dir) var path = util.normalizePath(p +'/'+ index_dir)
@ -635,13 +620,9 @@ function(path, index_dir, from_date, logger){
// dir (the parent dir to the index root) // dir (the parent dir to the index root)
// we do not need to include the index // we do not need to include the index
// itself in the base path... // itself in the base path...
res[p] = obj[path] res[p] = obj[path] }) }))
}) .then(function(){
})).then(function(){ resolve(res) }) resolve(res) }) }) } }) }
})
}
})
}
// get/populate the previews... // get/populate the previews...
@ -663,20 +644,20 @@ function(path, index_dir, from_date, logger){
var loadPreviews = var loadPreviews =
module.loadPreviews = module.loadPreviews =
function(base, pattern, previews, index_dir, absolute_path){ function(base, pattern, previews, index_dir, absolute_path){
previews = previews || {} previews = previews
index_dir = index_dir || INDEX_DIR || {}
index_dir = index_dir
|| INDEX_DIR
base = util.normalizePath(base) base = util.normalizePath(base)
pattern = pattern || '*' pattern = pattern
|| '*'
// we got an explicit index.... // we got an explicit index....
if(pathlib.basename(base) == index_dir){ if(pathlib.basename(base) == index_dir){
return new Promise(function(resolve, reject){ return new Promise(function(resolve, reject){
if(!(base in previews)){ if(!(base in previews)){
previews[base] = {} previews[base] = {} }
}
var images = previews[base] var images = previews[base]
listPreviews(base, pattern) listPreviews(base, pattern)
// XXX handle errors.... // XXX handle errors....
//.on('error', function(err){ //.on('error', function(err){
@ -687,26 +668,18 @@ function(base, pattern, previews, index_dir, absolute_path){
// get the data we need... // get the data we need...
var gid = pathlib.basename(path).split(' - ')[0] var gid = pathlib.basename(path).split(' - ')[0]
var res = pathlib.basename(pathlib.dirname(path)) var res = pathlib.basename(pathlib.dirname(path))
// build the structure if it does not exist... // build the structure if it does not exist...
if(!(gid in images)){ if(!(gid in images)){
images[gid] = {} images[gid] = {} }
}
if(images[gid].preview == null){ if(images[gid].preview == null){
images[gid].preview = {} images[gid].preview = {} }
}
// add a preview... // add a preview...
// NOTE: this will overwrite a previews if they are found in // NOTE: this will overwrite a previews if they are found in
// several locations... // several locations...
images[gid].preview[res] = images[gid].preview[res] =
util.normalizePath(index_dir +'/'+ path.split(index_dir)[1]) util.normalizePath(index_dir +'/'+ path.split(index_dir)[1]) })
})
.on('end', function(){ .on('end', function(){
resolve(previews) resolve(previews) }) })
})
})
// find all sub indexes... // find all sub indexes...
} else { } else {
return new Promise(function(resolve, reject){ return new Promise(function(resolve, reject){
@ -716,17 +689,12 @@ function(base, pattern, previews, index_dir, absolute_path){
//.on('error', function(err){ //.on('error', function(err){
//}) //})
.on('data', function(base){ .on('data', function(base){
queue.push(loadPreviews(base, pattern, previews, index_dir, absolute_path)) queue.push(
}) loadPreviews(base, pattern, previews, index_dir, absolute_path)) })
.on('end', function(){ .on('end', function(){
Promise.all(queue) Promise.all(queue)
.then(function(){ .then(function(){
resolve(previews) resolve(previews) }) }) }) } }
})
})
})
}
}
// XXX // XXX
@ -765,33 +733,38 @@ var FILENAME = '${DATE}-${KEYWORD}.${EXT}'
var writeIndex = var writeIndex =
module.writeIndex = module.writeIndex =
function(json, path, date, filename_tpl, logger){ function(json, path, date, filename_tpl, logger){
logger = logger && logger.push('Index') logger = logger
&& logger.push('Index')
// XXX get this from args/config... // XXX get this from args/config...
var spaces = null var spaces = null
path = util.normalizePath(path) path = util.normalizePath(path)
filename_tpl = filename_tpl || FILENAME filename_tpl = filename_tpl
|| FILENAME
// XXX for some reason this gets the unpatched node.js Date, so we // XXX for some reason this gets the unpatched node.js Date, so we
// get the patched date explicitly... // get the patched date explicitly...
date = date || window.Date.timeStamp() date = date
|| window.Date.timeStamp()
var files = [] var files = []
// build the path if it does not exist... // build the path if it does not exist...
return ensureDir(path) return ensureDir(path)
.catch(function(err){ .catch(function(err){
logger && logger.emit('error', err) logger
console.error(err) && logger.emit('error', err)
}) console.error(err) })
.then(function(){ .then(function(){
logger && logger.emit('path', path) logger
&& logger.emit('path', path)
// write files... // write files...
// NOTE: we are not doing this sequencilly as there will not // NOTE: we are not doing this sequencilly as there will not
// be too many files... // be too many files...
return Promise return Promise
.all(Object.keys(json).map(function(keyword){ .all(Object.keys(json)
.map(function(keyword){
//var data = JSON.stringify(json[keyword]) //var data = JSON.stringify(json[keyword])
var data = JSON.stringify(json[keyword], null, spaces) var data = JSON.stringify(json[keyword], null, spaces)
@ -809,23 +782,20 @@ function(json, path, date, filename_tpl, logger){
.then(function(){ .then(function(){
files.push(file) files.push(file)
logger && logger.emit('queued', file) logger
&& logger.emit('queued', file)
return writeFile(file, data, 'utf8') return writeFile(file, data, 'utf8')
.catch(function(err){ .catch(function(err){
logger && logger.emit('error', err) logger
console.error(err) && logger.emit('error', err)
}) console.error(err) })
.then(function(){ .then(function(){
logger && logger.emit('written', file) logger
}) && logger.emit('written', file) }) }) }))
})
}))
.then(function(){ .then(function(){
logger && logger.emit('done', files) logger
}) && logger.emit('done', files) }) }) }
})
}