split things out into modules...

Signed-off-by: Alex A. Naanou <alex.nanou@gmail.com>
This commit is contained in:
Alex A. Naanou 2022-08-03 01:35:19 +03:00
parent f39943affb
commit 0da4026b27
12 changed files with 1561 additions and 1370 deletions

0
data/fs/.gitkeep Executable file
View File

0
data/pouch/.gitkeep Executable file
View File

BIN
img/pWiki.kra Executable file

Binary file not shown.

BIN
img/pWiki.png Executable file

Binary file not shown.

After

Width:  |  Height:  |  Size: 468 KiB

177
lib/path.js Executable file
View File

@ -0,0 +1,177 @@
/**********************************************************************
*
*
*
**********************************************************************/
((typeof define)[0]=='u'?function(f){module.exports=f(require)}:define)
(function(require){ var module={} // make module AMD/node compatible...
/*********************************************************************/
var types = require('ig-types')
//---------------------------------------------------------------------
// Path...
module = {
// Page returned when listing a path ending with '/'...
//
// If set to false treat dirs the same as pages (default)
//INDEX_PAGE: 'index',
INDEX_PAGE: false,
// The page returned when getting the '/' path...
//
// NOTE: this is the same as .INDEX_PAGE but only for '/'
ROOT_PAGE: 'WikiHome',
ALTERNATIVE_PAGES: [
'EmptyPage',
'NotFound',
],
// Default alternate search locations...
//
// NOTE: if a path here is relative it is also searched relative to
// the target path.
SEARCH_PATHS: [
//'./Theme/CLI',
'./Templates',
'/System',
],
// Path utils...
//
// Path can be in one of two formats:
// string
// array
//
// NOTE: trailing/leading '/' are represented by '' at end/start of
// path list...
normalize: function(path='.', format='auto'){
format = format == 'auto' ?
(path instanceof Array ?
'array'
: 'string')
: format
var root = path[0] == ''
|| path[0] == '/'
path = (path instanceof Array ?
path
// NOTE: this will also trim the path elements...
: path.split(/\s*[\\\/]+\s*/))
.reduce(function(res, e, i, L){
// special case: leading '..' / '.'
if(res.length == 0
&& e == '..'){
return [e] }
;(e == '.'
// keep explicit '/' only at start/end of path...
|| (e == ''
&& i != 0
&& i != L.length-1)) ?
undefined
: e == '..'
|| res[res.length-1] == '>>' ?
((res.length > 1
|| res[0] != '')
&& res.pop())
// NOTE: the last '>>' will be retained...
: res.push(e)
return res }, [])
return format == 'string' ?
// special case: root -> keep '/'
((root
&& path.length == 1
&& path[0] == '') ?
('/'+ path.join('/'))
: path.join('/'))
: path },
split: function(path){
return this.normalize(path, 'array') },
join: function(...parts){
return this.normalize(
(parts[0] instanceof Array ?
parts[0]
: parts)
.join('/'),
'string') },
basename: function(path){
return this.split(path).pop() },
dirname: function(path){
return this.relative(path, '..', 'string') },
relative: function(parent, path, format='auto'){
format = format == 'auto' ?
(path instanceof Array ?
'array'
: 'string')
: format
// root path...
if(path[0] == '' || path[0] == '/'){
return this.normalize(path, format) }
// unify parent/path types...
parent = parent instanceof Array ?
parent
: parent.split(/\s*[\\\/]+\s*/)
path = path instanceof Array ?
path
: path.split(/\s*[\\\/]+\s*/)
return this.normalize([...parent, ...path], format) },
// Build alternative paths for page acquisition...
//
// NOTE: if seen is given (when called recursively) this will not
// search for .ALTERNATIVE_PAGES...
// XXX should we search for each path element or just the last one (current)???
// XXX should we keep the trailing '/'???
paths: function*(path='/', seen){
var alt_pages = !seen
seen = seen
?? new Set()
path = this.normalize(path, 'string')
// special case: root...
if(path == '/' || path == ''){
// normalize...
path = '/'
// as-is...
seen.add(path)
yield path
// special case: root page...
if(this.ROOT_PAGE){
yield* this.paths(this.normalize('/'+ this.ROOT_PAGE, 'string'), seen) }}
// NOTE: since path is already normalized we can trust the delimiter...
path = path.split(/\//g)
// normalize relative paths to root...
path[0] != ''
&& path.unshift('')
// paths ending in '/'...
if(path[path.length-1] == ''){
path.pop()
this.INDEX_PAGE
&& path.push(this.INDEX_PAGE) }
// search for page...
var page = path.pop()
for(var tpl of ['.', ...this.SEARCH_PATHS]){
// search for page up the path...
var p = path.slice()
while(p.length > 0){
var cur = this.relative(p, tpl +'/'+ page, 'string')
if(!seen.has(cur)){
seen.add(cur)
yield cur }
// special case: non-relative template/page path...
if(tpl[0] == '/'){
break }
p.pop() } }
// alternative pages...
if(alt_pages){
for(var page of [...this.ALTERNATIVE_PAGES]){
yield* this.paths(path.concat(page), seen) }} },
}
/**********************************************************************
* vim:set ts=4 sw=4 : */ return module })

1398
pwiki2.js

File diff suppressed because it is too large Load Diff

535
store/base.js Executable file
View File

@ -0,0 +1,535 @@
/**********************************************************************
*
*
*
**********************************************************************/
((typeof define)[0]=='u'?function(f){module.exports=f(require)}:define)
(function(require){ var module={} // make module AMD/node compatible...
/*********************************************************************/
var object = require('ig-object')
var types = require('ig-types')
var pwpath = require('../lib/path')
//---------------------------------------------------------------------
// Store...
//
// To create a store adapter:
// - inherit from BaseStore
// - overload:
// .__paths__()
// -> <keys>
// .__exists__(..)
// -> <path>
// -> false
// .__get__(..)
// -> <data>
// - optionally (for writable stores)
// .__update__(..)
// .__delete__(..)
// .load(..)
//
//
// NOTE: store keys must be normalized to avoid conditions where two
// forms of the same path exist at the same time...
//
//
// XXX potential architectural problems:
// - .paths()
// external index -- is this good???
// bottleneck??
// cache/index???
// ...can we avoid this??
//
// XXX LEADING_SLASH should this be strict about leading '/' in paths???
// ...this may lead to duplicate paths created -- '/a/b' and 'a/b'
// XXX should we support page symlinking???
// XXX async: not sure if we need to return this from async methods...
var BaseStore =
module.BaseStore = {
// XXX NEXT revise naming...
next: undefined,
// NOTE: .data is not part of the spec and can be implementation-specific,
// only .__<name>__(..) use it internally... (XXX check this)
__data: undefined,
get data(){
return this.__data
?? (this.__data = {}) },
set data(value){
this.__data = value },
// XXX might be a good idea to cache this...
__paths__: async function(){
return Object.keys(this.data) },
paths: async function(local=false){
return this.__paths__()
.iter()
// XXX NEXT
.concat((!local && (this.next || {}).paths) ?
this.next.paths()
: []) },
//
// .exists(<path>)
// -> <normalized-path>
// -> false
//
// XXX might be a good idea to cache this...
__exists__: async function(path){
return path in this.data
&& path },
exists: async function(path){
path = pwpath.normalize(path, 'string')
return (await this.__exists__(path, 'string'))
// NOTE: all paths at this point and in store are
// absolute, so we check both with the leading
// '/' and without it to make things a bit more
// relaxed and return the actual matching path...
|| (await this.__exists__(
path[0] == '/' ?
path.slice(1)
: ('/'+ path)))
// XXX NEXT
// delegate to .next...
|| ((this.next || {}).__exists__
&& (await this.next.__exists__(path)
|| await this.next.__exists__(
path[0] == '/' ?
path.slice(1)
: ('/'+ path))))
// normalize the output...
|| false },
// find the closest existing alternative path...
find: async function(path){
for(var p of await pwpath.paths(path)){
p = await this.exists(p)
if(p){
return p } } },
//
// Resolve page for path
// .match(<path>)
// -> <path>
//
// Match paths (non-strict mode)
// .match(<pattern>)
// .match(<pattern>, false)
// -> [<path>, ...]
// -> []
//
// Match pages (paths in strict mode)
// .match(<pattern>, true)
// -> [<path>, ...]
// -> []
//
// In strict mode the trailing star in the pattern will only match
// actual existing pages, while in non-strict mode the pattern will
// match all sub-paths.
//
match: async function(path, strict=false){
// pattern match * / **
if(path.includes('*')
|| path.includes('**')){
var order = (this.metadata(path) ?? {}).order || []
// NOTE: we are matching full paths only here so leading and
// trainling '/' are optional...
// NOTE: we ensure that we match full names and always split
// at '/' only...
var pattern = new RegExp(`^\\/?${
pwpath.normalize(path, 'string')
.replace(/^\/|\/$/g, '')
.replace(/\//g, '\\/')
.replace(/\*\*/g, '.+')
.replace(/\*/g, '[^\\/]+')
}(?=[\\\\\/]|$)`)
return [...(await this.paths())
// NOTE: we are not using .filter(..) here as wee
// need to keep parts of the path only and not
// return the whole thing...
.reduce(function(res, p){
// skip metadata paths...
if(p.includes('*')){
return res }
var m = p.match(pattern)
m
&& (!strict
|| m[0] == p)
&& res.add(m[0])
return res }, new Set())]
.sortAs(order) }
// direct search...
return this.find(path) },
//
// .resolve(<path>)
// -> <path>
//
// .resolve(<pattern>)
// -> [<path>, ...]
// -> []
//
// This is like .match(..) for non-pattern paths and paths ending
// with '/'; When patterns end with a non-pattern then match the
// basedir and add the basename to each resulting path...
//
// XXX should this be used by .get(..) instead of .match(..)???
// XXX EXPERIMENTAL
resolve: async function(path, strict){
// pattern match * / **
if(path.includes('*')
|| path.includes('**')){
path = pwpath.split(path)
// match basedir and addon basename to the result...
var name = path[path.length-1]
if(name
&& name != ''
&& !name.includes('*')){
path.pop()
path.push('')
return (await this.match(path.join('/'), strict))
.map(function(p){
return pwpath.join(p, name) }) } }
// direct...
return this.match(path, strict) },
//
// Resolve page
// .get(<path>)
// -> <value>
//
// Resolve pages (non-strict mode)
// .get(<pattern>)
// .get(<pattern>, false)
// -> [<value>, .. ]
//
// Get pages (strict mode)
// .get(<pattern>, true)
// -> [<value>, .. ]
//
// In strict mode this will not try to resolve pages and will not
// return pages at paths that do not explicitly exist.
//
// XXX should this call actions???
// XXX should this return a map for pattern matches???
__get__: async function(key){
return this.data[key] },
get: async function(path, strict=false){
var that = this
//path = this.match(path, strict)
path = await this.resolve(path, strict)
return path instanceof Array ?
// XXX should we return matched paths???
Promise.iter(path)
.map(function(p){
// NOTE: p can match a non existing page at this point,
// this can be the result of matching a/* in a a/b/c
// and returning a a/b which can be undefined...
return that.get(p) })
: (await this.__get__(path)
// XXX NEXT
?? ((this.next || {}).__get__
&& this.next.__get__(path))) },
//
// Get metadata...
// .metadata(<path>)
// -> <metadata>
// -> undefined
//
// Set metadata...
// .metadata(<path>, <data>[, <mode>])
// .update(<path>, <data>[, <mode>])
//
// Delete metadata...
// .delete(<path>)
//
// NOTE: .metadata(..) is the same as .data but supports pattern paths
// and does not try to acquire a target page.
// NOTE: setting/removing metadata is done via .update(..) / .delete(..)
// NOTE: this uses .__get__(..) internally...
metadata: async function(path, ...args){
// set...
if(args.length > 0){
return this.update(path, ...args) }
// get...
path = await this.exists(path)
return path
&& await this.__get__(path)
|| undefined },
// NOTE: deleting and updating only applies to explicit matching
// paths -- no page acquisition is performed...
// NOTE: edit methods are local-only...
// NOTE: if .__update__ and .__delete__ are set to null/false this
// will quietly go into read-only mode...
// XXX do we copy the data here or modify it????
__update__: async function(key, data, mode='update'){
this.data[key] = data },
update: async function(path, data, mode='update'){
// read-only...
if(this.__update__ == null){
return this }
var exists = await this.exists(path)
path = exists
|| pwpath.normalize(path, 'string')
data = data instanceof Promise ?
await data
: data
data =
typeof(data) == 'function' ?
data
: Object.assign(
{
__proto__: data.__proto__,
ctime: Date.now(),
},
(mode == 'update' && exists) ?
await this.get(path)
: {},
data,
{mtime: Date.now()})
await this.__update__(path, data, mode)
return this },
__delete__: async function(path){
delete this.data[path] },
delete: async function(path){
// read-only...
if(this.__delete__ == null){
return this }
path = await this.exists(path)
path
&& await this.__delete__(path)
return this },
// XXX NEXT might be a good idea to have an API to move pages from
// current store up the chain...
// load/json protocol...
//
// The .load(..) / .json(..) methods have two levels of implementation:
// - generic
// uses .update(..) and .paths()/.get(..) and is usable as-is
// in any store adapter implementing the base protocol.
// - batch
// implemented via .__batch_load__(..) and .__batch_json__(..)
// methods and can be adapter specific.
//
// NOTE: the generic level does not care about the nested stores
// and other details, as it uses the base API and will produce
// full and generic result regardless of actual store topology.
// NOTE: implementations of the batch level need to handle nested
// stores correctly.
// XXX not sure if we can avoid this at this stage...
// NOTE: care must be taken with inheriting the batch protocol methods
// as they take precedence over the generic protocol. It is
// recommended to either overload them or simply assign null or
// undefined to them when inheriting from a non-base-store.
//__batch_load__: function(data){
// // ...
// return this },
load: async function(...data){
var input = {}
for(var e of data){
input = {...input, ...e} }
// batch loader (optional)...
if(this.__batch_load__){
this.__batch_load__(input)
// one-by-one loader...
} else {
for(var [path, value] of Object.entries(input)){
this.update(path, value) } }
return this },
//__batch_json__: function(){
// // ...
// return json},
json: async function(asstring=false){
// batch...
if(this.__batch_json__){
var res = this.__batch_json__(asstring)
// generic...
} else {
var res = {}
for(var path of await this.paths()){
res[path] = await this.get(path) } }
return (asstring
&& typeof(res) != 'string') ?
JSON.stringify(res)
: res },
// XXX NEXT EXPERIMENTAL...
nest: function(base){
return {
__proto__: base
?? BaseStore,
next: this,
data: {}
} },
}
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
//
// XXX stores to experiment with:
// - cache
// - fs
// - PouchDB
//
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
// Meta-Store
//
// Extends BaseStore to handle other stores as pages. i.e. sub-paths can
// be handled by nested stores.
//
// XXX might be a good idea to normalize args...
var metaProxy =
function(meth, drop_cache=false, post){
var target = meth.replace(/__/g, '')
if(typeof(drop_cache) == 'function'){
post = drop_cache
drop_cache = false }
var func = async function(path, ...args){
var store = this.substore(path)
var res =
store == null ?
object.parentCall(MetaStore[meth], this, path, ...args)
: this.data[store][target](
// NOTE: we are normalizing for root/non-root paths...
path.slice(path.indexOf(store)+store.length),
...args)
if(drop_cache){
delete this.__substores }
post
&& (res = post.call(this, await res, store, path, ...args))
return res}
Object.defineProperty(func, 'name', {value: meth})
return func }
// XXX this gets stuff from .data, can we avoid this???
// ...this can restrict this to being in-memory...
// XXX not sure about the name...
// XXX should this be a mixin???
var MetaStore =
module.MetaStore = {
__proto__: BaseStore,
//data: undefined,
__substores: undefined,
get substores(){
return this.__substores
?? (this.__substores = Object.entries(this.data)
.filter(function([path, value]){
return object.childOf(value, BaseStore) })
.map(function([path, _]){
return path })) },
substore: function(path){
path = pwpath.normalize(path, 'string')
if(this.substores.includes(path)){
return path }
var root = path[0] == '/'
var store = this.substores
.filter(function(p){
return path.startsWith(
root ?
'/'+p
: p) })
.sort(function(a, b){
return a.length - b.length })
.pop()
return store == path ?
// the actual store is not stored within itself...
undefined
: store },
getstore: function(path){
return this.data[this.substore(path)] },
// XXX this depends on .data having keys...
__paths__: async function(){
var that = this
var data = this.data
//return Object.keys(data)
return Promise.iter(Object.keys(data)
.map(function(path){
return object.childOf(data[path], BaseStore) ?
data[path].paths()
.iter()
.map(function(s){
return pwpath.join(path, s) })
: path }))
.flat() },
// XXX revise...
__exists__: metaProxy('__exists__',
// normalize path...
function(res, store, path){
return (store && res) ?
path
: res }),
__get__: metaProxy('__get__'),
__delete__: metaProxy('__delete__', true),
// XXX BUG: this does not create stuff in sub-store...
__update__: metaProxy('__update__', true),
}
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
// XXX might be a fun idea to actually use this as a backend for BaseStore...
// XXX make this a mixin...
// XXX add cache invalidation strategies...
// - timeout
// - count
// XXX TEST...
var CachedStore =
module.CachedStore = {
//__proto__: FileStoreRO,
// format:
// {
// <path>: <value>,
// }
__cache: undefined,
__paths: undefined,
resetCache: function(){
delete this.__paths
delete this.__cache
return this },
__paths__: function(){
return this.__paths
?? (this.__paths =
object.parentCall(CachedStore.__paths__, this)) },
__exists__: async function(path){
return path in this.cache
|| object.parentCall(CachedStore.__exists__, this, path) },
__get__: async function(path){
return this.cache[path]
?? (this.cache[path] =
object.parentCall(CachedStore.__get__, this, path, ...args)) },
__update__: async function(path, data){
this.__paths.includes(path)
|| this.__paths.push(path)
this.__cache[path] = data
return object.parentCall(CachedStore.__update__, this, path, data) },
__delete__: async function(path){
var i = this.__paths.indexOf(path)
i > 0
&& this.__paths.splice(i, 1)
delete this.__cache[path]
return object.parentCall(CachedStore.__delete__, this, path) },
}
/**********************************************************************
* vim:set ts=4 sw=4 : */ return module })

655
store/file.js Executable file
View File

@ -0,0 +1,655 @@
/**********************************************************************
*
*
*
**********************************************************************/
((typeof define)[0]=='u'?function(f){module.exports=f(require)}:define)
(function(require){ var module={} // make module AMD/node compatible...
/*********************************************************************/
var fs = require('fs')
var glob = require('glob')
var object = require('ig-object')
var types = require('ig-types')
var pwpath = require('../lib/path')
var base = require('../store/base')
//---------------------------------------------------------------------
//
// XXX structure is not final...
// - need to split each adapter into modules...
// - should the media handler api be merged with store???
// - how do we handle config???
var FILESTORE_OPTIONS = {
index: '.index',
backup: '/.backup',
clearEmptyDir: true,
dirToFile: true,
cleanBackup: true,
verbose: true,
}
var getOpts =
function(opts){
return {
...FILESTORE_OPTIONS,
...(opts ?? {}),
} }
// func(base[, options])
// -> true/false
//
// func(base, path[, options])
// -> true/false
//
// XXX not yet sure how w should handle dot-files....
// XXX should these be store methods???
// XXX do we need error checking in these???
var exists =
module.exists =
async function(base, sub, options){
if(typeof(sub) != 'string'){
options = sub ?? options
sub = base
base = null }
var {index} = getOpts(options)
var target = base ?
pwpath.join(base, sub)
: sub
if(!fs.existsSync(target)){
return false }
var stat = await fs.promises.stat(target)
if(stat.isDirectory()){
return fs.existsSync(pwpath.join(target, index)) }
return true }
var read =
module.read =
async function(base, sub, options){
if(typeof(sub) != 'string'){
options = sub ?? options
sub = base
base = null }
var {index} = getOpts(options)
var target = base ?
pwpath.join(base, sub)
: sub
if(!fs.existsSync(target)){
return undefined }
// handle dir text...
var stat = await fs.promises.stat(target)
if(stat.isDirectory()){
var target = pwpath.join(target, index)
fs.existsSync(target)
|| (target = false) }
return target ?
fs.promises.readFile(target, {encoding: 'utf-8'})
: undefined }
var mkdir =
module.mkdir =
async function(base, sub, options){
if(typeof(sub) != 'string'){
options = sub ?? options
sub = base
base = null }
var {index, verbose} = getOpts(options)
var levels = pwpath.split(sub)
for(var level of levels){
base = base == null ?
level
: pwpath.join(base, level)
// nothing exists -- create dir and continue...
if(!fs.existsSync(base)){
verbose
&& console.log('mkdir(..): mkdir:', base)
await fs.promises.mkdir(base, {recursive: true})
continue }
// directory -- continue...
var stat = await fs.promises.stat(base)
if(stat.isDirectory()){
continue }
// file -- convert to dir...
verbose
&& console.log('mkdir(..): converting file to dir:', base)
await fs.promises.rename(base, base+'.pwiki-bak')
await fs.promises.mkdir(base, {recursive: true})
await fs.promises.rename(base +'.pwiki-bak', base +'/'+ index) }
return base }
// XXX metadata???
var update =
module.update =
async function(base, sub, data, options){
if(typeof(data) != 'string'){
options = data ?? options
data = sub
sub = base
base = null }
var {index} = getOpts(options)
var target = base ?
pwpath.join(base, sub)
: sub
// path already exists...
if(fs.existsSync(target)){
var stat = await fs.promises.stat(target)
if(stat.isDirectory()){
target = pwpath.join(target, index) }
// create path / parts of path...
} else {
var levels = pwpath.split(target)
levels.pop()
// ensure the parent dir exists...
await module.mkdir(
...(base ?
// NOTE: we are keeping this separate here to avoid creating
// anything above it...
[base]
: []),
levels,
options) }
// write the data...
var f = await fs.promises.open(target, 'w')
await f.writeFile(data)
f.close()
return target }
var clear =
module.clear =
async function(base, sub, options){
if(typeof(sub) != 'string'){
options = sub ?? options
sub = base
base = '' }
var {index} = getOpts(options)
// remove leaf...
var target = base == '' ?
sub
: pwpath.join(base, sub)
// dir...
if(fs.existsSync(target)){
var stat = await fs.promises.stat(target)
if(stat.isDirectory()){
var files = await fs.promises.readdir(target)
// remove index...
if(files.includes(index)){
await fs.promises.rm(pwpath.join(target, index))
// NOTE: we do not care what we pop as long as the .length
// is correct as we'll not be using the content after
// this point...
files.pop() }
// remove dir if empty...
if(files.length == 0){
fs.promises.rmdir(target) }
// simple file...
} else {
await fs.promises.rm(target) } }
// cleanup path -- remove empty dirs... (XXX ???)
var levels = pwpath.split(sub)
.slice(0, -1)
while(levels.length > 0){
var cur = pwpath.join(base, ...levels)
if(fs.existsSync(cur)){
var stat = await fs.promises.stat(base)
if(stat.isDirectory()){
// stop cleanup if non-empty dir...
if((await fs.promises.readdir(cur)).length != 0){
break }
fs.promises.rmdir(cur) } }
levels.pop() }
return target }
var cleanup =
module.cleanup =
async function(base, options){
var {index, clearEmptyDir, dirToFile, verbose} = getOpts(options)
glob(pwpath.join(base, '**/*'))
.on('end', async function(paths){
paths
.sort(function(a, b){
return b.length - a.length })
for(var path of paths){
var stat = await fs.promises.stat(path)
if(stat.isDirectory()){
var children = await fs.promises.readdir(path)
// empty -> remove...
if(clearEmptyDir
&& children.length == 0){
verbose
&& console.log('cleanup(..): removing dir:', path)
fs.promises.rmdir(path)
continue }
// dir -> file...
if(dirToFile
&& children.length == 1
&& children[0] == index){
verbose
&& console.log('cleanup(..): converting dir to file:', path)
await fs.promises.rename(path +'/'+ index, path+'.pwiki-bak')
await fs.promises.rmdir(path)
await fs.promises.rename(path +'.pwiki-bak', path)
continue }
} } }) }
// XXX backup metadata...
// - date
// - reason
// - refs...
// XXX set hidden attribute on backup dir...
// XXX add backup packing...
var backup =
module.backup = {
// XXX backup config???
//index: '.index',
//base: '/.backup',
//cleanBackup: true,
//verbose: true,
//
// .create(<base>[, <options>])
// .create(<base>, '**'[, <options>])
// .create(<base>, '**', Date.timeStamp()[, <options>])
// -> <list>
//
// .create(<base>, <path>[, <version>][, <options>])
// -> <list>
//
// .create(<base>, <path>, false[, <options>])
// -> <list>
//
// .create(..) and .restore(..) are completely symmetrical.
//
// NOTE: backing up ** will include nested backups but will skip the
// root backup but will ignore the root backup dir...
//
// XXX since these are *almost* identical in structure, can we reuse one
// to implement the other???
// ..or can we implement these in a manner similar to "cp A B" vs. "cp B A"???
create: async function(base, sub='**', version=Date.timeStamp(), options){
var that = this
if(typeof(sub) == 'object'){
options = sub
sub = '**' }
if(typeof(version) == 'object'){
options = version
version = Date.timeStamp() }
// options...
var {index, backup, verbose, recursive, cleanBackup, __batch} = options = getOpts(options)
recursive = recursive ?? false
var _backup = backup =
version ?
pwpath.join(backup, version)
: backup
backup =
pwpath.join(
base,
pwpath.relative(pwpath.dirname(sub), backup))
// ** or * -- backup each file in path...
if(/[\\\/]*\*\*?$/.test(sub)){
if(sub.endsWith('**')){
options.recursive = true }
options.__batch = true
if(cleanBackup
&& fs.existsSync(backup)){
verbose
&& console.log('.create(..): cleaning:', backup)
await fs.promises.rm(backup, {recursive: true}) }
sub = sub.replace(/[\\\/]*\*\*?$/, '')
var b = pwpath.split(_backup)
.filter(function(p){
return p != '' })
.shift()
return fs.promises.readdir(base +'/'+ sub)
.iter()
// skip backups...
.filter(function(file){
return !file.includes(b) })
.map(async function(file){
return await that.create(base, sub +'/'+ file, version, options) })
// keep only the paths we backed up...
.filter(function(e){
return !!e })
// backup single page...
} else {
var target = pwpath.join(base, sub)
var full = _backup[0] == '/'
// nothing to backup...
if(!fs.existsSync(target)){
verbose
&& console.log('.create(..): target does not exist:', target)
return }
var to = full ?
backup +'/'+ sub
: backup +'/'+ pwpath.basename(sub)
var todir = pwpath.dirname(to)
if(!recursive){
var stat = await fs.promises.stat(target)
if(stat.isDirectory()){
target += '/'+index
to += '/'+index
// nothing to backup...
if(!fs.existsSync(target)){
verbose
&& !__batch
&& console.log('.create(..): nothing to backup:', target)
return } } }
verbose
&& console.log('.create(..):', sub, '->', to)
await fs.promises.mkdir(todir, {recursive: true})
await fs.promises.cp(target, to, {force: true, recursive})
return to } },
restore: async function(base, sub, version, options){
var that = this
// XXX
var {index, backup, verbose, recursive, preBackup, __batch} = options = getOpts(options)
recursive = recursive ?? false
var _backup = backup =
version ?
pwpath.join(backup, version)
: backup
backup =
pwpath.join(
base,
pwpath.relative(
pwpath.dirname(sub),
backup))
// check if we can restore...
if(!fs.existsSync(backup)){
verbose
&& console.log('restore(..): no backup version:', version)
return }
// XXX should we use the same options...
preBackup
&& await this.create(base, sub, options ?? {})
// ** or * -- backup each file in path...
// NOTE: when restoring there is no difference between ** and *...
if(/[\\\/]*\*\*?$/.test(sub)){
if(sub.endsWith('**')){
options.recursive = true }
// restore...
// NOTE: we have already made a full backup so no need to
// redo it down the line...
options.preBackup = false
options.__batch = true
sub = sub.replace(/[\\\/]*\*\*?$/, '')
var to = pwpath.join(base, sub)
var b = pwpath.split(_backup)
.filter(function(p){
return p != '' })
.shift()
// cleanup...
// NOTE: we need this stage as the file list we are backing up
// and the one in the target dir can differ, and a single-page
// .restore(..) will only remove collisions...
await fs.promises.readdir(base +'/'+ sub)
.iter()
// skip backups...
.filter(function(file){
return !file.includes(b) })
.map(async function(file){
var p = pwpath.join(base, sub, file)
verbose
&& console.log('restore(..): removing:', p)
await fs.promises.rm(p, {recursive: true})
return p })
return fs.promises.readdir(backup)
.iter()
.map(async function(file){
return await that.restore(base, sub+'/'+file, version, options) })
// keep only the paths we backed up...
.filter(function(e){
return !!e })
// single page...
} else {
var index_file = ''
var full = _backup[0] == '/'
var source = full ?
pwpath.join(backup, sub)
: pwpath.join(backup, pwpath.basename(sub))
if(!fs.existsSync(source)){
verbose
&& console.log('restore(..): source not present in backup:', source)
return }
var to = pwpath.join(base, sub)
if(fs.existsSync(to)){
var stat = await fs.promises.stat(to)
if(stat.isDirectory()){
var f = pwpath.join(to, index)
if(fs.existsSync(f)){
verbose
&& console.log('restore(..): removing:', f)
await fs.promises.rm(f) }
} else {
verbose
&& console.log('restore(..): removing:', to)
await fs.promises.rm(to) } }
if(!recursive){
// handle dir text...
var stat = await fs.promises.stat(source)
if(stat.isDirectory()){
source += '/'+index
to += '/'+index
if(!fs.existsSync(source)){
verbose
&& !__batch
&& console.log('restore(..): source not present in backup:', source)
return } } }
verbose
&& console.log('restore(..): restoring:', to)
await fs.promises.cp(source, to, {recursive: true})
return source } },
//
// Get backup versions...
// listbackups(<base>[, <options>])
// listbackups(<base>, '*'[, <options>])
// -> <list>
//
// Get backup versions containing <path>...
// listbackups(<base>, <path>[, <options>])
// -> <list>
//
list: async function(base, sub, options){
var that = this
if(typeof(sub) == 'object'){
options = sub
sub = '*' }
var {backup} = getOpts(options)
// handle local/global backups...
var full = backup[0] == '/'
base = full ?
pwpath.join(base, backup)
: pwpath.join(base, pwpath.dirname(sub), backup)
sub = full ?
sub
: pwpath.basename(sub)
return fs.existsSync(base) ?
fs.promises.readdir(base)
.iter()
.filter(function(version){
return (sub == '*' || sub == '**')
|| fs.existsSync(
pwpath.join(base, version, sub)) })
: [] },
remove: async function(base, version, options){
var {backup, verbose} = getOpts(options)
var target =
(version == '*' || version == '**') ?
pwpath.join(base, backup)
: pwpath.join(base, backup, version)
if(fs.existsSync(target)){
verbose
&& console.log(`.remove(..): removing:`, target)
await fs.promises.rm(target, {recursive: true})
return target } },
clear: async function(base, options){
return await this.remove(base, '*', options) }
}
// - - - - - - - - - - - - - - - - - - - - - - -
// XXX might be a good idea to support ro mode on top level explicitly...
// XXX add monitor API + cache + live mode (auto on when lock detected)...
var FileStoreRO =
module.FileStoreRO = {
__proto__: base.BaseStore,
// XXX
__path__: 'data/fs',
// XXX should this be "index" or ".index"???
__directory_text__: '.index',
// XXX do we remove the extension???
// XXX cache???
__paths__: async function(){
var that = this
return new Promise(function(resolve, reject){
glob(pwpath.join(that.__path__, '**/*'))
.on('end', function(paths){
Promise.all(paths
.map(async function(path){
return await module.exists(path) ?
path
.slice(that.__path__.length)
: [] }))
.then(function(paths){
resolve(paths.flat()) }) }) }) },
__exists__: async function(path){
return await module.exists(this.__path__, path, {index: this.__directory_text__})
&& path },
__get__: async function(path){
var p = pwpath.join(this.__path__, path)
var {atimeMs, mtimeMs, ctimeMs, birthtimeMs} = await fs.promises.stat(p)
return {
atime: atimeMs,
mtime: mtimeMs,
ctime: ctimeMs,
text: await module.read(p, {index: this.__directory_text__})
} },
__update__: function(){},
__delete__: function(){},
}
// XXX add a lock file and prevent multiple adapters from controlling
// one path...
// XXX backup files on write/delete...
var FileStore =
module.FileStore = {
__proto__: FileStoreRO,
// XXX
__path__: 'data/fs',
__backup_path__: '/.pwiki/backup',
__lock_path__: '/.pwiki/lock',
// XXX should this be "index" or ".index"???
__directory_text__: '.index',
__clear_lock__: [
`SIGINT`,
`SIGUSR1`,
`SIGUSR2`,
`SIGTERM`,
`exit`,
// XXX should we handle this??
// ...this can be an indicator of inconsistent state...
//`uncaughtException`,
],
__exit_lock_handler: undefined,
// prevent more than one handler to write to a store...
ensureLock: async function(){
var that = this
var lock = this.__path__ + this.__lock_path__
// check lock...
if(fs.existsSync(lock)){
if(await module.read(lock) != process.pid){
throw new Error('attempting to write to a locked store:', this.__path__) }
// set lock...
} else {
module.update(lock, `${process.pid}`)
this.__exit_lock_handler =
this.__exit_lock_handler
// NOTE: this must be sync as deferred calls might
// not get a chance to execute...
?? function(){
fs.rmSync(lock) }
this.__clear_lock__.forEach(function(evt){
process.off(evt, that.__exit_lock_handler)
process.on(evt, that.__exit_lock_handler) }) }
return this },
// XXX do we write all the data or only the .text???
__update__: async function(path, data, mode='update'){
this.ensureLock()
return module.update(
this.__path__, path,
data.text,
{index: this.__directory_text__}) },
__delete__: async function(path){
this.ensureLock()
return module.clear(
this.__path__, path,
{index: this.__directory_text__}) },
// specific API...
cleanup: async function(options={}){
return module.cleanup(this.__path__, {
index: this.__directory_text__,
...options,
}) },
// XXX add explicit versioning???
backup: async function(path='**', options={}){
this.ensureLock()
return backup.create(
this.__path__, path,
{
index: this.__directory_text__,
backup: this.__backup_path__,
...options,
}) },
restore: async function(path='**', options={}){
this.ensureLock()
return backup.restore(
this.__path__, path,
{
index: this.__directory_text__,
backup: this.__backup_path__,
...options,
}) },
}
/**********************************************************************
* vim:set ts=4 sw=4 : */ return module })

80
store/localstorage.js Executable file
View File

@ -0,0 +1,80 @@
/**********************************************************************
*
*
*
**********************************************************************/
((typeof define)[0]=='u'?function(f){module.exports=f(require)}:define)
(function(require){ var module={} // make module AMD/node compatible...
/*********************************************************************/
var object = require('ig-object')
var types = require('ig-types')
var pwpath = require('../lib/path')
var base = require('../store/base')
//---------------------------------------------------------------------
// XXX EXPERIMENTAL, needs testing in browser...
var localStorageStore =
module.localStorageStore = {
__proto__: base.BaseStore,
__prefix__: '--pwiki:',
// XXX add caching of unserialized data???
data:
typeof(localStorage) != 'undefined' ?
localStorage
: undefined,
__paths__: function(){
var that = this
return Object.keys(this.data)
.map(function(k){
return k.startsWith(that.__prefix__) ?
k.slice((that.__prefix__ ?? '').length)
: [] })
.flat() },
__exists__: function(path){
return ((this.__prefix__ ?? '')+ path) in this.data
&& path },
__get__: function(path){
path = (this.__prefix__ ?? '')+ path
return path in this.data ?
JSON.parse(this.data[path])
: undefined },
__update__: function(path, data={}){
this.data[(this.__prefix__ ?? '')+ path] =
JSON.stringify(data) },
__delete__: function(path){
delete this.data[(this.__prefix__ ?? '')+ path] },
}
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
var localStorageNestedStore =
module.localStorageNestedStore = {
__proto__: base.BaseStore,
__data__: '__pwiki_data__',
__cache__: '__pwiki_cache__',
__data: undefined,
get data(){
return this.__data
?? (this.__data =
Object.assign(
{ __proto__: JSON.parse(localStorage[this.__data__] || '{}') },
JSON.parse(localStorage[this.__cache__] || '{}') )) },
// XXX do partials saves -> cache + write cache...
// XXX on full save merge cache and save...
}
/**********************************************************************
* vim:set ts=4 sw=4 : */ return module })

86
store/pouchdb.js Executable file
View File

@ -0,0 +1,86 @@
/**********************************************************************
*
*
*
**********************************************************************/
((typeof define)[0]=='u'?function(f){module.exports=f(require)}:define)
(function(require){ var module={} // make module AMD/node compatible...
/*********************************************************************/
var object = require('ig-object')
var types = require('ig-types')
var pwpath = require('../lib/path')
var base = require('../store/base')
//---------------------------------------------------------------------
// XXX
module.PouchDB = undefined
var PouchDBStore =
module.PouchDBStore = {
__proto__: base.BaseStore,
// XXX should this be __path__???
// ...this sets the path where the store is created...
__name__: 'data/pouch',
__key_prefix__: 'pwiki:',
__data: undefined,
get data(){
if(!this.__data){
var PouchDB =
module.PouchDB =
require('PouchDB')
return (this.__data = new PouchDB(this.__name__)) }
return this.__data },
set data(value){
this.__data = value },
// XXX cache???
__paths__: async function(){
var that = this
// XXX not sure if this is a good idea...
return (await this.data.allDocs()).rows
.map(function(e){
return e.id.slice(that.__key_prefix__.length) }) },
// XXX use an index...
__exists__: async function(key){
return !! await this.__get__(key) },
__get__: async function(key){
try{
return await this.data.get(this.__key_prefix__ + key)
}catch(err){
return undefined } },
__update__: async function(key, data, mode='update'){
var {_id, _rev, ...rest} = await this.__get__(key) ?? {}
await this.data.put({
// original data...
...( (mode == 'update') ?
rest
: {}),
// new data...
...data,
// system...
_id: _id
?? (this.__key_prefix__ + key),
...(_rev ?
{_rev}
: {}),
})
return this },
__delete__: async function(key){
var doc = await this.__get__(key)
doc
&& (await this.data.remove(doc))
return this },
}
/**********************************************************************
* vim:set ts=4 sw=4 : */ return module })