2014-11-12 05:48:26 +03:00
|
|
|
/**********************************************************************
|
|
|
|
|
*
|
|
|
|
|
*
|
|
|
|
|
*
|
|
|
|
|
**********************************************************************/
|
|
|
|
|
|
2014-12-28 03:47:44 +03:00
|
|
|
var pathlib = require('path')
|
2014-12-16 04:50:34 +03:00
|
|
|
var events = require('events')
|
|
|
|
|
|
|
|
|
|
var fse = require('fs.extra')
|
|
|
|
|
var glob = require('glob')
|
2014-12-31 05:42:28 +03:00
|
|
|
var Promise = require('promise')
|
|
|
|
|
|
|
|
|
|
var guaranteeEvents = require('guarantee-events')
|
2014-11-14 21:57:55 +03:00
|
|
|
|
|
|
|
|
|
2014-11-12 05:48:26 +03:00
|
|
|
define(function(require){ var module = {}
|
|
|
|
|
console.log('>>> file')
|
|
|
|
|
|
|
|
|
|
//var DEBUG = DEBUG != null ? DEBUG : true
|
|
|
|
|
|
2015-11-15 01:25:04 +03:00
|
|
|
var data = require('data')
|
|
|
|
|
var images = require('images')
|
|
|
|
|
|
2014-11-14 21:57:55 +03:00
|
|
|
var tasks = require('lib/tasks')
|
2014-11-12 05:48:26 +03:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2014-11-14 21:57:55 +03:00
|
|
|
/*********************************************************************/
|
2014-11-12 05:48:26 +03:00
|
|
|
|
2014-11-14 21:57:55 +03:00
|
|
|
var INDEX_DIR = '.ImageGrid'
|
2014-11-12 05:48:26 +03:00
|
|
|
|
|
|
|
|
|
2014-12-28 23:49:42 +03:00
|
|
|
|
2014-12-16 04:50:34 +03:00
|
|
|
/*********************************************************************/
|
|
|
|
|
// Queue
|
|
|
|
|
//
|
|
|
|
|
// Task
|
|
|
|
|
//
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2014-11-12 05:48:26 +03:00
|
|
|
/*********************************************************************/
|
2014-12-28 23:49:42 +03:00
|
|
|
// helpers...
|
|
|
|
|
|
|
|
|
|
// Guarantee that the 'end' and 'match' handlers will always get called
|
|
|
|
|
// with all results at least once...
|
2014-12-16 04:50:34 +03:00
|
|
|
//
|
2014-12-28 23:49:42 +03:00
|
|
|
// This does two things:
|
|
|
|
|
// - every 'end' event handler will get the full result set, regardless
|
|
|
|
|
// of when it was set...
|
|
|
|
|
// - every 'match' handler will be called for every match found, again
|
|
|
|
|
// regardless of whether it was set before or after the time of
|
|
|
|
|
// match.
|
2014-12-16 04:50:34 +03:00
|
|
|
//
|
2014-12-28 23:49:42 +03:00
|
|
|
// This prevents handlers from missing the event they are waiting for,
|
|
|
|
|
// essentially making it similar to how Promise/Deferred handle their
|
|
|
|
|
// callbacks.
|
2014-12-12 03:56:58 +03:00
|
|
|
//
|
2014-12-28 20:46:06 +03:00
|
|
|
var guaranteeGlobEvents =
|
|
|
|
|
module.guaranteeGlobEvents =
|
2014-12-29 18:47:27 +03:00
|
|
|
function(glob){ return guaranteeEvents('match end', glob) }
|
2014-12-28 20:46:06 +03:00
|
|
|
|
2014-12-28 23:49:42 +03:00
|
|
|
|
|
|
|
|
|
|
|
|
|
/*********************************************************************/
|
|
|
|
|
// Reader...
|
|
|
|
|
|
|
|
|
|
|
2014-12-28 20:46:06 +03:00
|
|
|
// XXX return a promise rather than an event emitter (???)
|
|
|
|
|
// XXX glob has a problem: if a match happens fast enough and we are slow
|
|
|
|
|
// enough to register a 'match' handler, then that match(s) will get
|
|
|
|
|
// missed...
|
2015-05-21 04:00:28 +03:00
|
|
|
var listIndexes =
|
|
|
|
|
module.listIndexes =
|
|
|
|
|
function(base){
|
2014-12-28 20:46:06 +03:00
|
|
|
return guaranteeGlobEvents(glob(base +'/**/'+ INDEX_DIR))
|
2014-11-14 21:57:55 +03:00
|
|
|
}
|
2014-11-12 05:48:26 +03:00
|
|
|
|
|
|
|
|
|
2015-05-21 04:00:28 +03:00
|
|
|
var listPreviews =
|
|
|
|
|
module.listPreviews =
|
|
|
|
|
function(base){
|
|
|
|
|
return guaranteeGlobEvents(glob(base +'/*px/*jpg'))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2014-12-28 20:46:06 +03:00
|
|
|
// XXX return a promise rather than an event emitter (???)
|
2014-12-16 04:50:34 +03:00
|
|
|
function listJSON(path, pattern){
|
|
|
|
|
pattern = pattern || '*'
|
2014-12-28 20:46:06 +03:00
|
|
|
return guaranteeGlobEvents(glob(path +'/'+ pattern +'.json'))
|
2014-12-16 04:50:34 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2014-12-31 05:42:28 +03:00
|
|
|
var loadFile = Promise.denodeify(fse.readFile)
|
2014-12-28 20:46:06 +03:00
|
|
|
|
|
|
|
|
|
|
|
|
|
// XXX handle errors...
|
2014-12-16 04:50:34 +03:00
|
|
|
function loadJSON(path){
|
2014-12-28 03:47:44 +03:00
|
|
|
return loadFile(path).then(JSON.parse)
|
2014-12-16 04:50:34 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2014-12-29 18:47:27 +03:00
|
|
|
// Load index(s)...
|
|
|
|
|
//
|
|
|
|
|
// loadIndex(path)
|
|
|
|
|
// -> data
|
|
|
|
|
//
|
|
|
|
|
// loadIndex(path, logger)
|
|
|
|
|
// -> data
|
|
|
|
|
//
|
|
|
|
|
//
|
|
|
|
|
// Procedure:
|
|
|
|
|
// - locate indexes in path given
|
|
|
|
|
// - per each index
|
|
|
|
|
// - get all .json files
|
|
|
|
|
// - get and load latest base file per keyword
|
|
|
|
|
// - merge all later than loaded base diff files per keyword
|
|
|
|
|
//
|
2015-05-21 04:00:28 +03:00
|
|
|
// Merging is done by copying the key-value pairs from diff to the
|
|
|
|
|
// resulting object.
|
|
|
|
|
//
|
2014-12-29 18:47:27 +03:00
|
|
|
//
|
|
|
|
|
// Index format (input):
|
|
|
|
|
// .ImageGrid/
|
|
|
|
|
// +- [<timestamp>-]<keyword>[-diff].json
|
|
|
|
|
// +- ...
|
|
|
|
|
//
|
|
|
|
|
//
|
|
|
|
|
// Output format:
|
|
|
|
|
// {
|
|
|
|
|
// // one per index found...
|
|
|
|
|
// <path>/<sub-path>: {
|
|
|
|
|
// <keyword>: <kw-data>,
|
|
|
|
|
// ...
|
|
|
|
|
// },
|
|
|
|
|
// ...
|
|
|
|
|
// }
|
|
|
|
|
//
|
2014-12-16 04:50:34 +03:00
|
|
|
//
|
2014-12-29 18:47:27 +03:00
|
|
|
// Events emitted on logger if passed:
|
2014-12-16 04:50:34 +03:00
|
|
|
// - queued <path> - json file path queued for loading
|
|
|
|
|
// - loaded <path> - done loading json file path
|
2014-12-28 23:49:42 +03:00
|
|
|
// - index <path> <data> - done loading index at path
|
|
|
|
|
// - error <err> - an error occurred...
|
2014-12-28 05:09:28 +03:00
|
|
|
//
|
2014-12-16 04:50:34 +03:00
|
|
|
//
|
2014-12-31 19:18:03 +03:00
|
|
|
// NOTE: this is fairly generic and does not care about the type of data
|
|
|
|
|
// or it's format as long as it's JSON and the file names comply
|
|
|
|
|
// with the scheme above...
|
2015-05-21 04:00:28 +03:00
|
|
|
// NOTE: this only loads the JSON data and does not import or process
|
|
|
|
|
// anything...
|
2014-12-31 19:18:03 +03:00
|
|
|
//
|
2014-12-28 03:47:44 +03:00
|
|
|
// XXX test with:
|
2014-12-28 04:33:57 +03:00
|
|
|
// requirejs(['file'],
|
|
|
|
|
// function(m){
|
|
|
|
|
// f = m.loadIndex("L:/mnt/hdd15 (photo)/NTFS1/media/img/others") })
|
|
|
|
|
// .done(function(d){ console.log(d) })
|
2014-12-31 19:18:03 +03:00
|
|
|
// XXX need to do better error handling -- stop when an error is not recoverable...
|
2014-12-28 23:49:42 +03:00
|
|
|
// XXX a bit overcomplicated (???), see if this can be split into more generic
|
2014-12-28 05:09:28 +03:00
|
|
|
// sections...
|
2014-12-28 03:47:44 +03:00
|
|
|
var loadIndex =
|
|
|
|
|
module.loadIndex =
|
2014-12-28 04:33:57 +03:00
|
|
|
function(path, logger){
|
2014-12-16 04:50:34 +03:00
|
|
|
var p = path.split(INDEX_DIR)
|
|
|
|
|
var last = p.slice(-1)[0].trim()
|
|
|
|
|
|
2014-12-31 05:42:28 +03:00
|
|
|
return new Promise(function(resolve, reject){
|
2014-12-28 04:33:57 +03:00
|
|
|
// we've got an index...
|
|
|
|
|
if(p.length > 1 && /^\/*$/.test(last)){
|
|
|
|
|
listJSON(path)
|
2014-12-28 20:46:06 +03:00
|
|
|
// XXX handle errors...
|
|
|
|
|
.on('error', function(err){
|
|
|
|
|
logger && logger.emit('error', err)
|
|
|
|
|
})
|
2014-12-28 04:33:57 +03:00
|
|
|
.on('end', function(files){
|
|
|
|
|
var res = {}
|
|
|
|
|
var index = {}
|
|
|
|
|
var root = {}
|
|
|
|
|
|
|
|
|
|
// group by keyword...
|
2015-05-21 04:00:28 +03:00
|
|
|
//
|
|
|
|
|
// this will build a structure in the following format:
|
|
|
|
|
// {
|
|
|
|
|
// <keyword>: [
|
|
|
|
|
// // diff files...
|
|
|
|
|
// // NOTE: the first argument indicates
|
|
|
|
|
// // if this is a diff or not, used to
|
|
|
|
|
// // skip past the last base...
|
|
|
|
|
// [true, <filename>],
|
|
|
|
|
// ...
|
|
|
|
|
//
|
|
|
|
|
// // base file (non-diff)
|
|
|
|
|
// [false, <filename>]
|
|
|
|
|
// ],
|
|
|
|
|
// ...
|
|
|
|
|
// }
|
|
|
|
|
//
|
|
|
|
|
// This is used to sequence, load and correctly merge
|
|
|
|
|
// the found JSON files.
|
|
|
|
|
//
|
|
|
|
|
// NOTE: all files past the first non-diff are skipped.
|
2014-12-28 04:33:57 +03:00
|
|
|
files
|
|
|
|
|
.sort()
|
|
|
|
|
.reverse()
|
|
|
|
|
.forEach(function(n){
|
|
|
|
|
var b = pathlib.basename(n)
|
|
|
|
|
var s = b.split(/[-.]/g).slice(0, -1)
|
|
|
|
|
|
|
|
|
|
// <keyword>.json / non-diff
|
|
|
|
|
// NOTE: this is a special case, we add this to
|
2015-05-21 04:00:28 +03:00
|
|
|
// a separate index and then concat it to
|
2014-12-28 04:33:57 +03:00
|
|
|
// the final list if needed...
|
|
|
|
|
if(s.length == 1){
|
|
|
|
|
var k = s[0]
|
|
|
|
|
root[k] = n
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
// <timestamp>-<keyword>[-diff].json / diff / non-diff
|
|
|
|
|
} else {
|
|
|
|
|
var k = s[1]
|
|
|
|
|
var d = s[2] == 'diff'
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// new keyword...
|
|
|
|
|
if(index[k] == null){
|
|
|
|
|
index[k] = [[d, n]]
|
|
|
|
|
logger && logger.emit('queued', n)
|
|
|
|
|
|
|
|
|
|
// do not add anything past the latest non-diff
|
|
|
|
|
// for each keyword...
|
|
|
|
|
} else if(index[k].slice(-1)[0][0] == true){
|
|
|
|
|
index[k].push([d, n])
|
|
|
|
|
logger && logger.emit('queued', n)
|
|
|
|
|
}
|
|
|
|
|
})
|
|
|
|
|
|
2014-12-28 23:49:42 +03:00
|
|
|
// add base files back where needed...
|
2014-12-28 20:46:06 +03:00
|
|
|
Object.keys(root)
|
|
|
|
|
.forEach(function(k){
|
|
|
|
|
var n = root[k]
|
2014-12-28 04:33:57 +03:00
|
|
|
|
2014-12-28 20:46:06 +03:00
|
|
|
// no diffs...
|
|
|
|
|
if(index[k] == null){
|
|
|
|
|
index[k] = [[false, n]]
|
|
|
|
|
logger && logger.emit('queued', n)
|
2014-12-16 04:50:34 +03:00
|
|
|
|
2014-12-28 20:46:06 +03:00
|
|
|
// add root file if no base is found...
|
|
|
|
|
} else if(index[k].slice(-1)[0][0] == true){
|
|
|
|
|
index[k].push([false, n])
|
|
|
|
|
logger && logger.emit('queued', n)
|
|
|
|
|
}
|
|
|
|
|
})
|
2014-12-28 03:47:44 +03:00
|
|
|
|
2014-12-28 04:33:57 +03:00
|
|
|
// load...
|
2014-12-31 05:42:28 +03:00
|
|
|
Promise
|
2014-12-28 04:33:57 +03:00
|
|
|
.all(Object.keys(index).map(function(k){
|
|
|
|
|
// get relevant paths...
|
|
|
|
|
var diffs = index[k]
|
|
|
|
|
var latest = diffs.splice(-1)[0][1]
|
|
|
|
|
|
2014-12-28 23:49:42 +03:00
|
|
|
// NOTE: so far I really do not like how nested and
|
|
|
|
|
// unreadable the Promise/Deferred code becomes
|
|
|
|
|
// even with a small rise in complexity...
|
|
|
|
|
// ...for example, the following code is quite
|
|
|
|
|
// simple, but does not look the part.
|
|
|
|
|
//
|
|
|
|
|
// Maybe it's a style thing...
|
|
|
|
|
|
2014-12-28 04:33:57 +03:00
|
|
|
// load latest...
|
|
|
|
|
return loadJSON(latest)
|
|
|
|
|
.then(function(data){
|
|
|
|
|
// handle diffs...
|
2014-12-31 05:42:28 +03:00
|
|
|
return Promise
|
2014-12-28 04:33:57 +03:00
|
|
|
.all(diffs
|
|
|
|
|
.reverse()
|
|
|
|
|
.map(function(p){
|
|
|
|
|
p = p[1]
|
|
|
|
|
// load diff...
|
|
|
|
|
return loadJSON(p)
|
2014-12-28 20:46:06 +03:00
|
|
|
// XXX handle errors...
|
2014-12-28 23:49:42 +03:00
|
|
|
// XXX we should abort loading this index...
|
|
|
|
|
.catch(function(err){
|
|
|
|
|
logger && logger.emit('error', err)
|
|
|
|
|
})
|
2014-12-28 04:33:57 +03:00
|
|
|
.done(function(json){
|
|
|
|
|
// merge...
|
|
|
|
|
for(var k in json){
|
|
|
|
|
data[k] = json[k]
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
logger && logger.emit('loaded', p)
|
|
|
|
|
})
|
|
|
|
|
}))
|
|
|
|
|
.then(function(){
|
|
|
|
|
res[k] = data
|
|
|
|
|
|
|
|
|
|
logger && logger.emit('loaded', latest)
|
|
|
|
|
})
|
|
|
|
|
})
|
|
|
|
|
}))
|
|
|
|
|
.then(function(){
|
|
|
|
|
logger && logger.emit('index', path, res)
|
|
|
|
|
|
|
|
|
|
var d = {}
|
|
|
|
|
d[path] = res
|
|
|
|
|
|
|
|
|
|
resolve(d)
|
|
|
|
|
})
|
2014-12-28 03:47:44 +03:00
|
|
|
})
|
2014-12-16 04:50:34 +03:00
|
|
|
|
2014-12-28 04:33:57 +03:00
|
|
|
// no explicit index given -- find all in sub tree...
|
|
|
|
|
} else {
|
|
|
|
|
var res = {}
|
|
|
|
|
|
2014-12-28 05:09:28 +03:00
|
|
|
// XXX handle 'error' event...
|
2014-12-28 04:33:57 +03:00
|
|
|
listIndexes(path)
|
2014-12-28 20:46:06 +03:00
|
|
|
// XXX handle errors...
|
|
|
|
|
.on('error', function(err){
|
|
|
|
|
logger && logger.emit('error', err)
|
|
|
|
|
})
|
2014-12-28 05:09:28 +03:00
|
|
|
// collect the found indexes...
|
|
|
|
|
.on('match', function(path){
|
|
|
|
|
loadIndex(path, logger)
|
2015-11-14 04:48:55 +03:00
|
|
|
.done(function(obj){
|
|
|
|
|
// NOTE: considering that all the paths within
|
|
|
|
|
// the index are relative to the preview
|
|
|
|
|
// dir (the parent dir to the index root)
|
|
|
|
|
// we do not need to include the index
|
|
|
|
|
// itself in the base path...
|
|
|
|
|
var p = path.split(INDEX_DIR)[0]
|
|
|
|
|
res[p] = obj[path]
|
|
|
|
|
})
|
2014-12-28 05:09:28 +03:00
|
|
|
})
|
|
|
|
|
// done...
|
|
|
|
|
.on('end', function(paths){
|
|
|
|
|
resolve(res)
|
2014-12-28 03:47:44 +03:00
|
|
|
})
|
2014-12-28 04:33:57 +03:00
|
|
|
}
|
|
|
|
|
})
|
2014-12-16 04:50:34 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2015-05-21 04:00:28 +03:00
|
|
|
// get/populate the previews...
|
|
|
|
|
//
|
|
|
|
|
// format:
|
|
|
|
|
// {
|
|
|
|
|
// <index-base>: {
|
|
|
|
|
// <gid>: {
|
|
|
|
|
// <resolution>: <local-path>,
|
|
|
|
|
// ...
|
|
|
|
|
// },
|
|
|
|
|
// ...
|
|
|
|
|
// },
|
|
|
|
|
// ...
|
|
|
|
|
// }
|
|
|
|
|
//
|
|
|
|
|
// XXX should this be compatible with loadIndex(..) data???
|
|
|
|
|
// XXX handle errors....
|
|
|
|
|
var loadPreviews =
|
|
|
|
|
module.loadPreviews =
|
|
|
|
|
function(base, previews, absolute_path){
|
|
|
|
|
previews = previews || {}
|
|
|
|
|
|
|
|
|
|
return new Promise(function(resolve, reject){
|
|
|
|
|
listIndexes(base)
|
|
|
|
|
// XXX handle errors....
|
|
|
|
|
//.on('error', function(err){
|
|
|
|
|
//})
|
|
|
|
|
.on('match', function(base){
|
|
|
|
|
if(!(base in previews)){
|
|
|
|
|
previews[base] = {}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
var images = previews[base]
|
|
|
|
|
|
|
|
|
|
listPreviews(base)
|
|
|
|
|
// XXX handle errors....
|
|
|
|
|
//.on('error', function(err){
|
|
|
|
|
//})
|
|
|
|
|
// preview name syntax:
|
|
|
|
|
// <res>px/<gid> - <orig-filename>.jpg
|
|
|
|
|
.on('match', function(path){
|
|
|
|
|
// get the data we need...
|
|
|
|
|
var gid = pathlib.basename(path).split(' - ')[0]
|
|
|
|
|
var res = pathlib.basename(pathlib.dirname(path))
|
|
|
|
|
|
|
|
|
|
// build the structure if it does not exist...
|
|
|
|
|
if(!(gid in images)){
|
|
|
|
|
images[gid] = {}
|
|
|
|
|
}
|
|
|
|
|
if(images[gid].preview == null){
|
|
|
|
|
images[gid].preview = {}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// add a preview...
|
|
|
|
|
// NOTE: this will overwrite a previews if they are found in
|
|
|
|
|
// several locations...
|
|
|
|
|
images[gid].preview[res] = INDEX_DIR +'/'+ path.split(INDEX_DIR)[1]
|
|
|
|
|
})
|
|
|
|
|
})
|
|
|
|
|
.on('end', function(){
|
|
|
|
|
resolve(previews)
|
|
|
|
|
})
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2015-11-15 01:25:04 +03:00
|
|
|
// XXX move this to a better spot...
|
|
|
|
|
var buildIndex =
|
|
|
|
|
module.buildIndex = function(index, base){
|
|
|
|
|
var d = data.Data.fromJSON(index.data)
|
|
|
|
|
|
|
|
|
|
// buildup the data object...
|
2015-11-15 02:31:31 +03:00
|
|
|
// NOTE: this is mostly to attach stuff that is stored in separate files...
|
|
|
|
|
|
|
|
|
|
// .tags + bookmarks + selection...
|
|
|
|
|
d.tags = index.tags || {}
|
2015-11-15 01:25:04 +03:00
|
|
|
d.tags.bookmark = index.bookmarked ? index.bookmarked[0] : []
|
|
|
|
|
d.tags.selected = index.marked || []
|
|
|
|
|
d.sortTags()
|
|
|
|
|
|
2015-11-15 02:31:31 +03:00
|
|
|
// .current...
|
2015-11-15 01:25:04 +03:00
|
|
|
d.current = index.current || d.current
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// images...
|
|
|
|
|
// XXX there seems to be a problem with updated images...
|
|
|
|
|
// - in the test set not all rotated manually images are loaded rotated...
|
|
|
|
|
var img = images.Images(index.images)
|
|
|
|
|
|
|
|
|
|
if(base){
|
|
|
|
|
d.base_path = base
|
|
|
|
|
// XXX STUB remove ASAP...
|
|
|
|
|
// ...need a real way to handle base dir, possible
|
|
|
|
|
// approaches:
|
|
|
|
|
// 1) .base attr in image, set on load and
|
|
|
|
|
// do not save (or ignore on load)...
|
|
|
|
|
// if exists prepend to all paths...
|
|
|
|
|
// - more to do in view-time
|
|
|
|
|
// + more flexible
|
|
|
|
|
// 2) add/remove on load/save (approach below)
|
|
|
|
|
// + less to do in real time
|
|
|
|
|
// - more processing on load/save
|
|
|
|
|
img.forEach(function(_, img){ img.base = base })
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
data: d,
|
|
|
|
|
images: img,
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2015-11-15 02:31:31 +03:00
|
|
|
// XXX
|
|
|
|
|
var mergeIndex =
|
|
|
|
|
module.mergeIndex = function(index, base){
|
|
|
|
|
// XXX
|
|
|
|
|
}
|
|
|
|
|
|
2014-12-16 04:50:34 +03:00
|
|
|
|
2014-12-28 23:49:42 +03:00
|
|
|
/*********************************************************************/
|
|
|
|
|
// Writer...
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2014-12-16 04:50:34 +03:00
|
|
|
|
2014-11-12 05:48:26 +03:00
|
|
|
/**********************************************************************
|
|
|
|
|
* vim:set ts=4 sw=4 : */
|
|
|
|
|
return module })
|