finished most of buildcache.py...

Signed-off-by: Alex A. Naanou <alex.nanou@gmail.com>
This commit is contained in:
Alex A. Naanou 2013-05-22 21:44:34 +04:00
parent 4e76cbbb60
commit fd8e68abd7
3 changed files with 866 additions and 675 deletions

318
buildcache.old.py Executable file
View File

@ -0,0 +1,318 @@
#=======================================================================
__version__ = '''0.0.01'''
__sub_version__ = '''20130521225013'''
__copyright__ = '''(c) Alex A. Naanou 2012'''
#-----------------------------------------------------------------------
import os
import Image
import json
import sha
import urllib2
from pli.logictypes import OR
import gid
#-----------------------------------------------------------------------
# XXX fanatically cleanup and normalise paths...
# XXX use real uuid's...
#
# TODO:
# - load config from file...
# - accept a path on command-line
# - default path is cwd
# - support nested fav's for ribbons
#
# Long Term TODO:
# - support processed images
#
#
#-----------------------------------------------------------------------
config = {
'format-version': '2.0',
'cache-structure': {
# XXX make these as close to standard as possible and keep
# sane distances...
'150px': '.ImageGridCache/150px/',
'350px': '.ImageGridCache/350px/',
'900px': '.ImageGridCache/900px/',
'1080px': '.ImageGridCache/1080px/',
'1920px': '.ImageGridCache/1920px/',
},
# gen1 format...
'json': '.ImageGridCache/all.json',
# gen3 format...
'images': '.ImageGridCache/images.json',
'data': '.ImageGridCache/data.json',
'marked': '.ImageGridCache/marked.json',
'error': '.ImageGridCache/error.log',
'sizes': {
'150px': 150,
'350px': 350,
'900px': 900,
'1080px': 1080,
'1920px': 1920,
}
}
images = {
'position': 0,
'ribbons':[
{}
]
}
IMAGE_EXT = OR(*(
'.jpg', '.jpeg', '.JPG', '.JPEG',
))
ERR_LOG = '''\
ERROR: %(error)s
SOURCE: %(source-file)s
TARGET: %(target-file)s
'''
CACHE_FILE_NAME = '%(guid)s - %(name)s'
#-----------------------------------------------------------------------
def pathjoin(*p):
'''
'''
return ('/'.join(p)).replace('//', '/')
def log_err(path, e, source_file, target_file):
'''
'''
err_file = pathjoin(path, config['error'])
if not os.path.exists(err_file):
err = open(err_file, 'w')
else:
err = open(err_file, 'a')
with err:
err.write(ERR_LOG % {
'source-file': source_file,
'target-file': target_file,
'error': e,
})
# this should:
# 1) see if image is cached, if yes return the cached guid (if dates match)...
# 2) read the image file and get its guid
##!!!
def get_image_guid(path, force=False):
'''
'''
im = Image.open(path)
return sha.sha(im.tostring()).hexdigest()
## return sha.sha(open(path, 'r').read())
# return list of paths ending in a pattern...
def build_cache_dirs(path, config=config):
'''
'''
dirs = config['cache-structure']
for _, k in dirs.items():
p = pathjoin(path, k)
if not os.path.exists(p):
os.makedirs(p)
# image format:
# {
# 'id': <image-id>,
# 'preview': {
# <resolution>: <cache-path>,
# ...
# },
# 'path': <image-path>
# }
#
def build_index(path, images=None, count=None):
'''
'''
dirs = config['cache-structure']
sizes = config['sizes']
n = -1
if images == None:
images = {
'position': 0,
'ribbons': [{}],
}
for name in os.listdir(path):
# skip non-images...
iid, ext = os.path.splitext(name)
if ext != IMAGE_EXT:
continue
##!!! this is here for debuging...
n += 1
if count != None and n >= count:
break
i = {
'id': iid,
'preview': {},
##!!! absolute paths???
'path': 'file:///' + urllib2.quote(pathjoin(path, name), safe='/:'),
}
img = Image.open(pathjoin(path, name), 'r')
try:
iid = sha.sha(img.tostring()).hexdigest()
except IOError, e:
print 'x',
log_err(path, e, name, '-')
continue
if iid in images['ribbons'][0]:
print '_',
continue
i['id'] = iid
images['ribbons'][0][iid] = i
print '.',
return images
# XXX this will not overwrite existing files...
# XXX make this destingwish absolute and relative paths...
def make_cache_images(path, images, config=config):
'''
'''
dirs = config['cache-structure']
sizes = config['sizes']
n = 0
for name in os.listdir(path):
# skip non-images...
iid, ext = os.path.splitext(name)
source_path = pathjoin(path, name)
if ext != IMAGE_EXT:
continue
n += 1
i = {
'id': iid,
'preview': {},
## 'path': pathjoin(path, name),
##!!! absolute paths???
'path': 'file:///' + urllib2.quote(pathjoin(path, name), safe='/:'),
'ctime': os.path.getctime(source_path),
}
img = Image.open(source_path, 'r')
try:
##!!! use a real gid -- gid.image_gid(path, ...)
iid = sha.sha(img.tostring()).hexdigest()
except IOError, e:
print 'x',
log_err(path, e, name, '-')
i['error'] = 'IOError: ' + str(e)
continue
finally:
# we need to know which images are dead...
images['ribbons'][0][iid] = i
i['id'] = iid
# add original image to struct...
## i['preview'][str(max(*img.size)) + 'px'] = pathjoin(path, name)
i['preview'][str(max(*img.size)) + 'px'] = 'file:///' + urllib2.quote(pathjoin(path, name), safe='/:')
# previews...
for k, spec in sizes.items():
p = pathjoin(path, dirs[k], CACHE_FILE_NAME % {'guid': iid, 'name': name})
# do not upscale images...
if max(*img.size) <= spec:
continue
# add image to index...
if not os.path.exists(p):
scale = spec/float(max(*img.size))
preview = img.resize((int(img.size[0]*scale), int(img.size[1]*scale)), Image.ANTIALIAS)
preview.save(p)
##!!! metadata???
##!!!
print '.',
else:
# indicate an image skip...
print '_',
## i['preview'][str(spec) + 'px'] = p
##!!! absolute paths???
i['preview'][str(spec) + 'px'] = 'file:///' + urllib2.quote(p, safe='/:')
# put original image in cache...
## i['preview'][str(spec) + 'px'] = p
i['preview'][str(spec) + 'px'] = 'file:///' + urllib2.quote(p, safe='/:')
images['position'] = images['ribbons'][0].keys()[0]
with open(pathjoin(path, config['json']), 'w') as f:
json.dump(images, f, indent=4)
##!!! STUB...
return n
#-----------------------------------------------------------------------
def build_local_cache(path):
'''
'''
import time
t0 = time.time()
build_cache_dirs(path)
n = make_cache_images(path, images)
t1 = time.time()
print
print 'Processed %s images in %s seconds.' % (n, t1-t0)
#-----------------------------------------------------------------------
if __name__ == '__main__':
from optparse import OptionParser
parser = OptionParser()
##!!! need to define the path so that it shoes up in -h
options, args = parser.parse_args()
if len(args) != 1:
parser.print_usage()
else:
IN_PATH = args[0]
IN_PATH = IN_PATH.replace('\\', '/')
build_local_cache(IN_PATH)
## PATH = 'images/cache-test/'
## PATH = 'L:/incoming/UNSORTED/Images/fav'
## build_local_cache(PATH)
## index = build_index(PATH, count=10)
##
## import IPython
## IPython.embed()
#=======================================================================
# vim:set ts=4 sw=4 nowrap :

View File

@ -1,318 +1,548 @@
#=======================================================================
__version__ = '''0.0.01'''
__sub_version__ = '''20130521225013'''
__copyright__ = '''(c) Alex A. Naanou 2012'''
#-----------------------------------------------------------------------
import os
import Image
import json
import sha
import urllib2
from pli.logictypes import OR
import gid
#-----------------------------------------------------------------------
# XXX fanatically cleanup and normalise paths...
# XXX use real uuid's...
#
# TODO:
# - load config from file...
# - accept a path on command-line
# - default path is cwd
# - support nested fav's for ribbons
#
# Long Term TODO:
# - support processed images
#
#
#-----------------------------------------------------------------------
config = {
'format-version': '2.0',
'cache-structure': {
# XXX make these as close to standard as possible and keep
# sane distances...
'150px': '.ImageGridCache/150px/',
'350px': '.ImageGridCache/350px/',
'900px': '.ImageGridCache/900px/',
'1080px': '.ImageGridCache/1080px/',
'1920px': '.ImageGridCache/1920px/',
},
# gen1 format...
'json': '.ImageGridCache/all.json',
# gen3 format...
'images': '.ImageGridCache/images.json',
'data': '.ImageGridCache/data.json',
'marked': '.ImageGridCache/marked.json',
'error': '.ImageGridCache/error.log',
'sizes': {
'150px': 150,
'350px': 350,
'900px': 900,
'1080px': 1080,
'1920px': 1920,
}
}
images = {
'position': 0,
'ribbons':[
{}
]
}
IMAGE_EXT = OR(*(
'.jpg', '.jpeg', '.JPG', '.JPEG',
))
ERR_LOG = '''\
ERROR: %(error)s
SOURCE: %(source-file)s
TARGET: %(target-file)s
'''
CACHE_FILE_NAME = '%(guid)s - %(name)s'
#-----------------------------------------------------------------------
def pathjoin(*p):
'''
'''
return ('/'.join(p)).replace('//', '/')
def log_err(path, e, source_file, target_file):
'''
'''
err_file = pathjoin(path, config['error'])
if not os.path.exists(err_file):
err = open(err_file, 'w')
else:
err = open(err_file, 'a')
with err:
err.write(ERR_LOG % {
'source-file': source_file,
'target-file': target_file,
'error': e,
})
# this should:
# 1) see if image is cached, if yes return the cached guid (if dates match)...
# 2) read the image file and get its guid
##!!!
def get_image_guid(path, force=False):
'''
'''
im = Image.open(path)
return sha.sha(im.tostring()).hexdigest()
## return sha.sha(open(path, 'r').read())
# return list of paths ending in a pattern...
def build_cache_dirs(path, config=config):
'''
'''
dirs = config['cache-structure']
for _, k in dirs.items():
p = pathjoin(path, k)
if not os.path.exists(p):
os.makedirs(p)
# image format:
# {
# 'id': <image-id>,
# 'preview': {
# <resolution>: <cache-path>,
# ...
# },
# 'path': <image-path>
# }
#
def build_index(path, images=None, count=None):
'''
'''
dirs = config['cache-structure']
sizes = config['sizes']
n = -1
if images == None:
images = {
'position': 0,
'ribbons': [{}],
}
for name in os.listdir(path):
# skip non-images...
iid, ext = os.path.splitext(name)
if ext != IMAGE_EXT:
continue
##!!! this is here for debuging...
n += 1
if count != None and n >= count:
break
i = {
'id': iid,
'preview': {},
##!!! absolute paths???
'path': 'file:///' + urllib2.quote(pathjoin(path, name), safe='/:'),
}
img = Image.open(pathjoin(path, name), 'r')
try:
iid = sha.sha(img.tostring()).hexdigest()
except IOError, e:
print 'x',
log_err(path, e, name, '-')
continue
if iid in images['ribbons'][0]:
print '_',
continue
i['id'] = iid
images['ribbons'][0][iid] = i
print '.',
return images
# XXX this will not overwrite existing files...
# XXX make this destingwish absolute and relative paths...
def make_cache_images(path, images, config=config):
'''
'''
dirs = config['cache-structure']
sizes = config['sizes']
n = 0
for name in os.listdir(path):
# skip non-images...
iid, ext = os.path.splitext(name)
source_path = pathjoin(path, name)
if ext != IMAGE_EXT:
continue
n += 1
i = {
'id': iid,
'preview': {},
## 'path': pathjoin(path, name),
##!!! absolute paths???
'path': 'file:///' + urllib2.quote(pathjoin(path, name), safe='/:'),
'ctime': os.path.getctime(source_path),
}
img = Image.open(source_path, 'r')
try:
##!!! use a real gid -- gid.image_gid(path, ...)
iid = sha.sha(img.tostring()).hexdigest()
except IOError, e:
print 'x',
log_err(path, e, name, '-')
i['error'] = 'IOError: ' + str(e)
continue
finally:
# we need to know which images are dead...
images['ribbons'][0][iid] = i
i['id'] = iid
# add original image to struct...
## i['preview'][str(max(*img.size)) + 'px'] = pathjoin(path, name)
i['preview'][str(max(*img.size)) + 'px'] = 'file:///' + urllib2.quote(pathjoin(path, name), safe='/:')
# previews...
for k, spec in sizes.items():
p = pathjoin(path, dirs[k], CACHE_FILE_NAME % {'guid': iid, 'name': name})
# do not upscale images...
if max(*img.size) <= spec:
continue
# add image to index...
if not os.path.exists(p):
scale = spec/float(max(*img.size))
preview = img.resize((int(img.size[0]*scale), int(img.size[1]*scale)), Image.ANTIALIAS)
preview.save(p)
##!!! metadata???
##!!!
print '.',
else:
# indicate an image skip...
print '_',
## i['preview'][str(spec) + 'px'] = p
##!!! absolute paths???
i['preview'][str(spec) + 'px'] = 'file:///' + urllib2.quote(p, safe='/:')
# put original image in cache...
## i['preview'][str(spec) + 'px'] = p
i['preview'][str(spec) + 'px'] = 'file:///' + urllib2.quote(p, safe='/:')
images['position'] = images['ribbons'][0].keys()[0]
with open(pathjoin(path, config['json']), 'w') as f:
json.dump(images, f, indent=4)
##!!! STUB...
return n
#-----------------------------------------------------------------------
def build_local_cache(path):
'''
'''
import time
t0 = time.time()
build_cache_dirs(path)
n = make_cache_images(path, images)
t1 = time.time()
print
print 'Processed %s images in %s seconds.' % (n, t1-t0)
#-----------------------------------------------------------------------
if __name__ == '__main__':
from optparse import OptionParser
parser = OptionParser()
##!!! need to define the path so that it shoes up in -h
options, args = parser.parse_args()
if len(args) != 1:
parser.print_usage()
else:
IN_PATH = args[0]
IN_PATH = IN_PATH.replace('\\', '/')
build_local_cache(IN_PATH)
## PATH = 'images/cache-test/'
## PATH = 'L:/incoming/UNSORTED/Images/fav'
## build_local_cache(PATH)
## index = build_index(PATH, count=10)
##
## import IPython
## IPython.embed()
#=======================================================================
# vim:set ts=4 sw=4 nowrap :
#=======================================================================
__version__ = '''0.0.01'''
__sub_version__ = '''20130522213945'''
__copyright__ = '''(c) Alex A. Naanou 2011'''
#-----------------------------------------------------------------------
import os
import Image
import json
import sha
import urllib2
import time
from pli.logictypes import OR
import gid
#-----------------------------------------------------------------------
CONFIG = {
'absolute-path': False,
# this can be:
# - original (default)
# - preview size
# - metadata
'gid-source': 'original',
'cache-image-name': '%(guid)s - %(name)s',
# the rest of the paths will be relative to this...
'cache-dir': '.ImageGridCache',
'images': 'images.json',
'data': 'data.json',
'marked': 'marked.json',
'config': 'ImageGrid.cfg',
'error': 'error.log',
'cache-structure': {
# make these as close to standard as possible and keep sane
# distances...
'150px': '150px/',
'350px': '350px/',
'900px': '900px/',
'1080px': '1080px/',
'1920px': '1920px/',
},
'sizes': {
'150px': 150,
'350px': 350,
'900px': 900,
'1080px': 1080,
'1920px': 1920,
}
}
DATA = {
'version': '2.0',
'current': None,
'ribbons': (),
'order': (),
'image_file': None,
}
IMAGE_EXT = OR(*(
'.jpg', '.jpeg', '.JPG', '.JPEG',
))
ERR_LOG = '''\
ERROR: %(error)s
SOURCE: %(source-file)s
TARGET: %(target-file)s
'''
#-----------------------------------------------------------------------
# Helpers...
#------------------------------------------------------------pathjoin---
def pathjoin(*p):
'''
'''
return ('/'.join(p)).replace('//', '/')
#-------------------------------------------------------------getpath---
def getpath(root, path, absolute=False):
'''
'''
if absolute == True:
return 'file:///' + urllib2.quote(pathjoin(root, path), safe='/:')
else:
return urllib2.quote(pathjoin(path), safe='/:')
#-------------------------------------------------------------log_err---
def log_err(path, e, source_file, target_file):
'''
'''
err_file = pathjoin(path, CONFIG['error'])
if not os.path.exists(err_file):
err = open(err_file, 'w')
else:
err = open(err_file, 'a')
with err:
err.write(ERR_LOG % {
'source-file': source_file,
'target-file': target_file,
'error': e,
})
#------------------------------------------------------------hash_gid---
def hash_gid(img, force=False):
'''
Generate gid based on preview file content.
NOTE: img can be either a path or an Image.
'''
if type(img) in (str, unicode):
img = Image.open(img)
return sha.sha(img.tostring()).hexdigest()
#-----------------------------------------------------report_progress---
def report_progress(img, status):
'''
'''
# created all previews...
if False not in status:
print '.',
# created no previews...
elif True not in status:
print '-',
# created some previews...
else:
print 'p',
return img
#-----------------------------------------make_inline_report_progress---
def make_inline_report_progress(state=None):
if state == None:
state = {}
if 'started at' not in state:
state['started at'] = time.time()
def _inline_report_progress(img, status):
created = state.get('created', 0)
skipped = state.get('skipped', 0)
partial = state.get('partial', 0)
# created all previews...
if False not in status:
created += 1
state['created'] = created
# created no previews...
elif True not in status:
skipped += 1
state['skipped'] = skipped
# created some previews...
else:
partial += 1
state['partial'] = partial
state['done at'] = time.time()
print 'Previews created: %s partial: %s skipped: %s...\r' % (created, partial, skipped),
return img
return _inline_report_progress
#-----------------------------------------------------------------------
# API...
#----------------------------------------------------build_cache_dirs---
def build_cache_dirs(path, config=CONFIG, dry_run=False, verbosity=0):
'''
Build cache directory tree.
'''
if verbosity >= 1:
print 'Creating cache directory structure...'
cache_dir = config['cache-dir']
dirs = config['cache-structure']
for _, k in dirs.items():
p = pathjoin(path, cache_dir, k)
if not os.path.exists(p):
if not dry_run:
os.makedirs(p)
if verbosity >= 2:
print 'Creating directory: %s' % p
elif verbosity >= 2:
print 'Directory exists: %s' % p
#--------------------------------------------------------build_images---
def build_images(path, config=CONFIG, gid_generator=hash_gid, verbosity=0):
'''
Build image structures update images.json in cache.
'''
absolute_path = config['absolute-path']
for name in os.listdir(path):
fname, ext = os.path.splitext(name)
if ext != IMAGE_EXT:
continue
source_path = pathjoin(path, name)
img = {
'id': gid_generator(source_path),
'name': name,
'type': 'image',
'state': 'single',
'path': getpath(path, name, absolute_path),
'ctime': os.path.getctime(source_path),
'preview': {},
}
if verbosity >= 2:
print (' '*72) + '\rProcessing image: %s' % getpath(path, name, absolute_path)
yield img
#------------------------------------------------------build_previews---
# NOTE: this will create images in the file system.
def build_previews(image, path=None, config=CONFIG, dry_run=True, verbosity=0):
'''
NOTE: this needs the cache directory structure present.
'''
status = []
# config...
absolute_path = config['absolute-path']
cache_dir = config['cache-dir']
dirs = config['cache-structure']
sizes = config['sizes']
cache_name = config['cache-image-name']
# data...
gid = image['id']
img_name = image['name']
name = os.path.splitext(img_name)[0]
img_path = image['path']
if absolute_path == False:
source_path = os.path.join(path, img_path)
else:
# XXX is this the best way???
o = urllib2.urlopen(img_path)
source_path = o.fp.name
o.close()
img = Image.open(source_path, 'r')
# biggest preview is the original image...
image['preview'][str(max(*img.size)) + 'px'] = img_path
# previews...
for k, spec in sizes.items():
if k in image['preview'].keys():
continue
# build the two paths: relative and full...
n = pathjoin(cache_dir, dirs[k], cache_name % {'guid': gid, 'name': img_name})
p = pathjoin(path, n)
# do not upscale images...
if max(*img.size) <= spec:
continue
# add image to index...
if not os.path.exists(p):
scale = spec/float(max(*img.size))
preview = img.resize((int(img.size[0]*scale), int(img.size[1]*scale)), Image.ANTIALIAS)
if not dry_run:
preview.save(p)
else:
preview.close()
##!!! metadata???
status += [True]
# image exists...
else:
status += [False]
image['preview'][str(spec) + 'px'] = getpath(path, n, absolute_path)
if verbosity >= 2:
print ' %s: %s' % ('C' if status[-1] else 'S', getpath(path, n, absolute_path))
return image, status
#----------------------------------------------------------build_data---
##!!! add option to consider fav depth and build a correct ribbon structure...
def build_data(images, path, config=CONFIG):
'''
'''
images_index = {}
marked = []
data = DATA.copy()
ribbon = []
for image in images:
gid = image['id']
images_index[gid] = image
ribbon += [gid]
ribbon.sort(lambda a, b: cmp(images_index[b]['ctime'], images_index[a]['ctime']))
data['ribbons'] = [ribbon]
data['order'] = ribbon[:]
data['current'] = ribbon[0]
return data, images_index, marked
#-----------------------------------------------------------------------
# High-level API...
#---------------------------------------------------------build_cache---
##!!! DO NOT OVERWRITE EXISTING DATA...
def build_cache(path, config=CONFIG, gid_generator=hash_gid,
report_progress=report_progress, dry_run=False, verbosity=0):
'''
'''
cache_dir = config['cache-dir']
absolute_path = config['absolute-path']
build_cache_dirs(path, config, dry_run, verbosity)
if report_progress == None:
report_progress = lambda a, b: a
data, images, marked = build_data(
(report_progress(
*build_previews(img, path, config, dry_run=dry_run, verbosity=verbosity))
for img in build_images(path, config, gid_generator, verbosity=verbosity)),
path, config)
images_file = pathjoin(cache_dir, config['images'])
data_file = pathjoin(cache_dir, config['data'])
marked_file = pathjoin(cache_dir, config['marked'])
data['image_file'] = getpath(path, images_file, absolute_path)
if verbosity >= 1:
print
for n, d in {images_file: images, data_file: data, marked_file: marked}.items():
n = os.path.join(path, n)
if verbosity >= 1:
print 'Writing: %s' % n
if not dry_run:
##!!! DO NOT OVERWRITE EXISTING DATA...
with open(n, 'w') as f:
json.dump(d, f, indent=4)
return data
#-----------------------------------------------------------------------
if __name__ == '__main__':
from optparse import OptionParser, OptionGroup
parser = OptionParser(
usage='Usage: %prog [options] ROOT',
version='%prog ' + __version__,
epilog='Notes: This script is still experimental. '
'GID source default will change to "metadata" '
'in the future.')
parser.add_option('-q', '--quiet',
dest='verbosity',
action='store_const',
const=0,
default=1,
help='Run quietly.')
parser.add_option('-v', '--verbose',
dest='verbosity',
action='store_const',
const=2,
default=1,
help='Do full reporting.')
parser.add_option('--debug',
dest='verbosity',
action='store_const',
const=3,
default=1,
help='Print debug data.')
parser.add_option('--dry-run',
action='store_true',
default=False,
help='Run but do not write anything to disk.')
output_configuration = OptionGroup(parser, 'Output configuration')
output_configuration.add_option('--path-mode',
default='absolute' if CONFIG['absolute-path'] else 'relative',
help='Path generation mode (default: "%default").')
output_configuration.add_option('--gid-source',
default=CONFIG['gid-source'],
help='Source used for GID generation (default: "%default").')
parser.add_option_group(output_configuration)
configuration = OptionGroup(parser, 'Configuration options')
configuration.add_option('--config-file',
metavar='PATH',
default=CONFIG['config'],
help='Config file to search for (default: "%default").')
configuration.add_option('--config-print',
action='store_true',
default=False,
help='Print current configuration and exit.')
configuration.add_option('--config-defaults-print',
action='store_true',
default=False,
help='Print default configuration and exit.')
configuration.add_option('--config-save-local',
action='store_true',
default=False,
help='Save current configuration at the root location. '
'this is a shorthand for: '
'%prog ... --config-print > ROOT/CONFIG; %prog')
parser.add_option_group(configuration)
options, args = parser.parse_args()
##!!! test if we are missing something...
## if (len(args) != 1
## and True not in (options.config_defaults_print, options.config_print)):
## parser.print_usage()
## raise SystemExit
# prepare the path...
if len(args) < 1:
IN_PATH = '.'
else:
IN_PATH = args[0]
IN_PATH = IN_PATH.replace('\\', '/')
config = {}
config.update(CONFIG)
# load configuration files..
config_name = options.config_file
# local to script...
if os.path.exists(config_name):
with open(config_name) as f:
config.update(json.load(f))
# local to target...
if os.path.exists(os.path.join(IN_PATH, config_name)):
with open(os.path.join(IN_PATH, config_name)) as f:
config.update(json.load(f))
# update config according to set args...
config.update({
'gid-source': options.gid_source,
'absolute-path': options.path_mode == 'absolute',
})
# a value from 0 through 2...
verbosity = options.verbosity
# bool...
dry_run = options.dry_run
# configuration stuff...
# write a local configuration...
if options.config_save_local:
with file(os.path.join(IN_PATH, config_name), 'w') as f:
f.write(json.dumps(config, sort_keys=True, indent=4))
# print configuration data...
if True in (options.config_defaults_print, options.config_print):
# see if we need to print a prefix...
print_prefix = False
if len([ s for s in (options.config_defaults_print, options.config_print) if s]) > 1:
print_prefix = True
# do the prinitng...
if options.config_print:
if print_prefix:
print 'Current Configuration:'
print json.dumps(config, sort_keys=True, indent=4)
print
if options.config_defaults_print:
if print_prefix:
print 'Default Configuration:'
print json.dumps(CONFIG, sort_keys=True, indent=4)
print
# do the actual work...
# NOTE: we are not using verbosity 2 at this point...
else:
progress_state = {}
if verbosity == 0:
report = None
elif verbosity >= 1:
report = make_inline_report_progress(progress_state)
# do the run...
build_cache(IN_PATH,
config,
hash_gid,
report,
dry_run,
verbosity)
# report results...
if verbosity >= 1:
print
print 'Time: %.1fm' % ((progress_state['done at'] - progress_state['started at'])/60)
#=======================================================================
# vim:set ts=4 sw=4 nowrap :

View File

@ -1,357 +0,0 @@
#=======================================================================
__version__ = '''0.0.01'''
__sub_version__ = '''20130522020247'''
__copyright__ = '''(c) Alex A. Naanou 2011'''
#-----------------------------------------------------------------------
import os
import Image
import json
import sha
import urllib2
import time
from pli.logictypes import OR
import gid
#-----------------------------------------------------------------------
CONFIG = {
'absolute-path': False,
'cache-image-name': '%(guid)s - %(name)s',
'images': '.ImageGridCache/images.json',
'data': '.ImageGridCache/data.json',
'marked': '.ImageGridCache/marked.json',
'error': '.ImageGridCache/error.log',
'cache-structure': {
# make these as close to standard as possible and keep sane
# distances...
'150px': '.ImageGridCache/150px/',
'350px': '.ImageGridCache/350px/',
'900px': '.ImageGridCache/900px/',
'1080px': '.ImageGridCache/1080px/',
'1920px': '.ImageGridCache/1920px/',
},
'sizes': {
'150px': 150,
'350px': 350,
'900px': 900,
'1080px': 1080,
'1920px': 1920,
}
}
DATA = {
'version': '2.0',
'current': None,
'ribbons': (),
'order': (),
'image_file': None,
}
IMAGE_EXT = OR(*(
'.jpg', '.jpeg', '.JPG', '.JPEG',
))
ERR_LOG = '''\
ERROR: %(error)s
SOURCE: %(source-file)s
TARGET: %(target-file)s
'''
#-----------------------------------------------------------------------
# Helpers...
#------------------------------------------------------------pathjoin---
def pathjoin(*p):
'''
'''
return ('/'.join(p)).replace('//', '/')
#-------------------------------------------------------------getpath---
def getpath(root, path, absolute=False):
'''
'''
if absolute == True:
return 'file:///' + urllib2.quote(pathjoin(root, path), safe='/:')
else:
return urllib2.quote(pathjoin(path), safe='/:')
#-------------------------------------------------------------log_err---
def log_err(path, e, source_file, target_file):
'''
'''
err_file = pathjoin(path, CONFIG['error'])
if not os.path.exists(err_file):
err = open(err_file, 'w')
else:
err = open(err_file, 'a')
with err:
err.write(ERR_LOG % {
'source-file': source_file,
'target-file': target_file,
'error': e,
})
#------------------------------------------------------------hash_gid---
def hash_gid(img, force=False):
'''
Generate gid based on preview file content.
NOTE: img can be either a path or an Image.
'''
if type(img) in (str, unicode):
img = Image.open(img)
return sha.sha(img.tostring()).hexdigest()
#-----------------------------------------------------report_progress---
def report_progress(img, status):
'''
'''
# created all previews...
if False not in status:
print '.',
# created no previews...
elif True not in status:
print '-',
# created some previews...
else:
print 'p',
return img
#-----------------------------------------make_inline_report_progress---
def make_inline_report_progress(state=None):
if state == None:
state = {}
if 'started at' not in state:
state['started at'] = time.time()
def _inline_report_progress(img, status):
created = state.get('created', 0)
skipped = state.get('skipped', 0)
partial = state.get('partial', 0)
# created all previews...
if False not in status:
created += 1
state['created'] = created
# created no previews...
elif True not in status:
skipped += 1
state['skipped'] = skipped
# created some previews...
else:
partial += 1
state['partial'] = partial
state['done at'] = time.time()
print 'Previews created: %s partial: %s skipped: %s...\r' % (created, partial, skipped),
return img
return _inline_report_progress
#-----------------------------------------------------------------------
# API...
#----------------------------------------------------build_cache_dirs---
def build_cache_dirs(path, config=CONFIG):
'''
Build cache directory tree.
'''
dirs = config['cache-structure']
for _, k in dirs.items():
p = pathjoin(path, k)
if not os.path.exists(p):
os.makedirs(p)
#--------------------------------------------------------build_images---
def build_images(path, config=CONFIG, gid_generator=hash_gid):
'''
Build image structures update images.json in cache.
'''
absolute_path = config['absolute-path']
for name in os.listdir(path):
fname, ext = os.path.splitext(name)
if ext != IMAGE_EXT:
continue
source_path = pathjoin(path, name)
img = {
'id': gid_generator(source_path),
'name': name,
'type': 'image',
'state': 'single',
'path': getpath(path, name, absolute_path),
'ctime': os.path.getctime(source_path),
'preview': {},
}
yield img
#------------------------------------------------------build_previews---
# NOTE: this will create images in the file system.
def build_previews(image, path=None, config=CONFIG, dry_run=True):
'''
NOTE: this needs the cache directory structure present.
'''
status = []
# config...
absolute_path = config['absolute-path']
dirs = config['cache-structure']
sizes = config['sizes']
cache_name = config['cache-image-name']
# data...
gid = image['id']
img_name = image['name']
name = os.path.splitext(img_name)[0]
img_path = image['path']
if absolute_path == False:
source_path = os.path.join(path, img_path)
else:
# XXX is this the best way???
o = urllib2.urlopen(img_path)
source_path = o.fp.name
o.close()
img = Image.open(source_path, 'r')
# biggest preview is the original image...
image['preview'][str(max(*img.size)) + 'px'] = img_path
# previews...
for k, spec in sizes.items():
if k in image['preview'].keys():
continue
# build the two paths: relative and full...
n = pathjoin(dirs[k], cache_name % {'guid': gid, 'name': img_name})
p = pathjoin(path, n)
# do not upscale images...
if max(*img.size) <= spec:
continue
# add image to index...
if not os.path.exists(p):
scale = spec/float(max(*img.size))
preview = img.resize((int(img.size[0]*scale), int(img.size[1]*scale)), Image.ANTIALIAS)
if not dry_run:
preview.save(p)
else:
preview.close()
##!!! metadata???
status += [True]
# image exists...
else:
status += [False]
image['preview'][str(spec) + 'px'] = getpath(path, n, absolute_path)
return image, status
#----------------------------------------------------------build_data---
def build_data(images, path, config=CONFIG):
'''
'''
images_index = {}
marked = []
data = DATA.copy()
ribbon = []
for image in images:
gid = image['id']
images_index[gid] = image
ribbon += [gid]
ribbon.sort(lambda a, b: cmp(images_index[b]['ctime'], images_index[a]['ctime']))
data['ribbons'] = [ribbon]
data['order'] = ribbon[:]
data['current'] = ribbon[0]
return data, images_index, marked
#-----------------------------------------------------------------------
# High-level API...
#---------------------------------------------------------build_cache---
##!!! DO NOT OVERWRITE EXISTING DATA...
def build_cache(path, config=CONFIG, gid_generator=hash_gid,
report_progress=report_progress, dry_run=False):
'''
'''
absolute_path = config['absolute-path']
build_cache_dirs(path, config)
data, images, marked = build_data(
(report_progress(*build_previews(img, path, config, dry_run=dry_run))
for img in build_images(path, config, gid_generator)),
path, config)
images_file = config['images']
data_file = config['data']
marked_file = config['marked']
data['image_file'] = getpath(path, images_file, absolute_path)
if not dry_run:
##!!! DO NOT OVERWRITE EXISTING DATA...
with open(os.path.join(path, images_file), 'w') as f:
json.dump(images, f, indent=4)
with open(os.path.join(path, data_file), 'w') as f:
json.dump(data, f, indent=4)
with open(os.path.join(path, marked_file), 'w') as f:
json.dump(marked, f, indent=4)
return data
#-----------------------------------------------------------------------
if __name__ == '__main__':
pass
#=======================================================================
# vim:set ts=4 sw=4 nowrap :