From 65ec512fb94a11243e34f62a7c9d7fe282080802 Mon Sep 17 00:00:00 2001 From: Jason Michalski Date: Wed, 10 Jan 2007 04:47:12 +0000 Subject: [PATCH] pyTivo - plugins/music/music.py, plugins/video/video.py using lrucache - plugin.py should no longer fall of this file list --- httpserver.py | 1 - lrucache.py | 213 +++++++++++++++++++++++++++++++++++++++++++++++++ plugin.py | 13 ++- plugins/music/music.py | 25 +++++- plugins/video/video.py | 5 +- 5 files changed, 250 insertions(+), 7 deletions(-) create mode 100644 lrucache.py diff --git a/httpserver.py b/httpserver.py index 22b8d17..8f48a3b 100644 --- a/httpserver.py +++ b/httpserver.py @@ -52,7 +52,6 @@ class TivoHTTPHandler(BaseHTTPServer.BaseHTTPRequestHandler): if query.has_key('Container'): #Dispatch to the container plugin for name, container in self.server.containers.items(): - print name, query['Container'][0] if query['Container'][0].startswith(name): plugin = GetPlugin(container['type']) if hasattr(plugin,command): diff --git a/lrucache.py b/lrucache.py new file mode 100644 index 0000000..7f8752e --- /dev/null +++ b/lrucache.py @@ -0,0 +1,213 @@ +# lrucache.py -- a simple LRU (Least-Recently-Used) cache class + +# Copyright 2004 Evan Prodromou +# Licensed under the Academic Free License 2.1 + +# arch-tag: LRU cache main module + +"""a simple LRU (Least-Recently-Used) cache module + +This module provides very simple LRU (Least-Recently-Used) cache +functionality. + +An *in-memory cache* is useful for storing the results of an +'expensive' process (one that takes a lot of time or resources) for +later re-use. Typical examples are accessing data from the filesystem, +a database, or a network location. If you know you'll need to re-read +the data again, it can help to keep it in a cache. + +You *can* use a Python dictionary as a cache for some purposes. +However, if the results you're caching are large, or you have a lot of +possible results, this can be impractical memory-wise. + +An *LRU cache*, on the other hand, only keeps _some_ of the results in +memory, which keeps you from overusing resources. The cache is bounded +by a maximum size; if you try to add more values to the cache, it will +automatically discard the values that you haven't read or written to +in the longest time. In other words, the least-recently-used items are +discarded. [1]_ + +.. [1]: 'Discarded' here means 'removed from the cache'. + +""" + +from __future__ import generators +import time +from heapq import heappush, heappop, heapify + +__version__ = "0.2" +__all__ = ['CacheKeyError', 'LRUCache', 'DEFAULT_SIZE'] +__docformat__ = 'reStructuredText en' + +DEFAULT_SIZE = 16 +"""Default size of a new LRUCache object, if no 'size' argument is given.""" + +class CacheKeyError(KeyError): + """Error raised when cache requests fail + + When a cache record is accessed which no longer exists (or never did), + this error is raised. To avoid it, you may want to check for the existence + of a cache record before reading or deleting it.""" + pass + +class LRUCache(object): + """Least-Recently-Used (LRU) cache. + + Instances of this class provide a least-recently-used (LRU) cache. They + emulate a Python mapping type. You can use an LRU cache more or less like + a Python dictionary, with the exception that objects you put into the + cache may be discarded before you take them out. + + Some example usage:: + + cache = LRUCache(32) # new cache + cache['foo'] = get_file_contents('foo') # or whatever + + if 'foo' in cache: # if it's still in cache... + # use cached version + contents = cache['foo'] + else: + # recalculate + contents = get_file_contents('foo') + # store in cache for next time + cache['foo'] = contents + + print cache.size # Maximum size + + print len(cache) # 0 <= len(cache) <= cache.size + + cache.size = 10 # Auto-shrink on size assignment + + for i in range(50): # note: larger than cache size + cache[i] = i + + if 0 not in cache: print 'Zero was discarded.' + + if 42 in cache: + del cache[42] # Manual deletion + + for j in cache: # iterate (in LRU order) + print j, cache[j] # iterator produces keys, not values + """ + + class __Node(object): + """Record of a cached value. Not for public consumption.""" + + def __init__(self, key, obj, timestamp): + object.__init__(self) + self.key = key + self.obj = obj + self.atime = timestamp + self.mtime = self.atime + + def __cmp__(self, other): + return cmp(self.atime, other.atime) + + def __repr__(self): + return "<%s %s => %s (%s)>" % \ + (self.__class__, self.key, self.obj, \ + time.asctime(time.localtime(self.atime))) + + def __init__(self, size=DEFAULT_SIZE): + # Check arguments + if size <= 0: + raise ValueError, size + elif type(size) is not type(0): + raise TypeError, size + object.__init__(self) + self.__heap = [] + self.__dict = {} + self.size = size + """Maximum size of the cache. + If more than 'size' elements are added to the cache, + the least-recently-used ones will be discarded.""" + + def __len__(self): + return len(self.__heap) + + def __contains__(self, key): + return self.__dict.has_key(key) + + def __setitem__(self, key, obj): + if self.__dict.has_key(key): + node = self.__dict[key] + node.obj = obj + node.atime = time.time() + node.mtime = node.atime + heapify(self.__heap) + else: + # size may have been reset, so we loop + while len(self.__heap) >= self.size: + lru = heappop(self.__heap) + del self.__dict[lru.key] + node = self.__Node(key, obj, time.time()) + self.__dict[key] = node + heappush(self.__heap, node) + + def __getitem__(self, key): + if not self.__dict.has_key(key): + raise CacheKeyError(key) + else: + node = self.__dict[key] + node.atime = time.time() + heapify(self.__heap) + return node.obj + + def __delitem__(self, key): + if not self.__dict.has_key(key): + raise CacheKeyError(key) + else: + node = self.__dict[key] + del self.__dict[key] + self.__heap.remove(node) + heapify(self.__heap) + return node.obj + + def __iter__(self): + copy = self.__heap[:] + while len(copy) > 0: + node = heappop(copy) + yield node.key + raise StopIteration + + def __setattr__(self, name, value): + object.__setattr__(self, name, value) + # automagically shrink heap on resize + if name == 'size': + while len(self.__heap) > value: + lru = heappop(self.__heap) + del self.__dict[lru.key] + + def __repr__(self): + return "<%s (%d elements)>" % (str(self.__class__), len(self.__heap)) + + def mtime(self, key): + """Return the last modification time for the cache record with key. + May be useful for cache instances where the stored values can get + 'stale', such as caching file or network resource contents.""" + if not self.__dict.has_key(key): + raise CacheKeyError(key) + else: + node = self.__dict[key] + return node.mtime + +if __name__ == "__main__": + cache = LRUCache(25) + print cache + for i in range(50): + cache[i] = str(i) + print cache + if 46 in cache: + del cache[46] + print cache + cache.size = 10 + print cache + cache[46] = '46' + print cache + print len(cache) + for c in cache: + print c + print cache + print cache.mtime(46) + for c in cache: + print c diff --git a/plugin.py b/plugin.py index 5ad9ba1..e1bf9ab 100644 --- a/plugin.py +++ b/plugin.py @@ -112,9 +112,18 @@ class Plugin(object): if query.has_key('AnchorOffset'): index = index + int(query['AnchorOffset'][0]) + #foward count if index < index + count: - files = files[max(index, 0):index + count ] + files = files[index:index + count ] return files, totalFiles, index + #backwards count else: - files = files[max(index + count, 0):index] + print 'index, count', index, count + print index + count + #off the start of the list + if index + count < 0: + print 0 - (index + count) + index += 0 - (index + count) + print index + count + files = files[index + count:index] return files, totalFiles, index + count diff --git a/plugins/music/music.py b/plugins/music/music.py index ee636f1..d78016d 100644 --- a/plugins/music/music.py +++ b/plugins/music/music.py @@ -3,6 +3,7 @@ from Cheetah.Template import Template from plugin import Plugin from urllib import unquote_plus, quote, unquote from xml.sax.saxutils import escape +from lrucache import LRUCache import eyeD3 SCRIPTDIR = os.path.dirname(__file__) @@ -10,6 +11,9 @@ SCRIPTDIR = os.path.dirname(__file__) class music(Plugin): content_type = 'x-container/tivo-music' + playable_cache = {} + playable_cache = LRUCache(1000) + media_data_cache = LRUCache(100) def QueryContainer(self, handler, query): @@ -25,15 +29,31 @@ class music(Plugin): def isdir(file): return os.path.isdir(os.path.join(path, file)) + def AudioFileFilter(file): + full_path = os.path.join(path, file) + + if full_path in self.playable_cache: + return self.playable_cache[full_path] + if os.path.isdir(full_path) or eyeD3.isMp3File(full_path): + self.playable_cache[full_path] = True + return True + else: + self.playable_cache[full_path] = False + return False + def media_data(file): dict = {} dict['path'] = file file = os.path.join(path, file) + if file in self.media_data_cache: + return self.media_data_cache[file] + if isdir(file) or not eyeD3.isMp3File(file): + self.media_data_cache[file] = dict return dict - + try: audioFile = eyeD3.Mp3AudioFile(file) dict['Duration'] = audioFile.getPlayTime() * 1000 @@ -50,13 +70,14 @@ class music(Plugin): except: pass + self.media_data_cache[file] = dict return dict handler.send_response(200) handler.end_headers() t = Template(file=os.path.join(SCRIPTDIR,'templates', 'container.tmpl')) t.name = subcname - t.files, t.total, t.start = self.get_files(handler, query, lambda f: isdir(f) or eyeD3.isMp3File(os.path.join(path, f))) + t.files, t.total, t.start = self.get_files(handler, query, AudioFileFilter) t.files = map(media_data, t.files) t.isdir = isdir t.quote = quote diff --git a/plugins/video/video.py b/plugins/video/video.py index d75e654..cac78e6 100644 --- a/plugins/video/video.py +++ b/plugins/video/video.py @@ -4,6 +4,7 @@ from plugin import Plugin from urllib import unquote_plus, quote, unquote from urlparse import urlparse from xml.sax.saxutils import escape +from lrucache import LRUCache SCRIPTDIR = os.path.dirname(__file__) @@ -11,7 +12,7 @@ SCRIPTDIR = os.path.dirname(__file__) class video(Plugin): content_type = 'x-container/tivo-videos' - playable_cache = {} + playable_cache = LRUCache(1000) def SendFile(self, handler, container, name): @@ -46,7 +47,7 @@ class video(Plugin): def VideoFileFilter(file): full_path = os.path.join(path, file) - if self.playable_cache.has_key(full_path): + if full_path in self.playable_cache: return self.playable_cache[full_path] if os.path.isdir(full_path) or transcode.suported_format(full_path): self.playable_cache[full_path] = True -- 2.11.4.GIT