/usr/share/pyshared/larch/lru.py is in python-larch 1.20131130-1.
This file is owned by root:root, with mode 0o644.
The actual contents of the file can be viewed below.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 | # Copyright 2010 Lars Wirzenius, Richard Braakman
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import heapq
import logging
class LRUCache(object):
'''A least-recently-used cache.
This class caches objects, based on keys. The cache has a fixed size,
in number of objects. When a new object is added to the cache, the
least recently used old object is dropped. Each object is associated
with a key, and use is defined as retrieval of the object using the key.
Two hooks are provided for: for removing an object by user request,
and when it is automatically removed due to cache overflow. Either
hook is called with the key and object as arguments.
'''
def __init__(self, max_size, remove_hook=None, forget_hook=None):
self.max_size = max_size
# Together, obj_before and obj_after form a random access
# double-linked sequence. None used as the sentinel on both ends.
self.obj_before = dict()
self.obj_after = dict()
self.obj_before[None] = None
self.obj_after[None] = None
self.ids = dict() # maps key to object
self.objs = dict() # maps object to key
self.remove_hook = remove_hook
self.forget_hook = forget_hook
self.hits = 0
self.misses = 0
def log_stats(self): # pragma: no cover
logging.debug('LRUCache %s: hits=%s misses=%s' %
(self, self.hits, self.misses))
def __len__(self):
return len(self.ids)
def keys(self):
'''List keys for objects in cache.'''
return self.ids.keys()
def add(self, key, obj):
'''Add new item to cache.'''
if key in self.ids:
self.remove(key)
before = self.obj_before[None]
self.obj_before[None] = obj
self.obj_before[obj] = before
self.obj_after[before] = obj
self.obj_after[obj] = None
self.ids[key] = obj
self.objs[obj] = key
while len(self.ids) > self.max_size:
self._forget_oldest()
def _forget_oldest(self):
obj = self.obj_after[None]
key = self.objs[obj]
self._remove(key)
if self.forget_hook:
self.forget_hook(key, obj)
def _remove(self, key):
obj = self.ids[key]
before = self.obj_before[obj]
after = self.obj_after[obj]
self.obj_before[after] = before
self.obj_after[before] = after
del self.obj_before[obj]
del self.obj_after[obj]
del self.ids[key]
del self.objs[obj]
def get(self, key):
'''Retrieve item from cache.
Return object associated with key, or None.
'''
if key in self.ids:
self.hits += 1
obj = self.ids[key]
self.remove(key)
self.add(key, obj)
return obj
else:
self.misses += 1
return None
def remove(self, key):
'''Remove an item from the cache.
Return True if item was in cache, False otherwise.
'''
if key in self.ids:
obj = self.ids[key]
self._remove(key)
if self.remove_hook:
self.remove_hook(key, obj)
return True
else:
return False
def remove_oldest(self):
'''Remove oldest object.
Return key and object.
'''
obj = self.obj_after[None]
key = self.objs[obj]
self.remove(key)
return key, obj
|