diff options
author | Juan Batiz-Benet <jbenet@cs.stanford.edu> | 2011-07-10 05:52:52 -0700 |
---|---|---|
committer | Juan Batiz-Benet <jbenet@cs.stanford.edu> | 2011-07-10 05:56:02 -0700 |
commit | 9525ec85063786716ab61a57dd69f556586792d3 (patch) | |
tree | 65d71ac288b6326e5d406b11fe0c3b527756049b | |
parent | a96e83d7fb11c86349218eb2c22200d639665929 (diff) | |
download | pylru-9525ec85063786716ab61a57dd69f556586792d3.tar.gz |
Added keys, values, and items to allow iteration
-rw-r--r-- | README.txt | 20 | ||||
-rw-r--r-- | pylru.py | 17 | ||||
-rw-r--r-- | test.py | 22 |
3 files changed, 58 insertions, 1 deletions
@@ -24,6 +24,10 @@ An LRU cache object has a dictionary like interface and can be used in the same value = cache[key] # Lookup a value given its key del cache[key] # Remove a value given its key + cache.keys() # Returns the keys in the cache * + cache.values() # Returns the values in the cache * + cache.items() # Returns the key, value pairs in the cache * + cache.size() # Returns the size of the cache cache.size(x) # Changes the size of the cache. x MUST be greater than # zero. @@ -33,6 +37,8 @@ An LRU cache object has a dictionary like interface and can be used in the same cache.clear() # Remove all elements from the cache. +* These calls have no effect on the cache order. + The lrucache takes an optional callback function as a second argument. Since the cache has a fixed size some operations, such as an insertion, may cause a key/value pair to be ejected. If the optional callback function is given it will be called when this occurs. For example:: import pylru @@ -91,3 +97,17 @@ PyLRU also provides a function decorator:: return x*x # Now results of the square function are cached for future lookup. + +To iterate over the cache elements, use .items():: + + import pylru + + cache = pylru.lrucache(size, callback) + + # items() returns a list of key, value pairs without modifying order. + for key, val in cache.items(): + print key, val + + # WARNING: while keys() does not modify order, the cache[key] call does! + for key in cache.keys(): + print key, cache[key] @@ -178,7 +178,23 @@ class lrucache(object): self.mtf(node) self.head = node.next + def items(self): + # return the (key, value) pairs (from most recent to least) + # without modifying the cache order + return zip(self.keys(), self.values()) + + def keys(self): + + # return the keys (from most recent to least) in the cache + # does not modify the cache order + return self.table.keys() + + def values(self): + + # return the values in the cache (from most recent to least) + # does not modify the cache order + return [node.obj for node in self.table.values()] def size(self, size=None): @@ -324,7 +340,6 @@ class lruwrap(object): except KeyError: pass - def sync(self): if self.writeback: for key in self.dirty: @@ -158,7 +158,28 @@ def testDecorator(): assert square(x) == x*x +def testItems(): + a = lrucache(128) + b = simplelrucache(128) + + for i in range(1000): + x = random.randint(0, 512) + y = random.randint(0, 512) + a[x] = y + b[x] = y + + for k, v in a.items(): + assert k in a + assert a[k] == v + assert b[k] == v + + # ensure the order returned in items() is correct. + items = a.items() + for k, v in reversed(items): + a[k] = v + # test the order is returned correctly. + assert items == a.items() if __name__ == '__main__': @@ -172,5 +193,6 @@ if __name__ == '__main__': wraptest2() wraptest3() testDecorator() + testItems() |