summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJay Hutchinson <jlhutch@gmail.com>2011-07-25 01:53:00 -0500
committerJay Hutchinson <jlhutch@gmail.com>2011-07-25 01:53:00 -0500
commitdc9d12ac9a95e653c72132dd0ddde28b6e72cedd (patch)
tree43b460d61e6dcaf9debb765c528a3bd6336b8509
parent5ea4a54bd03f1d7814fa0d24638b7b5260bb8901 (diff)
downloadpylru-dc9d12ac9a95e653c72132dd0ddde28b6e72cedd.tar.gz
Fixed the formatting of comments.
-rw-r--r--pylru.py188
-rw-r--r--test.py20
2 files changed, 105 insertions, 103 deletions
diff --git a/pylru.py b/pylru.py
index bb2a86a..cb98b4a 100644
--- a/pylru.py
+++ b/pylru.py
@@ -1,34 +1,35 @@
-# Cache implementaion with a Least Recently Used (LRU) replacement policy and a
-# basic dictionary interface.
+# Cache implementaion with a Least Recently Used (LRU) replacement policy and
+# a basic dictionary interface.
-# Copyright (C) 2006, 2009, 2010, 2011 Jay Hutchinson
+# Copyright (C) 2006, 2009, 2010, 2011 Jay Hutchinson
-# This program is free software; you can redistribute it and/or modify it under
-# the terms of the GNU General Public License as published by the Free Software
-# Foundation; either version 2 of the License, or (at your option) any later
-# version.
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the Free
+# Software Foundation; either version 2 of the License, or (at your option)
+# any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
+# more details.
-# You should have received a copy of the GNU General Public License along with
-# this program; if not, write to the Free Software Foundation, Inc., 51
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-# The cache is implemented using a combination of a python
-# dictionary (hash table) and a circular doubly linked list. Items in the cache are
-# stored in nodes. These nodes make up the linked list. The list is used to
-# efficiently maintain the order that the items have been used in. The front
-# or head of the list contains the most recently used item, the tail of
-# the list contains the least recently used item. When an item is used it
-# can easily (in a constant amount of time) be moved to the front of the list,
-# thus updating its position in the ordering. These nodes are also placed in
-# the hash table under their associated key. The hash table allows efficient
+# The cache is implemented using a combination of a python dictionary (hash
+# table) and a circular doubly linked list. Items in the cache are stored in
+# nodes. These nodes make up the linked list. The list is used to efficiently
+# maintain the order that the items have been used in. The front or head of
+# the list contains the most recently used item, the tail of the list
+# contains the least recently used item. When an item is used it can easily
+# (in a constant amount of time) be moved to the front of the list, thus
+# updating its position in the ordering. These nodes are also placed in the
+# hash table under their associated key. The hash table allows efficient
# lookup of values by key.
# Class for the node objects.
@@ -46,13 +47,13 @@ class lrucache(object):
self.table = {}
# Initialize the doubly linked list with one empty node. This is an
- # invariant. The cache size must always be greater than zero.
- # Each
- # node has a 'prev' and 'next' variable to hold the node that comes before
- # it and after it respectivly. Initially the two variables each point to
- # the head node itself, creating a circular doubly linked list of size one.
- # Then the size() method is used to adjust the list to the desired size.
-
+ # invariant. The cache size must always be greater than zero. Each
+ # node has a 'prev' and 'next' variable to hold the node that comes
+ # before it and after it respectivly. Initially the two variables
+ # each point to the head node itself, creating a circular doubly
+ # linked list of size one. Then the size() method is used to adjust
+ # the list to the desired size.
+
self.head = _dlnode()
self.head.next = self.head
self.head.prev = self.head
@@ -89,9 +90,9 @@ class lrucache(object):
# Look up the node
node = self.table[key]
- # Update the list ordering. Move this node so that is directly proceeds
- # the head node. Then set the 'head' variable to it. This makes it the
- # new head of the list.
+ # Update the list ordering. Move this node so that is directly
+ # proceeds the head node. Then set the 'head' variable to it. This
+ # makes it the new head of the list.
self.mtf(node)
self.head = node
@@ -116,22 +117,22 @@ class lrucache(object):
return
- # Ok, no value is currently stored under 'key' in the cache. We need to
- # choose a node to place the new item in. There are two cases. If the
- # cache is full some item will have to be pushed out of the cache. We
- # want to choose the node with the least recently used item. This is the
- # node at the tail of the list. If the cache is not full we want to choose
- # a node that is empty. Because of the way the list is managed, the empty
- # nodes are always together at the tail end of the list. Thus, in either
- # case, by chooseing the node at the tail of the list our conditions are
- # satisfied.
-
- # Since the list is circular, the tail node directly preceeds the 'head'
- # node.
+ # Ok, no value is currently stored under 'key' in the cache. We need
+ # to choose a node to place the new item in. There are two cases. If
+ # the cache is full some item will have to be pushed out of the
+ # cache. We want to choose the node with the least recently used
+ # item. This is the node at the tail of the list. If the cache is not
+ # full we want to choose a node that is empty. Because of the way the
+ # list is managed, the empty nodes are always together at the tail
+ # end of the list. Thus, in either case, by chooseing the node at the
+ # tail of the list our conditions are satisfied.
+
+ # Since the list is circular, the tail node directly preceeds the
+ # 'head' node.
node = self.head.prev
- # If the node already contains something we need to remove the old key from
- # the dictionary.
+ # If the node already contains something we need to remove the old
+ # key from the dictionary.
if not node.empty:
if self.callback is not None:
self.callback(node.key, node.value)
@@ -145,10 +146,10 @@ class lrucache(object):
# Add the node to the dictionary under the new key.
self.table[key] = node
- # We need to move the node to the head of the list. The node is the tail
- # node, so it directly preceeds the head node due to the list being
- # circular. Therefore, the ordering is already correct, we just need to
- # adjust the 'head' variable.
+ # We need to move the node to the head of the list. The node is the
+ # tail node, so it directly preceeds the head node due to the list
+ # being circular. Therefore, the ordering is already correct, we just
+ # need to adjust the 'head' variable.
self.head = node
@@ -159,16 +160,17 @@ class lrucache(object):
del self.table[key]
node.empty = True
-
+
# Not strictly necessary.
node.key = None
node.value = None
- # Because this node is now empty we want to reuse it before any non-empty
- # node. To do that we want to move it to the tail of the list. We move it
- # so that it directly preceeds the 'head' node. This makes it the tail
- # node. The 'head' is then adjusted. This adjustment ensures correctness
- # even for the case where the 'node' is the 'head' node.
+ # Because this node is now empty we want to reuse it before any
+ # non-empty node. To do that we want to move it to the tail of the
+ # list. We move it so that it directly preceeds the 'head' node. This
+ # makes it the tail node. The 'head' is then adjusted. This
+ # adjustment ensures correctness even for the case where the 'node'
+ # is the 'head' node.
self.mtf(node)
self.head = node.next
@@ -198,9 +200,9 @@ class lrucache(object):
def values(self):
- # Return an iterator that returns the values in the cache in order from
- # the most recently to least recently used. Does not modify the cache
- # order.
+ # Return an iterator that returns the values in the cache in order
+ # from the most recently to least recently used. Does not modify the
+ # cache order.
for node in self.dli():
yield node.value
@@ -215,8 +217,8 @@ class lrucache(object):
return self.listSize
- # Increases the size of the cache by inserting n empty nodes at the tail of
- # the list.
+ # Increases the size of the cache by inserting n empty nodes at the tail
+ # of the list.
def addTailNode(self, n):
for i in range(n):
node = _dlnode()
@@ -253,10 +255,10 @@ class lrucache(object):
self.listSize -= n
- # This method adjusts the ordering of the doubly linked list so that 'node'
- # directly precedes the 'head' node. Because of the order of operations, if
- # 'node' already directly precedes the 'head' node or if 'node' is the
- # 'head' node the order of the list will be unchanged.
+ # This method adjusts the ordering of the doubly linked list so that
+ # 'node' directly precedes the 'head' node. Because of the order of
+ # operations, if 'node' already directly precedes the 'head' node or if
+ # 'node' is the 'head' node the order of the list will be unchanged.
def mtf(self, node):
node.prev.next = node.next
node.next.prev = node.prev
@@ -267,8 +269,8 @@ class lrucache(object):
node.next.prev = node
node.prev.next = node
- # This method returns an iterator that iterates over the non-empty nodes in
- # the doubly linked list in order from the most recently to the least
+ # This method returns an iterator that iterates over the non-empty nodes
+ # in the doubly linked list in order from the most recently to the least
# recently used.
def dli(self):
node = self.head
@@ -277,7 +279,7 @@ class lrucache(object):
node = node.next
-
+
class WriteThroughCacheManager(object):
def __init__(self, store, size):
@@ -307,16 +309,16 @@ class WriteThroughCacheManager(object):
return False
def __getitem__(self, key):
- # First we try the cache. If successful we just return the value. If not
- # we catch KeyError and ignore it since that just means the key was not
- # in the cache.
+ # First we try the cache. If successful we just return the value. If
+ # not we catch KeyError and ignore it since that just means the key
+ # was not in the cache.
try:
return self.cache[key]
except KeyError:
pass
- # It wasn't in the cache. Look it up in the store, add the entry to the
- # cache, and return the value.
+ # It wasn't in the cache. Look it up in the store, add the entry to
+ # the cache, and return the value.
value = self.store[key]
self.cache[key] = value
return value
@@ -327,8 +329,8 @@ class WriteThroughCacheManager(object):
self.store[key] = value
def __delitem__(self, key):
- # Write-through behavior cache and store should be consistent.
- # Delete it from the store.
+ # Write-through behavior cache and store should be consistent. Delete
+ # it from the store.
del self.store[key]
try:
# Ok, delete from the store was successful. It might also be in
@@ -349,25 +351,25 @@ class WriteThroughCacheManager(object):
def items(self):
return self.store.items()
-
-
+
+
class WriteBackCacheManager(object):
def __init__(self, store, size):
self.store = store
-
+
# Create a set to hold the dirty keys.
self.dirty = set()
# Define a callback function to be called by the cache when a
# key/value pair is about to be ejected. This callback will check to
- # see if the key is in the dirty set. If so, then it will update
- # the store object and remove the key from the dirty set.
+ # see if the key is in the dirty set. If so, then it will update the
+ # store object and remove the key from the dirty set.
def callback(key, value):
if key in self.dirty:
self.store[key] = value
self.dirty.remove(key)
-
+
# Create a cache and give it the callback function.
self.cache = lrucache(size, callback)
@@ -379,7 +381,7 @@ class WriteBackCacheManager(object):
self.cache.clear()
self.dirty.clear()
self.store.clear()
-
+
def __contains__(self, key):
# Check the cache first, since if it is there we can return quickly.
if key in self.cache:
@@ -392,16 +394,16 @@ class WriteBackCacheManager(object):
return False
def __getitem__(self, key):
- # First we try the cache. If successful we just return the value. If not
- # we catch KeyError and ignore it since that just means the key was not
- # in the cache.
+ # First we try the cache. If successful we just return the value. If
+ # not we catch KeyError and ignore it since that just means the key
+ # was not in the cache.
try:
return self.cache[key]
except KeyError:
pass
- # It wasn't in the cache. Look it up in the store, add the entry to the
- # cache, and return the value.
+ # It wasn't in the cache. Look it up in the store, add the entry to
+ # the cache, and return the value.
value = self.store[key]
self.cache[key] = value
return value
@@ -410,9 +412,9 @@ class WriteBackCacheManager(object):
# Add the key/value pair to the cache.
self.cache[key] = value
self.dirty.add(key)
-
+
def __delitem__(self, key):
-
+
found = False
try:
del self.cache[key]
@@ -430,7 +432,7 @@ class WriteBackCacheManager(object):
if not found: # If not found in cache or store, raise error.
raise KeyError
-
+
def __iter__(self):
return self.keys()
@@ -438,7 +440,7 @@ class WriteBackCacheManager(object):
for key in self.store.keys():
if key not in self.dirty:
yield key
-
+
for key in self.dirty:
yield key
@@ -452,7 +454,7 @@ class WriteBackCacheManager(object):
for key, value in self.store.items():
if key not in self.dirty:
yield (key, value)
-
+
for key in self.dirty:
value = self.cache.peek(key)
yield (key, value)
@@ -487,9 +489,9 @@ def lruwrap(store, size, writeback=False):
return WriteBackCacheManager(store, size)
else:
return WriteThroughCacheManager(store, size)
-
-
-
+
+
+
class lrudecorator(object):
def __init__(self, size):
diff --git a/test.py b/test.py
index 1dee0f4..258bf3f 100644
--- a/test.py
+++ b/test.py
@@ -122,13 +122,13 @@ def wraptest():
assert p == x.store
for key, value in x.cache.items():
assert x.store[key] == value
-
+
tmp = list(x.items())
tmp.sort()
-
+
tmp2 = list(p.items())
tmp2.sort()
-
+
assert tmp == tmp2
p = dict()
@@ -145,20 +145,20 @@ def wraptest2():
for key, value in x.store.items():
if key not in x.dirty:
assert p[key] == value
-
+
for key in x.dirty:
assert x.cache.peek(key) == p[key]
-
+
for key, value in x.cache.items():
if key not in x.dirty:
assert x.store[key] == p[key] == value
-
+
tmp = list(x.items())
tmp.sort()
-
+
tmp2 = list(p.items())
tmp2.sort()
-
+
assert tmp == tmp2
p = dict()
@@ -176,10 +176,10 @@ def wraptest3():
for key, value in x.store.items():
if key not in x.dirty:
assert p[key] == value
-
+
for key in x.dirty:
assert x.cache.peek(key) == p[key]
-
+
for key, value in x.cache.items():
if key not in x.dirty:
assert x.store[key] == p[key] == value