summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSean Reifschneider <jafo00@gmail.com>2014-09-27 15:01:30 -0600
committerSean Reifschneider <jafo00@gmail.com>2014-09-27 15:01:30 -0600
commit1f7e8a68753cd558374200a1d925df2293cea126 (patch)
treec2db1fae42188e840859b1fc48665c979b243fca
parent32ce7ae948080758e1a81b76a053fe220b04bf42 (diff)
parent3635217eb8e6e7ab733ec04b6ce2ef941179d8df (diff)
downloadpython-memcached-1f7e8a68753cd558374200a1d925df2293cea126.tar.gz
Merge pull request #50 from cactus/support-alternate-compression
add support for pluggable compressor/decompressor
-rw-r--r--memcache.py30
1 files changed, 10 insertions, 20 deletions
diff --git a/memcache.py b/memcache.py
index 18950af..30120c9 100644
--- a/memcache.py
+++ b/memcache.py
@@ -72,18 +72,6 @@ def useOldServerHashFunction():
global serverHashFunction
serverHashFunction = binascii.crc32
-try:
- from zlib import compress, decompress
- _supports_compress = True
-except ImportError:
- _supports_compress = False
- # quickly define a decompress just in case we recv compressed data.
-
- def decompress(val):
- raise _Error(
- "Received compressed data but I don't support "
- "compression (import error)")
-
from io import BytesIO
try:
unicode
@@ -178,6 +166,7 @@ class Client(threading.local):
def __init__(self, servers, debug=0, pickleProtocol=0,
pickler=pickle.Pickler, unpickler=pickle.Unpickler,
+ compressor=zlib.compress, decompressor=zlib.decompress,
pload=None, pid=None,
server_max_key_length=None, server_max_value_length=None,
dead_retry=_DEAD_RETRY, socket_timeout=_SOCKET_TIMEOUT,
@@ -240,6 +229,8 @@ class Client(threading.local):
self.pickleProtocol = pickleProtocol
self.pickler = pickler
self.unpickler = unpickler
+ self.compressor = compressor
+ self.decompressor = decompressor
self.persistent_load = pload
self.persistent_id = pid
self.server_max_key_length = server_max_key_length
@@ -667,7 +658,7 @@ class Client(threading.local):
default to 0 == cache forever.
@param min_compress_len: The threshold length to kick in
- auto-compression of the value using the zlib.compress()
+ auto-compression of the value using the compressor
routine. If the value being cached is a string, then the
length of the string is measured, else if the value is an
object, then the length of the pickle result is measured. If
@@ -704,7 +695,7 @@ class Client(threading.local):
default to 0 == cache forever.
@param min_compress_len: The threshold length to kick in
- auto-compression of the value using the zlib.compress()
+ auto-compression of the value using the compressor
routine. If the value being cached is a string, then the
length of the string is measured, else if the value is an
object, then the length of the pickle result is measured. If
@@ -812,7 +803,7 @@ class Client(threading.local):
prefix not applied.
@param min_compress_len: The threshold length to kick in
- auto-compression of the value using the zlib.compress()
+ auto-compression of the value using the compressor
routine. If the value being cached is a string, then the
length of the string is measured, else if the value is an
object, then the length of the pickle result is
@@ -922,11 +913,10 @@ class Client(threading.local):
val = file.getvalue()
lv = len(val)
- # We should try to compress if min_compress_len > 0 and we
- # could import zlib and this string is longer than our min
- # threshold.
+ # We should try to compress if min_compress_len > 0
+ # and this string is longer than our min threshold.
if min_compress_len and lv > min_compress_len:
- comp_val = zlib.compress(val)
+ comp_val = self.compressor(val)
# Only retain the result if the compression result is smaller
# than the original.
if len(comp_val) < lv:
@@ -1182,7 +1172,7 @@ class Client(threading.local):
buf = buf[:-2] # strip \r\n
if flags & Client._FLAG_COMPRESSED:
- buf = zlib.decompress(buf)
+ buf = self.decompressor(buf)
if flags == 0 or flags == Client._FLAG_COMPRESSED:
# Either a bare string or a compressed string now decompressed...