summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAndy McCurdy <andy@andymccurdy.com>2019-05-28 15:42:47 -0700
committerAndy McCurdy <andy@andymccurdy.com>2019-05-28 15:42:47 -0700
commit0f26aad99f55d2ead9936ac57a97693c0b501fcf (patch)
tree9a574385168b33000c72b29618fd97a02ecf5f66
parentb9f49c38f11965727d32f41e4a5b802f72fb60f6 (diff)
downloadredis-py-remove_token.tar.gz
remove Token class in favor of bytestringremove_token
The Token class was needed when supporting Python 2.6. Now that we've dropped support for 2.6, we don't need it anymore. Fixes #1066
-rwxr-xr-xredis/client.py160
-rwxr-xr-xredis/connection.py53
-rw-r--r--tests/test_encoding.py2
3 files changed, 89 insertions, 126 deletions
diff --git a/redis/client.py b/redis/client.py
index 15ec190..6ae868b 100755
--- a/redis/client.py
+++ b/redis/client.py
@@ -11,7 +11,7 @@ import hashlib
from redis._compat import (basestring, imap, iteritems, iterkeys,
itervalues, izip, long, nativestr, safe_unicode)
from redis.connection import (ConnectionPool, UnixDomainSocketConnection,
- SSLConnection, Token)
+ SSLConnection)
from redis.lock import Lock
from redis.exceptions import (
ConnectionError,
@@ -834,20 +834,18 @@ class Redis(object):
if str(_type).lower() not in client_types:
raise DataError("CLIENT KILL type must be one of %r" % (
client_types,))
- args.extend((Token.get_token('TYPE'), _type))
+ args.extend((b'TYPE', _type))
if skipme is not None:
if not isinstance(skipme, bool):
raise DataError("CLIENT KILL skipme must be a bool")
if skipme:
- args.extend((Token.get_token('SKIPME'),
- Token.get_token('YES')))
+ args.extend((b'SKIPME', b'YES'))
else:
- args.extend((Token.get_token('SKIPME'),
- Token.get_token('NO')))
+ args.extend((b'SKIPME', b'NO'))
if _id is not None:
- args.extend((Token.get_token('ID'), _id))
+ args.extend((b'ID', _id))
if addr is not None:
- args.extend((Token.get_token('ADDR'), addr))
+ args.extend((b'ADDR', addr))
if not args:
raise DataError("CLIENT KILL <filter> <value> ... ... <filter> "
"<value> must specify at least one filter")
@@ -866,8 +864,7 @@ class Redis(object):
if str(_type).lower() not in client_types:
raise DataError("CLIENT LIST _type must be one of %r" % (
client_types,))
- return self.execute_command('CLIENT LIST', Token.get_token('TYPE'),
- _type)
+ return self.execute_command('CLIENT LIST', b'TYPE', _type)
return self.execute_command('CLIENT LIST')
def client_getname(self):
@@ -891,7 +888,7 @@ class Redis(object):
"""
args = ['CLIENT UNBLOCK', int(client_id)]
if error:
- args.append(Token.get_token('ERROR'))
+ args.append(b'ERROR')
return self.execute_command(*args)
def client_pause(self, timeout):
@@ -940,7 +937,7 @@ class Redis(object):
"""
args = []
if asynchronous:
- args.append(Token.get_token('ASYNC'))
+ args.append(b'ASYNC')
return self.execute_command('FLUSHALL', *args)
def flushdb(self, asynchronous=False):
@@ -952,7 +949,7 @@ class Redis(object):
"""
args = []
if asynchronous:
- args.append(Token.get_token('ASYNC'))
+ args.append(b'ASYNC')
return self.execute_command('FLUSHDB', *args)
def swapdb(self, first, second):
@@ -1005,13 +1002,13 @@ class Redis(object):
raise DataError('MIGRATE requires at least one key')
pieces = []
if copy:
- pieces.append(Token.get_token('COPY'))
+ pieces.append(b'COPY')
if replace:
- pieces.append(Token.get_token('REPLACE'))
+ pieces.append(b'REPLACE')
if auth:
- pieces.append(Token.get_token('AUTH'))
+ pieces.append(b'AUTH')
pieces.append(auth)
- pieces.append(Token.get_token('KEYS'))
+ pieces.append(b'KEYS')
pieces.extend(keys)
return self.execute_command('MIGRATE', host, port, '', destination_db,
timeout, *pieces)
@@ -1031,7 +1028,7 @@ class Redis(object):
"""
args = []
if isinstance(samples, int):
- args.extend([Token.get_token('SAMPLES'), samples])
+ args.extend([b'SAMPLES', samples])
return self.execute_command('MEMORY USAGE', key, *args)
def memory_purge(self):
@@ -1115,8 +1112,7 @@ class Redis(object):
instance is promoted to a master instead.
"""
if host is None and port is None:
- return self.execute_command('SLAVEOF', Token.get_token('NO'),
- Token.get_token('ONE'))
+ return self.execute_command('SLAVEOF', b'NO', b'ONE')
return self.execute_command('SLAVEOF', host, port)
def slowlog_get(self, num=None):
@@ -1707,10 +1703,10 @@ class Redis(object):
pieces = [name]
if by is not None:
- pieces.append(Token.get_token('BY'))
+ pieces.append(b'BY')
pieces.append(by)
if start is not None and num is not None:
- pieces.append(Token.get_token('LIMIT'))
+ pieces.append(b'LIMIT')
pieces.append(start)
pieces.append(num)
if get is not None:
@@ -1719,18 +1715,18 @@ class Redis(object):
# values. We can't just iterate blindly because strings are
# iterable.
if isinstance(get, (bytes, basestring)):
- pieces.append(Token.get_token('GET'))
+ pieces.append(b'GET')
pieces.append(get)
else:
for g in get:
- pieces.append(Token.get_token('GET'))
+ pieces.append(b'GET')
pieces.append(g)
if desc:
- pieces.append(Token.get_token('DESC'))
+ pieces.append(b'DESC')
if alpha:
- pieces.append(Token.get_token('ALPHA'))
+ pieces.append(b'ALPHA')
if store is not None:
- pieces.append(Token.get_token('STORE'))
+ pieces.append(b'STORE')
pieces.append(store)
if groups:
@@ -1754,9 +1750,9 @@ class Redis(object):
"""
pieces = [cursor]
if match is not None:
- pieces.extend([Token.get_token('MATCH'), match])
+ pieces.extend([b'MATCH', match])
if count is not None:
- pieces.extend([Token.get_token('COUNT'), count])
+ pieces.extend([b'COUNT', count])
return self.execute_command('SCAN', *pieces)
def scan_iter(self, match=None, count=None):
@@ -1785,9 +1781,9 @@ class Redis(object):
"""
pieces = [name, cursor]
if match is not None:
- pieces.extend([Token.get_token('MATCH'), match])
+ pieces.extend([b'MATCH', match])
if count is not None:
- pieces.extend([Token.get_token('COUNT'), count])
+ pieces.extend([b'COUNT', count])
return self.execute_command('SSCAN', *pieces)
def sscan_iter(self, name, match=None, count=None):
@@ -1817,9 +1813,9 @@ class Redis(object):
"""
pieces = [name, cursor]
if match is not None:
- pieces.extend([Token.get_token('MATCH'), match])
+ pieces.extend([b'MATCH', match])
if count is not None:
- pieces.extend([Token.get_token('COUNT'), count])
+ pieces.extend([b'COUNT', count])
return self.execute_command('HSCAN', *pieces)
def hscan_iter(self, name, match=None, count=None):
@@ -1852,9 +1848,9 @@ class Redis(object):
"""
pieces = [name, cursor]
if match is not None:
- pieces.extend([Token.get_token('MATCH'), match])
+ pieces.extend([b'MATCH', match])
if count is not None:
- pieces.extend([Token.get_token('COUNT'), count])
+ pieces.extend([b'COUNT', count])
options = {'score_cast_func': score_cast_func}
return self.execute_command('ZSCAN', *pieces, **options)
@@ -1982,9 +1978,9 @@ class Redis(object):
if maxlen is not None:
if not isinstance(maxlen, (int, long)) or maxlen < 1:
raise DataError('XADD maxlen must be a positive integer')
- pieces.append(Token.get_token('MAXLEN'))
+ pieces.append(b'MAXLEN')
if approximate:
- pieces.append(Token.get_token('~'))
+ pieces.append(b'~')
pieces.append(str(maxlen))
pieces.append(id)
if not isinstance(fields, dict) or len(fields) == 0:
@@ -2032,24 +2028,24 @@ class Redis(object):
if idle is not None:
if not isinstance(idle, (int, long)):
raise DataError("XCLAIM idle must be an integer")
- pieces.extend((Token.get_token('IDLE'), str(idle)))
+ pieces.extend((b'IDLE', str(idle)))
if time is not None:
if not isinstance(time, (int, long)):
raise DataError("XCLAIM time must be an integer")
- pieces.extend((Token.get_token('TIME'), str(time)))
+ pieces.extend((b'TIME', str(time)))
if retrycount is not None:
if not isinstance(retrycount, (int, long)):
raise DataError("XCLAIM retrycount must be an integer")
- pieces.extend((Token.get_token('RETRYCOUNT'), str(retrycount)))
+ pieces.extend((b'RETRYCOUNT', str(retrycount)))
if force:
if not isinstance(force, bool):
raise DataError("XCLAIM force must be a boolean")
- pieces.append(Token.get_token('FORCE'))
+ pieces.append(b'FORCE')
if justid:
if not isinstance(justid, bool):
raise DataError("XCLAIM justid must be a boolean")
- pieces.append(Token.get_token('JUSTID'))
+ pieces.append(b'JUSTID')
kwargs['parse_justid'] = True
return self.execute_command('XCLAIM', *pieces, **kwargs)
@@ -2070,7 +2066,7 @@ class Redis(object):
"""
pieces = ['XGROUP CREATE', name, groupname, id]
if mkstream:
- pieces.append(Token.get_token('MKSTREAM'))
+ pieces.append(b'MKSTREAM')
return self.execute_command(*pieces)
def xgroup_delconsumer(self, name, groupname, consumername):
@@ -2180,7 +2176,7 @@ class Redis(object):
if count is not None:
if not isinstance(count, (int, long)) or count < 1:
raise DataError('XRANGE count must be a positive integer')
- pieces.append(Token.get_token('COUNT'))
+ pieces.append(b'COUNT')
pieces.append(str(count))
return self.execute_command('XRANGE', name, *pieces)
@@ -2198,16 +2194,16 @@ class Redis(object):
if block is not None:
if not isinstance(block, (int, long)) or block < 0:
raise DataError('XREAD block must be a non-negative integer')
- pieces.append(Token.get_token('BLOCK'))
+ pieces.append(b'BLOCK')
pieces.append(str(block))
if count is not None:
if not isinstance(count, (int, long)) or count < 1:
raise DataError('XREAD count must be a positive integer')
- pieces.append(Token.get_token('COUNT'))
+ pieces.append(b'COUNT')
pieces.append(str(count))
if not isinstance(streams, dict) or len(streams) == 0:
raise DataError('XREAD streams must be a non empty dict')
- pieces.append(Token.get_token('STREAMS'))
+ pieces.append(b'STREAMS')
keys, values = izip(*iteritems(streams))
pieces.extend(keys)
pieces.extend(values)
@@ -2226,23 +2222,23 @@ class Redis(object):
block: number of milliseconds to wait, if nothing already present.
noack: do not add messages to the PEL
"""
- pieces = [Token.get_token('GROUP'), groupname, consumername]
+ pieces = [b'GROUP', groupname, consumername]
if count is not None:
if not isinstance(count, (int, long)) or count < 1:
raise DataError("XREADGROUP count must be a positive integer")
- pieces.append(Token.get_token("COUNT"))
+ pieces.append(b'COUNT')
pieces.append(str(count))
if block is not None:
if not isinstance(block, (int, long)) or block < 0:
raise DataError("XREADGROUP block must be a non-negative "
"integer")
- pieces.append(Token.get_token("BLOCK"))
+ pieces.append(b'BLOCK')
pieces.append(str(block))
if noack:
- pieces.append(Token.get_token("NOACK"))
+ pieces.append(b'NOACK')
if not isinstance(streams, dict) or len(streams) == 0:
raise DataError('XREADGROUP streams must be a non empty dict')
- pieces.append(Token.get_token('STREAMS'))
+ pieces.append(b'STREAMS')
pieces.extend(streams.keys())
pieces.extend(streams.values())
return self.execute_command('XREADGROUP', *pieces)
@@ -2262,7 +2258,7 @@ class Redis(object):
if count is not None:
if not isinstance(count, (int, long)) or count < 1:
raise DataError('XREVRANGE count must be a positive integer')
- pieces.append(Token.get_token('COUNT'))
+ pieces.append(b'COUNT')
pieces.append(str(count))
return self.execute_command('XREVRANGE', name, *pieces)
@@ -2274,9 +2270,9 @@ class Redis(object):
maxlen: truncate old stream messages beyond this size
approximate: actual stream length may be slightly more than maxlen
"""
- pieces = [Token.get_token('MAXLEN')]
+ pieces = [b'MAXLEN']
if approximate:
- pieces.append(Token.get_token('~'))
+ pieces.append(b'~')
pieces.append(maxlen)
return self.execute_command('XTRIM', name, *pieces)
@@ -2315,13 +2311,13 @@ class Redis(object):
pieces = []
options = {}
if nx:
- pieces.append(Token.get_token('NX'))
+ pieces.append(b'NX')
if xx:
- pieces.append(Token.get_token('XX'))
+ pieces.append(b'XX')
if ch:
- pieces.append(Token.get_token('CH'))
+ pieces.append(b'CH')
if incr:
- pieces.append(Token.get_token('INCR'))
+ pieces.append(b'INCR')
options['as_score'] = True
for pair in iteritems(mapping):
pieces.append(pair[1])
@@ -2434,7 +2430,7 @@ class Redis(object):
score_cast_func)
pieces = ['ZRANGE', name, start, end]
if withscores:
- pieces.append(Token.get_token('WITHSCORES'))
+ pieces.append(b'WITHSCORES')
options = {
'withscores': withscores,
'score_cast_func': score_cast_func
@@ -2454,7 +2450,7 @@ class Redis(object):
raise DataError("``start`` and ``num`` must both be specified")
pieces = ['ZRANGEBYLEX', name, min, max]
if start is not None and num is not None:
- pieces.extend([Token.get_token('LIMIT'), start, num])
+ pieces.extend([b'LIMIT', start, num])
return self.execute_command(*pieces)
def zrevrangebylex(self, name, max, min, start=None, num=None):
@@ -2470,7 +2466,7 @@ class Redis(object):
raise DataError("``start`` and ``num`` must both be specified")
pieces = ['ZREVRANGEBYLEX', name, max, min]
if start is not None and num is not None:
- pieces.extend([Token.get_token('LIMIT'), start, num])
+ pieces.extend([b'LIMIT', start, num])
return self.execute_command(*pieces)
def zrangebyscore(self, name, min, max, start=None, num=None,
@@ -2492,9 +2488,9 @@ class Redis(object):
raise DataError("``start`` and ``num`` must both be specified")
pieces = ['ZRANGEBYSCORE', name, min, max]
if start is not None and num is not None:
- pieces.extend([Token.get_token('LIMIT'), start, num])
+ pieces.extend([b'LIMIT', start, num])
if withscores:
- pieces.append(Token.get_token('WITHSCORES'))
+ pieces.append(b'WITHSCORES')
options = {
'withscores': withscores,
'score_cast_func': score_cast_func
@@ -2552,7 +2548,7 @@ class Redis(object):
"""
pieces = ['ZREVRANGE', name, start, end]
if withscores:
- pieces.append(Token.get_token('WITHSCORES'))
+ pieces.append(b'WITHSCORES')
options = {
'withscores': withscores,
'score_cast_func': score_cast_func
@@ -2578,9 +2574,9 @@ class Redis(object):
raise DataError("``start`` and ``num`` must both be specified")
pieces = ['ZREVRANGEBYSCORE', name, max, min]
if start is not None and num is not None:
- pieces.extend([Token.get_token('LIMIT'), start, num])
+ pieces.extend([b'LIMIT', start, num])
if withscores:
- pieces.append(Token.get_token('WITHSCORES'))
+ pieces.append(b'WITHSCORES')
options = {
'withscores': withscores,
'score_cast_func': score_cast_func
@@ -2614,10 +2610,10 @@ class Redis(object):
weights = None
pieces.extend(keys)
if weights:
- pieces.append(Token.get_token('WEIGHTS'))
+ pieces.append(b'WEIGHTS')
pieces.extend(weights)
if aggregate:
- pieces.append(Token.get_token('AGGREGATE'))
+ pieces.append(b'AGGREGATE')
pieces.append(aggregate)
return self.execute_command(*pieces)
@@ -2900,27 +2896,33 @@ class Redis(object):
else:
pieces.append('m',)
- for token in ('withdist', 'withcoord', 'withhash'):
- if kwargs[token]:
- pieces.append(Token(token.upper()))
+ for arg_name, byte_repr in (
+ ('withdist', b'WITHDIST'),
+ ('withcoord', b'WITHCOORD'),
+ ('withhash', b'WITHHASH')):
+ if kwargs[arg_name]:
+ pieces.append(byte_repr)
if kwargs['count']:
- pieces.extend([Token('COUNT'), kwargs['count']])
+ pieces.extend([b'COUNT', kwargs['count']])
- if kwargs['sort'] and kwargs['sort'] not in ('ASC', 'DESC'):
- raise DataError("GEORADIUS invalid sort")
- elif kwargs['sort']:
- pieces.append(Token(kwargs['sort']))
+ if kwargs['sort']:
+ if kwargs['sort'] == 'ASC':
+ pieces.append(b'ASC')
+ elif kwargs['sort'] == 'DESC':
+ pieces.append(b'DESC')
+ else:
+ raise DataError("GEORADIUS invalid sort")
if kwargs['store'] and kwargs['store_dist']:
raise DataError("GEORADIUS store and store_dist cant be set"
" together")
if kwargs['store']:
- pieces.extend([Token('STORE'), kwargs['store']])
+ pieces.extend([b'STORE', kwargs['store']])
if kwargs['store_dist']:
- pieces.extend([Token('STOREDIST'), kwargs['store_dist']])
+ pieces.extend([b'STOREDIST', kwargs['store_dist']])
return self.execute_command(command, *pieces, **kwargs)
diff --git a/redis/connection.py b/redis/connection.py
index eff88b0..a42b33f 100755
--- a/redis/connection.py
+++ b/redis/connection.py
@@ -60,41 +60,6 @@ SYM_EMPTY = b''
SERVER_CLOSED_CONNECTION_ERROR = "Connection closed by server."
-class Token(object):
- """
- Literal strings in Redis commands, such as the command names and any
- hard-coded arguments are wrapped in this class so we know not to apply
- and encoding rules on them.
- """
-
- _cache = {}
-
- @classmethod
- def get_token(cls, value):
- "Gets a cached token object or creates a new one if not already cached"
-
- # Use try/except because after running for a short time most tokens
- # should already be cached
- try:
- return cls._cache[value]
- except KeyError:
- token = Token(value)
- cls._cache[value] = token
- return token
-
- def __init__(self, value):
- if isinstance(value, Token):
- value = value.value
- self.value = value
- self.encoded_value = value.encode()
-
- def __repr__(self):
- return self.value
-
- def __str__(self):
- return self.value
-
-
class Encoder(object):
"Encode strings to bytes and decode bytes to strings"
@@ -105,9 +70,7 @@ class Encoder(object):
def encode(self, value):
"Return a bytestring representation of the value"
- if isinstance(value, Token):
- return value.encoded_value
- elif isinstance(value, bytes):
+ if isinstance(value, bytes):
return value
elif isinstance(value, bool):
# special case bool since it is a subclass of int
@@ -647,14 +610,12 @@ class Connection(object):
# the client might have included 1 or more literal arguments in
# the command name, e.g., 'CONFIG GET'. The Redis server expects these
# arguments to be sent separately, so split the first argument
- # manually. All of these arguements get wrapped in the Token class
- # to prevent them from being encoded.
- command = args[0]
- if ' ' in command:
- args = tuple(Token.get_token(s)
- for s in command.split()) + args[1:]
- else:
- args = (Token.get_token(command),) + args[1:]
+ # manually. These arguments should be bytestrings so that they are
+ # not encoded.
+ if isinstance(args[0], unicode):
+ args = tuple(args[0].encode().split()) + args[1:]
+ elif b' ' in args[0]:
+ args = tuple(args[0].split()) + args[1:]
buff = SYM_EMPTY.join((SYM_STAR, str(len(args)).encode(), SYM_CRLF))
diff --git a/tests/test_encoding.py b/tests/test_encoding.py
index 283fc6e..18219a6 100644
--- a/tests/test_encoding.py
+++ b/tests/test_encoding.py
@@ -25,7 +25,7 @@ class TestEncoding(object):
assert r.lrange('a', 0, -1) == result
-class TestCommandsAndTokensArentEncoded(object):
+class TestCommandsAreNotEncoded(object):
@pytest.fixture()
def r(self, request):
return _get_client(redis.Redis, request=request, encoding='utf-16')