summaryrefslogtreecommitdiff
path: root/kafka/producer/kafka.py
diff options
context:
space:
mode:
authorJeff Widman <jeff@jeffwidman.com>2018-11-13 11:57:45 -0800
committerJeff Widman <jeff@jeffwidman.com>2018-11-18 00:21:18 -0800
commit1d443638e22c2d360086b8d7cee8b5d930741d12 (patch)
tree9e60f53f4a791b306acd6f1a2223557ead1ed750 /kafka/producer/kafka.py
parentf3105a434f3bd2fb3f8899e4861e187e786b03da (diff)
downloadkafka-python-1d443638e22c2d360086b8d7cee8b5d930741d12.tar.gz
Be explicit with tuples for %s formatting
Fix #1633
Diffstat (limited to 'kafka/producer/kafka.py')
-rw-r--r--kafka/producer/kafka.py10
1 files changed, 5 insertions, 5 deletions
diff --git a/kafka/producer/kafka.py b/kafka/producer/kafka.py
index 45bb058..685c3f9 100644
--- a/kafka/producer/kafka.py
+++ b/kafka/producer/kafka.py
@@ -340,11 +340,11 @@ class KafkaProducer(object):
self.config[key] = configs.pop(key)
# Only check for extra config keys in top-level class
- assert not configs, 'Unrecognized configs: %s' % configs
+ assert not configs, 'Unrecognized configs: %s' % (configs,)
if self.config['client_id'] is None:
self.config['client_id'] = 'kafka-python-producer-%s' % \
- PRODUCER_CLIENT_ID_SEQUENCE.increment()
+ (PRODUCER_CLIENT_ID_SEQUENCE.increment(),)
if self.config['acks'] == 'all':
self.config['acks'] = -1
@@ -633,12 +633,12 @@ class KafkaProducer(object):
raise Errors.MessageSizeTooLargeError(
"The message is %d bytes when serialized which is larger than"
" the maximum request size you have configured with the"
- " max_request_size configuration" % size)
+ " max_request_size configuration" % (size,))
if size > self.config['buffer_memory']:
raise Errors.MessageSizeTooLargeError(
"The message is %d bytes when serialized which is larger than"
" the total memory buffer you have configured with the"
- " buffer_memory configuration." % size)
+ " buffer_memory configuration." % (size,))
def _wait_on_metadata(self, topic, max_wait):
"""
@@ -679,7 +679,7 @@ class KafkaProducer(object):
elapsed = time.time() - begin
if not metadata_event.is_set():
raise Errors.KafkaTimeoutError(
- "Failed to update metadata after %.1f secs." % max_wait)
+ "Failed to update metadata after %.1f secs." % (max_wait,))
elif topic in self._metadata.unauthorized_topics:
raise Errors.TopicAuthorizationFailedError(topic)
else: