summaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
Diffstat (limited to 'test')
-rw-r--r--test/test_client.py11
-rw-r--r--test/test_client_async.py4
-rw-r--r--test/test_client_integration.py8
-rw-r--r--test/test_conn_legacy.py3
-rw-r--r--test/test_consumer.py10
-rw-r--r--test/test_consumer_group.py2
-rw-r--r--test/test_consumer_integration.py6
-rw-r--r--test/test_context.py2
-rw-r--r--test/test_coordinator.py5
-rw-r--r--test/test_failover_integration.py6
-rw-r--r--test/test_fetcher.py5
-rw-r--r--test/test_producer_integration.py6
-rw-r--r--test/test_producer_legacy.py8
-rw-r--r--test/test_protocol.py20
-rw-r--r--test/test_util.py11
-rw-r--r--test/testutil.py2
16 files changed, 51 insertions, 58 deletions
diff --git a/test/test_client.py b/test/test_client.py
index 6980434..42d7dbd 100644
--- a/test/test_client.py
+++ b/test/test_client.py
@@ -6,17 +6,14 @@ import six
from . import unittest
from kafka import SimpleClient
-from kafka.common import (
- ProduceRequestPayload,
- BrokerMetadata,
- TopicPartition, KafkaUnavailableError,
- LeaderNotAvailableError, UnknownTopicOrPartitionError,
- KafkaTimeoutError, ConnectionError, FailedPayloadsError
-)
from kafka.conn import KafkaConnection
+from kafka.errors import (
+ KafkaUnavailableError, LeaderNotAvailableError, KafkaTimeoutError,
+ UnknownTopicOrPartitionError, ConnectionError, FailedPayloadsError)
from kafka.future import Future
from kafka.protocol import KafkaProtocol, create_message
from kafka.protocol.metadata import MetadataResponse
+from kafka.structs import ProduceRequestPayload, BrokerMetadata, TopicPartition
from test.testutil import Timer
diff --git a/test/test_client_async.py b/test/test_client_async.py
index 884686d..eaac8e1 100644
--- a/test/test_client_async.py
+++ b/test/test_client_async.py
@@ -4,12 +4,12 @@ import socket
import pytest
from kafka.client_async import KafkaClient
-from kafka.common import BrokerMetadata
-import kafka.common as Errors
from kafka.conn import ConnectionStates
+import kafka.errors as Errors
from kafka.future import Future
from kafka.protocol.metadata import MetadataResponse, MetadataRequest
from kafka.protocol.produce import ProduceRequest
+from kafka.structs import BrokerMetadata
@pytest.mark.parametrize("bootstrap,expected_hosts", [
diff --git a/test/test_client_integration.py b/test/test_client_integration.py
index c5d3b58..742572d 100644
--- a/test/test_client_integration.py
+++ b/test/test_client_integration.py
@@ -1,10 +1,10 @@
import os
-from kafka.common import (
- FetchRequestPayload, OffsetCommitRequestPayload, OffsetFetchRequestPayload,
- KafkaTimeoutError, ProduceRequestPayload
-)
+from kafka.errors import KafkaTimeoutError
from kafka.protocol import create_message
+from kafka.structs import (
+ FetchRequestPayload, OffsetCommitRequestPayload, OffsetFetchRequestPayload,
+ ProduceRequestPayload)
from test.fixtures import ZookeeperFixture, KafkaFixture
from test.testutil import KafkaIntegrationTestCase, kafka_versions
diff --git a/test/test_conn_legacy.py b/test/test_conn_legacy.py
index f0ef8fb..347588e 100644
--- a/test/test_conn_legacy.py
+++ b/test/test_conn_legacy.py
@@ -5,9 +5,10 @@ from threading import Thread
import mock
from . import unittest
-from kafka.common import ConnectionError
+from kafka.errors import ConnectionError
from kafka.conn import KafkaConnection, collect_hosts, DEFAULT_SOCKET_TIMEOUT_SECONDS
+
class ConnTest(unittest.TestCase):
def setUp(self):
diff --git a/test/test_consumer.py b/test/test_consumer.py
index e664292..f3dad16 100644
--- a/test/test_consumer.py
+++ b/test/test_consumer.py
@@ -4,11 +4,11 @@ from mock import MagicMock, patch
from . import unittest
from kafka import SimpleConsumer, KafkaConsumer, MultiProcessConsumer
-from kafka.common import (
- KafkaConfigurationError, FetchResponsePayload, OffsetFetchResponsePayload,
- FailedPayloadsError, OffsetAndMessage,
- NotLeaderForPartitionError, UnknownTopicOrPartitionError
-)
+from kafka.errors import (
+ FailedPayloadsError, KafkaConfigurationError, NotLeaderForPartitionError,
+ UnknownTopicOrPartitionError)
+from kafka.structs import (
+ FetchResponsePayload, OffsetAndMessage, OffsetFetchResponsePayload)
class TestKafkaConsumer(unittest.TestCase):
diff --git a/test/test_consumer_group.py b/test/test_consumer_group.py
index 5fcfbe2..c02eddc 100644
--- a/test/test_consumer_group.py
+++ b/test/test_consumer_group.py
@@ -7,11 +7,11 @@ import pytest
import six
from kafka import SimpleClient
-from kafka.common import TopicPartition
from kafka.conn import ConnectionStates
from kafka.consumer.group import KafkaConsumer
from kafka.future import Future
from kafka.protocol.metadata import MetadataResponse
+from kafka.structs import TopicPartition
from test.conftest import version
from test.testutil import random_string
diff --git a/test/test_consumer_integration.py b/test/test_consumer_integration.py
index 1b60c95..4e081ce 100644
--- a/test/test_consumer_integration.py
+++ b/test/test_consumer_integration.py
@@ -7,11 +7,9 @@ from . import unittest
from kafka import (
KafkaConsumer, MultiProcessConsumer, SimpleConsumer, create_message
)
-from kafka.common import (
- ProduceRequestPayload, ConsumerFetchSizeTooSmall,
- OffsetOutOfRangeError, TopicPartition
-)
from kafka.consumer.base import MAX_FETCH_BUFFER_SIZE_BYTES
+from kafka.errors import ConsumerFetchSizeTooSmall, OffsetOutOfRangeError
+from kafka.structs import ProduceRequestPayload, TopicPartition
from test.fixtures import ZookeeperFixture, KafkaFixture
from test.testutil import (
diff --git a/test/test_context.py b/test/test_context.py
index da9b22f..3d41ba6 100644
--- a/test/test_context.py
+++ b/test/test_context.py
@@ -5,8 +5,8 @@ from . import unittest
from mock import MagicMock, patch
-from kafka.common import OffsetOutOfRangeError
from kafka.context import OffsetCommitContext
+from kafka.errors import OffsetOutOfRangeError
class TestOffsetCommitContext(unittest.TestCase):
diff --git a/test/test_coordinator.py b/test/test_coordinator.py
index 44db808..d6df983 100644
--- a/test/test_coordinator.py
+++ b/test/test_coordinator.py
@@ -4,7 +4,7 @@ from __future__ import absolute_import
import pytest
from kafka.client_async import KafkaClient
-from kafka.common import TopicPartition, OffsetAndMetadata
+from kafka.structs import TopicPartition, OffsetAndMetadata
from kafka.consumer.subscription_state import (
SubscriptionState, ConsumerRebalanceListener)
from kafka.coordinator.assignors.range import RangePartitionAssignor
@@ -13,6 +13,7 @@ from kafka.coordinator.consumer import ConsumerCoordinator
from kafka.coordinator.protocol import (
ConsumerProtocolMemberMetadata, ConsumerProtocolMemberAssignment)
from kafka.conn import ConnectionStates
+import kafka.errors as Errors
from kafka.future import Future
from kafka.protocol.commit import (
OffsetCommitRequest_v0, OffsetCommitRequest_v1, OffsetCommitRequest_v2,
@@ -21,8 +22,6 @@ from kafka.protocol.commit import (
from kafka.protocol.metadata import MetadataResponse
from kafka.util import WeakMethod
-import kafka.common as Errors
-
@pytest.fixture
def conn(mocker):
diff --git a/test/test_failover_integration.py b/test/test_failover_integration.py
index 9409241..58e9463 100644
--- a/test/test_failover_integration.py
+++ b/test/test_failover_integration.py
@@ -3,10 +3,10 @@ import os
import time
from kafka import SimpleClient, SimpleConsumer, KeyedProducer
-from kafka.common import (
- TopicPartition, FailedPayloadsError, ConnectionError, RequestTimedOutError
-)
+from kafka.errors import (
+ FailedPayloadsError, ConnectionError, RequestTimedOutError)
from kafka.producer.base import Producer
+from kafka.structs import TopicPartition
from test.fixtures import ZookeeperFixture, KafkaFixture
from test.testutil import KafkaIntegrationTestCase, random_string
diff --git a/test/test_fetcher.py b/test/test_fetcher.py
index a252f6c..cdd324f 100644
--- a/test/test_fetcher.py
+++ b/test/test_fetcher.py
@@ -4,13 +4,12 @@ from __future__ import absolute_import
import pytest
from kafka.client_async import KafkaClient
-from kafka.common import TopicPartition, OffsetAndMetadata
from kafka.consumer.fetcher import Fetcher
from kafka.consumer.subscription_state import SubscriptionState
+import kafka.errors as Errors
from kafka.future import Future
from kafka.protocol.fetch import FetchRequest
-
-import kafka.common as Errors
+from kafka.structs import TopicPartition, OffsetAndMetadata
@pytest.fixture
diff --git a/test/test_producer_integration.py b/test/test_producer_integration.py
index d631402..176c99e 100644
--- a/test/test_producer_integration.py
+++ b/test/test_producer_integration.py
@@ -10,11 +10,9 @@ from kafka import (
RoundRobinPartitioner, HashedPartitioner
)
from kafka.codec import has_snappy
-from kafka.common import (
- FetchRequestPayload, ProduceRequestPayload,
- UnknownTopicOrPartitionError, LeaderNotAvailableError
-)
+from kafka.errors import UnknownTopicOrPartitionError, LeaderNotAvailableError
from kafka.producer.base import Producer
+from kafka.structs import FetchRequestPayload, ProduceRequestPayload
from test.fixtures import ZookeeperFixture, KafkaFixture
from test.testutil import KafkaIntegrationTestCase, kafka_versions
diff --git a/test/test_producer_legacy.py b/test/test_producer_legacy.py
index 850cb80..9b87c76 100644
--- a/test/test_producer_legacy.py
+++ b/test/test_producer_legacy.py
@@ -9,12 +9,12 @@ from mock import MagicMock, patch
from . import unittest
from kafka import SimpleClient, SimpleProducer, KeyedProducer
-from kafka.common import (
- AsyncProducerQueueFull, FailedPayloadsError, NotLeaderForPartitionError,
- ProduceResponsePayload, RetryOptions, TopicPartition
-)
+from kafka.errors import (
+ AsyncProducerQueueFull, FailedPayloadsError, NotLeaderForPartitionError)
from kafka.producer.base import Producer, _send_upstream
from kafka.protocol import CODEC_NONE
+from kafka.structs import (
+ ProduceResponsePayload, RetryOptions, TopicPartition)
from six.moves import queue, xrange
diff --git a/test/test_protocol.py b/test/test_protocol.py
index 1d91e7d..d705e3a 100644
--- a/test/test_protocol.py
+++ b/test/test_protocol.py
@@ -7,21 +7,21 @@ from mock import patch, sentinel
from . import unittest
from kafka.codec import has_snappy, gzip_decode, snappy_decode
-from kafka.common import (
+from kafka.errors import (
+ ChecksumError, KafkaUnavailableError, UnsupportedCodecError,
+ ConsumerFetchSizeTooSmall, ProtocolError)
+from kafka.protocol import (
+ ATTRIBUTE_CODEC_MASK, CODEC_NONE, CODEC_GZIP, CODEC_SNAPPY, KafkaProtocol,
+ create_message, create_gzip_message, create_snappy_message,
+ create_message_set)
+from kafka.structs import (
OffsetRequestPayload, OffsetResponsePayload,
OffsetCommitRequestPayload, OffsetCommitResponsePayload,
OffsetFetchRequestPayload, OffsetFetchResponsePayload,
ProduceRequestPayload, ProduceResponsePayload,
FetchRequestPayload, FetchResponsePayload,
- Message, ChecksumError, OffsetAndMessage, BrokerMetadata,
- KafkaUnavailableError, UnsupportedCodecError, ConsumerFetchSizeTooSmall,
- ProtocolError, ConsumerMetadataResponse
-)
-from kafka.protocol import (
- ATTRIBUTE_CODEC_MASK, CODEC_NONE, CODEC_GZIP, CODEC_SNAPPY, KafkaProtocol,
- create_message, create_gzip_message, create_snappy_message,
- create_message_set
-)
+ Message, OffsetAndMessage, BrokerMetadata, ConsumerMetadataResponse)
+
class TestProtocol(unittest.TestCase):
def test_create_message(self):
diff --git a/test/test_util.py b/test/test_util.py
index 7f0432b..5fc3f69 100644
--- a/test/test_util.py
+++ b/test/test_util.py
@@ -4,8 +4,9 @@ import struct
import six
from . import unittest
-import kafka.common
+import kafka.errors
import kafka.util
+import kafka.structs
class UtilTest(unittest.TestCase):
@@ -48,7 +49,7 @@ class UtilTest(unittest.TestCase):
self.assertEqual(kafka.util.read_int_string(b'\x00\x00\x00\x0bsome string', 0), (b'some string', 15))
def test_read_int_string__insufficient_data(self):
- with self.assertRaises(kafka.common.BufferUnderflowError):
+ with self.assertRaises(kafka.errors.BufferUnderflowError):
kafka.util.read_int_string(b'\x00\x00\x00\x021', 0)
def test_write_short_string(self):
@@ -90,7 +91,7 @@ class UtilTest(unittest.TestCase):
self.assertEqual(kafka.util.read_short_string(b'\x00\x0bsome string', 0), (b'some string', 13))
def test_read_int_string__insufficient_data2(self):
- with self.assertRaises(kafka.common.BufferUnderflowError):
+ with self.assertRaises(kafka.errors.BufferUnderflowError):
kafka.util.read_int_string('\x00\x021', 0)
def test_relative_unpack2(self):
@@ -100,11 +101,11 @@ class UtilTest(unittest.TestCase):
)
def test_relative_unpack3(self):
- with self.assertRaises(kafka.common.BufferUnderflowError):
+ with self.assertRaises(kafka.errors.BufferUnderflowError):
kafka.util.relative_unpack('>hh', '\x00', 0)
def test_group_by_topic_and_partition(self):
- t = kafka.common.TopicPartition
+ t = kafka.structs.TopicPartition
l = [
t("a", 1),
diff --git a/test/testutil.py b/test/testutil.py
index 1d1f6ea..a6f4421 100644
--- a/test/testutil.py
+++ b/test/testutil.py
@@ -12,7 +12,7 @@ from six.moves import xrange
from . import unittest
from kafka import SimpleClient
-from kafka.common import OffsetRequestPayload
+from kafka.structs import OffsetRequestPayload
__all__ = [
'random_string',