summaryrefslogtreecommitdiff
path: root/test/test_util.py
diff options
context:
space:
mode:
authorDana Powers <dana.powers@gmail.com>2016-04-05 09:34:48 -0700
committerDana Powers <dana.powers@gmail.com>2016-04-05 09:35:45 -0700
commit5a14bd8c947251d1a8f848175cc3cf2b07af3411 (patch)
treea251ddbc60c84405762365429de9b04727653e6c /test/test_util.py
parent221f56d8a05cdc2d37f85018e4af352b4b2a95c5 (diff)
downloadkafka-python-5a14bd8c947251d1a8f848175cc3cf2b07af3411.tar.gz
Update imports from kafka.common -> kafka.errors / kafka.structs
Diffstat (limited to 'test/test_util.py')
-rw-r--r--test/test_util.py11
1 files changed, 6 insertions, 5 deletions
diff --git a/test/test_util.py b/test/test_util.py
index 7f0432b..5fc3f69 100644
--- a/test/test_util.py
+++ b/test/test_util.py
@@ -4,8 +4,9 @@ import struct
import six
from . import unittest
-import kafka.common
+import kafka.errors
import kafka.util
+import kafka.structs
class UtilTest(unittest.TestCase):
@@ -48,7 +49,7 @@ class UtilTest(unittest.TestCase):
self.assertEqual(kafka.util.read_int_string(b'\x00\x00\x00\x0bsome string', 0), (b'some string', 15))
def test_read_int_string__insufficient_data(self):
- with self.assertRaises(kafka.common.BufferUnderflowError):
+ with self.assertRaises(kafka.errors.BufferUnderflowError):
kafka.util.read_int_string(b'\x00\x00\x00\x021', 0)
def test_write_short_string(self):
@@ -90,7 +91,7 @@ class UtilTest(unittest.TestCase):
self.assertEqual(kafka.util.read_short_string(b'\x00\x0bsome string', 0), (b'some string', 13))
def test_read_int_string__insufficient_data2(self):
- with self.assertRaises(kafka.common.BufferUnderflowError):
+ with self.assertRaises(kafka.errors.BufferUnderflowError):
kafka.util.read_int_string('\x00\x021', 0)
def test_relative_unpack2(self):
@@ -100,11 +101,11 @@ class UtilTest(unittest.TestCase):
)
def test_relative_unpack3(self):
- with self.assertRaises(kafka.common.BufferUnderflowError):
+ with self.assertRaises(kafka.errors.BufferUnderflowError):
kafka.util.relative_unpack('>hh', '\x00', 0)
def test_group_by_topic_and_partition(self):
- t = kafka.common.TopicPartition
+ t = kafka.structs.TopicPartition
l = [
t("a", 1),