diff options
author | Bruno ReniƩ <brutasse@gmail.com> | 2014-08-29 15:23:48 +0200 |
---|---|---|
committer | Mark Roberts <wizzat@fb.com> | 2014-09-03 09:55:45 -0700 |
commit | 81f51b9b284d750f5e5added2f2c4bd773acd604 (patch) | |
tree | 235104f553fd91d97b84acc07dec681a06b8b642 /test | |
parent | ab80fa8283dc938e354d094e34fb0e86b5316ea4 (diff) | |
download | kafka-python-81f51b9b284d750f5e5added2f2c4bd773acd604.tar.gz |
Fix more tests, only multiprocessing consumer ones remaining
Diffstat (limited to 'test')
-rw-r--r-- | test/test_client_integration.py | 10 | ||||
-rw-r--r-- | test/test_consumer_integration.py | 5 | ||||
-rw-r--r-- | test/test_failover_integration.py | 2 | ||||
-rw-r--r-- | test/test_producer_integration.py | 30 | ||||
-rw-r--r-- | test/test_protocol.py | 1 | ||||
-rw-r--r-- | test/testutil.py | 4 |
6 files changed, 32 insertions, 20 deletions
diff --git a/test/test_client_integration.py b/test/test_client_integration.py index 3eb917f..029301e 100644 --- a/test/test_client_integration.py +++ b/test/test_client_integration.py @@ -56,7 +56,7 @@ class TestKafkaClientIntegration(KafkaIntegrationTestCase): # ensure_topic_exists should fail with KafkaTimeoutError with self.assertRaises(KafkaTimeoutError): - self.client.ensure_topic_exists("this_topic_doesnt_exist", timeout=0) + self.client.ensure_topic_exists(b"this_topic_doesnt_exist", timeout=0) #################### # Offset Tests # @@ -64,12 +64,12 @@ class TestKafkaClientIntegration(KafkaIntegrationTestCase): @kafka_versions("0.8.1", "0.8.1.1") def test_commit_fetch_offsets(self): - req = OffsetCommitRequest(self.topic, 0, 42, "metadata") - (resp,) = self.client.send_offset_commit_request("group", [req]) + req = OffsetCommitRequest(self.topic, 0, 42, b"metadata") + (resp,) = self.client.send_offset_commit_request(b"group", [req]) self.assertEquals(resp.error, 0) req = OffsetFetchRequest(self.topic, 0) - (resp,) = self.client.send_offset_fetch_request("group", [req]) + (resp,) = self.client.send_offset_fetch_request(b"group", [req]) self.assertEquals(resp.error, 0) self.assertEquals(resp.offset, 42) - self.assertEquals(resp.metadata, "") # Metadata isn't stored for now + self.assertEquals(resp.metadata, b"") # Metadata isn't stored for now diff --git a/test/test_consumer_integration.py b/test/test_consumer_integration.py index 2e8e859..1b2c73e 100644 --- a/test/test_consumer_integration.py +++ b/test/test_consumer_integration.py @@ -4,6 +4,9 @@ from datetime import datetime from kafka import * # noqa from kafka.common import * # noqa from kafka.consumer import MAX_FETCH_BUFFER_SIZE_BYTES + +from six.moves import xrange + from .fixtures import ZookeeperFixture, KafkaFixture from .testutil import * @@ -267,7 +270,7 @@ class TestConsumerIntegration(KafkaIntegrationTestCase): kwargs.setdefault('auto_commit', True) consumer_class = kwargs.pop('consumer', SimpleConsumer) - group = kwargs.pop('group', self.id()) + group = kwargs.pop('group', self.id().encode('utf-8')) topic = kwargs.pop('topic', self.topic) if consumer_class == SimpleConsumer: diff --git a/test/test_failover_integration.py b/test/test_failover_integration.py index d3121d6..ee84aad 100644 --- a/test/test_failover_integration.py +++ b/test/test_failover_integration.py @@ -60,7 +60,7 @@ class TestFailover(KafkaIntegrationTestCase): while not recovered and (time.time() - started) < timeout: try: logging.debug("attempting to send 'success' message after leader killed") - producer.send_messages(topic, partition, 'success') + producer.send_messages(topic, partition, b'success') logging.debug("success!") recovered = True except (FailedPayloadsError, ConnectionError): diff --git a/test/test_producer_integration.py b/test/test_producer_integration.py index c39d6b5..674cc58 100644 --- a/test/test_producer_integration.py +++ b/test/test_producer_integration.py @@ -32,13 +32,15 @@ class TestKafkaProducerIntegration(KafkaIntegrationTestCase): start_offset = self.current_offset(self.topic, 0) self.assert_produce_request( - [ create_message("Test message %d" % i) for i in range(100) ], + [create_message(("Test message %d" % i).encode('utf-8')) + for i in range(100)], start_offset, 100, ) self.assert_produce_request( - [ create_message("Test message %d" % i) for i in range(100) ], + [create_message(("Test message %d" % i).encode('utf-8')) + for i in range(100)], start_offset+100, 100, ) @@ -48,7 +50,8 @@ class TestKafkaProducerIntegration(KafkaIntegrationTestCase): start_offset = self.current_offset(self.topic, 0) self.assert_produce_request( - [ create_message("Test message %d" % i) for i in range(10000) ], + [create_message(("Test message %d" % i).encode('utf-8')) + for i in range(10000)], start_offset, 10000, ) @@ -57,8 +60,10 @@ class TestKafkaProducerIntegration(KafkaIntegrationTestCase): def test_produce_many_gzip(self): start_offset = self.current_offset(self.topic, 0) - message1 = create_gzip_message(["Gzipped 1 %d" % i for i in range(100)]) - message2 = create_gzip_message(["Gzipped 2 %d" % i for i in range(100)]) + message1 = create_gzip_message([ + ("Gzipped 1 %d" % i).encode('utf-8') for i in range(100)]) + message2 = create_gzip_message([ + ("Gzipped 2 %d" % i).encode('utf-8') for i in range(100)]) self.assert_produce_request( [ message1, message2 ], @@ -85,8 +90,9 @@ class TestKafkaProducerIntegration(KafkaIntegrationTestCase): msg_count = 1+100 messages = [ - create_message("Just a plain message"), - create_gzip_message(["Gzipped %d" % i for i in range(100)]), + create_message(b"Just a plain message"), + create_gzip_message([ + ("Gzipped %d" % i).encode('utf-8') for i in range(100)]), ] # All snappy integration tests fail with nosnappyjava @@ -101,14 +107,18 @@ class TestKafkaProducerIntegration(KafkaIntegrationTestCase): start_offset = self.current_offset(self.topic, 0) self.assert_produce_request([ - create_gzip_message(["Gzipped batch 1, message %d" % i for i in range(50000)]) + create_gzip_message([ + ("Gzipped batch 1, message %d" % i).encode('utf-8') + for i in range(50000)]) ], start_offset, 50000, ) self.assert_produce_request([ - create_gzip_message(["Gzipped batch 1, message %d" % i for i in range(50000)]) + create_gzip_message([ + ("Gzipped batch 1, message %d" % i).encode('utf-8') + for i in range(50000)]) ], start_offset+50000, 50000, @@ -144,7 +154,7 @@ class TestKafkaProducerIntegration(KafkaIntegrationTestCase): @kafka_versions("all") def test_produce__new_topic_fails_with_reasonable_error(self): - new_topic = 'new_topic_{guid}'.format(guid = str(uuid.uuid4())) + new_topic = 'new_topic_{guid}'.format(guid = str(uuid.uuid4())).encode('utf-8') producer = SimpleProducer(self.client) # At first it doesn't exist diff --git a/test/test_protocol.py b/test/test_protocol.py index 2107f82..a4f6f64 100644 --- a/test/test_protocol.py +++ b/test/test_protocol.py @@ -108,7 +108,6 @@ class TestProtocol(unittest.TestCase): def test_encode_message(self): message = create_message(b"test", b"key") encoded = KafkaProtocol._encode_message(message) - print("CRC", -1427009701) expect = b"".join([ struct.pack(">i", -1427009701), # CRC struct.pack(">bb", 0, 0), # Magic, flags diff --git a/test/testutil.py b/test/testutil.py index 114dff9..9262fca 100644 --- a/test/testutil.py +++ b/test/testutil.py @@ -59,8 +59,8 @@ class KafkaIntegrationTestCase(unittest.TestCase): return if not self.topic: - self.topic = "%s-%s" % (self.id()[self.id().rindex(".") + 1:], random_string(10).decode('utf-8')) - self.topic = self.topic.encode('utf-8') + topic = "%s-%s" % (self.id()[self.id().rindex(".") + 1:], random_string(10).decode('utf-8')) + self.topic = topic.encode('utf-8') if self.create_client: self.client = KafkaClient('%s:%d' % (self.server.host, self.server.port)) |