summaryrefslogtreecommitdiff
path: root/README.md
diff options
context:
space:
mode:
Diffstat (limited to 'README.md')
-rw-r--r--README.md26
1 files changed, 13 insertions, 13 deletions
diff --git a/README.md b/README.md
index 9bf3a03..fbccc1e 100644
--- a/README.md
+++ b/README.md
@@ -17,7 +17,7 @@ Copyright 2013, David Arthur under Apache License, v2.0. See `LICENSE`
# Status
-The current version of this package is **0.9.0** and is compatible with
+The current version of this package is **0.9.0** and is compatible with
Kafka brokers running version **0.8.1**.
# Usage
@@ -32,24 +32,24 @@ from kafka.producer import SimpleProducer, KeyedProducer
kafka = KafkaClient("localhost", 9092)
# To send messages synchronously
-producer = SimpleProducer(kafka, "my-topic")
-producer.send_messages("some message")
-producer.send_messages("this method", "is variadic")
+producer = SimpleProducer(kafka)
+producer.send_messages("my-topic", "some message")
+producer.send_messages("my-topic", "this method", "is variadic")
# To send messages asynchronously
-producer = SimpleProducer(kafka, "my-topic", async=True)
-producer.send_messages("async message")
+producer = SimpleProducer(kafka, async=True)
+producer.send_messages("my-topic", "async message")
# To wait for acknowledgements
# ACK_AFTER_LOCAL_WRITE : server will wait till the data is written to
# a local log before sending response
# ACK_AFTER_CLUSTER_COMMIT : server will block until the message is committed
# by all in sync replicas before sending a response
-producer = SimpleProducer(kafka, "my-topic", async=False,
+producer = SimpleProducer(kafka, async=False,
req_acks=SimpleProducer.ACK_AFTER_LOCAL_WRITE,
ack_timeout=2000)
-response = producer.send_messages("async message")
+response = producer.send_messages("my-topic", "async message")
if response:
print(response[0].error)
@@ -62,7 +62,7 @@ if response:
# Notes:
# * If the producer dies before the messages are sent, there will be losses
# * Call producer.stop() to send the messages and cleanup
-producer = SimpleProducer(kafka, "my-topic", batch_send=True,
+producer = SimpleProducer(kafka, batch_send=True,
batch_send_every_n=20,
batch_send_every_t=60)
@@ -83,11 +83,11 @@ from kafka.partitioner import HashedPartitioner, RoundRobinPartitioner
kafka = KafkaClient("localhost", 9092)
# HashedPartitioner is default
-producer = KeyedProducer(kafka, "my-topic")
-producer.send("key1", "some message")
-producer.send("key2", "this methode")
+producer = KeyedProducer(kafka)
+producer.send("my-topic", "key1", "some message")
+producer.send("my-topic", "key2", "this methode")
-producer = KeyedProducer(kafka, "my-topic", partitioner=RoundRobinPartitioner)
+producer = KeyedProducer(kafka, partitioner=RoundRobinPartitioner)
```
## Multiprocess consumer