Skip to content

Commit 35ec0f2

Browse files
committed
Use the simpler initialization API
1 parent 7fc3cfe commit 35ec0f2

File tree

9 files changed

+15
-20
lines changed

9 files changed

+15
-20
lines changed

lib/kafka/client.rb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -481,7 +481,7 @@ def delete_topic(name, timeout: 30)
481481
# @note This is an alpha level API and is subject to change.
482482
#
483483
# @example Describing the cleanup policy config of a topic
484-
# kafka = Kafka.new(seed_brokers: ["kafka1:9092"])
484+
# kafka = Kafka.new(["kafka1:9092"])
485485
# kafka.describe_topic("my-topic", ["cleanup.policy"])
486486
# #=> { "cleanup.policy" => "delete" }
487487
#

lib/kafka/consumer.rb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ module Kafka
2020
#
2121
# require "kafka"
2222
#
23-
# kafka = Kafka.new(seed_brokers: ["kafka1:9092", "kafka2:9092"])
23+
# kafka = Kafka.new(["kafka1:9092", "kafka2:9092"])
2424
#
2525
# # Create a new Consumer instance in the group `my-group`:
2626
# consumer = kafka.consumer(group_id: "my-group")

lib/kafka/producer.rb

Lines changed: 2 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ module Kafka
1414
# do it for you, e.g.
1515
#
1616
# # Will instantiate Kafka::Client
17-
# kafka = Kafka.new(...)
17+
# kafka = Kafka.new(["kafka1:9092", "kafka2:9092"])
1818
#
1919
# # Will instantiate Kafka::Producer
2020
# producer = kafka.producer
@@ -106,12 +106,7 @@ module Kafka
106106
# # cluster to auto-create topics.
107107
# topic = "random-messages"
108108
#
109-
# kafka = Kafka.new(
110-
# seed_brokers: brokers,
111-
# client_id: "simple-producer",
112-
# logger: logger,
113-
# )
114-
#
109+
# kafka = Kafka.new(brokers, client_id: "simple-producer", logger: logger)
115110
# producer = kafka.producer
116111
#
117112
# begin

spec/functional/batch_consumer_spec.rb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
topic = create_random_topic(num_partitions: 15)
1010

1111
Thread.new do
12-
kafka = Kafka.new(seed_brokers: kafka_brokers, client_id: "test")
12+
kafka = Kafka.new(kafka_brokers, client_id: "test")
1313
producer = kafka.producer
1414

1515
messages.each do |i|
@@ -23,7 +23,7 @@
2323

2424
threads = 2.times.map do |thread_id|
2525
t = Thread.new do
26-
kafka = Kafka.new(seed_brokers: kafka_brokers, client_id: "test", logger: logger)
26+
kafka = Kafka.new(kafka_brokers, client_id: "test", logger: logger)
2727
consumer = kafka.consumer(group_id: group_id, offset_retention_time: offset_retention_time)
2828
consumer.subscribe(topic)
2929

spec/functional/client_spec.rb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010

1111
example "listing available topics in the cluster" do
1212
# Use a clean Kafka instance to avoid hitting caches.
13-
kafka = Kafka.new(seed_brokers: KAFKA_BROKERS, logger: LOGGER)
13+
kafka = Kafka.new(KAFKA_BROKERS, logger: LOGGER)
1414

1515
topics = kafka.topics
1616

spec/functional/consumer_group_spec.rb

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
messages = (1..1000).to_a
77

88
begin
9-
kafka = Kafka.new(seed_brokers: kafka_brokers, client_id: "test")
9+
kafka = Kafka.new(kafka_brokers, client_id: "test")
1010
producer = kafka.producer
1111

1212
messages.each do |i|
@@ -22,7 +22,7 @@
2222
received_messages = []
2323

2424
consumers = 2.times.map do
25-
kafka = Kafka.new(seed_brokers: kafka_brokers, client_id: "test", logger: logger)
25+
kafka = Kafka.new(kafka_brokers, client_id: "test", logger: logger)
2626
consumer = kafka.consumer(group_id: group_id, offset_retention_time: offset_retention_time)
2727
consumer.subscribe(topic)
2828
consumer
@@ -60,7 +60,7 @@
6060
var = ConditionVariable.new
6161

6262
Thread.new do
63-
kafka = Kafka.new(seed_brokers: kafka_brokers, client_id: "test")
63+
kafka = Kafka.new(kafka_brokers, client_id: "test")
6464
producer = kafka.producer
6565

6666
messages.each do |i|
@@ -88,7 +88,7 @@
8888

8989
threads = 2.times.map do |thread_id|
9090
t = Thread.new do
91-
kafka = Kafka.new(seed_brokers: kafka_brokers, client_id: "test", logger: logger)
91+
kafka = Kafka.new(kafka_brokers, client_id: "test", logger: logger)
9292
consumer = kafka.consumer(group_id: group_id, offset_retention_time: offset_retention_time)
9393
consumer.subscribe(topic)
9494

spec/fuzz/consumer_group_spec.rb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
before do
1111
logger.level = Logger::INFO
1212

13-
kafka = Kafka.new(seed_brokers: kafka_brokers, logger: logger)
13+
kafka = Kafka.new(kafka_brokers, logger: logger)
1414
producer = kafka.producer(max_buffer_size: num_messages)
1515

1616
messages.each do |i|
@@ -66,7 +66,7 @@
6666
def start_consumer(group_id, result_queue)
6767
thread = Thread.new do
6868
kafka = Kafka.new(
69-
seed_brokers: kafka_brokers,
69+
kafka_brokers,
7070
logger: logger,
7171
socket_timeout: 20,
7272
connect_timeout: 20,

spec/fuzz/producer_spec.rb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
describe "Producing a lot of messages with an unreliable cluster", fuzz: true do
22
let(:logger) { LOGGER }
3-
let(:kafka) { Kafka.new(seed_brokers: kafka_brokers, client_id: "test", logger: logger) }
3+
let(:kafka) { Kafka.new(kafka_brokers, client_id: "test", logger: logger) }
44
let(:producer) { kafka.producer(max_retries: 20, retry_backoff: 5) }
55

66
before do

spec/spec_helper.rb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ def self.included(base)
6969
base.class_eval do
7070
let(:logger) { LOGGER }
7171
let(:kafka_brokers) { KAFKA_BROKERS }
72-
let(:kafka) { Kafka.new(seed_brokers: kafka_brokers, client_id: "test", logger: logger) }
72+
let(:kafka) { Kafka.new(kafka_brokers, client_id: "test", logger: logger) }
7373

7474
after { kafka.close rescue nil }
7575
end

0 commit comments

Comments
 (0)