Skip to content

Commit 7ebc807

Browse files
authored
Merge pull request #260 from confluentinc/kafkatest_fixes
Kafkatest fixes
2 parents 40fd30c + 3994b87 commit 7ebc807

File tree

5 files changed

+208
-82
lines changed

5 files changed

+208
-82
lines changed

confluent_kafka/kafkatest/deploy.sh

Lines changed: 11 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -72,12 +72,13 @@ function setup_virtualenv {
7272
echo "Installing and creating virtualenv"
7373
which virtualenv || sudo apt-get install -y python-virtualenv
7474
virtualenv $DIR/venv
75+
source $DIR/venv/bin/activate
76+
# Upgrade pip
77+
pip install -U pip
7578
else
7679
echo "Reusing existing virtualenv"
80+
source $DIR/venv/bin/activate
7781
fi
78-
source $DIR/venv/bin/activate
79-
# Upgrade pip
80-
pip install -U pip
8182

8283
}
8384

@@ -113,9 +114,13 @@ setup_virtualenv
113114
# librdkafka is bundled with the wheel, if not, install it here:
114115
#install_librdkafka
115116

116-
install_client
117-
118-
verify_client
117+
if ! verify_client ; then
118+
echo "Client not installed, installing..."
119+
install_client
120+
verify_client
121+
else
122+
echo "Client already installed"
123+
fi
119124

120125

121126

confluent_kafka/kafkatest/verifiable_consumer.py

Lines changed: 44 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,8 @@
1616
#
1717

1818
import argparse
19+
import os
20+
import time
1921
from confluent_kafka import Consumer, KafkaError, KafkaException
2022
from verifiable_client import VerifiableClient
2123

@@ -95,10 +97,10 @@ def on_revoke(self, consumer, partitions):
9597
# Send final consumed records prior to rebalancing to make sure
9698
# latest consumed is in par with what is going to be committed.
9799
self.send_records_consumed(immediate=True)
100+
self.do_commit(immediate=True, async=False)
98101
self.assignment = list()
99102
self.assignment_dict = dict()
100103
self.send_assignment('revoked', partitions)
101-
self.do_commit(immediate=True)
102104

103105
def on_commit(self, err, partitions):
104106
""" Offsets Committed callback """
@@ -125,6 +127,10 @@ def on_commit(self, err, partitions):
125127
pd['error'] = str(p.error)
126128
d['offsets'].append(pd)
127129

130+
if len(self.assignment) == 0:
131+
self.dbg('Not sending offsets_committed: No current assignment: would be: %s' % d)
132+
return
133+
128134
self.send(d)
129135

130136
def do_commit(self, immediate=False, async=None):
@@ -149,15 +155,33 @@ def do_commit(self, immediate=False, async=None):
149155
(self.consumed_msgs - self.consumed_msgs_at_last_commit,
150156
async_mode))
151157

152-
try:
153-
self.consumer.commit(async=async_mode)
154-
except KafkaException as e:
155-
if e.args[0].code() == KafkaError._WAIT_COORD:
156-
self.dbg('Ignoring commit failure, still waiting for coordinator')
157-
elif e.args[0].code() == KafkaError._NO_OFFSET:
158-
self.dbg('No offsets to commit')
159-
else:
160-
raise
158+
retries = 3
159+
while True:
160+
try:
161+
self.dbg('Commit')
162+
offsets = self.consumer.commit(async=async_mode)
163+
self.dbg('Commit done: offsets %s' % offsets)
164+
165+
if not async_mode:
166+
self.on_commit(None, offsets)
167+
168+
break
169+
170+
except KafkaException as e:
171+
if e.args[0].code() == KafkaError._NO_OFFSET:
172+
self.dbg('No offsets to commit')
173+
break
174+
elif e.args[0].code() in (KafkaError.REQUEST_TIMED_OUT,
175+
KafkaError.NOT_COORDINATOR_FOR_GROUP,
176+
KafkaError._WAIT_COORD):
177+
self.dbg('Commit failed: %s (%d retries)' % (str(e), retries))
178+
if retries <= 0:
179+
raise
180+
retries -= 1
181+
time.sleep(1)
182+
continue
183+
else:
184+
raise
161185

162186
self.consumed_msgs_at_last_commit = self.consumed_msgs
163187

@@ -168,7 +192,7 @@ def msg_consume(self, msg):
168192
# ignore EOF
169193
pass
170194
else:
171-
self.err('Consume failed: %s' % msg.error(), term=True)
195+
self.err('Consume failed: %s' % msg.error(), term=False)
172196
return
173197

174198
if False:
@@ -192,6 +216,7 @@ def msg_consume(self, msg):
192216

193217
self.consumed_msgs += 1
194218

219+
self.consumer.store_offsets(message=msg)
195220
self.send_records_consumed(immediate=False)
196221
self.do_commit(immediate=False)
197222

@@ -229,7 +254,11 @@ def to_dict(self):
229254
args = vars(parser.parse_args())
230255

231256
conf = {'broker.version.fallback': '0.9.0',
232-
'default.topic.config': dict()}
257+
'default.topic.config': dict(),
258+
# Do explicit manual offset stores to avoid race conditions
259+
# where a message is consumed from librdkafka but not yet handled
260+
# by the Python code that keeps track of last consumed offset.
261+
'enable.auto.offset.store': False}
233262

234263
VerifiableClient.set_config(conf, args)
235264

@@ -239,6 +268,7 @@ def to_dict(self):
239268
vc.use_auto_commit = args['enable.auto.commit']
240269
vc.max_msgs = args['max_messages']
241270

271+
vc.dbg('Pid %d' % os.getpid())
242272
vc.dbg('Using config: %s' % conf)
243273

244274
vc.dbg('Subscribing to %s' % args['topic'])
@@ -261,6 +291,8 @@ def to_dict(self):
261291
vc.msg_consume(msg)
262292

263293
except KeyboardInterrupt:
294+
vc.dbg('KeyboardInterrupt')
295+
vc.run = False
264296
pass
265297

266298
vc.dbg('Closing consumer')

confluent_kafka/kafkatest/verifiable_producer.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -131,10 +131,10 @@ def dr_cb(self, err, msg):
131131
# Flush remaining messages to broker.
132132
vp.dbg('Flushing')
133133
try:
134-
vp.producer.flush()
134+
vp.producer.flush(5)
135135
except KeyboardInterrupt:
136136
pass
137137

138-
vp.send({'name': 'shutdown_complete'})
138+
vp.send({'name': 'shutdown_complete', '_qlen': len(vp.producer)})
139139

140140
vp.dbg('All done')

0 commit comments

Comments
 (0)