Skip to content

Commit a2be27b

Browse files
committed
PR comments
1 parent d643392 commit a2be27b

File tree

3 files changed

+19
-31
lines changed

3 files changed

+19
-31
lines changed

examples/adminapi.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -899,7 +899,7 @@ def example_list_offsets(a, args):
899899

900900
# Create Admin client
901901
a = AdminClient({'bootstrap.servers': broker},
902-
logger=logger)
902+
logger=logger)
903903

904904
opsmap = {'create_topics': example_create_topics,
905905
'delete_topics': example_delete_topics,
@@ -930,4 +930,4 @@ def example_list_offsets(a, args):
930930
opsmap[operation](a, args)
931931

932932
# Log messages through custom logger if provided
933-
a.poll(0)
933+
a.poll(0)

src/confluent_kafka/admin/__init__.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -115,12 +115,10 @@ def __init__(self, conf, logger=None):
115115
116116
The AdminClient is a standard Kafka protocol client, supporting
117117
the standard librdkafka configuration properties as specified at
118-
https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md
118+
https://github.com/confluentinc/librdkafka/blob/master/CONFIGURATION.md
119119
120-
:param dict config: Configuration properties. At a minimum ``bootstrap.servers`` **should** be set\n"
120+
:param dict conf: Configuration properties. At a minimum ``bootstrap.servers`` **should** be set\n"
121121
:param Logger logger: Optional Logger instance to use as a custom log messages handler.
122-
123-
At least 'bootstrap.servers' should be configured.
124122
"""
125123
if logger is not None:
126124
conf['logger'] = logger

tests/test_log.py

Lines changed: 15 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,16 @@ def filter(self, record):
1818
print(record)
1919

2020

21+
def _setup_string_buffer_logger():
22+
stringBuffer = StringIO()
23+
logger = logging.getLogger('Producer')
24+
logger.setLevel(logging.DEBUG)
25+
handler = logging.StreamHandler(stringBuffer)
26+
handler.setFormatter(logging.Formatter('%(name)s Logger | %(message)s'))
27+
logger.addHandler(handler)
28+
return stringBuffer,logger
29+
30+
2131
def test_logging_consumer():
2232
""" Tests that logging works """
2333

@@ -121,12 +131,7 @@ def test_logging_constructor():
121131
def test_producer_logger_logging_in_given_format():
122132
"""Test that asserts that logging is working by matching part of the log message"""
123133

124-
stringBuffer = StringIO()
125-
logger = logging.getLogger('Producer')
126-
logger.setLevel(logging.DEBUG)
127-
handler = logging.StreamHandler(stringBuffer)
128-
handler.setFormatter(logging.Formatter('%(name)s Logger | %(message)s'))
129-
logger.addHandler(handler)
134+
stringBuffer, logger = _setup_string_buffer_logger()
130135

131136
p = confluent_kafka.Producer(
132137
{"bootstrap.servers": "test", "logger": logger, "debug": "msg"})
@@ -143,12 +148,7 @@ def test_producer_logger_logging_in_given_format():
143148
def test_consumer_logger_logging_in_given_format():
144149
"""Test that asserts that logging is working by matching part of the log message"""
145150

146-
stringBuffer = StringIO()
147-
logger = logging.getLogger('Consumer')
148-
logger.setLevel(logging.DEBUG)
149-
handler = logging.StreamHandler(stringBuffer)
150-
handler.setFormatter(logging.Formatter('%(name)s Logger | %(message)s'))
151-
logger.addHandler(handler)
151+
stringBuffer, logger = _setup_string_buffer_logger()
152152

153153
c = confluent_kafka.Consumer(
154154
{"bootstrap.servers": "test", "group.id": "test", "logger": logger, "debug": "msg"})
@@ -164,12 +164,7 @@ def test_consumer_logger_logging_in_given_format():
164164
def test_admin_logger_logging_in_given_format_when_provided_in_conf():
165165
"""Test that asserts that logging is working by matching part of the log message"""
166166

167-
stringBuffer = StringIO()
168-
logger = logging.getLogger('Admin')
169-
logger.setLevel(logging.DEBUG)
170-
handler = logging.StreamHandler(stringBuffer)
171-
handler.setFormatter(logging.Formatter('%(name)s Logger | %(message)s'))
172-
logger.addHandler(handler)
167+
stringBuffer, logger = _setup_string_buffer_logger()
173168

174169
admin_client = confluent_kafka.admin.AdminClient(
175170
{"bootstrap.servers": "test", "logger": logger, "debug": "admin"})
@@ -184,12 +179,7 @@ def test_admin_logger_logging_in_given_format_when_provided_in_conf():
184179
def test_admin_logger_logging_in_given_format_when_provided_as_admin_client_argument():
185180
"""Test that asserts that logging is working by matching part of the log message"""
186181

187-
stringBuffer = StringIO()
188-
logger = logging.getLogger('Admin')
189-
logger.setLevel(logging.DEBUG)
190-
handler = logging.StreamHandler(stringBuffer)
191-
handler.setFormatter(logging.Formatter('%(name)s Logger | %(message)s'))
192-
logger.addHandler(handler)
182+
stringBuffer, logger = _setup_string_buffer_logger()
193183

194184
admin_client = confluent_kafka.admin.AdminClient(
195185
{"bootstrap.servers": "test", "debug": "admin"}, logger=logger)
@@ -198,4 +188,4 @@ def test_admin_logger_logging_in_given_format_when_provided_as_admin_client_argu
198188
logMessage = stringBuffer.getvalue().strip()
199189
stringBuffer.close()
200190

201-
assert "Admin Logger | INIT" in logMessage
191+
assert "Admin Logger | INIT" in logMessage

0 commit comments

Comments
 (0)