Skip to content
This repository was archived by the owner on Nov 14, 2022. It is now read-only.

Commit b7a56bb

Browse files
authored
Merge pull request #1454 from autoreduction/update-utils-kafka
Refactor consumer to use kafka_utils config in autoreduce_utils
2 parents 953ec68 + d7d8417 commit b7a56bb

File tree

3 files changed

+20
-32
lines changed

3 files changed

+20
-32
lines changed

.github/workflows/run-tests.yml

Lines changed: 8 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
name: Tests
2-
on: [ push ]
2+
on: [push]
33

44
jobs:
55
pytest:
@@ -8,7 +8,7 @@ jobs:
88
strategy:
99
fail-fast: false
1010
matrix:
11-
python-version: ['3.8']
11+
python-version: ["3.8"]
1212

1313
steps:
1414
- uses: actions/checkout@v2
@@ -34,14 +34,14 @@ jobs:
3434
with:
3535
repository: "autoreduction/autoreduce-workspace"
3636

37-
- name: Run unit tests
38-
run: >-
39-
RUNNING_VIA_PYTEST=true pytest --ignore=autoreduce_qp/systemtests --cov=autoreduce_qp --cov-report=xml -v autoreduce_qp
40-
4137
- name: Run system tests
4238
run: |
4339
RUNNING_VIA_PYTEST=true pytest autoreduce_qp/systemtests --cov-append -v
4440
41+
- name: Run unit tests
42+
run: >-
43+
RUNNING_VIA_PYTEST=true pytest --ignore=autoreduce_qp/systemtests --cov=autoreduce_qp --cov-report=xml -v autoreduce_qp
44+
4545
- uses: codecov/codecov-action@v2
4646
with:
4747
files: ./coverage.xml
@@ -66,7 +66,7 @@ jobs:
6666
strategy:
6767
fail-fast: false
6868
matrix:
69-
python-version: ['3.6', '3.8']
69+
python-version: ["3.6", "3.8"]
7070

7171
steps:
7272
- uses: actions/checkout@v2
@@ -95,7 +95,7 @@ jobs:
9595
strategy:
9696
fail-fast: false
9797
matrix:
98-
python-version: ['3.8']
98+
python-version: ["3.8"]
9999

100100
steps:
101101
- uses: actions/checkout@v2
@@ -119,4 +119,3 @@ jobs:
119119
- uses: autoreduction/autoreduce-actions/code_inspection@main
120120
with:
121121
package_name: autoreduce_qp
122-

autoreduce_qp/queue_processor/confluent_consumer.py

Lines changed: 9 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -7,15 +7,15 @@
77
from pydantic import ValidationError
88
from confluent_kafka import DeserializingConsumer, KafkaException
99
from confluent_kafka.serialization import StringDeserializer
10-
from confluent_kafka.admin import AdminClient, NewTopic
1110
from autoreduce_utils.clients.connection_exception import ConnectionException
1211
from autoreduce_utils.message.message import Message
1312
from autoreduce_utils.clients.producer import Publisher
13+
from autoreduce_utils.clients.kafka_utils import kafka_config_from_env
1414
from autoreduce_qp.queue_processor.handle_message import HandleMessage
1515

1616
TRANSACTIONS_TOPIC = os.getenv('KAFKA_TOPIC')
1717
KAFKA_BROKER_URL = os.getenv("KAFKA_BROKER_URL")
18-
GROUP_ID = 'mygroup'
18+
GROUP_ID = 'data_ready-group'
1919

2020

2121
class Consumer(threading.Thread):
@@ -26,16 +26,6 @@ def __init__(self, consumer=None):
2626
self.logger = logging.getLogger(__package__)
2727
self.logger.debug("Initializing the consumer")
2828

29-
kafka_broker = {'bootstrap.servers': KAFKA_BROKER_URL}
30-
admin_client = AdminClient(kafka_broker)
31-
topics = admin_client.list_topics().topics
32-
33-
if not topics:
34-
# Create the topic
35-
self.logger.info("Creating the topic '%s'", TRANSACTIONS_TOPIC)
36-
new_topic = NewTopic(TRANSACTIONS_TOPIC, num_partitions=1, replication_factor=1)
37-
admin_client.create_topics([new_topic])
38-
3929
self.consumer = consumer
4030
self.message_handler = HandleMessage()
4131
self._stop_event = threading.Event()
@@ -49,14 +39,13 @@ def __init__(self, consumer=None):
4939
try:
5040
self.logger.debug("Getting the kafka consumer")
5141

52-
config = {
53-
'bootstrap.servers': KAFKA_BROKER_URL,
54-
'group.id': GROUP_ID,
55-
'auto.offset.reset': "earliest",
56-
"on_commit": self.on_commit,
57-
'key.deserializer': StringDeserializer('utf_8'),
58-
'value.deserializer': StringDeserializer('utf_8')
59-
}
42+
config = kafka_config_from_env()
43+
44+
config['key.deserializer'] = StringDeserializer('utf_8')
45+
config['value.deserializer'] = StringDeserializer('utf_8')
46+
config['on_commit'] = self.on_commit
47+
config['group.id'] = GROUP_ID
48+
config['auto.offset.reset'] = 'earliest'
6049
self.consumer = DeserializingConsumer(config)
6150
except KafkaException as err:
6251
self.logger.error("Could not initialize the consumer: %s", err)

setup.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -7,13 +7,13 @@
77

88
setup(
99
name="autoreduce_qp",
10-
version="22.0.0.dev35", # when updating the version here make sure to also update qp_mantid_python36.D
10+
version="22.0.0.dev36", # when updating the version here make sure to also update qp_mantid_python36.D
1111
description="ISIS Autoreduction queue processor",
1212
author="ISIS Autoreduction Team",
1313
url="https://github.com/autoreduction/autoreduce/",
1414
install_requires=[
15-
"autoreduce_db==22.0.0.dev34", "Django>=3.2.10", "fire==0.4.0", "plotly==5.3.1", "kaleido==0.2.1", "stomp.py",
16-
"docker==5.0.3", "confluent-kafka==1.8.2"
15+
"autoreduce_db==22.0.0.dev35", "Django>=3.2.10", "fire==0.4.0", "plotly==5.3.1", "kaleido==0.2.1", "stomp.py",
16+
"docker==5.0.3", "confluent-kafka"
1717
],
1818
packages=find_packages(),
1919
entry_points={

0 commit comments

Comments
 (0)