-
Notifications
You must be signed in to change notification settings - Fork 164
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
bridge: Add kafka receiver output (#1345)
… that is, support converting incoming webhooks to Kafka messages. Closes svix/monorepo-private#8508.
- Loading branch information
Showing
7 changed files
with
252 additions
and
70 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,9 +1,14 @@ | ||
mod config; | ||
mod error; | ||
mod input; | ||
mod output; | ||
|
||
pub use self::{ | ||
config::{into_sender_input, KafkaInputOpts, KafkaSecurityProtocol}, | ||
config::{ | ||
into_receiver_output, into_sender_input, KafkaInputOpts, KafkaOutputOpts, | ||
KafkaSecurityProtocol, | ||
}, | ||
error::{Error, Result}, | ||
input::KafkaConsumer, | ||
output::KafkaProducer, | ||
}; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,48 @@ | ||
use rdkafka::{ | ||
error::KafkaError, | ||
producer::{FutureProducer, FutureRecord}, | ||
util::Timeout, | ||
}; | ||
use svix_bridge_types::{async_trait, BoxError, ForwardRequest, ReceiverOutput}; | ||
|
||
use crate::config::KafkaOutputOpts; | ||
|
||
/// Forwards webhook payloads to kafka. | ||
pub struct KafkaProducer { | ||
name: String, | ||
topic: String, | ||
producer: FutureProducer, | ||
} | ||
|
||
impl KafkaProducer { | ||
pub fn new(name: String, opts: KafkaOutputOpts) -> Result<Self, KafkaError> { | ||
let topic = opts.topic.clone(); | ||
let producer = opts.create_producer()?; | ||
|
||
Ok(Self { | ||
name, | ||
topic, | ||
producer, | ||
}) | ||
} | ||
} | ||
|
||
#[async_trait] | ||
impl ReceiverOutput for KafkaProducer { | ||
fn name(&self) -> &str { | ||
&self.name | ||
} | ||
|
||
async fn handle(&self, request: ForwardRequest) -> Result<(), BoxError> { | ||
self.producer | ||
.send( | ||
FutureRecord::<(), _>::to(&self.topic) | ||
.payload(&serde_json::to_vec(&request.payload)?), | ||
Timeout::Never, | ||
) | ||
.await | ||
.map_err(|(e, _msg)| e)?; | ||
|
||
Ok(()) | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
64 changes: 64 additions & 0 deletions
64
bridge/svix-bridge-plugin-kafka/tests/it/kafka_producer.rs
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,64 @@ | ||
use std::{sync::Arc, time::Duration}; | ||
|
||
use rdkafka::{ | ||
consumer::{Consumer, StreamConsumer}, | ||
ClientConfig, Message, | ||
}; | ||
use serde_json::json; | ||
use svix_bridge_plugin_kafka::{KafkaOutputOpts, KafkaProducer}; | ||
use svix_bridge_types::{ForwardRequest, ReceiverOutput as _}; | ||
|
||
use crate::{create_topic, delete_topic, kafka_admin_client, BROKER_HOST}; | ||
|
||
/// Time to wait for the consumer to be properly listening. | ||
const LISTEN_WAIT_TIME: Duration = Duration::from_secs(5); | ||
|
||
#[tokio::test] | ||
async fn test_produce_ok() { | ||
let topic = unique_topic_name!(); | ||
let admin_client = kafka_admin_client(); | ||
create_topic(&admin_client, topic).await; | ||
|
||
// Start listening for messages | ||
let consumer: StreamConsumer = ClientConfig::new() | ||
.set("bootstrap.servers", BROKER_HOST) | ||
.set("group.id", "svix_bridge_test_group_id") | ||
.create() | ||
.unwrap(); | ||
|
||
consumer.subscribe(&[topic]).unwrap(); | ||
|
||
let consumer = Arc::new(consumer); | ||
let recv_join_hdl = tokio::spawn({ | ||
let consumer = consumer.clone(); | ||
async move { consumer.recv().await.unwrap().detach() } | ||
}); | ||
tokio::time::sleep(LISTEN_WAIT_TIME).await; | ||
|
||
let payload = json!({ "test": "payload" }); | ||
let payload_s = payload.to_string(); | ||
|
||
// Only then actually send a message | ||
let producer = KafkaProducer::new( | ||
"test".into(), | ||
KafkaOutputOpts { | ||
bootstrap_brokers: BROKER_HOST.to_owned(), | ||
topic: topic.to_owned(), | ||
security_protocol: svix_bridge_plugin_kafka::KafkaSecurityProtocol::Plaintext, | ||
debug_contexts: None, | ||
}, | ||
) | ||
.unwrap(); | ||
producer.handle(ForwardRequest { payload }).await.unwrap(); | ||
|
||
// Assert that the message is received | ||
let msg = recv_join_hdl.await.unwrap(); | ||
assert_eq!(msg.payload(), Some(payload_s.as_bytes())); | ||
|
||
// Assert that no further messages are received in the next second | ||
tokio::time::timeout(Duration::from_secs(1), consumer.recv()) | ||
.await | ||
.expect_err("there must be no further messages"); | ||
|
||
delete_topic(&admin_client, topic).await; | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1 +1,54 @@ | ||
use rdkafka::{ | ||
admin::{AdminClient, NewTopic, TopicReplication}, | ||
client::DefaultClientContext, | ||
types::RDKafkaErrorCode, | ||
ClientConfig, | ||
}; | ||
|
||
/// These tests assume a "vanilla" kafka instance, using the default port, creds, exchange... | ||
const BROKER_HOST: &str = "localhost:9094"; | ||
|
||
fn kafka_admin_client() -> AdminClient<DefaultClientContext> { | ||
// create does block I/O, but we don't care in tests | ||
ClientConfig::new() | ||
.set("bootstrap.servers", BROKER_HOST) | ||
.create() | ||
.unwrap() | ||
} | ||
|
||
async fn create_topic(admin_client: &AdminClient<DefaultClientContext>, topic: &str) { | ||
let new_topic = NewTopic::new(topic, 1, TopicReplication::Fixed(1)); | ||
if let Err(e) = admin_client | ||
.create_topics(&[new_topic], &Default::default()) | ||
.await | ||
{ | ||
if e.rdkafka_error_code() != Some(RDKafkaErrorCode::TopicAlreadyExists) { | ||
panic!("{e}"); | ||
} | ||
} | ||
} | ||
|
||
async fn delete_topic(admin_client: &AdminClient<DefaultClientContext>, topic: &str) { | ||
admin_client | ||
.delete_topics(&[topic], &Default::default()) | ||
.await | ||
.unwrap(); | ||
} | ||
|
||
macro_rules! unique_topic_name { | ||
() => { | ||
&format!( | ||
"test_{}_{}", | ||
file!() | ||
.split('/') | ||
.next_back() | ||
.unwrap() | ||
.strip_suffix(".rs") | ||
.unwrap(), | ||
line!() | ||
) | ||
}; | ||
} | ||
|
||
mod kafka_consumer; | ||
mod kafka_producer; |
Oops, something went wrong.