11package tech.figure.kafka.coroutines.retry.flow
22
3- import tech.figure.coroutines.retry.store.RetryRecord
4- import tech.figure.coroutines.retry.flow.FlowRetry
5- import tech.figure.coroutines.retry.store.RetryRecordStore
6- import tech.figure.kafka.coroutines.retry.DEFAULT_RECORD_REPROCESS_GROUP_SIZE
7- import tech.figure.kafka.coroutines.retry.KAFKA_RETRY_ATTEMPTS_HEADER
8- import tech.figure.kafka.coroutines.retry.toByteArray
93import java.time.OffsetDateTime
104import kotlinx.coroutines.flow.asFlow
115import mu.KotlinLogging
126import org.apache.kafka.clients.consumer.ConsumerRecord
137import org.apache.kafka.common.header.Header
148import org.apache.kafka.common.header.Headers
159import org.apache.kafka.common.header.internals.RecordHeader
10+ import tech.figure.coroutines.retry.flow.FlowRetry
11+ import tech.figure.coroutines.retry.store.RetryRecord
12+ import tech.figure.coroutines.retry.store.RetryRecordStore
13+ import tech.figure.kafka.coroutines.retry.DEFAULT_RECORD_REPROCESS_GROUP_SIZE
14+ import tech.figure.kafka.coroutines.retry.KAFKA_RETRY_ATTEMPTS_HEADER
15+ import tech.figure.kafka.coroutines.retry.toByteArray
1616
1717/* *
1818 * Retry a flow of kafka records.
@@ -22,10 +22,10 @@ import org.apache.kafka.common.header.internals.RecordHeader
2222 * @param groupSize Process a max of this many elements each poll loop.
2323 */
2424open class KafkaFlowRetry <K , V >(
25- private val handlers : Map <String , suspend (ConsumerRecord <K , V >) -> Unit>,
26- private val store: RetryRecordStore <ConsumerRecord <K , V >>,
25+ private val handlers : Map <String , suspend (List < ConsumerRecord <K , V > >) -> Unit>,
26+ private val store: RetryRecordStore <List < ConsumerRecord <K , V > >>,
2727 private val groupSize: Int = DEFAULT_RECORD_REPROCESS_GROUP_SIZE ,
28- ) : FlowRetry <ConsumerRecord <K , V >> {
28+ ) : FlowRetry <List < ConsumerRecord <K , V > >> {
2929 private val log = KotlinLogging .logger {}
3030
3131 override suspend fun hasNext (): Boolean = ! store.isEmpty()
@@ -34,40 +34,52 @@ open class KafkaFlowRetry<K, V>(
3434 attemptRange : IntRange ,
3535 olderThan : OffsetDateTime ,
3636 limit : Int ,
37- ) = store.select(attemptRange, olderThan, limit).sortedByDescending { it.lastAttempted }.asFlow()
37+ ) =
38+ store
39+ .select(attemptRange, olderThan, limit)
40+ .sortedByDescending { it.lastAttempted }
41+ .asFlow()
3842
39- override suspend fun send (
40- item : ConsumerRecord <K , V >,
41- e : Throwable
42- ) {
43- log.debug { " adding record to retry queue key:${item.key()} source:${item.topic()} -${item.partition()} " }
43+ override suspend fun send (item : List <ConsumerRecord <K , V >>, e : Throwable ) {
44+ log.debug {
45+ " adding record to retry queue count:${item.size} sources:${item.map { " ${it.topic()} -${it.partition()} " } } "
46+ }
4447 store.insert(item, e)
4548 }
4649
47- override suspend fun onSuccess (
48- item : RetryRecord < ConsumerRecord < K , V >>
49- ) {
50- log.debug { " successful reprocess attempt: ${item.attempt} key: ${item.data.key()} source: ${item.data.topic()} - ${item.data.partition()} " }
50+ override suspend fun onSuccess (item : RetryRecord < List < ConsumerRecord < K , V >>>) {
51+ log.debug {
52+ " successful reprocess attempt: ${item.attempt} count: ${item.data.size} sources: ${item.data.map { " ${it.topic()} - ${it.partition()} " } } "
53+ }
5154 store.remove(item.data)
5255 }
5356
54- override suspend fun onFailure (item : RetryRecord <ConsumerRecord <K , V >>, e : Throwable ) {
55- log.debug { " failed reprocess attempt:${item.attempt} Error: ${item.lastException} key:${item.data.key()} source:${item.data.topic()} -${item.data.partition()} " }
57+ override suspend fun onFailure (item : RetryRecord <List <ConsumerRecord <K , V >>>, e : Throwable ) {
58+ log.debug {
59+ " failed reprocess attempt:${item.attempt} Error: ${item.lastException} count:${item.data.size} sources:${item.data.map { " ${it.topic()} -${it.partition()} " } } "
60+ }
5661 store.update(item.data, e)
5762 }
5863
5964 override suspend fun process (
60- item : ConsumerRecord <K , V >,
65+ item : List < ConsumerRecord <K , V > >,
6166 attempt : Int ,
6267 ) {
63- val topic = item.topic()
64- val handler = handlers[topic] ? : throw RuntimeException (" topic '$topic ' not handled by this retry handler" )
68+ val topic = item.firstOrNull()?.topic() ? : return
69+ val handler =
70+ handlers[topic]
71+ ? : throw RuntimeException (" topic '$topic ' not handled by this retry handler" )
6572
66- log.debug { " processing key:${item.key()} attempt:$attempt source:${item.topic()} -${item.partition()} " }
67- handler(item.setHeader(KAFKA_RETRY_ATTEMPTS_HEADER , attempt.toByteArray()))
73+ log.debug {
74+ " processing count:${item.size} sources:${item.map { " ${it.topic()} -${it.partition()} " } } "
75+ }
76+ handler(item.map { it.setHeader(KAFKA_RETRY_ATTEMPTS_HEADER , attempt.toByteArray()) })
6877 }
6978
70- private fun <K , V > ConsumerRecord <K , V >.setHeader (key : String , value : ByteArray ): ConsumerRecord <K , V > = apply {
79+ private fun <K , V > ConsumerRecord <K , V >.setHeader (
80+ key : String ,
81+ value : ByteArray
82+ ): ConsumerRecord <K , V > = apply {
7183 fun Headers.addOrUpdate (header : Header ): Headers {
7284 val h = find { it.key() == header.key() }
7385 if (h == null ) {
0 commit comments