Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add support for external Avro Schema Registry #411

Merged
merged 13 commits into from
Jul 7, 2023
Merged
Show file tree
Hide file tree
Changes from 9 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
using System;
using Microsoft.Extensions.Logging;
using Newtonsoft.Json;
using Microsoft.Azure.WebJobs.Extensions.Kafka;
using Avro.Generic;
using System.Collections.Generic;


namespace KafkaFunctionSample
{
/// <summary>
/// Demonstrate using generic avro support and integrating a Confluent schema registry
/// In this scenario the schema is retrieved from a registry.
/// An instance of GenericRecord will be available in the KafkaEventData.Value property.
///
/// This function is disabled by default so it does not interfere with the execution when
/// no schema registry is present. To test this, install a schema registry locally
/// with <see href="https://docs.confluent.io/platform/current/platform-quickstart.html#cp-quick-start-docker">the tutorial from Confluent</see>
/// and uncomment the function name attribute in this file.
/// </summary>
///
public class AvroGenericTriggersWithSchemaRegistry
{
// [FunctionName(nameof(PageViewsSchemaRegistry))]
public static void PageViewsSchemaRegistry(
[KafkaTrigger("LocalBroker", "pageviews", ConsumerGroup = "azfunc", SchemaRegistryUrl = "localhost:8081")]
KafkaEventData<string, GenericRecord>[] kafkaEvents,
long[] offsetArray,
int[] partitionArray,
string[] topicArray,
DateTime[] timestampArray,
ILogger logger)
{
for (int i = 0; i < kafkaEvents.Length; i++)
{
var kafkaEvent = kafkaEvents[i];
if (kafkaEvent.Value is GenericRecord genericRecord)
{
logger.LogInformation(
$"[{timestampArray[i]}] {topicArray[i]} / {partitionArray[i]} / {offsetArray[i]}: {GenericToJson(genericRecord)}");
}
}
}

public static string GenericToJson(GenericRecord record)
{
var props = new Dictionary<string, object>();
foreach (var field in record.Schema.Fields)
{
if (record.TryGetValue(field.Name, out var value))
props[field.Name] = value;
}

return JsonConvert.SerializeObject(props);
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
using System;
using System.Threading.Tasks;
using Avro;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Azure.WebJobs;
using Microsoft.Azure.WebJobs.Extensions.Http;
using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.Logging;
using Microsoft.Azure.WebJobs.Extensions.Kafka;
using Avro.Generic;

namespace KafkaFunctionSample
{
public static class AvroProduceStringTopicFunction
{
const string PageViewsSchema = @"{
""type"": ""record"",
""name"": ""pageviews"",
""namespace"": ""ksql"",
""fields"": [
{
""name"": ""viewtime"",
""type"": ""long""
},
{
""name"": ""userid"",
""type"": ""string""
},
{
""name"": ""pageid"",
""type"": ""string""
}
]
}";

/// <summary>
/// To execute posting an object, execute:
/// curl -X POST http://localhost:7071/api/AvroProduceStringTopic
/// </summary>
/// <param name="req"></param>
/// <param name="events"></param>
/// <param name="log"></param>
/// <returns></returns>
[FunctionName(nameof(AvroProduceStringTopic))]
public static async Task<IActionResult> AvroProduceStringTopic(
[HttpTrigger(AuthorizationLevel.Function, "get", "post", Route = null)]
HttpRequest req,
[Kafka("LocalBroker", "avroTopic", AvroSchema = PageViewsSchema)]
IAsyncCollector<KafkaEventData<GenericRecord>> events,
ILogger log)
{
try
{
var genericRecord = new GenericRecord((RecordSchema)Schema.Parse(PageViewsSchema));
genericRecord.Add("viewtime", 4711L);
genericRecord.Add("userid", "User4711");
genericRecord.Add("pageid", "Page4711");
var kafkaEvent = new KafkaEventData<GenericRecord>()
{
Value = genericRecord,
};

await events.AddAsync(kafkaEvent);
}
catch (Exception ex)
{
throw new Exception(
"Are you sure the topic 'stringTopic' exists? To create using Confluent Docker quickstart run this command: 'docker-compose exec broker kafka-topics --create --zookeeper zookeeper:2181 --replication-factor 1 --partitions 10 --topic stringTopicTenPartitions'",
ex);
}

return new OkResult();
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
using System;
using System.Threading.Tasks;
using Avro;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Azure.WebJobs;
using Microsoft.Azure.WebJobs.Extensions.Http;
using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.Logging;
using Microsoft.Azure.WebJobs.Extensions.Kafka;
using Avro.Generic;

namespace KafkaFunctionSample
{
public static class AvroProduceStringTopicFunctionWithSchemaRegistry
{
const string PageViewsSchema = @"{
""type"": ""record"",
""name"": ""pageviews"",
""namespace"": ""ksql"",
""fields"": [
{
""name"": ""viewtime"",
""type"": ""long""
},
{
""name"": ""userid"",
""type"": ""string""
},
{
""name"": ""pageid"",
""type"": ""string""
}
]
}";

/// <summary>
/// To execute posting an object, execute:
/// curl http://localhost:7071/api/AvroProduceStringTopicSchemaRegistry
///
/// To test this, install a schema registry locally with
/// <see href="https://docs.confluent.io/platform/current/platform-quickstart.html#cp-quick-start-docker">the tutorial from Confluent</see>.
/// You also have to create a topic called "avroTopic" and add a schema via the Confluent web UI.
/// The schema that you have to add can be copied from <see cref="PageViewsSchema"/>.
/// </summary>
/// <param name="req"></param>
/// <param name="events"></param>
/// <param name="log"></param>
/// <returns></returns>
[FunctionName(nameof(AvroProduceStringTopicSchemaRegistry))]
public static async Task<IActionResult> AvroProduceStringTopicSchemaRegistry(
[HttpTrigger(AuthorizationLevel.Function, "get", "post", Route = null)]
HttpRequest req,
[Kafka("LocalBroker", "avroTopic", SchemaRegistryUrl = "localhost:8081")]
IAsyncCollector<KafkaEventData<GenericRecord>> events,
ILogger log)
{
try
{
var genericRecord = new GenericRecord((RecordSchema)Schema.Parse(PageViewsSchema));
genericRecord.Add("viewtime", 4711L);
genericRecord.Add("userid", "User4711");
genericRecord.Add("pageid", "Page4711");
var kafkaEvent = new KafkaEventData<GenericRecord>()
{
Value = genericRecord,
};

await events.AddAsync(kafkaEvent);
}
catch (Exception ex)
{
throw new Exception(
"Are you sure the topic 'stringTopic' exists? To create using Confluent Docker quickstart run this command: 'docker-compose exec broker kafka-topics --create --zookeeper zookeeper:2181 --replication-factor 1 --partitions 10 --topic stringTopicTenPartitions'",
ex);
}

return new OkResult();
}
}
}
60 changes: 0 additions & 60 deletions samples/dotnet/KafkaFunctionSample/PageViews.cs

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,6 @@
// Licensed under the MIT License. See License.txt in the project root for license information.

using System;
using Avro.Specific;
using Confluent.Kafka;
using Microsoft.Azure.WebJobs.Description;

namespace Microsoft.Azure.WebJobs.Extensions.Kafka
Expand Down Expand Up @@ -145,5 +143,20 @@ public KafkaAttribute()
/// being sent to cluster. Larger value allows more batching results in high throughput.
/// </summary>
public int LingerMs { get; set; } = 5;

/// <summary>
/// URL for the Avro Schema Registry
/// </summary>
public string SchemaRegistryUrl { get; set; }

/// <summary>
/// Username for the Avro Schema Registry
/// </summary>
public string SchemaRegistryUsername { get; set; }

/// <summary>
/// Password for the Avro Schema Registry
/// </summary>
public string SchemaRegistryPassword { get; set; }
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ internal class KafkaAttributeBinding : IBinding
private readonly INameResolver nameResolver;

public KafkaAttributeBinding(
string parameterName,
string parameterName,
KafkaAttribute attribute,
IKafkaProducerFactory kafkaProducerFactory,
IArgumentBinding<KafkaProducerEntity> argumentBinding,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.

using Confluent.Kafka;
using Microsoft.Azure.WebJobs.Host.Bindings;
using Microsoft.Extensions.Configuration;
using System;
Expand Down Expand Up @@ -44,7 +43,6 @@ public Task<IBinding> TryCreateAsync(BindingProviderContext context)
{
return Task.FromResult<IBinding>(null);
}


var argumentBinding = InnerProvider.TryCreate(parameter);
var keyAndValueTypes = SerializationHelper.GetKeyAndValueTypes(attribute.AvroSchema, parameter.ParameterType, typeof(string));
Expand Down
Original file line number Diff line number Diff line change
@@ -1,14 +1,10 @@
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.

using Confluent.Kafka;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using System.Xml;

namespace Microsoft.Azure.WebJobs.Extensions.Kafka
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,10 @@

using System;
using System.Collections.Concurrent;
using System.Reflection;
using System.Text;
using Avro.Generic;
using Avro.Specific;
using Confluent.Kafka;
using Microsoft.Azure.WebJobs.Host;
using Microsoft.Azure.WebJobs.Logging;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;

namespace Microsoft.Azure.WebJobs.Extensions.Kafka
{
Expand Down Expand Up @@ -88,7 +82,7 @@ private IKafkaProducer Create(Handle producerBaseHandle, KafkaProducerEntity ent
var valueType = entity.ValueType ?? typeof(byte[]);
var keyType = entity.KeyType ?? typeof(Null);

var valueSerializer = SerializationHelper.ResolveValueSerializer(valueType, entity.AvroSchema);
var valueSerializer = SerializationHelper.ResolveValueSerializer(valueType, entity.AvroSchema, entity.Attribute.SchemaRegistryUrl, entity.Attribute.SchemaRegistryUsername, entity.Attribute.SchemaRegistryPassword);

return (IKafkaProducer)Activator.CreateInstance(
typeof(KafkaProducer<,>).MakeGenericType(keyType, valueType),
Expand Down
Loading