Skip to content

Commit 8536f76

Browse files
committed
dockerize dotnet app
1 parent 84313b5 commit 8536f76

File tree

5 files changed

+160
-124
lines changed

5 files changed

+160
-124
lines changed

docker-compose.yml

+9
Original file line numberDiff line numberDiff line change
@@ -105,6 +105,15 @@ services:
105105
- 5601:5601
106106
depends_on:
107107
- elasticsearch
108+
dotnetapp:
109+
container_name: cdc-dotnet_app
110+
build: ./netcore/
111+
ports:
112+
- "5000:80"
113+
depends_on:
114+
- elasticsearch
115+
- kafka
116+
108117

109118
volumes:
110119
elasticsearch-data:

netcore/.dockerignore

+4
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
bin/
2+
obj/
3+
.vs/
4+
ClientApp/node_modules/
+30-29
Original file line numberDiff line numberDiff line change
@@ -1,29 +1,30 @@
1-
using Microsoft.AspNetCore.Mvc;
2-
using Microsoft.Extensions.Logging;
3-
using Nest;
4-
using netCoreClient.Models;
5-
using System;
6-
using System.Collections.Generic;
7-
using System.Linq;
8-
using System.Threading.Tasks;
9-
10-
namespace netCoreClient.Controllers
11-
{
12-
[ApiController]
13-
[Route("[controller]")]
14-
public class QuestionController : ControllerBase
15-
{
16-
[HttpGet]
17-
public IEnumerable<Question> Get()
18-
{
19-
var settings = new ConnectionSettings().DefaultIndex("question");
20-
var client = new ElasticClient(settings);
21-
var searchResponse = client.Search<Question>(s => s
22-
.Query(q => q.MatchAll())
23-
.Sort(q => q.Descending(question => question.Id))
24-
.Size(20)
25-
);
26-
return searchResponse.Documents;
27-
}
28-
}
29-
}
1+
using Microsoft.AspNetCore.Mvc;
2+
using Microsoft.Extensions.Logging;
3+
using Nest;
4+
using netCoreClient.Models;
5+
using System;
6+
using System.Collections.Generic;
7+
using System.Linq;
8+
using System.Threading.Tasks;
9+
10+
namespace netCoreClient.Controllers
11+
{
12+
[ApiController]
13+
[Route("[controller]")]
14+
public class QuestionController : ControllerBase
15+
{
16+
[HttpGet]
17+
public IEnumerable<Question> Get()
18+
{
19+
var uri = new Uri("http://elasticsearch:9200");
20+
var settings = new ConnectionSettings(uri).DefaultIndex("question");
21+
var client = new ElasticClient(settings);
22+
var searchResponse = client.Search<Question>(s => s
23+
.Query(q => q.MatchAll())
24+
.Sort(q => q.Descending(question => question.Id))
25+
.Size(20)
26+
);
27+
return searchResponse.Documents;
28+
}
29+
}
30+
}

netcore/Dockerfile

+21
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
# syntax=docker/dockerfile:1
2+
FROM mcr.microsoft.com/dotnet/sdk:3.1 AS build-env
3+
WORKDIR /app
4+
5+
# Install nodejs for client-side react
6+
RUN curl -sL https://deb.nodesource.com/setup_10.x | bash -
7+
RUN apt-get install -y nodejs
8+
9+
# Copy csproj and restore as distinct layers
10+
COPY netCoreClient.csproj ./
11+
RUN dotnet restore
12+
13+
# Copy everything else and build
14+
COPY ./ ./
15+
RUN dotnet publish -c Release -o out
16+
17+
# Build runtime image
18+
FROM mcr.microsoft.com/dotnet/aspnet:3.1
19+
WORKDIR /app
20+
COPY --from=build-env /app/out .
21+
ENTRYPOINT ["dotnet", "netCoreClient.dll"]
+96-95
Original file line numberDiff line numberDiff line change
@@ -1,95 +1,96 @@
1-
using QuestionProto = Com.Hus.Cdc.Question;
2-
using Confluent.Kafka;
3-
using Confluent.Kafka.SyncOverAsync;
4-
using Confluent.SchemaRegistry.Serdes;
5-
using Microsoft.Extensions.Hosting;
6-
using Nest;
7-
using System;
8-
using System.Threading;
9-
using System.Threading.Tasks;
10-
using netCoreClient.Models;
11-
12-
namespace netCoreClient.Services
13-
{
14-
public class QuestionConsumerService : BackgroundService
15-
{
16-
private readonly string topic;
17-
private readonly IConsumer<string, QuestionProto> kafkaConsumer;
18-
19-
public QuestionConsumerService()
20-
{
21-
topic = "outbox.event.question";
22-
var consumerConfig = new ConsumerConfig
23-
{
24-
BootstrapServers = "host.docker.internal:9092",
25-
GroupId = "hus-dotnet-consumer",
26-
AutoOffsetReset = AutoOffsetReset.Earliest,
27-
};
28-
kafkaConsumer = new ConsumerBuilder<string, QuestionProto>(consumerConfig)
29-
.SetValueDeserializer(new MyDeserializer())
30-
.SetErrorHandler((_, e) => Console.WriteLine($"Consumer Error at SetErrorHandler: {e.Reason}"))
31-
.Build();
32-
}
33-
34-
protected override Task ExecuteAsync(CancellationToken stoppingToken)
35-
{
36-
new Thread(() => StartConsumerLoop(stoppingToken)).Start();
37-
return Task.CompletedTask;
38-
}
39-
40-
private void StartConsumerLoop(CancellationToken cancellationToken)
41-
{
42-
kafkaConsumer.Subscribe(topic);
43-
44-
while (!cancellationToken.IsCancellationRequested)
45-
{
46-
try
47-
{
48-
var cr = kafkaConsumer.Consume(cancellationToken);
49-
Console.WriteLine($"{cr.Message.Key}: {cr.Message.Value.QuestionText}");
50-
SaveMessage(cr.Message.Value);
51-
}
52-
catch (OperationCanceledException)
53-
{
54-
break;
55-
}
56-
catch (ConsumeException e)
57-
{
58-
Console.WriteLine($"Consume error at try/catch: {e.Error.Reason}");
59-
60-
if (e.Error.IsFatal)
61-
{
62-
break;
63-
}
64-
}
65-
catch (Exception e)
66-
{
67-
Console.WriteLine($"Unexpected error: {e}");
68-
break;
69-
}
70-
}
71-
}
72-
73-
void SaveMessage(QuestionProto val)
74-
{
75-
var question = new Question
76-
{
77-
Id = val.Id,
78-
QuestionText = val.QuestionText,
79-
PubDate = val.PubDate?.ToDateTime()
80-
};
81-
var settings = new ConnectionSettings().DefaultIndex("question");
82-
var client = new ElasticClient(settings);
83-
client.IndexDocument(question);
84-
}
85-
86-
public override void Dispose()
87-
{
88-
// Commit offsets and leave the group cleanly.
89-
kafkaConsumer.Close();
90-
kafkaConsumer.Dispose();
91-
92-
base.Dispose();
93-
}
94-
}
95-
}
1+
using QuestionProto = Com.Hus.Cdc.Question;
2+
using Confluent.Kafka;
3+
using Confluent.Kafka.SyncOverAsync;
4+
using Confluent.SchemaRegistry.Serdes;
5+
using Microsoft.Extensions.Hosting;
6+
using Nest;
7+
using System;
8+
using System.Threading;
9+
using System.Threading.Tasks;
10+
using netCoreClient.Models;
11+
12+
namespace netCoreClient.Services
13+
{
14+
public class QuestionConsumerService : BackgroundService
15+
{
16+
private readonly string topic;
17+
private readonly IConsumer<string, QuestionProto> kafkaConsumer;
18+
19+
public QuestionConsumerService()
20+
{
21+
topic = "outbox.event.question";
22+
var consumerConfig = new ConsumerConfig
23+
{
24+
BootstrapServers = "host.docker.internal:9092",
25+
GroupId = "hus-dotnet-consumer",
26+
AutoOffsetReset = AutoOffsetReset.Earliest,
27+
};
28+
kafkaConsumer = new ConsumerBuilder<string, QuestionProto>(consumerConfig)
29+
.SetValueDeserializer(new MyDeserializer())
30+
.SetErrorHandler((_, e) => Console.WriteLine($"Consumer Error at SetErrorHandler: {e.Reason}"))
31+
.Build();
32+
}
33+
34+
protected override Task ExecuteAsync(CancellationToken stoppingToken)
35+
{
36+
new Thread(() => StartConsumerLoop(stoppingToken)).Start();
37+
return Task.CompletedTask;
38+
}
39+
40+
private void StartConsumerLoop(CancellationToken cancellationToken)
41+
{
42+
kafkaConsumer.Subscribe(topic);
43+
44+
while (!cancellationToken.IsCancellationRequested)
45+
{
46+
try
47+
{
48+
var cr = kafkaConsumer.Consume(cancellationToken);
49+
Console.WriteLine($"{cr.Message.Key}: {cr.Message.Value.QuestionText}");
50+
SaveMessage(cr.Message.Value);
51+
}
52+
catch (OperationCanceledException)
53+
{
54+
break;
55+
}
56+
catch (ConsumeException e)
57+
{
58+
Console.WriteLine($"Consume error at try/catch: {e.Error.Reason}");
59+
60+
if (e.Error.IsFatal)
61+
{
62+
break;
63+
}
64+
}
65+
catch (Exception e)
66+
{
67+
Console.WriteLine($"Unexpected error: {e}");
68+
break;
69+
}
70+
}
71+
}
72+
73+
void SaveMessage(QuestionProto val)
74+
{
75+
var question = new Question
76+
{
77+
Id = val.Id,
78+
QuestionText = val.QuestionText,
79+
PubDate = val.PubDate?.ToDateTime()
80+
};
81+
var uri = new Uri("http://elasticsearch:9200");
82+
var settings = new ConnectionSettings(uri).DefaultIndex("question");
83+
var client = new ElasticClient(settings);
84+
client.IndexDocument(question);
85+
}
86+
87+
public override void Dispose()
88+
{
89+
// Commit offsets and leave the group cleanly.
90+
kafkaConsumer.Close();
91+
kafkaConsumer.Dispose();
92+
93+
base.Dispose();
94+
}
95+
}
96+
}

0 commit comments

Comments
 (0)