greenhouseCRUD #7

Merged
mfnefd merged 18 commits from greenhouseCRUD into dev 2024-12-15 20:52:57 +04:00
3 changed files with 56 additions and 23 deletions
Showing only changes of commit 7b751b7072 - Show all commits

View File

@ -20,7 +20,7 @@ using Cloud.Middlewares;
var builder = WebApplication.CreateBuilder(args);
// Add services to the container.
builder.Services.AddTransient<IBrokerService, KafkaService>();
builder.Services.AddSingleton<IBrokerService, KafkaService>();
builder.Services.AddTransient<IGreenhouseService, GreenhouseService>();
//Redis configuration

View File

@ -1,6 +1,7 @@
using Cloud.Services.Broker.Support;
using Confluent.Kafka;
using System.Diagnostics;
using System.Text.Json;
namespace Cloud.Services.Broker.Implement.Kafka
@ -20,29 +21,61 @@ namespace Cloud.Services.Broker.Implement.Kafka
public IEnumerable<T>? WaitMessages<T>(string topic)
where T : IBrokerResponse
{
try
{
_consumer.Subscribe(topic);
List<T> res = new();
List<PartitionMetadata> partitions;
var consumeResult = _consumer.Consume(TimeSpan.FromMilliseconds(1000));
Console.WriteLine($"================ Received message: {consumeResult?.Message.Value}");
if (consumeResult == null)
{
// No message received from Kafka within the specified timeout.
return default;
}
using var adminClient = new AdminClientBuilder(new AdminClientConfig { BootstrapServers = _config["KAFKA_URL"] }).Build();
var meta = adminClient.GetMetadata(TimeSpan.FromSeconds(20));
var currentTopic = meta.Topics.SingleOrDefault(t => t.Topic == topic)
?? throw new Exception($"Topic {topic} not found");
partitions = currentTopic.Partitions;
_consumer.Subscribe(topic);
foreach (var partition in partitions)
{
var topicPartition = new TopicPartition(topic, partition.PartitionId);
_consumer.Assign(topicPartition);
T? message = _consume<T>();
if (message == null) return null;
res.Add(message);
return JsonSerializer.Deserialize<IEnumerable<T>>(consumeResult.Message.Value);
}
_consumer.Unassign();
_consumer.Unsubscribe();
return res;
}
}
catch (Exception ex)
{
throw;
}
finally
{
_consumer.Close();
}
private T? _consume<T>() where T : IBrokerResponse
{
var sw = new Stopwatch();
sw.Start();
try
{
while (true)
{
var consumeResult = _consumer.Consume(TimeSpan.FromMinutes(1));
if (consumeResult?.Message?.Value == null)
{
// Предел по времени
if (sw.Elapsed > TimeSpan.FromMinutes(1))
{
return default;
}
continue;
}
string jsonObj = consumeResult.Message.Value;
return JsonSerializer.Deserialize<T>(jsonObj);
}
}
catch (Exception ex)
{
_consumer.Close();
throw;
}
}
public void ChangeBrokerIp(string ip)
@ -53,7 +86,7 @@ namespace Cloud.Services.Broker.Implement.Kafka
GroupId = _config["Kafka:GroupId"],
AutoOffsetReset = AutoOffsetReset.Earliest,
};
_consumer?.Close();
_consumer = new ConsumerBuilder<string, string>(consumerConfig).Build();
}
}

View File

@ -12,7 +12,7 @@ namespace Cloud.Services.Broker.Implement.Kafka
public KafkaProducer(IConfiguration configuration)
{
_config = configuration;
Console.WriteLine($"KafkaConsumer created. IP:" + _config["KAFKA_URL"]);
Console.WriteLine($"KafkaProducer created. IP:" + _config["KAFKA_URL"]);
ChangeBrokerIp(_config["KAFKA_URL"]);
}
public async Task ProduceAsync(string topic, Command command)