Merge branch 'dev' into greenhouseCRUD

This commit is contained in:
mfnefd 2024-11-19 23:35:40 +04:00
commit 9b770d131a
12 changed files with 342 additions and 121 deletions

View File

@ -0,0 +1,42 @@
using Cloud.Requests;
using Cloud.Services;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using System.ComponentModel.DataAnnotations;
using System.Text.Json;
namespace Cloud.Controllers
{
[Authorize]
[ApiController]
[Route("api")]
public class ValveController : ControllerBase
{
//Контроллер вентиля
private readonly ProducerService _producerService;
public ValveController(ProducerService producerService)
{
_producerService = producerService;
}
[HttpPost("farm/{farmId}/greenhouse/{ghId}/watering")]
public async Task<IActionResult> interactValve([FromBody] ValveRequest request, int farmId, int ghId)
{
var kafkaRequest = new
{
FarmId = farmId,
GreenHouseId = ghId,
SomeAction = request.Action,
};
var message = JsonSerializer.Serialize(kafkaRequest);
return Ok(kafkaRequest);
/*await _producerService.ProduceMessageAsync("ValvesHeatersRequest", message);
return Ok($"Valve status is {request.Action}");*/
}
}
}

9
Cloud/Enums/ValveEnum.cs Normal file
View File

@ -0,0 +1,9 @@
namespace Cloud.Enums
{
public enum ValveEnum
{
Open,
Close,
Auto
}
}

View File

@ -9,6 +9,7 @@ using Cloud.Validation;
using StackExchange.Redis; using StackExchange.Redis;
using Cloud.Services.Broker.Implement.Kafka; using Cloud.Services.Broker.Implement.Kafka;
using Cloud.Services.Broker; using Cloud.Services.Broker;
using Cloud.Services;
var builder = WebApplication.CreateBuilder(args); var builder = WebApplication.CreateBuilder(args);
@ -21,6 +22,15 @@ builder.Services.AddSingleton<IConnectionMultiplexer>(sp =>
return ConnectionMultiplexer.Connect(configuration); return ConnectionMultiplexer.Connect(configuration);
}); });
//Kafka producer service
builder.Services.AddSingleton<ProducerService, ProducerService>();
//Kafka consumer service
builder.Services.AddSingleton<ConsumerService, ConsumerService>();
//Add the BackgroundWorkerService
builder.Services.AddHostedService<BackgroundWorkerService>();
//Jwt configuration //Jwt configuration
var jwtIssuer = builder.Configuration.GetSection("Jwt:Issuer").Get<string>(); var jwtIssuer = builder.Configuration.GetSection("Jwt:Issuer").Get<string>();
var jwtKey = builder.Configuration.GetSection("Jwt:Key").Get<string>(); var jwtKey = builder.Configuration.GetSection("Jwt:Key").Get<string>();
@ -60,6 +70,7 @@ builder.Services.AddFluentValidationClientsideAdapters();
builder.Services.AddValidatorsFromAssemblyContaining<LoginValidator>(); builder.Services.AddValidatorsFromAssemblyContaining<LoginValidator>();
builder.Services.AddValidatorsFromAssemblyContaining<RegisterValidator>(); builder.Services.AddValidatorsFromAssemblyContaining<RegisterValidator>();
builder.Services.AddValidatorsFromAssemblyContaining<FarmValidator>(); builder.Services.AddValidatorsFromAssemblyContaining<FarmValidator>();
builder.Services.AddValidatorsFromAssemblyContaining<ValveValidator>();
// Learn more about configuring Swagger/OpenAPI at https://aka.ms/aspnetcore/swashbuckle // Learn more about configuring Swagger/OpenAPI at https://aka.ms/aspnetcore/swashbuckle
builder.Services.AddEndpointsApiExplorer(); builder.Services.AddEndpointsApiExplorer();

View File

@ -0,0 +1,7 @@
namespace Cloud.Requests
{
public class ValveRequest
{
public string Action { get; set; }
}
}

View File

@ -0,0 +1,39 @@
namespace Cloud.Services
{
public class BackgroundWorkerService : BackgroundService
{
public readonly ILogger<BackgroundWorkerService> _logger;
private readonly ConsumerService _consumerService;
public BackgroundWorkerService(ILogger<BackgroundWorkerService> logger, ConsumerService consumer)
{
_logger = logger;
_consumerService = consumer;
}
//Backghround Service, This will run continuously
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
{
try
{
while (!stoppingToken.IsCancellationRequested)
{
//_logger.LogInformation("Background Service is Runing at : {time}", DateTimeOffset.Now);
string request = await _consumerService.WaitMessage("ValvesHeatersRequest"); //Consume the Kafka Message
//After Consume the Order Request Can process the order
if (!string.IsNullOrEmpty(request))
_logger.LogInformation("Valves-Heaters Request : {value}", request);
await Task.Delay(1000, stoppingToken);
}
}
catch (Exception ex)
{
_logger.LogError($"BackgroundWorkerService - Exception {ex}");
}
}
}
}

View File

@ -0,0 +1,51 @@
using Confluent.Kafka;
namespace Cloud.Services
{
public class ConsumerService
{
private IConsumer<string, string> _consumer;
private ConsumerConfig consumerConfig;
public ConsumerService(IConfiguration configuration)
{
consumerConfig = new ConsumerConfig
{
BootstrapServers = configuration["Kafka:BootstrapServers"],
GroupId = configuration["Kafka:GroupId"],
AutoOffsetReset = AutoOffsetReset.Earliest,
};
_consumer = new ConsumerBuilder<string, string>(consumerConfig).Build();
}
//Consume Method
public async Task<string>WaitMessage(string topic)
{
try
{
_consumer.Subscribe(topic);
var consumeResult = _consumer.Consume(TimeSpan.FromMilliseconds(1000));
if (consumeResult != null)
{
return consumeResult.Message.Value;
}
else
{
//No message received from Kafka within the specified timeout.
}
return "";
}
catch (Exception ex)
{
return "";
}
finally
{
_consumer.Close();
}
}
}
}

View File

@ -0,0 +1,33 @@
using Confluent.Kafka;
namespace Cloud.Services
{
public class ProducerService
{
private readonly IProducer<string, string> _producer;
public ProducerService(IConfiguration configuration)
{
var producerConfig = new ProducerConfig
{
BootstrapServers = configuration["Kafka:BootstrapServers"]
};
//Build the Producer
_producer = new ProducerBuilder<string, string>(producerConfig).Build();
}
//Method for Produce the Message to Kafka Topic
public async Task ProduceMessageAsync(string topic, string value)
{
var kafkaMessage = new Message<string, string>
{
Key = Guid.NewGuid().ToString(),
Value = value
};
//Produce the Message
await _producer.ProduceAsync(topic, kafkaMessage);
}
}
}

View File

@ -0,0 +1,16 @@
using Cloud.Enums;
using Cloud.Requests;
using FluentValidation;
namespace Cloud.Validation
{
public class ValveValidator : AbstractValidator<ValveRequest>
{
public ValveValidator() {
RuleFor(request => request.Action)
.NotEmpty().WithMessage("Action can't be empty").
IsEnumName(typeof (ValveEnum)).WithMessage("Action is not correct");
}
}
}

View File

@ -5,6 +5,10 @@
"Microsoft.AspNetCore": "Warning" "Microsoft.AspNetCore": "Warning"
} }
}, },
"Kafka": {
"BootstrapServers": "localhost:9092",
"GroupId": "ValvesHeaters"
},
"AllowedHosts": "*", "AllowedHosts": "*",
"Jwt": { "Jwt": {
"Key": "m7TyhE20s0dVtUDAr9EnFdPZnAG8maxgBTaiW5j6kO6RQhWDAGxYmXyu0suDnE0o", "Key": "m7TyhE20s0dVtUDAr9EnFdPZnAG8maxgBTaiW5j6kO6RQhWDAGxYmXyu0suDnE0o",

View File

@ -1,13 +1,12 @@
from random import random
from turtledemo.penrose import start
from kafka import KafkaProducer, KafkaConsumer
import kafka
import socket
from json import dumps, loads
import time import time
import random as rnd import random as rnd
from flask import Flask
import requests
import threading
app = Flask(__name__)
class Detector: class Detector:
def __init__(self, id, moistureThresholdUpper, moistureThresholdLower, tempThresholdUpper, tempThresholdLower): def __init__(self, id, moistureThresholdUpper, moistureThresholdLower, tempThresholdUpper, tempThresholdLower):
self.id = id self.id = id
@ -18,29 +17,21 @@ class Detector:
self.moisture = 0 self.moisture = 0
self.temp = 0 self.temp = 0
self.producer = KafkaProducer(
bootstrap_servers=['localhost:9092'],
client_id=f'detector{self.id}',
value_serializer=lambda v: dumps(v).encode('utf-8')
)
def sendData(self):
message = {'id' : self.id,
'moisture': self.moisture,
'temperature' : self.temp }
self.producer.send('dataDetectors', message)
def cycle(self): def cycle(self):
self.moisture += rnd.random() / 100 self.moisture += rnd.random() / 100
self.temp += (rnd.random() - 0.5) / 100 self.temp += (rnd.random() - 0.5) / 100
def sendData(self):
data = {"moisture": self.moisture,
"temp": self.temp}
requests.post(f"http://127.0.0.1:20002/webhook?id={self.id}", json=data)
detector1 = Detector(1, 0.6, 0.2, 40, 20) detector1 = Detector(1, 0.6, 0.2, 40, 20)
detector2 = Detector(2, 0.7, 0.3, 40, 20)
detector3 = Detector(3, 0.9, 0.6, 40, 20)
detectors = [detector1, detector2, detector3] detectors = [detector1]
while True: if __name__ =="__main__":
while True:
for detector in detectors: for detector in detectors:
detector.cycle() detector.cycle()
detector.sendData() detector.sendData()

View File

@ -2,20 +2,20 @@ from kafka import KafkaProducer, KafkaConsumer
import kafka import kafka
import socket import socket
from json import dumps, loads from json import dumps, loads
from flask import Flask, request
import time import time
from enum import Enum from enum import Enum
from GreenhouseDetector.detector import Detector import threading
class Status(Enum): app = Flask(__name__)
UNKNOWN = -1
OFF = 0
ON = 1
def start_manager():
return
class Manager: class Manager:
def __init__(self, id: int, moisture: float = 0, temp: float = 20, isAutoOn: bool = False, valve_state: str = "closed", def __init__(self, _id: int, moisture: float = 0, temp: float = 20, isAutoOn: bool = False, valve_state: str = "closed",
heater_state: str = "off"): heater_state: str = "off"):
self.id = id self._id = _id
self.moisture = moisture self.moisture = moisture
self.temp = temp self.temp = temp
self.isAutoOn = isAutoOn self.isAutoOn = isAutoOn
@ -24,49 +24,29 @@ class Manager:
self.dataPublisher = KafkaProducer( self.dataPublisher = KafkaProducer(
bootstrap_servers=['localhost:9092'], bootstrap_servers=['localhost:9092'],
client_id=f'manager{id}_producer', client_id=f'manager{self._id}_producer',
value_serializer=lambda v: dumps(v).encode('utf-8') value_serializer=lambda v: dumps(v).encode('utf-8')
) )
# self.detectorConsumer = KafkaConsumer(
# 'dataDetectors',
# bootstrap_servers=['localhost:9092'],
# auto_offset_reset='earliest',
# enable_auto_commit=True,
# consumer_timeout_ms=1000,
#group_id=f'manager{id}',
# value_deserializer=lambda x: loads(x.decode('utf-8'))
#)
self.controllerConsumer = KafkaConsumer( self.controllerConsumer = KafkaConsumer(
'commands', 'commands',
bootstrap_servers=['localhost:9092'], bootstrap_servers=['localhost:9092'],
auto_offset_reset='earliest', auto_offset_reset='earliest',
enable_auto_commit=True, enable_auto_commit=True,
consumer_timeout_ms=2000, consumer_timeout_ms=2000,
group_id=f'manager{id}', group_id=f'manager{self._id}',
value_deserializer=lambda x: loads(x.decode('utf-8')) value_deserializer=lambda x: loads(x.decode('utf-8'))
) )
self.controllerConsumerResponse = KafkaProducer( self.controllerConsumerResponse = KafkaProducer(
bootstrap_servers=['localhost:9092'], bootstrap_servers=['localhost:9092'],
client_id=f'manager{id}_producer', client_id=f'manager{self._id}_producer',
value_serializer=lambda v: dumps(v).encode('utf-8') value_serializer=lambda v: dumps(v).encode('utf-8')
) )
def update(self):
for message in self.detectorConsumer:
print(f"Manager {self.id} received message: ")
print(message.value)
self.moisture = message.value['moisture']
self.temp = message.value['temperature']
print("Updating info...\n")
self.sendData()
def sendData(self): def sendData(self):
print("sending data...") print("sending data...")
message = { message = {
'id': self.id, 'id': self._id,
'moisture': self.moisture, 'moisture': self.moisture,
'temp': self.temp, 'temp': self.temp,
'valveStatus': str(self.valve_state), 'valveStatus': str(self.valve_state),
@ -76,6 +56,8 @@ class Manager:
print(message) print(message)
self.dataPublisher.send('data', message) self.dataPublisher.send('data', message)
self.dataPublisher.flush()
def toggle_device(self, device, request_id, greenhouse_id): def toggle_device(self, device, request_id, greenhouse_id):
@ -122,23 +104,33 @@ class Manager:
for tp, msgs in messages.items(): for tp, msgs in messages.items():
for message in msgs: for message in msgs:
print(f"Manager {self.id} received message: ") print(f"Manager {self._id} received message: ")
print(message.value) print(message.value)
self.request_id = message.value['request_id'] self.request_id = message.value['request_id']
self.greenhouse_id = message.value['greenhouse_id'] self.greenhouse_id = message.value['greenhouse_id']
self.command = message.value['command'] self.command = message.value['command']
self.toggle_device(self.command, self.request_id, self.greenhouse_id) self.toggle_device(self.command, self.request_id, self.greenhouse_id)
@app.route(f'/webhook', methods=['POST'])
def webhook():
print("received webhook", request.args.get('id'))
for manager in managers:
print()
if int(request.args.get('id')) == manager._id and request.method == 'POST':
print("Data received from Webhook is", request.json)
body = request.json
for key, value in body.items():
setattr(manager, key, value)
manager1 = Manager(id=1) manager.sendData()
return f"Webhook received for manager {manager._id}"
return "Webhook ignored"
t1 = threading.Thread(target=start_manager)
manager1 = Manager(_id=1)
managers = [manager1] managers = [manager1]
if __name__ == "__main__":
while True: threading.Thread(target=lambda: app.run(host="0.0.0.0", port=20002, debug=True, use_reloader=False)).start()
time.sleep(5)
manager1.sendData()
for manager in managers:
manager.getCommand()

View File

@ -1,3 +1,12 @@
networks:
vpn:
name: kafkaVPN
driver: bridge
ipam:
config:
- subnet: "192.168.2.0/24"
gateway: "192.168.2.1"
services: services:
cloud: cloud:
build: ./Cloud/ build: ./Cloud/
@ -30,7 +39,19 @@ services:
- ping - ping
retries: 3 retries: 3
timeout: 5s timeout: 5s
zookeeper:
networks:
- vpn
image: confluentinc/cp-zookeeper:7.4.0
environment:
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_TICK_TIME: 2000
ports:
- 2181:2181
kafka: kafka:
networks:
vpn:
ipv4_address: 192.168.2.10
image: confluentinc/cp-kafka:7.4.0 image: confluentinc/cp-kafka:7.4.0
ports: ports:
- 9092:9092 - 9092:9092
@ -40,8 +61,10 @@ services:
environment: environment:
KAFKA_BROKER_ID: 1 KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT KAFKA_LISTENERS: HOST://0.0.0.0:9092,DOCKER://0.0.0.0:29092
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:29092,PLAINTEXT_HOST://localhost:9092 KAFKA_ADVERTISED_LISTENERS: HOST://192.168.1.5:9092,DOCKER://kafka:29092
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: DOCKER:PLAINTEXT,HOST:PLAINTEXT
KAFKA_INTER_BROKER_LISTENER_NAME: DOCKER
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
KAFKA_LOG_FLUSH_INTERVAL_MESSAGES: 10000 KAFKA_LOG_FLUSH_INTERVAL_MESSAGES: 10000
@ -49,6 +72,8 @@ services:
depends_on: depends_on:
- zookeeper - zookeeper
init-kafka: init-kafka:
networks:
- vpn
image: confluentinc/cp-kafka:7.4.0 image: confluentinc/cp-kafka:7.4.0
depends_on: depends_on:
- kafka - kafka
@ -61,13 +86,14 @@ services:
echo -e 'Creating kafka topics' echo -e 'Creating kafka topics'
kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic commands --replication-factor 1 --partitions 1 kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic commands --replication-factor 1 --partitions 1
kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic data --replication-factor 1 --partitions 1 kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic data --replication-factor 1 --partitions 1
kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic dataDetectors --replication-factor 1 --partitions 1
kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic response --replication-factor 1 --partitions 1 kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic response --replication-factor 1 --partitions 1
echo -e 'Successfully created the following topics:' echo -e 'Successfully created the following topics:'
kafka-topics --bootstrap-server kafka:29092 --list kafka-topics --bootstrap-server kafka:29092 --list
" "
kafka-ui: kafka-ui:
networks:
- vpn
container_name: kafka-ui container_name: kafka-ui
image: provectuslabs/kafka-ui:latest image: provectuslabs/kafka-ui:latest
ports: ports: