From 46df076f96a8b415faa9f9a66e324f5b41ff57ce Mon Sep 17 00:00:00 2001 From: "m.zargarov" Date: Mon, 28 Oct 2024 00:02:57 +0400 Subject: [PATCH 01/49] add .gitignore --- .gitignore | 399 +++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 399 insertions(+) create mode 100644 .gitignore diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..26212b2 --- /dev/null +++ b/.gitignore @@ -0,0 +1,399 @@ +# ---> VisualStudio +## Ignore Visual Studio temporary files, build results, and +## files generated by popular Visual Studio add-ons. +## +## Get latest from https://github.com/github/gitignore/blob/main/VisualStudio.gitignore + +# User-specific files +*.rsuser +*.suo +*.user +*.userosscache +*.sln.docstates + +# User-specific files (MonoDevelop/Xamarin Studio) +*.userprefs + +# Mono auto generated files +mono_crash.* + +# Build results +[Dd]ebug/ +[Dd]ebugPublic/ +[Rr]elease/ +[Rr]eleases/ +x64/ +x86/ +[Ww][Ii][Nn]32/ +[Aa][Rr][Mm]/ +[Aa][Rr][Mm]64/ +bld/ +[Bb]in/ +[Oo]bj/ +[Ll]og/ +[Ll]ogs/ + +# Visual Studio 2015/2017 cache/options directory +.vs/ +# Uncomment if you have tasks that create the project's static files in wwwroot +#wwwroot/ + +# Visual Studio 2017 auto generated files +Generated\ Files/ + +# MSTest test Results +[Tt]est[Rr]esult*/ +[Bb]uild[Ll]og.* + +# NUnit +*.VisualState.xml +TestResult.xml +nunit-*.xml + +# Build Results of an ATL Project +[Dd]ebugPS/ +[Rr]eleasePS/ +dlldata.c + +# Benchmark Results +BenchmarkDotNet.Artifacts/ + +# .NET Core +project.lock.json +project.fragment.lock.json +artifacts/ + +# ASP.NET Scaffolding +ScaffoldingReadMe.txt + +# StyleCop +StyleCopReport.xml + +# Files built by Visual Studio +*_i.c +*_p.c +*_h.h +*.ilk +*.meta +*.obj +*.iobj +*.pch +*.pdb +*.ipdb +*.pgc +*.pgd +*.rsp +*.sbr +*.tlb +*.tli +*.tlh +*.tmp +*.tmp_proj +*_wpftmp.csproj +*.log +*.tlog +*.vspscc +*.vssscc +.builds +*.pidb +*.svclog +*.scc + +# Chutzpah Test files +_Chutzpah* + +# Visual C++ cache files +ipch/ +*.aps +*.ncb +*.opendb +*.opensdf +*.sdf +*.cachefile +*.VC.db +*.VC.VC.opendb + +# Visual Studio profiler +*.psess +*.vsp +*.vspx +*.sap + +# Visual Studio Trace Files +*.e2e + +# TFS 2012 Local Workspace +$tf/ + +# Guidance Automation Toolkit +*.gpState + +# ReSharper is a .NET coding add-in +_ReSharper*/ +*.[Rr]e[Ss]harper +*.DotSettings.user + +# TeamCity is a build add-in +_TeamCity* + +# DotCover is a Code Coverage Tool +*.dotCover + +# AxoCover is a Code Coverage Tool +.axoCover/* +!.axoCover/settings.json + +# Coverlet is a free, cross platform Code Coverage Tool +coverage*.json +coverage*.xml +coverage*.info + +# Visual Studio code coverage results +*.coverage +*.coveragexml + +# NCrunch +_NCrunch_* +.*crunch*.local.xml +nCrunchTemp_* + +# MightyMoose +*.mm.* +AutoTest.Net/ + +# Web workbench (sass) +.sass-cache/ + +# Installshield output folder +[Ee]xpress/ + +# DocProject is a documentation generator add-in +DocProject/buildhelp/ +DocProject/Help/*.HxT +DocProject/Help/*.HxC +DocProject/Help/*.hhc +DocProject/Help/*.hhk +DocProject/Help/*.hhp +DocProject/Help/Html2 +DocProject/Help/html + +# Click-Once directory +publish/ + +# Publish Web Output +*.[Pp]ublish.xml +*.azurePubxml +# Note: Comment the next line if you want to checkin your web deploy settings, +# but database connection strings (with potential passwords) will be unencrypted +*.pubxml +*.publishproj + +# Microsoft Azure Web App publish settings. Comment the next line if you want to +# checkin your Azure Web App publish settings, but sensitive information contained +# in these scripts will be unencrypted +PublishScripts/ + +# NuGet Packages +*.nupkg +# NuGet Symbol Packages +*.snupkg +# The packages folder can be ignored because of Package Restore +**/[Pp]ackages/* +# except build/, which is used as an MSBuild target. +!**/[Pp]ackages/build/ +# Uncomment if necessary however generally it will be regenerated when needed +#!**/[Pp]ackages/repositories.config +# NuGet v3's project.json files produces more ignorable files +*.nuget.props +*.nuget.targets + +# Microsoft Azure Build Output +csx/ +*.build.csdef + +# Microsoft Azure Emulator +ecf/ +rcf/ + +# Windows Store app package directories and files +AppPackages/ +BundleArtifacts/ +Package.StoreAssociation.xml +_pkginfo.txt +*.appx +*.appxbundle +*.appxupload + +# Visual Studio cache files +# files ending in .cache can be ignored +*.[Cc]ache +# but keep track of directories ending in .cache +!?*.[Cc]ache/ + +# Others +ClientBin/ +~$* +*~ +*.dbmdl +*.dbproj.schemaview +*.jfm +*.pfx +*.publishsettings +orleans.codegen.cs + +# Including strong name files can present a security risk +# (https://github.com/github/gitignore/pull/2483#issue-259490424) +#*.snk + +# Since there are multiple workflows, uncomment next line to ignore bower_components +# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) +#bower_components/ + +# RIA/Silverlight projects +Generated_Code/ + +# Backup & report files from converting an old project file +# to a newer Visual Studio version. Backup files are not needed, +# because we have git ;-) +_UpgradeReport_Files/ +Backup*/ +UpgradeLog*.XML +UpgradeLog*.htm +ServiceFabricBackup/ +*.rptproj.bak + +# SQL Server files +*.mdf +*.ldf +*.ndf + +# Business Intelligence projects +*.rdl.data +*.bim.layout +*.bim_*.settings +*.rptproj.rsuser +*- [Bb]ackup.rdl +*- [Bb]ackup ([0-9]).rdl +*- [Bb]ackup ([0-9][0-9]).rdl + +# Microsoft Fakes +FakesAssemblies/ + +# GhostDoc plugin setting file +*.GhostDoc.xml + +# Node.js Tools for Visual Studio +.ntvs_analysis.dat +node_modules/ + +# Visual Studio 6 build log +*.plg + +# Visual Studio 6 workspace options file +*.opt + +# Visual Studio 6 auto-generated workspace file (contains which files were open etc.) +*.vbw + +# Visual Studio 6 auto-generated project file (contains which files were open etc.) +*.vbp + +# Visual Studio 6 workspace and project file (working project files containing files to include in project) +*.dsw +*.dsp + +# Visual Studio 6 technical files +*.ncb +*.aps + +# Visual Studio LightSwitch build output +**/*.HTMLClient/GeneratedArtifacts +**/*.DesktopClient/GeneratedArtifacts +**/*.DesktopClient/ModelManifest.xml +**/*.Server/GeneratedArtifacts +**/*.Server/ModelManifest.xml +_Pvt_Extensions + +# Paket dependency manager +.paket/paket.exe +paket-files/ + +# FAKE - F# Make +.fake/ + +# CodeRush personal settings +.cr/personal + +# Python Tools for Visual Studio (PTVS) +__pycache__/ +*.pyc + +# Cake - Uncomment if you are using it +# tools/** +# !tools/packages.config + +# Tabs Studio +*.tss + +# Telerik's JustMock configuration file +*.jmconfig + +# BizTalk build output +*.btp.cs +*.btm.cs +*.odx.cs +*.xsd.cs + +# OpenCover UI analysis results +OpenCover/ + +# Azure Stream Analytics local run output +ASALocalRun/ + +# MSBuild Binary and Structured Log +*.binlog + +# NVidia Nsight GPU debugger configuration file +*.nvuser + +# MFractors (Xamarin productivity tool) working folder +.mfractor/ + +# Local History for Visual Studio +.localhistory/ + +# Visual Studio History (VSHistory) files +.vshistory/ + +# BeatPulse healthcheck temp database +healthchecksdb + +# Backup folder for Package Reference Convert tool in Visual Studio 2017 +MigrationBackup/ + +# Ionide (cross platform F# VS Code tools) working folder +.ionide/ + +# Fody - auto-generated XML schema +FodyWeavers.xsd + +# VS Code files for those working on multiple tools +.vscode/* +!.vscode/settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json +*.code-workspace + +# Local History for Visual Studio Code +.history/ + +# Windows Installer files from build outputs +*.cab +*.msi +*.msix +*.msm +*.msp + +# JetBrains Rider +*.sln.iml From c58d07731c9022740ec062a6efcd7546309e80a5 Mon Sep 17 00:00:00 2001 From: "m.zargarov" Date: Mon, 28 Oct 2024 00:25:51 +0400 Subject: [PATCH 02/49] add dev --- .vscode/settings.json | 3 ++ Cloud.sln | 22 +++++++++++++ Cloud/Cloud.csproj | 13 ++++++++ .../Controllers/WeatherForecastController.cs | 32 +++++++++++++++++++ Cloud/Program.cs | 25 +++++++++++++++ Cloud/Properties/launchSettings.json | 31 ++++++++++++++++++ Cloud/WeatherForecast.cs | 12 +++++++ Cloud/appsettings.Development.json | 8 +++++ Cloud/appsettings.json | 9 ++++++ docker-compose.yml | 16 ++++++++++ 10 files changed, 171 insertions(+) create mode 100644 .vscode/settings.json create mode 100644 Cloud.sln create mode 100644 Cloud/Cloud.csproj create mode 100644 Cloud/Controllers/WeatherForecastController.cs create mode 100644 Cloud/Program.cs create mode 100644 Cloud/Properties/launchSettings.json create mode 100644 Cloud/WeatherForecast.cs create mode 100644 Cloud/appsettings.Development.json create mode 100644 Cloud/appsettings.json create mode 100644 docker-compose.yml diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..c17f084 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "dotnet.defaultSolution": "Cloud.sln" +} \ No newline at end of file diff --git a/Cloud.sln b/Cloud.sln new file mode 100644 index 0000000..215c0ae --- /dev/null +++ b/Cloud.sln @@ -0,0 +1,22 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Cloud", "Cloud\Cloud.csproj", "{D279AAA9-D4F8-4C7B-B34D-44A7429C87AA}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {D279AAA9-D4F8-4C7B-B34D-44A7429C87AA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D279AAA9-D4F8-4C7B-B34D-44A7429C87AA}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D279AAA9-D4F8-4C7B-B34D-44A7429C87AA}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D279AAA9-D4F8-4C7B-B34D-44A7429C87AA}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection +EndGlobal diff --git a/Cloud/Cloud.csproj b/Cloud/Cloud.csproj new file mode 100644 index 0000000..2c33b1c --- /dev/null +++ b/Cloud/Cloud.csproj @@ -0,0 +1,13 @@ + + + + net6.0 + enable + enable + + + + + + + diff --git a/Cloud/Controllers/WeatherForecastController.cs b/Cloud/Controllers/WeatherForecastController.cs new file mode 100644 index 0000000..5f76bcd --- /dev/null +++ b/Cloud/Controllers/WeatherForecastController.cs @@ -0,0 +1,32 @@ +using Microsoft.AspNetCore.Mvc; + +namespace Cloud.Controllers; + +[ApiController] +[Route("[controller]")] +public class WeatherForecastController : ControllerBase +{ + private static readonly string[] Summaries = new[] + { + "Freezing", "Bracing", "Chilly", "Cool", "Mild", "Warm", "Balmy", "Hot", "Sweltering", "Scorching" + }; + + private readonly ILogger _logger; + + public WeatherForecastController(ILogger logger) + { + _logger = logger; + } + + [HttpGet(Name = "GetWeatherForecast")] + public IEnumerable Get() + { + return Enumerable.Range(1, 5).Select(index => new WeatherForecast + { + Date = DateTime.Now.AddDays(index), + TemperatureC = Random.Shared.Next(-20, 55), + Summary = Summaries[Random.Shared.Next(Summaries.Length)] + }) + .ToArray(); + } +} diff --git a/Cloud/Program.cs b/Cloud/Program.cs new file mode 100644 index 0000000..48863a6 --- /dev/null +++ b/Cloud/Program.cs @@ -0,0 +1,25 @@ +var builder = WebApplication.CreateBuilder(args); + +// Add services to the container. + +builder.Services.AddControllers(); +// Learn more about configuring Swagger/OpenAPI at https://aka.ms/aspnetcore/swashbuckle +builder.Services.AddEndpointsApiExplorer(); +builder.Services.AddSwaggerGen(); + +var app = builder.Build(); + +// Configure the HTTP request pipeline. +if (app.Environment.IsDevelopment()) +{ + app.UseSwagger(); + app.UseSwaggerUI(); +} + +app.UseHttpsRedirection(); + +app.UseAuthorization(); + +app.MapControllers(); + +app.Run(); diff --git a/Cloud/Properties/launchSettings.json b/Cloud/Properties/launchSettings.json new file mode 100644 index 0000000..2e9e32f --- /dev/null +++ b/Cloud/Properties/launchSettings.json @@ -0,0 +1,31 @@ +{ + "$schema": "https://json.schemastore.org/launchsettings.json", + "iisSettings": { + "windowsAuthentication": false, + "anonymousAuthentication": true, + "iisExpress": { + "applicationUrl": "http://localhost:43967", + "sslPort": 44304 + } + }, + "profiles": { + "Cloud": { + "commandName": "Project", + "dotnetRunMessages": true, + "launchBrowser": true, + "launchUrl": "swagger", + "applicationUrl": "https://localhost:7113;http://localhost:5124", + "environmentVariables": { + "ASPNETCORE_ENVIRONMENT": "Development" + } + }, + "IIS Express": { + "commandName": "IISExpress", + "launchBrowser": true, + "launchUrl": "swagger", + "environmentVariables": { + "ASPNETCORE_ENVIRONMENT": "Development" + } + } + } +} diff --git a/Cloud/WeatherForecast.cs b/Cloud/WeatherForecast.cs new file mode 100644 index 0000000..d787653 --- /dev/null +++ b/Cloud/WeatherForecast.cs @@ -0,0 +1,12 @@ +namespace Cloud; + +public class WeatherForecast +{ + public DateTime Date { get; set; } + + public int TemperatureC { get; set; } + + public int TemperatureF => 32 + (int)(TemperatureC / 0.5556); + + public string? Summary { get; set; } +} diff --git a/Cloud/appsettings.Development.json b/Cloud/appsettings.Development.json new file mode 100644 index 0000000..0c208ae --- /dev/null +++ b/Cloud/appsettings.Development.json @@ -0,0 +1,8 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning" + } + } +} diff --git a/Cloud/appsettings.json b/Cloud/appsettings.json new file mode 100644 index 0000000..10f68b8 --- /dev/null +++ b/Cloud/appsettings.json @@ -0,0 +1,9 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning" + } + }, + "AllowedHosts": "*" +} diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..6915ce1 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,16 @@ +services: + postgres: + image: postgres:14 + container_name: cucumber_database + environment: + POSTGRES_USER: postgres + POSTGRES_PASSWORD: 12345 + POSTGRES_DB: main_database + ports: + - "5438:5432" + volumes: + - postgres_data:/var/lib/postgresql/data + +volumes: + postgres_data: + driver: local \ No newline at end of file From 0c94c926a50f6242c146113537d7580fcae0319e Mon Sep 17 00:00:00 2001 From: "m.zargarov" Date: Mon, 28 Oct 2024 02:09:55 +0400 Subject: [PATCH 03/49] Add users table --- Cloud/ApplicationContext.cs | 29 ++++++++++ Cloud/Cloud.csproj | 6 +++ ...0241027220558_CreateUsersTable.Designer.cs | 53 +++++++++++++++++++ .../20241027220558_CreateUsersTable.cs | 34 ++++++++++++ .../ApplicationContextModelSnapshot.cs | 51 ++++++++++++++++++ Cloud/Models/User.cs | 11 ++++ Cloud/Program.cs | 6 +++ 7 files changed, 190 insertions(+) create mode 100644 Cloud/ApplicationContext.cs create mode 100644 Cloud/Migrations/20241027220558_CreateUsersTable.Designer.cs create mode 100644 Cloud/Migrations/20241027220558_CreateUsersTable.cs create mode 100644 Cloud/Migrations/ApplicationContextModelSnapshot.cs create mode 100644 Cloud/Models/User.cs diff --git a/Cloud/ApplicationContext.cs b/Cloud/ApplicationContext.cs new file mode 100644 index 0000000..e91cc00 --- /dev/null +++ b/Cloud/ApplicationContext.cs @@ -0,0 +1,29 @@ +using Cloud.Models; +using Microsoft.EntityFrameworkCore; + +namespace Cloud; +public class ApplicationContext : DbContext +{ + public DbSet Users { get; set; } + + public ApplicationContext(DbContextOptions options) + : base(options) + { + } + + public ApplicationContext() + : base(new DbContextOptionsBuilder() + .UseNpgsql("Host=localhost;Port=5438;Database=main_database;Username=postgres;Password=12345") + .Options) + { + Database.EnsureCreated(); + } + + protected override void OnConfiguring(DbContextOptionsBuilder optionsBuilder) + { + if (!optionsBuilder.IsConfigured) + { + optionsBuilder.UseNpgsql("Host=localhost;Port=5438;Database=main_database;Username=postgres;Password=12345"); + } + } +} diff --git a/Cloud/Cloud.csproj b/Cloud/Cloud.csproj index 2c33b1c..d9f0064 100644 --- a/Cloud/Cloud.csproj +++ b/Cloud/Cloud.csproj @@ -7,6 +7,12 @@ + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + diff --git a/Cloud/Migrations/20241027220558_CreateUsersTable.Designer.cs b/Cloud/Migrations/20241027220558_CreateUsersTable.Designer.cs new file mode 100644 index 0000000..b021b5d --- /dev/null +++ b/Cloud/Migrations/20241027220558_CreateUsersTable.Designer.cs @@ -0,0 +1,53 @@ +// +using Cloud; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.EntityFrameworkCore.Migrations; +using Microsoft.EntityFrameworkCore.Storage.ValueConversion; +using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata; + +#nullable disable + +namespace Cloud.Migrations +{ + [DbContext(typeof(ApplicationContext))] + [Migration("20241027220558_CreateUsersTable")] + partial class CreateUsersTable + { + protected override void BuildTargetModel(ModelBuilder modelBuilder) + { +#pragma warning disable 612, 618 + modelBuilder + .HasAnnotation("ProductVersion", "6.0.14") + .HasAnnotation("Relational:MaxIdentifierLength", 63); + + NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder); + + modelBuilder.Entity("Cloud.Models.User", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("integer"); + + NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property("Id")); + + b.Property("Email") + .IsRequired() + .HasColumnType("text"); + + b.Property("Name") + .IsRequired() + .HasColumnType("text"); + + b.Property("Password") + .IsRequired() + .HasColumnType("text"); + + b.HasKey("Id"); + + b.ToTable("Users"); + }); +#pragma warning restore 612, 618 + } + } +} diff --git a/Cloud/Migrations/20241027220558_CreateUsersTable.cs b/Cloud/Migrations/20241027220558_CreateUsersTable.cs new file mode 100644 index 0000000..65e5500 --- /dev/null +++ b/Cloud/Migrations/20241027220558_CreateUsersTable.cs @@ -0,0 +1,34 @@ +using Microsoft.EntityFrameworkCore.Migrations; +using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata; + +#nullable disable + +namespace Cloud.Migrations +{ + public partial class CreateUsersTable : Migration + { + protected override void Up(MigrationBuilder migrationBuilder) + { + migrationBuilder.CreateTable( + name: "Users", + columns: table => new + { + Id = table.Column(type: "integer", nullable: false) + .Annotation("Npgsql:ValueGenerationStrategy", NpgsqlValueGenerationStrategy.IdentityByDefaultColumn), + Name = table.Column(type: "text", nullable: false), + Email = table.Column(type: "text", nullable: false), + Password = table.Column(type: "text", nullable: false) + }, + constraints: table => + { + table.PrimaryKey("PK_Users", x => x.Id); + }); + } + + protected override void Down(MigrationBuilder migrationBuilder) + { + migrationBuilder.DropTable( + name: "Users"); + } + } +} diff --git a/Cloud/Migrations/ApplicationContextModelSnapshot.cs b/Cloud/Migrations/ApplicationContextModelSnapshot.cs new file mode 100644 index 0000000..fc15a07 --- /dev/null +++ b/Cloud/Migrations/ApplicationContextModelSnapshot.cs @@ -0,0 +1,51 @@ +// +using Cloud; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.EntityFrameworkCore.Storage.ValueConversion; +using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata; + +#nullable disable + +namespace Cloud.Migrations +{ + [DbContext(typeof(ApplicationContext))] + partial class ApplicationContextModelSnapshot : ModelSnapshot + { + protected override void BuildModel(ModelBuilder modelBuilder) + { +#pragma warning disable 612, 618 + modelBuilder + .HasAnnotation("ProductVersion", "6.0.14") + .HasAnnotation("Relational:MaxIdentifierLength", 63); + + NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder); + + modelBuilder.Entity("Cloud.Models.User", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("integer"); + + NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property("Id")); + + b.Property("Email") + .IsRequired() + .HasColumnType("text"); + + b.Property("Name") + .IsRequired() + .HasColumnType("text"); + + b.Property("Password") + .IsRequired() + .HasColumnType("text"); + + b.HasKey("Id"); + + b.ToTable("Users"); + }); +#pragma warning restore 612, 618 + } + } +} diff --git a/Cloud/Models/User.cs b/Cloud/Models/User.cs new file mode 100644 index 0000000..27c8ee4 --- /dev/null +++ b/Cloud/Models/User.cs @@ -0,0 +1,11 @@ +namespace Cloud.Models; +public class User +{ + public int Id { get; set; } + + public string Name { get; set; } + + public string Email { get; set; } + + public string Password { get; set; } +} \ No newline at end of file diff --git a/Cloud/Program.cs b/Cloud/Program.cs index 48863a6..2b9379b 100644 --- a/Cloud/Program.cs +++ b/Cloud/Program.cs @@ -1,7 +1,13 @@ +using Cloud; +using Microsoft.EntityFrameworkCore; + var builder = WebApplication.CreateBuilder(args); // Add services to the container. +builder.Services.AddDbContext(options => + options.UseNpgsql("Host=localhost;Port=5438;Database=main_database;Username=postgres;Password=12345")); + builder.Services.AddControllers(); // Learn more about configuring Swagger/OpenAPI at https://aka.ms/aspnetcore/swashbuckle builder.Services.AddEndpointsApiExplorer(); From 3fa35ba61714f3eb28686a87ff977383b32f91ec Mon Sep 17 00:00:00 2001 From: the Date: Mon, 28 Oct 2024 13:14:46 +0400 Subject: [PATCH 04/49] init --- GreenhouseDetector/detector.py | 0 GreenhouseManager/manager.py | 0 2 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 GreenhouseDetector/detector.py create mode 100644 GreenhouseManager/manager.py diff --git a/GreenhouseDetector/detector.py b/GreenhouseDetector/detector.py new file mode 100644 index 0000000..e69de29 diff --git a/GreenhouseManager/manager.py b/GreenhouseManager/manager.py new file mode 100644 index 0000000..e69de29 From d16968bc988158596272610307fe9a4d9a91abfc Mon Sep 17 00:00:00 2001 From: the Date: Mon, 28 Oct 2024 14:47:20 +0400 Subject: [PATCH 05/49] compose + kafka ui --- GreenhouseManager/manager.py | 7 +++++ docker-compose.yml | 58 ++++++++++++++++++++++++++++++++++++ 2 files changed, 65 insertions(+) create mode 100644 docker-compose.yml diff --git a/GreenhouseManager/manager.py b/GreenhouseManager/manager.py index e69de29..72b3332 100644 --- a/GreenhouseManager/manager.py +++ b/GreenhouseManager/manager.py @@ -0,0 +1,7 @@ +from kafka import KafkaProducer, KafkaConsumer +import kafka +import socket + +consumer = KafkaConsumer(bootstrap_servers = ['localhost:9092']) + +print(consumer.topics()) diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..0683811 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,58 @@ +services: + zookeeper: + image: confluentinc/cp-zookeeper:7.4.0 + environment: + ZOOKEEPER_CLIENT_PORT: 2181 + ZOOKEEPER_TICK_TIME: 2000 + ports: + - 2181:2181 + + kafka: + image: confluentinc/cp-kafka:7.4.0 + ports: + - 9092:9092 + - 9997:9997 + expose: + - 29092:29092 + environment: + KAFKA_BROKER_ID: 1 + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT + KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:29092,PLAINTEXT_HOST://localhost:9092 + KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 + KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 + KAFKA_LOG_FLUSH_INTERVAL_MESSAGES: 10000 + KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 + depends_on: + - zookeeper + + init-kafka: + image: confluentinc/cp-kafka:7.4.0 + depends_on: + - kafka + entrypoint: [ '/bin/sh', '-c' ] + command: | + " + # blocks until kafka is reachable + kafka-topics --bootstrap-server kafka:29092 --list + + echo -e 'Creating kafka topics' + kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic commands --replication-factor 1 --partitions 1 + kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic data --replication-factor 1 --partitions 1 + kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic response --replication-factor 1 --partitions 1 + + echo -e 'Successfully created the following topics:' + kafka-topics --bootstrap-server kafka:29092 --list + " + + kafka-ui: + container_name: kafka-ui + image: provectuslabs/kafka-ui:latest + ports: + - 8080:8080 + depends_on: + - kafka + environment: + KAFKA_CLUSTERS_0_NAME: local + KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka:29092 + KAFKA_CLUSTERS_0_METRICS_PORT: 9997 \ No newline at end of file From 0bf58236529de584f84e63a5f0e0b47d332ad66f Mon Sep 17 00:00:00 2001 From: "m.zargarov" Date: Mon, 28 Oct 2024 18:39:13 +0400 Subject: [PATCH 06/49] add jwt auth --- Cloud/Cloud.csproj | 3 + Cloud/Controllers/AuthController.cs | 104 ++++++++++++++++++++++++++ Cloud/Program.cs | 66 +++++++++++++++- Cloud/Requests/LoginRequest.cs | 13 ++++ Cloud/Requests/RegisterRequest.cs | 15 ++++ Cloud/Validation/LoginValidator.cs | 18 +++++ Cloud/Validation/RegisterValidator.cs | 22 ++++++ Cloud/appsettings.json | 8 +- 8 files changed, 245 insertions(+), 4 deletions(-) create mode 100644 Cloud/Controllers/AuthController.cs create mode 100644 Cloud/Requests/LoginRequest.cs create mode 100644 Cloud/Requests/RegisterRequest.cs create mode 100644 Cloud/Validation/LoginValidator.cs create mode 100644 Cloud/Validation/RegisterValidator.cs diff --git a/Cloud/Cloud.csproj b/Cloud/Cloud.csproj index d9f0064..93e4190 100644 --- a/Cloud/Cloud.csproj +++ b/Cloud/Cloud.csproj @@ -7,6 +7,8 @@ + + runtime; build; native; contentfiles; analyzers; buildtransitive @@ -14,6 +16,7 @@ + diff --git a/Cloud/Controllers/AuthController.cs b/Cloud/Controllers/AuthController.cs new file mode 100644 index 0000000..c63a89c --- /dev/null +++ b/Cloud/Controllers/AuthController.cs @@ -0,0 +1,104 @@ +using Microsoft.AspNetCore.Mvc; +using Microsoft.AspNetCore.Identity; +using Cloud.Models; +using Cloud.Requests; +using Microsoft.IdentityModel.Tokens; +using System.IdentityModel.Tokens.Jwt; +using System.Text; +using Microsoft.EntityFrameworkCore; +using Microsoft.AspNetCore.Authorization; +using System.Security.Claims; + +namespace Cloud.Controllers; + +[ApiController] +[Route("api/[controller]")] +public class AuthController : ControllerBase +{ + private PasswordHasher _passwordHasher; + private IConfiguration _config; + private ApplicationContext _context; + + public AuthController(IConfiguration config, ApplicationContext context) + { + _passwordHasher = new PasswordHasher(); + _config = config; + _context = context; + } + + [HttpPost("register")] + public async Task Register([FromBody] RegisterRequest request) + { + var existUser = await _context.Users.SingleOrDefaultAsync(u => u.Email == request.Email); + + if (existUser != null) { + return BadRequest("Пользователь с такой эл. почтой уже существует"); + } + + var user = new User + { + Name = request.Name, + Email = request.Email, + Password = _passwordHasher.HashPassword(null, request.Password) + }; + + _context.Users.Add(user); + await _context.SaveChangesAsync(); + + return Ok("Пользователь успешно зарегистрирован"); + } + + [HttpPost("login")] + public async Task Login([FromBody] LoginRequest request) + { + var user = await _context.Users.SingleOrDefaultAsync(u => u.Email == request.Email); + + if (user == null) { + return Unauthorized("Пользователя с такой эл. почтой не существует"); + } + + var verificationResult = _passwordHasher.VerifyHashedPassword(null, user.Password, request.Password); + + if (verificationResult == PasswordVerificationResult.Failed) { + return Unauthorized("Неверный пароль"); + } + + var securityKey = new SymmetricSecurityKey(Encoding.UTF8.GetBytes(_config["Jwt:Key"])); + var credentials = new SigningCredentials(securityKey, SecurityAlgorithms.HmacSha256); + + var claims = new[] + { + new Claim(ClaimTypes.Name, user.Email), + }; + + var Sectoken = new JwtSecurityToken(_config["Jwt:Issuer"], + _config["Jwt:Issuer"], + claims: claims, + expires: DateTime.Now.AddMinutes(120), + signingCredentials: credentials); + + var token = new JwtSecurityTokenHandler().WriteToken(Sectoken); + + return Ok(token); + } + + [Authorize] + [HttpGet("user")] + public async Task GetAuthUser() + { + var userEmail = User.Identity.Name; + + var user = await _context.Users.SingleOrDefaultAsync(u => u.Email == userEmail); + + if (user == null) { + return NotFound("Пользователь не найден"); + } + + return Ok(new + { + user.Id, + user.Name, + user.Email + }); + } +} \ No newline at end of file diff --git a/Cloud/Program.cs b/Cloud/Program.cs index 2b9379b..1c10d82 100644 --- a/Cloud/Program.cs +++ b/Cloud/Program.cs @@ -1,17 +1,73 @@ using Cloud; using Microsoft.EntityFrameworkCore; +using Microsoft.AspNetCore.Authentication.JwtBearer; +using Microsoft.IdentityModel.Tokens; +using System.Text; +using FluentValidation; +using FluentValidation.AspNetCore; +using Cloud.Validation; var builder = WebApplication.CreateBuilder(args); // Add services to the container. +//Jwt configuration starts here +var jwtIssuer = builder.Configuration.GetSection("Jwt:Issuer").Get(); +var jwtKey = builder.Configuration.GetSection("Jwt:Key").Get(); + +builder.Services.AddAuthentication(JwtBearerDefaults.AuthenticationScheme) + .AddJwtBearer(options => + { + options.TokenValidationParameters = new TokenValidationParameters + { + ValidateIssuer = true, + ValidateAudience = true, + ValidateLifetime = true, + ValidateIssuerSigningKey = true, + ValidIssuer = jwtIssuer, + ValidAudience = jwtIssuer, + IssuerSigningKey = new SymmetricSecurityKey(Encoding.UTF8.GetBytes(jwtKey)) + }; + }); + builder.Services.AddDbContext(options => options.UseNpgsql("Host=localhost;Port=5438;Database=main_database;Username=postgres;Password=12345")); builder.Services.AddControllers(); +builder.Services.AddFluentValidationAutoValidation(); +builder.Services.AddFluentValidationClientsideAdapters(); +builder.Services.AddValidatorsFromAssemblyContaining(); +builder.Services.AddValidatorsFromAssemblyContaining(); + // Learn more about configuring Swagger/OpenAPI at https://aka.ms/aspnetcore/swashbuckle builder.Services.AddEndpointsApiExplorer(); -builder.Services.AddSwaggerGen(); +builder.Services.AddSwaggerGen(c => +{ + c.SwaggerDoc("v1", new Microsoft.OpenApi.Models.OpenApiInfo { Title = "Cloud API", Version = "v1" }); + + c.AddSecurityDefinition("Bearer", new Microsoft.OpenApi.Models.OpenApiSecurityScheme + { + Description = "Введите ваш Bearer токен", + Name = "Authorization", + In = Microsoft.OpenApi.Models.ParameterLocation.Header, + Type = Microsoft.OpenApi.Models.SecuritySchemeType.ApiKey + }); + + c.AddSecurityRequirement(new Microsoft.OpenApi.Models.OpenApiSecurityRequirement + { + { + new Microsoft.OpenApi.Models.OpenApiSecurityScheme + { + Reference = new Microsoft.OpenApi.Models.OpenApiReference + { + Type = Microsoft.OpenApi.Models.ReferenceType.SecurityScheme, + Id = "Bearer" + } + }, + new string[] {} + } + }); +}); var app = builder.Build(); @@ -19,11 +75,17 @@ var app = builder.Build(); if (app.Environment.IsDevelopment()) { app.UseSwagger(); - app.UseSwaggerUI(); + app.UseSwaggerUI(c => + { + c.SwaggerEndpoint("/swagger/v1/swagger.json", "Cloud API V1"); + c.RoutePrefix = string.Empty; + }); } app.UseHttpsRedirection(); +app.UseAuthentication(); + app.UseAuthorization(); app.MapControllers(); diff --git a/Cloud/Requests/LoginRequest.cs b/Cloud/Requests/LoginRequest.cs new file mode 100644 index 0000000..472da63 --- /dev/null +++ b/Cloud/Requests/LoginRequest.cs @@ -0,0 +1,13 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; + +namespace Cloud.Requests; + +public class LoginRequest +{ + public string Email { get; set; } + + public string Password { get; set; } +} \ No newline at end of file diff --git a/Cloud/Requests/RegisterRequest.cs b/Cloud/Requests/RegisterRequest.cs new file mode 100644 index 0000000..a92e7a8 --- /dev/null +++ b/Cloud/Requests/RegisterRequest.cs @@ -0,0 +1,15 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; + +namespace Cloud.Requests; + +public class RegisterRequest +{ + public string Name { get; set; } + + public string Email { get; set; } + + public string Password { get; set; } +} \ No newline at end of file diff --git a/Cloud/Validation/LoginValidator.cs b/Cloud/Validation/LoginValidator.cs new file mode 100644 index 0000000..19f6ec0 --- /dev/null +++ b/Cloud/Validation/LoginValidator.cs @@ -0,0 +1,18 @@ +using Cloud.Requests; +using FluentValidation; + +namespace Cloud.Validation; + +public class LoginValidator : AbstractValidator +{ + public LoginValidator() + { + RuleFor(request => request.Email) + .NotEmpty().WithMessage("Email обязателен для заполнения") + .EmailAddress().WithMessage("Некорректный формат Email"); + + RuleFor(request => request.Password) + .NotEmpty().WithMessage("Пароль обязателен для заполнения") + .MinimumLength(8).WithMessage("Пароль должен быть не менее 8 символов"); + } +} diff --git a/Cloud/Validation/RegisterValidator.cs b/Cloud/Validation/RegisterValidator.cs new file mode 100644 index 0000000..1cfcb8d --- /dev/null +++ b/Cloud/Validation/RegisterValidator.cs @@ -0,0 +1,22 @@ +using Cloud.Requests; +using FluentValidation; + +namespace Cloud.Validation; + +public class RegisterValidator : AbstractValidator +{ + public RegisterValidator() + { + RuleFor(user => user.Name) + .NotEmpty().WithMessage("Имя обязательно для заполнения") + .MaximumLength(50).WithMessage("Имя должно быть не более 50 символов"); + + RuleFor(user => user.Email) + .NotEmpty().WithMessage("Email обязателен для заполнения") + .EmailAddress().WithMessage("Некорректный формат Email"); + + RuleFor(user => user.Password) + .NotEmpty().WithMessage("Пароль обязателен для заполнения") + .MinimumLength(8).WithMessage("Пароль должен быть не менее 8 символов"); + } +} \ No newline at end of file diff --git a/Cloud/appsettings.json b/Cloud/appsettings.json index 10f68b8..b272a9c 100644 --- a/Cloud/appsettings.json +++ b/Cloud/appsettings.json @@ -5,5 +5,9 @@ "Microsoft.AspNetCore": "Warning" } }, - "AllowedHosts": "*" -} + "AllowedHosts": "*", + "Jwt": { + "Key": "m7TyhE20s0dVtUDAr9EnFdPZnAG8maxgBTaiW5j6kO6RQhWDAGxYmXyu0suDnE0o", + "Issuer": "localhost" + } +} \ No newline at end of file From 720cf4bd60153d776f563138b226ba28b8a64d81 Mon Sep 17 00:00:00 2001 From: the Date: Mon, 28 Oct 2024 19:43:13 +0400 Subject: [PATCH 07/49] connection test --- GreenhouseManager/manager.py | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/GreenhouseManager/manager.py b/GreenhouseManager/manager.py index 72b3332..74fed03 100644 --- a/GreenhouseManager/manager.py +++ b/GreenhouseManager/manager.py @@ -1,7 +1,27 @@ from kafka import KafkaProducer, KafkaConsumer import kafka import socket +from json import dumps, loads +import time -consumer = KafkaConsumer(bootstrap_servers = ['localhost:9092']) +consumer = KafkaConsumer( + 'commands', + bootstrap_servers=['localhost:9092'], + auto_offset_reset='earliest', + enable_auto_commit=True, + group_id='my-group', + value_deserializer=lambda x: loads(x.decode('utf-8'))) print(consumer.topics()) +consumer.subscribe(['commands']) +producer = KafkaProducer(bootstrap_servers = ['localhost:9092'], + value_serializer=lambda x: + dumps(x).encode('utf-8')) + +data = {'message' : 'hello'} +producer.send('commands', value=data) + +while True: + for message in consumer: + print(message) + time.sleep(1) \ No newline at end of file From 33c3a074dab97188b6efe8da4f6f30c0003f61d4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=D0=90=D1=80=D1=82=D0=B5=D0=BC=20=D0=A5=D0=B0=D1=80=D0=BB?= =?UTF-8?q?=D0=B0=D0=BC=D0=BE=D0=B2?= Date: Tue, 29 Oct 2024 00:38:46 +0400 Subject: [PATCH 08/49] Make Farm CRUD --- Cloud/ApplicationContext.cs | 3 +- ...0241027220558_CreateUsersTable.Designer.cs | 60 +++++++++---------- .../20241027220558_CreateUsersTable.cs | 50 ++++++++-------- .../ApplicationContextModelSnapshot.cs | 42 +++++++++++++ Cloud/Models/User.cs | 2 + Cloud/Program.cs | 1 + 6 files changed, 102 insertions(+), 56 deletions(-) diff --git a/Cloud/ApplicationContext.cs b/Cloud/ApplicationContext.cs index e91cc00..6a60cf1 100644 --- a/Cloud/ApplicationContext.cs +++ b/Cloud/ApplicationContext.cs @@ -4,7 +4,8 @@ using Microsoft.EntityFrameworkCore; namespace Cloud; public class ApplicationContext : DbContext { - public DbSet Users { get; set; } + public DbSet Users { get; set; } = null!; + public DbSet Farms { get; set; } = null!; public ApplicationContext(DbContextOptions options) : base(options) diff --git a/Cloud/Migrations/20241027220558_CreateUsersTable.Designer.cs b/Cloud/Migrations/20241027220558_CreateUsersTable.Designer.cs index b021b5d..d1c7360 100644 --- a/Cloud/Migrations/20241027220558_CreateUsersTable.Designer.cs +++ b/Cloud/Migrations/20241027220558_CreateUsersTable.Designer.cs @@ -10,44 +10,44 @@ using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata; namespace Cloud.Migrations { - [DbContext(typeof(ApplicationContext))] - [Migration("20241027220558_CreateUsersTable")] - partial class CreateUsersTable - { - protected override void BuildTargetModel(ModelBuilder modelBuilder) - { + [DbContext(typeof(ApplicationContext))] + [Migration("20241027220558_CreateUsersTable")] + partial class CreateUsersTable + { + protected override void BuildTargetModel(ModelBuilder modelBuilder) + { #pragma warning disable 612, 618 - modelBuilder - .HasAnnotation("ProductVersion", "6.0.14") - .HasAnnotation("Relational:MaxIdentifierLength", 63); + modelBuilder + .HasAnnotation("ProductVersion", "6.0.14") + .HasAnnotation("Relational:MaxIdentifierLength", 63); - NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder); + NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder); - modelBuilder.Entity("Cloud.Models.User", b => - { - b.Property("Id") - .ValueGeneratedOnAdd() - .HasColumnType("integer"); + modelBuilder.Entity("Cloud.Models.User", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("integer"); - NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property("Id")); + NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property("Id")); - b.Property("Email") - .IsRequired() - .HasColumnType("text"); + b.Property("Email") + .IsRequired() + .HasColumnType("text"); - b.Property("Name") - .IsRequired() - .HasColumnType("text"); + b.Property("Name") + .IsRequired() + .HasColumnType("text"); - b.Property("Password") - .IsRequired() - .HasColumnType("text"); + b.Property("Password") + .IsRequired() + .HasColumnType("text"); - b.HasKey("Id"); + b.HasKey("Id"); - b.ToTable("Users"); - }); + b.ToTable("Users"); + }); #pragma warning restore 612, 618 - } - } + } + } } diff --git a/Cloud/Migrations/20241027220558_CreateUsersTable.cs b/Cloud/Migrations/20241027220558_CreateUsersTable.cs index 65e5500..4c9a4dd 100644 --- a/Cloud/Migrations/20241027220558_CreateUsersTable.cs +++ b/Cloud/Migrations/20241027220558_CreateUsersTable.cs @@ -5,30 +5,30 @@ using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata; namespace Cloud.Migrations { - public partial class CreateUsersTable : Migration - { - protected override void Up(MigrationBuilder migrationBuilder) - { - migrationBuilder.CreateTable( - name: "Users", - columns: table => new - { - Id = table.Column(type: "integer", nullable: false) - .Annotation("Npgsql:ValueGenerationStrategy", NpgsqlValueGenerationStrategy.IdentityByDefaultColumn), - Name = table.Column(type: "text", nullable: false), - Email = table.Column(type: "text", nullable: false), - Password = table.Column(type: "text", nullable: false) - }, - constraints: table => - { - table.PrimaryKey("PK_Users", x => x.Id); - }); - } + public partial class CreateUsersTable : Migration + { + protected override void Up(MigrationBuilder migrationBuilder) + { + migrationBuilder.CreateTable( + name: "Users", + columns: table => new + { + Id = table.Column(type: "integer", nullable: false) + .Annotation("Npgsql:ValueGenerationStrategy", NpgsqlValueGenerationStrategy.IdentityByDefaultColumn), + Name = table.Column(type: "text", nullable: false), + Email = table.Column(type: "text", nullable: false), + Password = table.Column(type: "text", nullable: false) + }, + constraints: table => + { + table.PrimaryKey("PK_Users", x => x.Id); + }); + } - protected override void Down(MigrationBuilder migrationBuilder) - { - migrationBuilder.DropTable( - name: "Users"); - } - } + protected override void Down(MigrationBuilder migrationBuilder) + { + migrationBuilder.DropTable( + name: "Users"); + } + } } diff --git a/Cloud/Migrations/ApplicationContextModelSnapshot.cs b/Cloud/Migrations/ApplicationContextModelSnapshot.cs index fc15a07..c499a41 100644 --- a/Cloud/Migrations/ApplicationContextModelSnapshot.cs +++ b/Cloud/Migrations/ApplicationContextModelSnapshot.cs @@ -21,6 +21,32 @@ namespace Cloud.Migrations NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder); + modelBuilder.Entity("Cloud.Models.Farm", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("integer"); + + NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property("Id")); + + b.Property("Name") + .IsRequired() + .HasColumnType("text"); + + b.Property("RaspberryMacAddr") + .IsRequired() + .HasColumnType("text"); + + b.Property("UserId") + .HasColumnType("integer"); + + b.HasKey("Id"); + + b.HasIndex("UserId"); + + b.ToTable("Farms"); + }); + modelBuilder.Entity("Cloud.Models.User", b => { b.Property("Id") @@ -45,6 +71,22 @@ namespace Cloud.Migrations b.ToTable("Users"); }); + + modelBuilder.Entity("Cloud.Models.Farm", b => + { + b.HasOne("Cloud.Models.User", "User") + .WithMany("Farms") + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("User"); + }); + + modelBuilder.Entity("Cloud.Models.User", b => + { + b.Navigation("Farms"); + }); #pragma warning restore 612, 618 } } diff --git a/Cloud/Models/User.cs b/Cloud/Models/User.cs index 27c8ee4..269dec4 100644 --- a/Cloud/Models/User.cs +++ b/Cloud/Models/User.cs @@ -8,4 +8,6 @@ public class User public string Email { get; set; } public string Password { get; set; } + + public List Farms { get; set; } = new(); } \ No newline at end of file diff --git a/Cloud/Program.cs b/Cloud/Program.cs index 1c10d82..6f77533 100644 --- a/Cloud/Program.cs +++ b/Cloud/Program.cs @@ -38,6 +38,7 @@ builder.Services.AddFluentValidationAutoValidation(); builder.Services.AddFluentValidationClientsideAdapters(); builder.Services.AddValidatorsFromAssemblyContaining(); builder.Services.AddValidatorsFromAssemblyContaining(); +builder.Services.AddValidatorsFromAssemblyContaining(); // Learn more about configuring Swagger/OpenAPI at https://aka.ms/aspnetcore/swashbuckle builder.Services.AddEndpointsApiExplorer(); From c4899d4a11b283e1f491bcf1d04d7a6bcac950e7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=D0=90=D1=80=D1=82=D0=B5=D0=BC=20=D0=A5=D0=B0=D1=80=D0=BB?= =?UTF-8?q?=D0=B0=D0=BC=D0=BE=D0=B2?= Date: Tue, 29 Oct 2024 00:40:58 +0400 Subject: [PATCH 09/49] Make Farm CRUD --- Cloud/Controllers/FarmController.cs | 128 ++++++++++++++++++ ...0241028192806_CreateFarmsTable.Designer.cs | 95 +++++++++++++ .../20241028192806_CreateFarmsTable.cs | 45 ++++++ Cloud/Models/Farm.cs | 12 ++ Cloud/Requests/FarmRequest.cs | 8 ++ Cloud/Validation/FarmValidator.cs | 18 +++ 6 files changed, 306 insertions(+) create mode 100644 Cloud/Controllers/FarmController.cs create mode 100644 Cloud/Migrations/20241028192806_CreateFarmsTable.Designer.cs create mode 100644 Cloud/Migrations/20241028192806_CreateFarmsTable.cs create mode 100644 Cloud/Models/Farm.cs create mode 100644 Cloud/Requests/FarmRequest.cs create mode 100644 Cloud/Validation/FarmValidator.cs diff --git a/Cloud/Controllers/FarmController.cs b/Cloud/Controllers/FarmController.cs new file mode 100644 index 0000000..a1e7b73 --- /dev/null +++ b/Cloud/Controllers/FarmController.cs @@ -0,0 +1,128 @@ +using Cloud.Models; +using Cloud.Requests; +using Microsoft.AspNetCore.Authorization; +using Microsoft.AspNetCore.Mvc; +using Microsoft.EntityFrameworkCore; + +namespace Cloud.Controllers +{ + [Authorize] + [ApiController] + [Route("api/user")] + public class FarmController : ControllerBase + { + private IConfiguration _config; + private ApplicationContext _context; + + public FarmController(IConfiguration config, ApplicationContext context) + { + _config = config; + _context = context; + } + + [HttpGet("{userId}/farm")] + public async Task>> Index (int userId) + { + try + { + List farms = await + _context.Farms.Where(x => x.UserId == userId).AsNoTracking().ToListAsync(); + if (!farms.Any()) + return NotFound("Farms is not found"); + + return Ok(farms); + } + catch (Exception ex) + { + return BadRequest(ex.Message); + } + } + + [HttpGet("{userId}/farm/{farmId}")] + public async Task> Show(int userId, int farmId) + { + try + { + Farm? farm = await + _context.Farms.FirstOrDefaultAsync(x => x.UserId == userId && x.Id == farmId); + + if (farm == null) + return NotFound("Farm is not found"); + + return Ok(farm); + } + catch (Exception ex) + { + return BadRequest(ex.Message); + } + } + + [HttpPost("{userId}/farm")] + public async Task> Create([FromBody] FarmRequest farmRequest, int userId) + { + try + { + var farm = new Farm { + Name = farmRequest.Name, + UserId = userId, + RaspberryMacAddr = farmRequest.RaspberryMacAddr, + }; + + Farm? farmCreated = _context.Farms.Add(farm).Entity; + await _context.SaveChangesAsync(); + + return Ok(farmCreated); + } + catch (Exception ex) + { + return BadRequest(ex.Message); + } + } + + [HttpPut("{userId}/farm/{farmId}")] + public async Task> Update([FromBody] FarmRequest farmRequest, int userId, int farmId) + { + try + { + Farm? farm = await _context.Farms.FirstOrDefaultAsync(x => x.Id == farmId && x.UserId == userId); + + if (farm == null) + return NotFound("Farm is not found"); + + farm.Name = farmRequest.Name; + farm.RaspberryMacAddr = farmRequest.RaspberryMacAddr; + + _context.Farms.Update(farm); + await _context.SaveChangesAsync(); + + return Ok(farm); + } + catch (Exception ex) + { + return BadRequest(ex.Message); + } + } + + [HttpDelete("{userId}/farm/{farmId}")] + public async Task Delete(int userId, int farmId) + { + try + { + Farm? farm = await _context.Farms.FirstOrDefaultAsync(x => x.Id == farmId && x.UserId == userId); + + if (farm == null) + return NotFound("Farm is not found"); + + _context.Farms.Remove(farm); + await _context.SaveChangesAsync(); + + return Ok("Farm deleted successfully"); + } + catch (Exception ex) + { + return BadRequest(ex.Message); + } + } + + } +} diff --git a/Cloud/Migrations/20241028192806_CreateFarmsTable.Designer.cs b/Cloud/Migrations/20241028192806_CreateFarmsTable.Designer.cs new file mode 100644 index 0000000..4bc7f80 --- /dev/null +++ b/Cloud/Migrations/20241028192806_CreateFarmsTable.Designer.cs @@ -0,0 +1,95 @@ +// +using Cloud; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.EntityFrameworkCore.Migrations; +using Microsoft.EntityFrameworkCore.Storage.ValueConversion; +using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata; + +#nullable disable + +namespace Cloud.Migrations +{ + [DbContext(typeof(ApplicationContext))] + [Migration("20241028192806_CreateFarmsTable")] + partial class CreateFarmsTable + { + protected override void BuildTargetModel(ModelBuilder modelBuilder) + { +#pragma warning disable 612, 618 + modelBuilder + .HasAnnotation("ProductVersion", "6.0.14") + .HasAnnotation("Relational:MaxIdentifierLength", 63); + + NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder); + + modelBuilder.Entity("Cloud.Models.Farm", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("integer"); + + NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property("Id")); + + b.Property("Name") + .IsRequired() + .HasColumnType("text"); + + b.Property("RaspberryMacAddr") + .IsRequired() + .HasColumnType("text"); + + b.Property("UserId") + .HasColumnType("integer"); + + b.HasKey("Id"); + + b.HasIndex("UserId"); + + b.ToTable("Farms"); + }); + + modelBuilder.Entity("Cloud.Models.User", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("integer"); + + NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property("Id")); + + b.Property("Email") + .IsRequired() + .HasColumnType("text"); + + b.Property("Name") + .IsRequired() + .HasColumnType("text"); + + b.Property("Password") + .IsRequired() + .HasColumnType("text"); + + b.HasKey("Id"); + + b.ToTable("Users"); + }); + + modelBuilder.Entity("Cloud.Models.Farm", b => + { + b.HasOne("Cloud.Models.User", "User") + .WithMany("Farms") + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("User"); + }); + + modelBuilder.Entity("Cloud.Models.User", b => + { + b.Navigation("Farms"); + }); +#pragma warning restore 612, 618 + } + } +} diff --git a/Cloud/Migrations/20241028192806_CreateFarmsTable.cs b/Cloud/Migrations/20241028192806_CreateFarmsTable.cs new file mode 100644 index 0000000..a297728 --- /dev/null +++ b/Cloud/Migrations/20241028192806_CreateFarmsTable.cs @@ -0,0 +1,45 @@ +using Microsoft.EntityFrameworkCore.Migrations; +using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata; + +#nullable disable + +namespace Cloud.Migrations +{ + public partial class CreateFarmsTable : Migration + { + protected override void Up(MigrationBuilder migrationBuilder) + { + migrationBuilder.CreateTable( + name: "Farms", + columns: table => new + { + Id = table.Column(type: "integer", nullable: false) + .Annotation("Npgsql:ValueGenerationStrategy", NpgsqlValueGenerationStrategy.IdentityByDefaultColumn), + Name = table.Column(type: "text", nullable: false), + UserId = table.Column(type: "integer", nullable: false), + RaspberryMacAddr = table.Column(type: "text", nullable: false) + }, + constraints: table => + { + table.PrimaryKey("PK_Farms", x => x.Id); + table.ForeignKey( + name: "FK_Farms_Users_UserId", + column: x => x.UserId, + principalTable: "Users", + principalColumn: "Id", + onDelete: ReferentialAction.Cascade); + }); + + migrationBuilder.CreateIndex( + name: "IX_Farms_UserId", + table: "Farms", + column: "UserId"); + } + + protected override void Down(MigrationBuilder migrationBuilder) + { + migrationBuilder.DropTable( + name: "Farms"); + } + } +} diff --git a/Cloud/Models/Farm.cs b/Cloud/Models/Farm.cs new file mode 100644 index 0000000..a17d3bc --- /dev/null +++ b/Cloud/Models/Farm.cs @@ -0,0 +1,12 @@ +namespace Cloud.Models +{ + public class Farm + { + public int Id { get; set; } + public string Name { get; set; } + public int UserId { get; set; } + public User? User { get; set; } + public string RaspberryMacAddr { get; set; } + + } +} diff --git a/Cloud/Requests/FarmRequest.cs b/Cloud/Requests/FarmRequest.cs new file mode 100644 index 0000000..c032dfa --- /dev/null +++ b/Cloud/Requests/FarmRequest.cs @@ -0,0 +1,8 @@ +namespace Cloud.Requests +{ + public class FarmRequest + { + public string Name { get; set; } + public string RaspberryMacAddr { get; set; } + } +} diff --git a/Cloud/Validation/FarmValidator.cs b/Cloud/Validation/FarmValidator.cs new file mode 100644 index 0000000..4c262fb --- /dev/null +++ b/Cloud/Validation/FarmValidator.cs @@ -0,0 +1,18 @@ +using Cloud.Requests; +using FluentValidation; + +namespace Cloud.Validation +{ + public class FarmValidator : AbstractValidator + { + public FarmValidator() + { + RuleFor(request => request.RaspberryMacAddr) + .NotEmpty().WithMessage("MAC address can't be empty") + .Matches("^([0-9A-Fa-f]{2}[:-]?){5}([0-9A-Fa-f]{2})$").WithMessage("MAC address is not valid"); + + RuleFor(request => request.Name) + .NotEmpty().WithMessage("Name can't be empty"); + } + } +} From 08ee12aa8bef22ec1e987965a8a606e45b8d597a Mon Sep 17 00:00:00 2001 From: the Date: Tue, 29 Oct 2024 16:07:02 +0400 Subject: [PATCH 10/49] simple detector + manager --- GreenhouseDetector/detector.py | 40 ++++++++++++++++++++++++++++++++++ GreenhouseManager/manager.py | 18 ++++----------- 2 files changed, 44 insertions(+), 14 deletions(-) diff --git a/GreenhouseDetector/detector.py b/GreenhouseDetector/detector.py index e69de29..1ecb671 100644 --- a/GreenhouseDetector/detector.py +++ b/GreenhouseDetector/detector.py @@ -0,0 +1,40 @@ +from random import random +from turtledemo.penrose import start + +from kafka import KafkaProducer, KafkaConsumer +import kafka +import socket +from json import dumps, loads +import time +import random as rnd + +class Detector: + def __init__(self, id, moistureThresholdUpper, moistureThresholdLower, tempThresholdUpper, tempThresholdLower): + self.id = id + self.moistureThresholdUpper = moistureThresholdUpper + self.moistureThresholdLower = moistureThresholdLower + self.tempThresholdUpper = tempThresholdUpper + self.tempThresholdLower = tempThresholdLower + self.moisture = 0 + self.temp = 0 + + self.producer = KafkaProducer( + bootstrap_servers=['localhost:9092'], + client_id=f'detector{self.id}', + value_serializer=lambda v: dumps(v).encode('utf-8') + ) + + def sendData(self): + message = {'id' : self.id, + 'moisture': self.moisture, + 'temperature' : self.temp } + self.producer.send('data', message) + + def cycle(self): + self.moisture += rnd.random() / 100 + self.temp += (rnd.random() - 0.5) / 100 + + +while True: + time.sleep(1) + diff --git a/GreenhouseManager/manager.py b/GreenhouseManager/manager.py index 74fed03..dbda11b 100644 --- a/GreenhouseManager/manager.py +++ b/GreenhouseManager/manager.py @@ -4,24 +4,14 @@ import socket from json import dumps, loads import time -consumer = KafkaConsumer( - 'commands', +dataConsumer = KafkaConsumer( + 'data', bootstrap_servers=['localhost:9092'], auto_offset_reset='earliest', enable_auto_commit=True, group_id='my-group', value_deserializer=lambda x: loads(x.decode('utf-8'))) -print(consumer.topics()) -consumer.subscribe(['commands']) -producer = KafkaProducer(bootstrap_servers = ['localhost:9092'], - value_serializer=lambda x: - dumps(x).encode('utf-8')) - -data = {'message' : 'hello'} -producer.send('commands', value=data) - while True: - for message in consumer: - print(message) - time.sleep(1) \ No newline at end of file + for message in dataConsumer: + print(message) \ No newline at end of file From 6291bb483c4fef9806732b330e5f9e7eafda94ed Mon Sep 17 00:00:00 2001 From: the Date: Tue, 29 Oct 2024 17:31:47 +0400 Subject: [PATCH 11/49] I MUST JOB --- GreenhouseDetector/detector.py | 10 +++++- GreenhouseManager/manager.py | 65 +++++++++++++++++++++++++++++----- docker-compose.yml | 1 + 3 files changed, 66 insertions(+), 10 deletions(-) diff --git a/GreenhouseDetector/detector.py b/GreenhouseDetector/detector.py index 1ecb671..728ec1d 100644 --- a/GreenhouseDetector/detector.py +++ b/GreenhouseDetector/detector.py @@ -28,13 +28,21 @@ class Detector: message = {'id' : self.id, 'moisture': self.moisture, 'temperature' : self.temp } - self.producer.send('data', message) + self.producer.send('dataDetectors', message) def cycle(self): self.moisture += rnd.random() / 100 self.temp += (rnd.random() - 0.5) / 100 +detector1 = Detector(1, 0.6, 0.2, 40, 20) +detector2 = Detector(2, 0.7, 0.3, 40, 20) +detector3 = Detector(3, 0.9, 0.6, 40, 20) + +detectors = [detector1, detector2, detector3] while True: + for detector in detectors: + detector.cycle() + detector.sendData() time.sleep(1) diff --git a/GreenhouseManager/manager.py b/GreenhouseManager/manager.py index dbda11b..4d1d836 100644 --- a/GreenhouseManager/manager.py +++ b/GreenhouseManager/manager.py @@ -3,15 +3,62 @@ import kafka import socket from json import dumps, loads import time +from enum import Enum -dataConsumer = KafkaConsumer( - 'data', - bootstrap_servers=['localhost:9092'], - auto_offset_reset='earliest', - enable_auto_commit=True, - group_id='my-group', - value_deserializer=lambda x: loads(x.decode('utf-8'))) +class Status(Enum): + UNKNOWN = -1 + OFF = 0 + ON = 1 + +class Manager: + def __init__(self, id : int, moisture : float = 0, temp : float = 20, valveStatus : Status = Status.UNKNOWN, heaterStatus : Status = Status.UNKNOWN, isAutoOn : bool = False): + self.id = id + self.moisture = moisture + self.temp = temp + self.valveStatus = valveStatus + self.heaterStatus = heaterStatus + self.isAutoOn = isAutoOn + + self.dataPublisher = KafkaProducer( + bootstrap_servers=['localhost:9092'], + client_id=f'manager{id}', + value_serializer=lambda v: dumps(v).encode('utf-8') + ) + + self.detectorConsumer = KafkaConsumer( + 'dataDetectors', + bootstrap_servers=['localhost:9092'], + auto_offset_reset='earliest', + enable_auto_commit=True, + group_id=f'manager{id}', + value_deserializer=lambda x: loads(x.decode('utf-8')) + ) + + def update(self): + for message in self.detectorConsumer: + if message.value['id'] == self.id: + print(f"Manager {self.id} received message: ") + print(message.value) + print("Updating info...\n") + self.moisture = message.value['moisture'] + self.temp = message.value['temperature'] + + dataMessage = { + 'id' : self.id, + 'moisture' : self.moisture, + 'temp' : self.temp, + 'valveStatus': str(self.valveStatus), + 'heaterStatus': str(self.heaterStatus), + 'isAutoOn' : self.isAutoOn + } + + self.dataPublisher.send('data', dataMessage) + +manager1 = Manager(id = 1) + +managers = [manager1] while True: - for message in dataConsumer: - print(message) \ No newline at end of file + time.sleep(1) + for manager in managers: + manager.update() \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 0683811..995a9e2 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -39,6 +39,7 @@ services: echo -e 'Creating kafka topics' kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic commands --replication-factor 1 --partitions 1 kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic data --replication-factor 1 --partitions 1 + kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic dataDetectors --replication-factor 1 --partitions 1 kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic response --replication-factor 1 --partitions 1 echo -e 'Successfully created the following topics:' From 7f88f87722aa05768a11fd3df72fc13e29fa0860 Mon Sep 17 00:00:00 2001 From: the Date: Tue, 29 Oct 2024 17:51:20 +0400 Subject: [PATCH 12/49] fix, no more infinite loops for consumer --- GreenhouseManager/manager.py | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/GreenhouseManager/manager.py b/GreenhouseManager/manager.py index 4d1d836..5c2b975 100644 --- a/GreenhouseManager/manager.py +++ b/GreenhouseManager/manager.py @@ -21,7 +21,7 @@ class Manager: self.dataPublisher = KafkaProducer( bootstrap_servers=['localhost:9092'], - client_id=f'manager{id}', + client_id=f'manager{id}_producer', value_serializer=lambda v: dumps(v).encode('utf-8') ) @@ -30,6 +30,7 @@ class Manager: bootstrap_servers=['localhost:9092'], auto_offset_reset='earliest', enable_auto_commit=True, + consumer_timeout_ms = 1000, group_id=f'manager{id}', value_deserializer=lambda x: loads(x.decode('utf-8')) ) @@ -39,20 +40,25 @@ class Manager: if message.value['id'] == self.id: print(f"Manager {self.id} received message: ") print(message.value) - print("Updating info...\n") self.moisture = message.value['moisture'] self.temp = message.value['temperature'] + print("Updating info...\n") - dataMessage = { - 'id' : self.id, - 'moisture' : self.moisture, - 'temp' : self.temp, + self.sendData() + + def sendData(self): + print("sending data...") + message = { + 'id': self.id, + 'moisture': self.moisture, + 'temp': self.temp, 'valveStatus': str(self.valveStatus), 'heaterStatus': str(self.heaterStatus), - 'isAutoOn' : self.isAutoOn + 'isAutoOn': self.isAutoOn } - self.dataPublisher.send('data', dataMessage) + print(message) + self.dataPublisher.send('data', message) manager1 = Manager(id = 1) From 091dcbd3a3bbda0af7b0b00ca439483c69e10476 Mon Sep 17 00:00:00 2001 From: dimazhelovanov Date: Tue, 29 Oct 2024 19:49:33 +0400 Subject: [PATCH 13/49] =?UTF-8?q?=D0=A3=D0=BF=D1=80=D0=B0=D0=B2=D0=BB?= =?UTF-8?q?=D0=B5=D0=BD=D0=B8=D0=B5=20=D0=B2=D0=B5=D0=BD=D1=82=D0=B8=D0=BB?= =?UTF-8?q?=D1=8F=D0=BC=D0=B8/=D0=BD=D0=B0=D0=B3=D1=80=D0=B5=D0=B2=D0=B0?= =?UTF-8?q?=D1=82=D0=B5=D0=BB=D1=8F=D0=BC=D0=B8?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- GreenhouseController/ManageController.py | 47 ++++++++++++++++++++++++ GreenhouseManager/manager.py | 3 +- 2 files changed, 49 insertions(+), 1 deletion(-) create mode 100644 GreenhouseController/ManageController.py diff --git a/GreenhouseController/ManageController.py b/GreenhouseController/ManageController.py new file mode 100644 index 0000000..37f60b5 --- /dev/null +++ b/GreenhouseController/ManageController.py @@ -0,0 +1,47 @@ +from json import dumps + + + +from json import dumps + +class ManageController: + def __init__(self, producer, topic='commands'): + self.valve_state = "closed" + self.heater_state = "off" + self.producer = producer + self.topic = topic + + def toggle_device(self, device, request_id, greenhouse_id): + + + if device == 'valve': + + if self.valve_state == 'closed': + self.valve_state = 'open' + print("Valve opened") + else: + self.valve_state = 'closed' + print("Valve closed") + + elif device == 'heater': + + if self.heater_state == 'off': + self.heater_state = 'on' + print("Heater turned on") + else: + self.heater_state = 'off' + print("Heater turned off") + + + self.send_status(request_id, greenhouse_id) + + def send_status(self, request_id, greenhouse_id): + + status = { + 'request_id': request_id, + 'greenhouse_id': greenhouse_id, + 'valve_state': self.valve_state, + 'heater_state': self.heater_state + } + self.producer.send(self.topic, value=status) + print(f"Sent device status: {status}") \ No newline at end of file diff --git a/GreenhouseManager/manager.py b/GreenhouseManager/manager.py index 74fed03..9e96558 100644 --- a/GreenhouseManager/manager.py +++ b/GreenhouseManager/manager.py @@ -3,7 +3,8 @@ import kafka import socket from json import dumps, loads import time - +from ManageController import ManagerController +greenhouse_controller = GreenhouseController(producer) consumer = KafkaConsumer( 'commands', bootstrap_servers=['localhost:9092'], From 3f5bb31646a19fa7126f0a1a581dd5f1e64cac76 Mon Sep 17 00:00:00 2001 From: dimazhelovanov Date: Wed, 30 Oct 2024 11:10:39 +0400 Subject: [PATCH 14/49] =?UTF-8?q?=D0=9C=D0=B5=D0=BD=D0=B5=D0=B4=D0=B6?= =?UTF-8?q?=D0=B5=D1=80?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- GreenhouseController/ManageController.py | 5 +- GreenhouseManager/manager.py | 98 +++++++++++++++++++++--- 2 files changed, 89 insertions(+), 14 deletions(-) diff --git a/GreenhouseController/ManageController.py b/GreenhouseController/ManageController.py index 37f60b5..2d9ad10 100644 --- a/GreenhouseController/ManageController.py +++ b/GreenhouseController/ManageController.py @@ -43,5 +43,6 @@ class ManageController: 'valve_state': self.valve_state, 'heater_state': self.heater_state } - self.producer.send(self.topic, value=status) - print(f"Sent device status: {status}") \ No newline at end of file + + print(f"Sent device status: {status}") + return status \ No newline at end of file diff --git a/GreenhouseManager/manager.py b/GreenhouseManager/manager.py index 5c2b975..c082d04 100644 --- a/GreenhouseManager/manager.py +++ b/GreenhouseManager/manager.py @@ -4,20 +4,23 @@ import socket from json import dumps, loads import time from enum import Enum +from GreenhouseDetector.detector import Detector class Status(Enum): UNKNOWN = -1 OFF = 0 ON = 1 + class Manager: - def __init__(self, id : int, moisture : float = 0, temp : float = 20, valveStatus : Status = Status.UNKNOWN, heaterStatus : Status = Status.UNKNOWN, isAutoOn : bool = False): + def __init__(self, id: int, moisture: float = 0, temp: float = 20, isAutoOn: bool = False, valve_state: str = "closed", + heater_state: str = "off"): self.id = id self.moisture = moisture self.temp = temp - self.valveStatus = valveStatus - self.heaterStatus = heaterStatus self.isAutoOn = isAutoOn + self.valve_state = valve_state + self.heater_state = heater_state self.dataPublisher = KafkaProducer( bootstrap_servers=['localhost:9092'], @@ -25,19 +28,33 @@ class Manager: value_serializer=lambda v: dumps(v).encode('utf-8') ) - self.detectorConsumer = KafkaConsumer( - 'dataDetectors', + # self.detectorConsumer = KafkaConsumer( + # 'dataDetectors', + # bootstrap_servers=['localhost:9092'], + # auto_offset_reset='earliest', + # enable_auto_commit=True, + # consumer_timeout_ms=1000, + #group_id=f'manager{id}', + # value_deserializer=lambda x: loads(x.decode('utf-8')) + #) + self.controllerConsumer = KafkaConsumer( + 'commands', bootstrap_servers=['localhost:9092'], auto_offset_reset='earliest', enable_auto_commit=True, - consumer_timeout_ms = 1000, + consumer_timeout_ms=2000, group_id=f'manager{id}', value_deserializer=lambda x: loads(x.decode('utf-8')) ) + self.controllerConsumerResponse = KafkaProducer( + bootstrap_servers=['localhost:9092'], + client_id=f'manager{id}_producer', + value_serializer=lambda v: dumps(v).encode('utf-8') + ) def update(self): for message in self.detectorConsumer: - if message.value['id'] == self.id: + print(f"Manager {self.id} received message: ") print(message.value) self.moisture = message.value['moisture'] @@ -52,19 +69,76 @@ class Manager: 'id': self.id, 'moisture': self.moisture, 'temp': self.temp, - 'valveStatus': str(self.valveStatus), - 'heaterStatus': str(self.heaterStatus), + 'valveStatus': str(self.valve_state), + 'heaterStatus': str(self.heater_state), 'isAutoOn': self.isAutoOn } print(message) self.dataPublisher.send('data', message) -manager1 = Manager(id = 1) + def toggle_device(self, device, request_id, greenhouse_id): + + if device == 'valve': + + if self.valve_state == 'closed': + self.valve_state = 'open' + print("Valve opened") + else: + self.valve_state = 'closed' + print("Valve closed") + + elif device == 'heater': + + if self.heater_state == 'off': + self.heater_state = 'on' + print("Heater turned on") + else: + self.heater_state = 'off' + print("Heater turned off") + + self.send_status(request_id, greenhouse_id) + + def send_status(self, request_id, greenhouse_id): + + status = { + 'request_id': request_id, + 'greenhouse_id': greenhouse_id, + 'valve_state': self.valve_state, + 'heater_state': self.heater_state + } + self.sendDataCommand(status) + print("Updating info...\n") + + def sendDataCommand(self, message): + print("sending data...") + + self.dataPublisher.send('response', message) + + def getCommand(self): + messages = self.controllerConsumer.poll(timeout_ms=1000) + + # Проверяем, есть ли сообщения + + for tp, msgs in messages.items(): + for message in msgs: + print(f"Manager {self.id} received message: ") + print(message.value) + self.request_id = message.value['request_id'] + self.greenhouse_id = message.value['greenhouse_id'] + self.command = message.value['command'] + self.toggle_device(self.command, self.request_id, self.greenhouse_id) + + + +manager1 = Manager(id=1) managers = [manager1] + while True: - time.sleep(1) + time.sleep(5) + manager1.sendData() for manager in managers: - manager.update() \ No newline at end of file + + manager.getCommand() From 5d7beec9c13eeeea1f345e87fe645bdf37c38875 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=D0=90=D1=80=D0=B8=D1=88=D0=B8=D0=BD=D0=B0=29?= <Аришина)@DESKTOP-OUE59OV> Date: Wed, 30 Oct 2024 15:33:08 +0400 Subject: [PATCH 15/49] =?UTF-8?q?=D0=94=D0=BE=D0=B1=D0=B0=D0=B2=D0=B8?= =?UTF-8?q?=D0=BB=20cors=20program.cs=20=D0=B4=D0=BB=D1=8F=20=D1=84=D1=80?= =?UTF-8?q?=D0=BE=D0=BD=D1=82=D0=B0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../20241030111034_front.Designer.cs | 95 +++++++++++++++++++ Cloud/Migrations/20241030111034_front.cs | 19 ++++ Cloud/Program.cs | 14 +++ 3 files changed, 128 insertions(+) create mode 100644 Cloud/Migrations/20241030111034_front.Designer.cs create mode 100644 Cloud/Migrations/20241030111034_front.cs diff --git a/Cloud/Migrations/20241030111034_front.Designer.cs b/Cloud/Migrations/20241030111034_front.Designer.cs new file mode 100644 index 0000000..017938b --- /dev/null +++ b/Cloud/Migrations/20241030111034_front.Designer.cs @@ -0,0 +1,95 @@ +// +using Cloud; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.EntityFrameworkCore.Migrations; +using Microsoft.EntityFrameworkCore.Storage.ValueConversion; +using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata; + +#nullable disable + +namespace Cloud.Migrations +{ + [DbContext(typeof(ApplicationContext))] + [Migration("20241030111034_front")] + partial class front + { + protected override void BuildTargetModel(ModelBuilder modelBuilder) + { +#pragma warning disable 612, 618 + modelBuilder + .HasAnnotation("ProductVersion", "6.0.14") + .HasAnnotation("Relational:MaxIdentifierLength", 63); + + NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder); + + modelBuilder.Entity("Cloud.Models.Farm", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("integer"); + + NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property("Id")); + + b.Property("Name") + .IsRequired() + .HasColumnType("text"); + + b.Property("RaspberryMacAddr") + .IsRequired() + .HasColumnType("text"); + + b.Property("UserId") + .HasColumnType("integer"); + + b.HasKey("Id"); + + b.HasIndex("UserId"); + + b.ToTable("Farms"); + }); + + modelBuilder.Entity("Cloud.Models.User", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("integer"); + + NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property("Id")); + + b.Property("Email") + .IsRequired() + .HasColumnType("text"); + + b.Property("Name") + .IsRequired() + .HasColumnType("text"); + + b.Property("Password") + .IsRequired() + .HasColumnType("text"); + + b.HasKey("Id"); + + b.ToTable("Users"); + }); + + modelBuilder.Entity("Cloud.Models.Farm", b => + { + b.HasOne("Cloud.Models.User", "User") + .WithMany("Farms") + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("User"); + }); + + modelBuilder.Entity("Cloud.Models.User", b => + { + b.Navigation("Farms"); + }); +#pragma warning restore 612, 618 + } + } +} diff --git a/Cloud/Migrations/20241030111034_front.cs b/Cloud/Migrations/20241030111034_front.cs new file mode 100644 index 0000000..9ca6bc8 --- /dev/null +++ b/Cloud/Migrations/20241030111034_front.cs @@ -0,0 +1,19 @@ +using Microsoft.EntityFrameworkCore.Migrations; + +#nullable disable + +namespace Cloud.Migrations +{ + public partial class front : Migration + { + protected override void Up(MigrationBuilder migrationBuilder) + { + + } + + protected override void Down(MigrationBuilder migrationBuilder) + { + + } + } +} diff --git a/Cloud/Program.cs b/Cloud/Program.cs index 6f77533..d1e6521 100644 --- a/Cloud/Program.cs +++ b/Cloud/Program.cs @@ -33,6 +33,17 @@ builder.Services.AddAuthentication(JwtBearerDefaults.AuthenticationScheme) builder.Services.AddDbContext(options => options.UseNpgsql("Host=localhost;Port=5438;Database=main_database;Username=postgres;Password=12345")); +// Настройка CORS +builder.Services.AddCors(options => +{ + options.AddPolicy("AllowFrontendLocalhost", builder => + { + builder.WithOrigins("http://localhost:3000") // фронтенд + .AllowAnyHeader() + .AllowAnyMethod(); + }); +}); + builder.Services.AddControllers(); builder.Services.AddFluentValidationAutoValidation(); builder.Services.AddFluentValidationClientsideAdapters(); @@ -85,6 +96,9 @@ if (app.Environment.IsDevelopment()) app.UseHttpsRedirection(); +// Включение CORS +app.UseCors("AllowFrontendLocalhost"); + app.UseAuthentication(); app.UseAuthorization(); From a4dae6211f8200c61f7d3345b370d594270b6d7a Mon Sep 17 00:00:00 2001 From: "m.zargarov" Date: Sun, 10 Nov 2024 13:35:23 +0400 Subject: [PATCH 16/49] add redis --- Cloud/Cloud.csproj | 1 + Cloud/Program.cs | 10 +++++++++- docker-compose.yml | 15 +++++++++++++++ 3 files changed, 25 insertions(+), 1 deletion(-) diff --git a/Cloud/Cloud.csproj b/Cloud/Cloud.csproj index 93e4190..6a5fc81 100644 --- a/Cloud/Cloud.csproj +++ b/Cloud/Cloud.csproj @@ -15,6 +15,7 @@ all + diff --git a/Cloud/Program.cs b/Cloud/Program.cs index d1e6521..c02b6fb 100644 --- a/Cloud/Program.cs +++ b/Cloud/Program.cs @@ -6,12 +6,20 @@ using System.Text; using FluentValidation; using FluentValidation.AspNetCore; using Cloud.Validation; +using StackExchange.Redis; var builder = WebApplication.CreateBuilder(args); // Add services to the container. -//Jwt configuration starts here +//Redis configuration +builder.Services.AddSingleton(sp => +{ + var configuration = ConfigurationOptions.Parse("localhost:6379"); + return ConnectionMultiplexer.Connect(configuration); +}); + +//Jwt configuration var jwtIssuer = builder.Configuration.GetSection("Jwt:Issuer").Get(); var jwtKey = builder.Configuration.GetSection("Jwt:Key").Get(); diff --git a/docker-compose.yml b/docker-compose.yml index 6915ce1..f13b1b9 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -11,6 +11,21 @@ services: volumes: - postgres_data:/var/lib/postgresql/data + redis: + image: 'redis:latest' + ports: + - '6379:6379' + volumes: + - 'cloud-redis:/data' + healthcheck: + test: + - CMD + - redis-cli + - ping + retries: 3 + timeout: 5s volumes: postgres_data: + driver: local + cloud-redis: driver: local \ No newline at end of file From 7f5262575e81e24965fc48f3b9b89cec0d4640d1 Mon Sep 17 00:00:00 2001 From: the Date: Tue, 12 Nov 2024 16:18:14 +0400 Subject: [PATCH 17/49] webhook test --- GreenhouseDetector/detector.py | 46 ++++++++++++++-------------------- GreenhouseManager/manager.py | 34 ++++++++++--------------- 2 files changed, 32 insertions(+), 48 deletions(-) diff --git a/GreenhouseDetector/detector.py b/GreenhouseDetector/detector.py index 728ec1d..57545e9 100644 --- a/GreenhouseDetector/detector.py +++ b/GreenhouseDetector/detector.py @@ -1,13 +1,19 @@ -from random import random -from turtledemo.penrose import start - -from kafka import KafkaProducer, KafkaConsumer -import kafka -import socket -from json import dumps, loads import time import random as rnd +from flask import Flask, jsonify +import requests +import threading + +app = Flask(__name__) + +def start_detector(): + while True: + for detector in detectors: + detector.cycle() + detector.sendData() + time.sleep(1) + class Detector: def __init__(self, id, moistureThresholdUpper, moistureThresholdLower, tempThresholdUpper, tempThresholdLower): self.id = id @@ -18,31 +24,17 @@ class Detector: self.moisture = 0 self.temp = 0 - self.producer = KafkaProducer( - bootstrap_servers=['localhost:9092'], - client_id=f'detector{self.id}', - value_serializer=lambda v: dumps(v).encode('utf-8') - ) - - def sendData(self): - message = {'id' : self.id, - 'moisture': self.moisture, - 'temperature' : self.temp } - self.producer.send('dataDetectors', message) - def cycle(self): self.moisture += rnd.random() / 100 self.temp += (rnd.random() - 0.5) / 100 detector1 = Detector(1, 0.6, 0.2, 40, 20) -detector2 = Detector(2, 0.7, 0.3, 40, 20) -detector3 = Detector(3, 0.9, 0.6, 40, 20) -detectors = [detector1, detector2, detector3] +detectors = [detector1] -while True: - for detector in detectors: - detector.cycle() - detector.sendData() - time.sleep(1) +t1 = threading.Thread(target=start_detector) + +if __name__ =="__main__": + t1.start() + app.run(host='0.0.0.0', port=20001, debug=True) diff --git a/GreenhouseManager/manager.py b/GreenhouseManager/manager.py index c082d04..b3d21bf 100644 --- a/GreenhouseManager/manager.py +++ b/GreenhouseManager/manager.py @@ -2,15 +2,15 @@ from kafka import KafkaProducer, KafkaConsumer import kafka import socket from json import dumps, loads +from flask import Flask, request import time from enum import Enum -from GreenhouseDetector.detector import Detector +import threading -class Status(Enum): - UNKNOWN = -1 - OFF = 0 - ON = 1 +app = Flask(__name__) +def start_manager(): + return class Manager: def __init__(self, id: int, moisture: float = 0, temp: float = 20, isAutoOn: bool = False, valve_state: str = "closed", @@ -28,15 +28,6 @@ class Manager: value_serializer=lambda v: dumps(v).encode('utf-8') ) - # self.detectorConsumer = KafkaConsumer( - # 'dataDetectors', - # bootstrap_servers=['localhost:9092'], - # auto_offset_reset='earliest', - # enable_auto_commit=True, - # consumer_timeout_ms=1000, - #group_id=f'manager{id}', - # value_deserializer=lambda x: loads(x.decode('utf-8')) - #) self.controllerConsumer = KafkaConsumer( 'commands', bootstrap_servers=['localhost:9092'], @@ -129,16 +120,17 @@ class Manager: self.command = message.value['command'] self.toggle_device(self.command, self.request_id, self.greenhouse_id) - +@app.route('/webhook', methods=['POST']) +def webhook(): + if request.method == 'POST': + print("Data received from Webhook is") + return "Webhook received" manager1 = Manager(id=1) managers = [manager1] +t1 = threading.Thread(target=start_manager) -while True: - time.sleep(5) - manager1.sendData() - for manager in managers: - - manager.getCommand() +if __name__ == "__main__": + app.run(host="0.0.0.0", port=20002) \ No newline at end of file From b7f4aa3f9f145a1e5c14fa63a97cf9af1a50b975 Mon Sep 17 00:00:00 2001 From: the Date: Tue, 12 Nov 2024 16:19:37 +0400 Subject: [PATCH 18/49] webhook fix --- GreenhouseManager/manager.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/GreenhouseManager/manager.py b/GreenhouseManager/manager.py index b3d21bf..1152700 100644 --- a/GreenhouseManager/manager.py +++ b/GreenhouseManager/manager.py @@ -123,7 +123,7 @@ class Manager: @app.route('/webhook', methods=['POST']) def webhook(): if request.method == 'POST': - print("Data received from Webhook is") + print("Data received from Webhook is", request.json) return "Webhook received" manager1 = Manager(id=1) From f82c8daa92541b2fdc60878a64ba978c8535e92a Mon Sep 17 00:00:00 2001 From: the Date: Tue, 12 Nov 2024 17:04:24 +0400 Subject: [PATCH 19/49] detector webhooks --- GreenhouseDetector/detector.py | 23 +++++++++---------- GreenhouseManager/manager.py | 41 +++++++++++++++------------------- 2 files changed, 29 insertions(+), 35 deletions(-) diff --git a/GreenhouseDetector/detector.py b/GreenhouseDetector/detector.py index 57545e9..d092373 100644 --- a/GreenhouseDetector/detector.py +++ b/GreenhouseDetector/detector.py @@ -1,19 +1,12 @@ import time import random as rnd -from flask import Flask, jsonify +from flask import Flask import requests import threading app = Flask(__name__) -def start_detector(): - while True: - for detector in detectors: - detector.cycle() - detector.sendData() - time.sleep(1) - class Detector: def __init__(self, id, moistureThresholdUpper, moistureThresholdLower, tempThresholdUpper, tempThresholdLower): self.id = id @@ -28,13 +21,19 @@ class Detector: self.moisture += rnd.random() / 100 self.temp += (rnd.random() - 0.5) / 100 + def sendData(self): + data = {"moisture": self.moisture, + "temp": self.temp} + requests.post(f"http://127.0.0.1:20002/webhook?id={self.id}", json=data) + detector1 = Detector(1, 0.6, 0.2, 40, 20) detectors = [detector1] -t1 = threading.Thread(target=start_detector) - if __name__ =="__main__": - t1.start() - app.run(host='0.0.0.0', port=20001, debug=True) + while True: + for detector in detectors: + detector.cycle() + detector.sendData() + time.sleep(1) diff --git a/GreenhouseManager/manager.py b/GreenhouseManager/manager.py index 1152700..457c483 100644 --- a/GreenhouseManager/manager.py +++ b/GreenhouseManager/manager.py @@ -13,9 +13,9 @@ def start_manager(): return class Manager: - def __init__(self, id: int, moisture: float = 0, temp: float = 20, isAutoOn: bool = False, valve_state: str = "closed", + def __init__(self, _id: int, moisture: float = 0, temp: float = 20, isAutoOn: bool = False, valve_state: str = "closed", heater_state: str = "off"): - self.id = id + self._id = _id self.moisture = moisture self.temp = temp self.isAutoOn = isAutoOn @@ -24,7 +24,7 @@ class Manager: self.dataPublisher = KafkaProducer( bootstrap_servers=['localhost:9092'], - client_id=f'manager{id}_producer', + client_id=f'manager{self._id}_producer', value_serializer=lambda v: dumps(v).encode('utf-8') ) @@ -34,30 +34,19 @@ class Manager: auto_offset_reset='earliest', enable_auto_commit=True, consumer_timeout_ms=2000, - group_id=f'manager{id}', + group_id=f'manager{self._id}', value_deserializer=lambda x: loads(x.decode('utf-8')) ) self.controllerConsumerResponse = KafkaProducer( bootstrap_servers=['localhost:9092'], - client_id=f'manager{id}_producer', + client_id=f'manager{self._id}_producer', value_serializer=lambda v: dumps(v).encode('utf-8') ) - def update(self): - for message in self.detectorConsumer: - - print(f"Manager {self.id} received message: ") - print(message.value) - self.moisture = message.value['moisture'] - self.temp = message.value['temperature'] - print("Updating info...\n") - - self.sendData() - def sendData(self): print("sending data...") message = { - 'id': self.id, + 'id': self._id, 'moisture': self.moisture, 'temp': self.temp, 'valveStatus': str(self.valve_state), @@ -113,21 +102,27 @@ class Manager: for tp, msgs in messages.items(): for message in msgs: - print(f"Manager {self.id} received message: ") + print(f"Manager {self._id} received message: ") print(message.value) self.request_id = message.value['request_id'] self.greenhouse_id = message.value['greenhouse_id'] self.command = message.value['command'] self.toggle_device(self.command, self.request_id, self.greenhouse_id) -@app.route('/webhook', methods=['POST']) +@app.route(f'/webhook', methods=['POST']) def webhook(): - if request.method == 'POST': - print("Data received from Webhook is", request.json) - return "Webhook received" + for manager in managers: + if request.args.get('id') == manager._id and request.method == 'POST': + print("Data received from Webhook is", request.json) -manager1 = Manager(id=1) + body = request.json + for key, value in body.items(): + setattr(manager, key, value) + return f"Webhook received for manager {manager._id}" + return "Webhook ignored" + +manager1 = Manager(_id=1) managers = [manager1] t1 = threading.Thread(target=start_manager) From 4747f975c53d19befc73d28da032cac98f0ae69f Mon Sep 17 00:00:00 2001 From: the Date: Tue, 12 Nov 2024 18:03:45 +0400 Subject: [PATCH 20/49] kafka network --- docker-compose.yml | 34 ++++++++++++++++++++++------------ 1 file changed, 22 insertions(+), 12 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 995a9e2..e05fdef 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,5 +1,7 @@ services: zookeeper: + networks: + - vpn image: confluentinc/cp-zookeeper:7.4.0 environment: ZOOKEEPER_CLIENT_PORT: 2181 @@ -8,17 +10,18 @@ services: - 2181:2181 kafka: + networks: + - vpn image: confluentinc/cp-kafka:7.4.0 ports: - - 9092:9092 - - 9997:9997 - expose: - 29092:29092 environment: KAFKA_BROKER_ID: 1 KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 - KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT - KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:29092,PLAINTEXT_HOST://localhost:9092 + KAFKA_LISTENERS: EXTERNAL_SAME_HOST://:29092,EXTERNAL_DIFFERENT_HOST://:29093,INTERNAL://:9092 + KAFKA_ADVERTISED_LISTENERS: INTERNAL://kafka:9092,EXTERNAL_SAME_HOST://localhost:29092,EXTERNAL_DIFFERENT_HOST://157.245.80.232:29093 + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTERNAL:PLAINTEXT,EXTERNAL_SAME_HOST:PLAINTEXT,EXTERNAL_DIFFERENT_HOST:PLAINTEXT + KAFKA_INTER_BROKER_LISTENER_NAME: INTERNAL KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 KAFKA_LOG_FLUSH_INTERVAL_MESSAGES: 10000 @@ -27,6 +30,8 @@ services: - zookeeper init-kafka: + networks: + - vpn image: confluentinc/cp-kafka:7.4.0 depends_on: - kafka @@ -34,19 +39,20 @@ services: command: | " # blocks until kafka is reachable - kafka-topics --bootstrap-server kafka:29092 --list + kafka-topics --bootstrap-server kafka:9092 --list echo -e 'Creating kafka topics' - kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic commands --replication-factor 1 --partitions 1 - kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic data --replication-factor 1 --partitions 1 - kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic dataDetectors --replication-factor 1 --partitions 1 - kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic response --replication-factor 1 --partitions 1 + kafka-topics --bootstrap-server kafka:9092 --create --if-not-exists --topic commands --replication-factor 1 --partitions 1 + kafka-topics --bootstrap-server kafka:9092 --create --if-not-exists --topic data --replication-factor 1 --partitions 1 + kafka-topics --bootstrap-server kafka:9092 --create --if-not-exists --topic response --replication-factor 1 --partitions 1 echo -e 'Successfully created the following topics:' - kafka-topics --bootstrap-server kafka:29092 --list + kafka-topics --bootstrap-server kafka:9092 --list " kafka-ui: + networks: + - vpn container_name: kafka-ui image: provectuslabs/kafka-ui:latest ports: @@ -56,4 +62,8 @@ services: environment: KAFKA_CLUSTERS_0_NAME: local KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka:29092 - KAFKA_CLUSTERS_0_METRICS_PORT: 9997 \ No newline at end of file + KAFKA_CLUSTERS_0_METRICS_PORT: 9997 + +networks: + vpn: + name: kafkaVPN \ No newline at end of file From 03302065abf2dc911c9533c207b9c2f592831ec1 Mon Sep 17 00:00:00 2001 From: mfnefd Date: Tue, 12 Nov 2024 19:18:37 +0400 Subject: [PATCH 21/49] =?UTF-8?q?fix:=20mac=20=D1=82=D0=B5=D0=BF=D0=B5?= =?UTF-8?q?=D1=80=D1=8C=20=D0=BD=D0=B5=20=D0=B8=D0=BF=D0=BE=D0=BB=D1=8C?= =?UTF-8?q?=D0=B7=D1=83=D0=B5=D1=82=D1=81=D1=8F.=20ip=20=D0=BD=D0=B0=D1=88?= =?UTF-8?q?=D0=B5=20=D0=B2=D1=81=D0=B5?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- Cloud/Controllers/FarmController.cs | 204 ++++++++++++++-------------- Cloud/Models/Farm.cs | 19 ++- Cloud/Requests/FarmRequest.cs | 12 +- Cloud/Validation/FarmValidator.cs | 24 ++-- 4 files changed, 129 insertions(+), 130 deletions(-) diff --git a/Cloud/Controllers/FarmController.cs b/Cloud/Controllers/FarmController.cs index a1e7b73..b5cf078 100644 --- a/Cloud/Controllers/FarmController.cs +++ b/Cloud/Controllers/FarmController.cs @@ -6,123 +6,123 @@ using Microsoft.EntityFrameworkCore; namespace Cloud.Controllers { - [Authorize] - [ApiController] - [Route("api/user")] - public class FarmController : ControllerBase - { - private IConfiguration _config; - private ApplicationContext _context; + [Authorize] + [ApiController] + [Route("api/user")] + public class FarmController : ControllerBase + { + private IConfiguration _config; + private ApplicationContext _context; - public FarmController(IConfiguration config, ApplicationContext context) - { - _config = config; - _context = context; - } + public FarmController(IConfiguration config, ApplicationContext context) + { + _config = config; + _context = context; + } - [HttpGet("{userId}/farm")] - public async Task>> Index (int userId) - { - try - { - List farms = await - _context.Farms.Where(x => x.UserId == userId).AsNoTracking().ToListAsync(); - if (!farms.Any()) - return NotFound("Farms is not found"); + [HttpGet("{userId}/farm")] + public async Task>> Index(int userId) + { + try + { + List farms = await + _context.Farms.Where(x => x.UserId == userId).AsNoTracking().ToListAsync(); + if (!farms.Any()) + return NotFound("Farms is not found"); - return Ok(farms); - } - catch (Exception ex) - { - return BadRequest(ex.Message); - } - } + return Ok(farms); + } + catch (Exception ex) + { + return BadRequest(ex.Message); + } + } - [HttpGet("{userId}/farm/{farmId}")] - public async Task> Show(int userId, int farmId) - { - try - { - Farm? farm = await - _context.Farms.FirstOrDefaultAsync(x => x.UserId == userId && x.Id == farmId); + [HttpGet("{userId}/farm/{farmId}")] + public async Task> Show(int userId, int farmId) + { + try + { + Farm? farm = await + _context.Farms.FirstOrDefaultAsync(x => x.UserId == userId && x.Id == farmId); - if (farm == null) - return NotFound("Farm is not found"); + if (farm == null) + return NotFound("Farm is not found"); - return Ok(farm); - } - catch (Exception ex) - { - return BadRequest(ex.Message); - } - } + return Ok(farm); + } + catch (Exception ex) + { + return BadRequest(ex.Message); + } + } - [HttpPost("{userId}/farm")] - public async Task> Create([FromBody] FarmRequest farmRequest, int userId) - { - try - { - var farm = new Farm { - Name = farmRequest.Name, - UserId = userId, - RaspberryMacAddr = farmRequest.RaspberryMacAddr, - }; + [HttpPost("{userId}/farm")] + public async Task> Create([FromBody] FarmRequest farmRequest, int userId) + { + try + { + var farm = new Farm + { + Name = farmRequest.Name, + UserId = userId, + RaspberryIP = farmRequest.RaspberryIP, + }; - Farm? farmCreated = _context.Farms.Add(farm).Entity; - await _context.SaveChangesAsync(); + Farm? farmCreated = _context.Farms.Add(farm).Entity; + await _context.SaveChangesAsync(); - return Ok(farmCreated); - } - catch (Exception ex) - { - return BadRequest(ex.Message); - } - } + return Ok(farmCreated); + } + catch (Exception ex) + { + return BadRequest(ex.Message); + } + } - [HttpPut("{userId}/farm/{farmId}")] - public async Task> Update([FromBody] FarmRequest farmRequest, int userId, int farmId) - { - try - { - Farm? farm = await _context.Farms.FirstOrDefaultAsync(x => x.Id == farmId && x.UserId == userId); + [HttpPut("{userId}/farm/{farmId}")] + public async Task> Update([FromBody] FarmRequest farmRequest, int userId, int farmId) + { + try + { + Farm? farm = await _context.Farms.FirstOrDefaultAsync(x => x.Id == farmId && x.UserId == userId); - if (farm == null) - return NotFound("Farm is not found"); + if (farm == null) + return NotFound("Farm is not found"); - farm.Name = farmRequest.Name; - farm.RaspberryMacAddr = farmRequest.RaspberryMacAddr; + farm.Name = farmRequest.Name; + farm.RaspberryIP = farmRequest.RaspberryIP; - _context.Farms.Update(farm); - await _context.SaveChangesAsync(); + _context.Farms.Update(farm); + await _context.SaveChangesAsync(); - return Ok(farm); - } - catch (Exception ex) - { - return BadRequest(ex.Message); - } - } + return Ok(farm); + } + catch (Exception ex) + { + return BadRequest(ex.Message); + } + } - [HttpDelete("{userId}/farm/{farmId}")] - public async Task Delete(int userId, int farmId) - { - try - { - Farm? farm = await _context.Farms.FirstOrDefaultAsync(x => x.Id == farmId && x.UserId == userId); + [HttpDelete("{userId}/farm/{farmId}")] + public async Task Delete(int userId, int farmId) + { + try + { + Farm? farm = await _context.Farms.FirstOrDefaultAsync(x => x.Id == farmId && x.UserId == userId); - if (farm == null) - return NotFound("Farm is not found"); + if (farm == null) + return NotFound("Farm is not found"); - _context.Farms.Remove(farm); - await _context.SaveChangesAsync(); + _context.Farms.Remove(farm); + await _context.SaveChangesAsync(); - return Ok("Farm deleted successfully"); - } - catch (Exception ex) - { - return BadRequest(ex.Message); - } - } - - } -} + return Ok("Farm deleted successfully"); + } + catch (Exception ex) + { + return BadRequest(ex.Message); + } + } + } +} \ No newline at end of file diff --git a/Cloud/Models/Farm.cs b/Cloud/Models/Farm.cs index a17d3bc..5cc1ac8 100644 --- a/Cloud/Models/Farm.cs +++ b/Cloud/Models/Farm.cs @@ -1,12 +1,11 @@ namespace Cloud.Models { - public class Farm - { - public int Id { get; set; } - public string Name { get; set; } - public int UserId { get; set; } - public User? User { get; set; } - public string RaspberryMacAddr { get; set; } - - } -} + public class Farm + { + public int Id { get; set; } + public string Name { get; set; } + public int UserId { get; set; } + public User? User { get; set; } + public string RaspberryIP { get; set; } + } +} \ No newline at end of file diff --git a/Cloud/Requests/FarmRequest.cs b/Cloud/Requests/FarmRequest.cs index c032dfa..3371488 100644 --- a/Cloud/Requests/FarmRequest.cs +++ b/Cloud/Requests/FarmRequest.cs @@ -1,8 +1,8 @@ namespace Cloud.Requests { - public class FarmRequest - { - public string Name { get; set; } - public string RaspberryMacAddr { get; set; } - } -} + public class FarmRequest + { + public string Name { get; set; } + public string RaspberryIP { get; set; } + } +} \ No newline at end of file diff --git a/Cloud/Validation/FarmValidator.cs b/Cloud/Validation/FarmValidator.cs index 4c262fb..c4df29e 100644 --- a/Cloud/Validation/FarmValidator.cs +++ b/Cloud/Validation/FarmValidator.cs @@ -3,16 +3,16 @@ using FluentValidation; namespace Cloud.Validation { - public class FarmValidator : AbstractValidator - { - public FarmValidator() - { - RuleFor(request => request.RaspberryMacAddr) - .NotEmpty().WithMessage("MAC address can't be empty") - .Matches("^([0-9A-Fa-f]{2}[:-]?){5}([0-9A-Fa-f]{2})$").WithMessage("MAC address is not valid"); + public class FarmValidator : AbstractValidator + { + public FarmValidator() + { + RuleFor(request => request.RaspberryIP) + .NotEmpty().WithMessage("IP address can't be empty") + .Matches(@"^((25[0-5]|(2[0-4]|1\d|[1-9]|)\d)\.?\b){4}$").WithMessage("IP address is not valid"); - RuleFor(request => request.Name) - .NotEmpty().WithMessage("Name can't be empty"); - } - } -} + RuleFor(request => request.Name) + .NotEmpty().WithMessage("Name can't be empty"); + } + } +} \ No newline at end of file From 488c91d2b173b3b509c45a98620534cc8c18f8fd Mon Sep 17 00:00:00 2001 From: mfnefd Date: Tue, 12 Nov 2024 19:48:58 +0400 Subject: [PATCH 22/49] =?UTF-8?q?del:=20=D1=83=D0=B4=D0=B0=D0=BB=D0=B5?= =?UTF-8?q?=D0=BD=D1=8B=20=D0=BD=D0=B5=D0=BD=D1=83=D0=B6=D0=BD=D1=8B=D0=B5?= =?UTF-8?q?=20=D1=84=D0=B0=D0=B9=D0=BB=D1=8B=20=D0=B6=D0=B5=D1=81=D1=82?= =?UTF-8?q?=D1=8C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../Controllers/WeatherForecastController.cs | 32 ------------------- Cloud/WeatherForecast.cs | 12 ------- 2 files changed, 44 deletions(-) delete mode 100644 Cloud/Controllers/WeatherForecastController.cs delete mode 100644 Cloud/WeatherForecast.cs diff --git a/Cloud/Controllers/WeatherForecastController.cs b/Cloud/Controllers/WeatherForecastController.cs deleted file mode 100644 index 5f76bcd..0000000 --- a/Cloud/Controllers/WeatherForecastController.cs +++ /dev/null @@ -1,32 +0,0 @@ -using Microsoft.AspNetCore.Mvc; - -namespace Cloud.Controllers; - -[ApiController] -[Route("[controller]")] -public class WeatherForecastController : ControllerBase -{ - private static readonly string[] Summaries = new[] - { - "Freezing", "Bracing", "Chilly", "Cool", "Mild", "Warm", "Balmy", "Hot", "Sweltering", "Scorching" - }; - - private readonly ILogger _logger; - - public WeatherForecastController(ILogger logger) - { - _logger = logger; - } - - [HttpGet(Name = "GetWeatherForecast")] - public IEnumerable Get() - { - return Enumerable.Range(1, 5).Select(index => new WeatherForecast - { - Date = DateTime.Now.AddDays(index), - TemperatureC = Random.Shared.Next(-20, 55), - Summary = Summaries[Random.Shared.Next(Summaries.Length)] - }) - .ToArray(); - } -} diff --git a/Cloud/WeatherForecast.cs b/Cloud/WeatherForecast.cs deleted file mode 100644 index d787653..0000000 --- a/Cloud/WeatherForecast.cs +++ /dev/null @@ -1,12 +0,0 @@ -namespace Cloud; - -public class WeatherForecast -{ - public DateTime Date { get; set; } - - public int TemperatureC { get; set; } - - public int TemperatureF => 32 + (int)(TemperatureC / 0.5556); - - public string? Summary { get; set; } -} From 5adec563ac3f4553b779d89f3ffa2a03ff7d844d Mon Sep 17 00:00:00 2001 From: mfnefd Date: Tue, 12 Nov 2024 19:57:44 +0400 Subject: [PATCH 23/49] =?UTF-8?q?add:=20dockerfile=20=D0=B8=20=D0=B8=D0=B7?= =?UTF-8?q?=D0=BC=D0=B5=D0=BD=D0=B5=D0=BD=20=D1=87=D1=83=D1=82=D0=BA=D0=B0?= =?UTF-8?q?=20docker-compose?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .dockerignore | 25 +++++++++++++++++++++++++ .vscode/launch.json | 35 +++++++++++++++++++++++++++++++++++ .vscode/tasks.json | 41 +++++++++++++++++++++++++++++++++++++++++ Cloud/Dockerfile | 28 ++++++++++++++++++++++++++++ docker-compose.yml | 6 ++++++ 5 files changed, 135 insertions(+) create mode 100644 .dockerignore create mode 100644 .vscode/launch.json create mode 100644 .vscode/tasks.json create mode 100644 Cloud/Dockerfile diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..3dbbcf3 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,25 @@ +**/.classpath +**/.dockerignore +**/.env +**/.git +**/.gitignore +**/.project +**/.settings +**/.toolstarget +**/.vs +**/.vscode +**/*.*proj.user +**/*.dbmdl +**/*.jfm +**/bin +**/charts +**/docker-compose* +**/compose* +**/Dockerfile* +**/node_modules +**/npm-debug.log +**/obj +**/secrets.dev.yaml +**/values.dev.yaml +LICENSE +README.md diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 0000000..e8a7501 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,35 @@ +{ + "version": "0.2.0", + "configurations": [ + { + // Use IntelliSense to find out which attributes exist for C# debugging + // Use hover for the description of the existing attributes + // For further information visit https://github.com/dotnet/vscode-csharp/blob/main/debugger-launchjson.md. + "name": ".NET Core Launch (web)", + "type": "coreclr", + "request": "launch", + "preLaunchTask": "build", + // If you have changed target frameworks, make sure to update the program path. + "program": "${workspaceFolder}/Cloud/bin/Debug/net6.0/Cloud.dll", + "args": [], + "cwd": "${workspaceFolder}/Cloud", + "stopAtEntry": false, + // Enable launching a web browser when ASP.NET Core starts. For more information: https://aka.ms/VSCode-CS-LaunchJson-WebBrowser + "serverReadyAction": { + "action": "openExternally", + "pattern": "\\bNow listening on:\\s+(https?://\\S+)" + }, + "env": { + "ASPNETCORE_ENVIRONMENT": "Development" + }, + "sourceFileMap": { + "/Views": "${workspaceFolder}/Views" + } + }, + { + "name": ".NET Core Attach", + "type": "coreclr", + "request": "attach" + } + ] +} \ No newline at end of file diff --git a/.vscode/tasks.json b/.vscode/tasks.json new file mode 100644 index 0000000..d410234 --- /dev/null +++ b/.vscode/tasks.json @@ -0,0 +1,41 @@ +{ + "version": "2.0.0", + "tasks": [ + { + "label": "build", + "command": "dotnet", + "type": "process", + "args": [ + "build", + "${workspaceFolder}/Cloud.sln", + "/property:GenerateFullPaths=true", + "/consoleloggerparameters:NoSummary;ForceNoAlign" + ], + "problemMatcher": "$msCompile" + }, + { + "label": "publish", + "command": "dotnet", + "type": "process", + "args": [ + "publish", + "${workspaceFolder}/Cloud.sln", + "/property:GenerateFullPaths=true", + "/consoleloggerparameters:NoSummary;ForceNoAlign" + ], + "problemMatcher": "$msCompile" + }, + { + "label": "watch", + "command": "dotnet", + "type": "process", + "args": [ + "watch", + "run", + "--project", + "${workspaceFolder}/Cloud.sln" + ], + "problemMatcher": "$msCompile" + } + ] +} \ No newline at end of file diff --git a/Cloud/Dockerfile b/Cloud/Dockerfile new file mode 100644 index 0000000..6de9e42 --- /dev/null +++ b/Cloud/Dockerfile @@ -0,0 +1,28 @@ +FROM mcr.microsoft.com/dotnet/aspnet:6.0 AS base +WORKDIR /app +EXPOSE 5124 + +ENV ASPNETCORE_URLS=http://+:5124 + +# Creates a non-root user with an explicit UID and adds permission to access the /app folder +# For more info, please refer to https://aka.ms/vscode-docker-dotnet-configure-containers +RUN adduser -u 5678 --disabled-password --gecos "" appuser && chown -R appuser /app +USER appuser + +FROM --platform=$BUILDPLATFORM mcr.microsoft.com/dotnet/sdk:6.0 AS build +ARG configuration=Release +WORKDIR /src +COPY ["Cloud/Cloud.csproj", "Cloud/"] +RUN dotnet restore "Cloud/Cloud.csproj" +COPY . . +WORKDIR "/src/Cloud" +RUN dotnet build "Cloud.csproj" -c $configuration -o /app/build + +FROM build AS publish +ARG configuration=Release +RUN dotnet publish "Cloud.csproj" -c $configuration -o /app/publish /p:UseAppHost=false + +FROM base AS final +WORKDIR /app +COPY --from=publish /app/publish . +ENTRYPOINT ["dotnet", "Cloud.dll"] diff --git a/docker-compose.yml b/docker-compose.yml index 6915ce1..532e4cb 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,4 +1,10 @@ services: + cloud: + build: ./Cloud/ + ports: + - "5124:5124" + depends_on: + - postgres postgres: image: postgres:14 container_name: cucumber_database From ce3f3a4dc64ed5a987f4194f6707f0e6bbea85a5 Mon Sep 17 00:00:00 2001 From: the Date: Tue, 12 Nov 2024 22:31:57 +0400 Subject: [PATCH 24/49] kafka network fix --- docker-compose.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index e05fdef..f17fc16 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -18,8 +18,8 @@ services: environment: KAFKA_BROKER_ID: 1 KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 - KAFKA_LISTENERS: EXTERNAL_SAME_HOST://:29092,EXTERNAL_DIFFERENT_HOST://:29093,INTERNAL://:9092 - KAFKA_ADVERTISED_LISTENERS: INTERNAL://kafka:9092,EXTERNAL_SAME_HOST://localhost:29092,EXTERNAL_DIFFERENT_HOST://157.245.80.232:29093 + KAFKA_LISTENERS: EXTERNAL_SAME_HOST://:29092,EXTERNAL_DIFFERENT_HOST://:29092,INTERNAL://:9092 + KAFKA_ADVERTISED_LISTENERS: INTERNAL://kafka:9092,EXTERNAL_SAME_HOST://localhost:29092,EXTERNAL_DIFFERENT_HOST://157.245.80.232:29092 KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTERNAL:PLAINTEXT,EXTERNAL_SAME_HOST:PLAINTEXT,EXTERNAL_DIFFERENT_HOST:PLAINTEXT KAFKA_INTER_BROKER_LISTENER_NAME: INTERNAL KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 From 57e05aba909768bf6518fd37849150b6c52b6f86 Mon Sep 17 00:00:00 2001 From: the Date: Tue, 12 Nov 2024 22:34:06 +0400 Subject: [PATCH 25/49] kafka network unfix --- docker-compose.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index f17fc16..e05fdef 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -18,8 +18,8 @@ services: environment: KAFKA_BROKER_ID: 1 KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 - KAFKA_LISTENERS: EXTERNAL_SAME_HOST://:29092,EXTERNAL_DIFFERENT_HOST://:29092,INTERNAL://:9092 - KAFKA_ADVERTISED_LISTENERS: INTERNAL://kafka:9092,EXTERNAL_SAME_HOST://localhost:29092,EXTERNAL_DIFFERENT_HOST://157.245.80.232:29092 + KAFKA_LISTENERS: EXTERNAL_SAME_HOST://:29092,EXTERNAL_DIFFERENT_HOST://:29093,INTERNAL://:9092 + KAFKA_ADVERTISED_LISTENERS: INTERNAL://kafka:9092,EXTERNAL_SAME_HOST://localhost:29092,EXTERNAL_DIFFERENT_HOST://157.245.80.232:29093 KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTERNAL:PLAINTEXT,EXTERNAL_SAME_HOST:PLAINTEXT,EXTERNAL_DIFFERENT_HOST:PLAINTEXT KAFKA_INTER_BROKER_LISTENER_NAME: INTERNAL KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 From ffef39d409773dde7850fdd759f5ac5aaf2ba007 Mon Sep 17 00:00:00 2001 From: the Date: Wed, 13 Nov 2024 00:30:33 +0400 Subject: [PATCH 26/49] =?UTF-8?q?=D0=A1=D0=A2=D0=9E=D0=9B=D0=AC=D0=9A?= =?UTF-8?q?=D0=9E=20=D0=9C=D0=A3=D0=A7=D0=95=D0=9D=D0=98=D0=99=20=D0=A0?= =?UTF-8?q?=D0=90=D0=94=D0=98=20=D0=9E=D0=94=D0=9D=D0=9E=D0=99=20=D0=A1?= =?UTF-8?q?=D0=A2=D0=A0=D0=9E=D0=A7=D0=9A=D0=98?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- GreenhouseManager/manager.py | 3 +++ docker-compose.yml | 47 ++++++++++++++++++++---------------- 2 files changed, 29 insertions(+), 21 deletions(-) diff --git a/GreenhouseManager/manager.py b/GreenhouseManager/manager.py index 457c483..794fbff 100644 --- a/GreenhouseManager/manager.py +++ b/GreenhouseManager/manager.py @@ -56,6 +56,7 @@ class Manager: print(message) self.dataPublisher.send('data', message) + self.dataPublisher.flush() def toggle_device(self, device, request_id, greenhouse_id): @@ -119,6 +120,8 @@ def webhook(): for key, value in body.items(): setattr(manager, key, value) + manager.sendData() + return f"Webhook received for manager {manager._id}" return "Webhook ignored" diff --git a/docker-compose.yml b/docker-compose.yml index e05fdef..a0a4521 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,7 +1,7 @@ services: zookeeper: - networks: - - vpn +# networks: +# - vpn image: confluentinc/cp-zookeeper:7.4.0 environment: ZOOKEEPER_CLIENT_PORT: 2181 @@ -10,18 +10,23 @@ services: - 2181:2181 kafka: - networks: - - vpn +# networks: +# - vpn image: confluentinc/cp-kafka:7.4.0 ports: - - 29092:29092 + - 9092:9092 + - 9997:9997 + + expose: + - 29092:29092 + environment: KAFKA_BROKER_ID: 1 KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 - KAFKA_LISTENERS: EXTERNAL_SAME_HOST://:29092,EXTERNAL_DIFFERENT_HOST://:29093,INTERNAL://:9092 - KAFKA_ADVERTISED_LISTENERS: INTERNAL://kafka:9092,EXTERNAL_SAME_HOST://localhost:29092,EXTERNAL_DIFFERENT_HOST://157.245.80.232:29093 - KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTERNAL:PLAINTEXT,EXTERNAL_SAME_HOST:PLAINTEXT,EXTERNAL_DIFFERENT_HOST:PLAINTEXT - KAFKA_INTER_BROKER_LISTENER_NAME: INTERNAL + KAFKA_LISTENERS: HOST://0.0.0.0:9092,DOCKER://0.0.0.0:29092 + KAFKA_ADVERTISED_LISTENERS: HOST://localhost:9092,DOCKER://kafka:29092 + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: DOCKER:PLAINTEXT,HOST:PLAINTEXT + KAFKA_INTER_BROKER_LISTENER_NAME: DOCKER KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 KAFKA_LOG_FLUSH_INTERVAL_MESSAGES: 10000 @@ -30,8 +35,8 @@ services: - zookeeper init-kafka: - networks: - - vpn +# networks: +# - vpn image: confluentinc/cp-kafka:7.4.0 depends_on: - kafka @@ -39,20 +44,20 @@ services: command: | " # blocks until kafka is reachable - kafka-topics --bootstrap-server kafka:9092 --list + kafka-topics --bootstrap-server kafka:29092 --list echo -e 'Creating kafka topics' - kafka-topics --bootstrap-server kafka:9092 --create --if-not-exists --topic commands --replication-factor 1 --partitions 1 - kafka-topics --bootstrap-server kafka:9092 --create --if-not-exists --topic data --replication-factor 1 --partitions 1 - kafka-topics --bootstrap-server kafka:9092 --create --if-not-exists --topic response --replication-factor 1 --partitions 1 + kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic commands --replication-factor 1 --partitions 1 + kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic data --replication-factor 1 --partitions 1 + kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic response --replication-factor 1 --partitions 1 echo -e 'Successfully created the following topics:' - kafka-topics --bootstrap-server kafka:9092 --list + kafka-topics --bootstrap-server kafka:29092 --list " kafka-ui: - networks: - - vpn +# networks: +# - vpn container_name: kafka-ui image: provectuslabs/kafka-ui:latest ports: @@ -64,6 +69,6 @@ services: KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka:29092 KAFKA_CLUSTERS_0_METRICS_PORT: 9997 -networks: - vpn: - name: kafkaVPN \ No newline at end of file +#networks: +# vpn: +# name: kafkaVPN \ No newline at end of file From 73961934f0081de042370a133fd2fee49f94b2aa Mon Sep 17 00:00:00 2001 From: the Date: Wed, 13 Nov 2024 00:31:21 +0400 Subject: [PATCH 27/49] =?UTF-8?q?=D0=A1=D0=A2=D0=9E=D0=9B=D0=AC=D0=9A?= =?UTF-8?q?=D0=9E=20=D0=9C=D0=A3=D0=A7=D0=95=D0=9D=D0=98=D0=99=20=D0=A0?= =?UTF-8?q?=D0=90=D0=94=D0=98=20=D0=9E=D0=94=D0=9D=D0=9E=D0=99=20=D0=A1?= =?UTF-8?q?=D0=A2=D0=A0=D0=9E=D0=A7=D0=9A=D0=98,=20=D0=95=D0=A9=D0=81=20?= =?UTF-8?q?=D0=98=20=D0=9D=D0=95=D0=A2=D0=92=D0=9E=D0=A0=D0=9A=20=D0=9D?= =?UTF-8?q?=D0=90=20=D0=A1=D0=90=D0=9C=D0=9E=D0=9C=20=D0=94=D0=95=D0=9B?= =?UTF-8?q?=D0=95=20=D0=9D=D0=95=20=D0=A0=D0=90=D0=91=D0=9E=D0=A2=D0=90?= =?UTF-8?q?=D0=95=D0=A2?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- GreenhouseManager/manager.py | 1 + 1 file changed, 1 insertion(+) diff --git a/GreenhouseManager/manager.py b/GreenhouseManager/manager.py index 794fbff..efee642 100644 --- a/GreenhouseManager/manager.py +++ b/GreenhouseManager/manager.py @@ -58,6 +58,7 @@ class Manager: self.dataPublisher.send('data', message) self.dataPublisher.flush() + def toggle_device(self, device, request_id, greenhouse_id): if device == 'valve': From b009ebdd0ced34f1628dc368c28b8aaa88318797 Mon Sep 17 00:00:00 2001 From: the Date: Wed, 13 Nov 2024 00:36:31 +0400 Subject: [PATCH 28/49] =?UTF-8?q?=D0=90=D0=90=D0=90=D0=90=D0=90=D0=90?= =?UTF-8?q?=D0=90=D0=90=D0=90=D0=90=D0=90=D0=90?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index a0a4521..56659ba 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -24,7 +24,7 @@ services: KAFKA_BROKER_ID: 1 KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 KAFKA_LISTENERS: HOST://0.0.0.0:9092,DOCKER://0.0.0.0:29092 - KAFKA_ADVERTISED_LISTENERS: HOST://localhost:9092,DOCKER://kafka:29092 + KAFKA_ADVERTISED_LISTENERS: HOST://192.168.1.5:9092,DOCKER://kafka:29092 KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: DOCKER:PLAINTEXT,HOST:PLAINTEXT KAFKA_INTER_BROKER_LISTENER_NAME: DOCKER KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 From 83aec339c9875de463d6d3fadd72519d0ae528d9 Mon Sep 17 00:00:00 2001 From: the Date: Wed, 13 Nov 2024 00:53:31 +0400 Subject: [PATCH 29/49] =?UTF-8?q?=D1=84=D0=B8=D0=BA=D1=81?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- GreenhouseManager/manager.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/GreenhouseManager/manager.py b/GreenhouseManager/manager.py index efee642..d9772d1 100644 --- a/GreenhouseManager/manager.py +++ b/GreenhouseManager/manager.py @@ -113,8 +113,10 @@ class Manager: @app.route(f'/webhook', methods=['POST']) def webhook(): + print("received webhook", request.args.get('id')) for manager in managers: - if request.args.get('id') == manager._id and request.method == 'POST': + print() + if int(request.args.get('id')) == manager._id and request.method == 'POST': print("Data received from Webhook is", request.json) body = request.json @@ -126,10 +128,9 @@ def webhook(): return f"Webhook received for manager {manager._id}" return "Webhook ignored" +t1 = threading.Thread(target=start_manager) manager1 = Manager(_id=1) managers = [manager1] -t1 = threading.Thread(target=start_manager) - if __name__ == "__main__": - app.run(host="0.0.0.0", port=20002) \ No newline at end of file + threading.Thread(target=lambda: app.run(host="0.0.0.0", port=20002, debug=True, use_reloader=False)).start() \ No newline at end of file From c0322536999c239e455efbadcb1724d174c8c6c7 Mon Sep 17 00:00:00 2001 From: the Date: Wed, 13 Nov 2024 01:00:16 +0400 Subject: [PATCH 30/49] =?UTF-8?q?=D1=84=D0=B8=D0=BA=D1=812?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docker-compose.yml | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 56659ba..32dc5a9 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,7 +1,7 @@ services: zookeeper: -# networks: -# - vpn + networks: + - vpn image: confluentinc/cp-zookeeper:7.4.0 environment: ZOOKEEPER_CLIENT_PORT: 2181 @@ -10,8 +10,8 @@ services: - 2181:2181 kafka: -# networks: -# - vpn + networks: + - vpn image: confluentinc/cp-kafka:7.4.0 ports: - 9092:9092 @@ -35,8 +35,8 @@ services: - zookeeper init-kafka: -# networks: -# - vpn + networks: + - vpn image: confluentinc/cp-kafka:7.4.0 depends_on: - kafka @@ -56,8 +56,8 @@ services: " kafka-ui: -# networks: -# - vpn + networks: + - vpn container_name: kafka-ui image: provectuslabs/kafka-ui:latest ports: @@ -69,6 +69,6 @@ services: KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka:29092 KAFKA_CLUSTERS_0_METRICS_PORT: 9997 -#networks: -# vpn: -# name: kafkaVPN \ No newline at end of file +networks: + vpn: + name: kafkaVPN \ No newline at end of file From 5fa9c76b996d3b79ed60ee3d4c6623271400b6fa Mon Sep 17 00:00:00 2001 From: the Date: Wed, 13 Nov 2024 01:14:32 +0400 Subject: [PATCH 31/49] test changes --- docker-compose.yml | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 32dc5a9..a1e5c42 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,3 +1,12 @@ +networks: + vpn: + name: kafkaVPN + driver: bridge + ipam: + config: + - subnet: "192.168.2.0/24" + gateway: "192.168.2.1" + services: zookeeper: networks: @@ -11,7 +20,8 @@ services: kafka: networks: - - vpn + vpn: + ipv4_address: 192.168.2.10 image: confluentinc/cp-kafka:7.4.0 ports: - 9092:9092 @@ -68,7 +78,3 @@ services: KAFKA_CLUSTERS_0_NAME: local KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka:29092 KAFKA_CLUSTERS_0_METRICS_PORT: 9997 - -networks: - vpn: - name: kafkaVPN \ No newline at end of file From 3ce4d6baf2845981909203480e72fdcd10fdab87 Mon Sep 17 00:00:00 2001 From: mfnefd Date: Wed, 13 Nov 2024 01:49:13 +0400 Subject: [PATCH 32/49] =?UTF-8?q?fix:=20dockerfile=20=D0=B8=20docker-compo?= =?UTF-8?q?se?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- Cloud/Dockerfile | 13 +++++++------ docker-compose.yml | 17 ++++++++--------- 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/Cloud/Dockerfile b/Cloud/Dockerfile index 6de9e42..084ab62 100644 --- a/Cloud/Dockerfile +++ b/Cloud/Dockerfile @@ -9,20 +9,21 @@ ENV ASPNETCORE_URLS=http://+:5124 RUN adduser -u 5678 --disabled-password --gecos "" appuser && chown -R appuser /app USER appuser -FROM --platform=$BUILDPLATFORM mcr.microsoft.com/dotnet/sdk:6.0 AS build +FROM mcr.microsoft.com/dotnet/sdk:6.0 AS build ARG configuration=Release WORKDIR /src -COPY ["Cloud/Cloud.csproj", "Cloud/"] -RUN dotnet restore "Cloud/Cloud.csproj" +COPY ["Cloud.csproj", "."] +RUN dotnet restore "./Cloud.csproj" COPY . . -WORKDIR "/src/Cloud" -RUN dotnet build "Cloud.csproj" -c $configuration -o /app/build +WORKDIR "/src/." +RUN dotnet build "./Cloud.csproj" -c $configuration -o /app/build FROM build AS publish ARG configuration=Release -RUN dotnet publish "Cloud.csproj" -c $configuration -o /app/publish /p:UseAppHost=false +RUN dotnet publish "./Cloud.csproj" -c $configuration -o /app/publish /p:UseAppHost=false FROM base AS final WORKDIR /app COPY --from=publish /app/publish . ENTRYPOINT ["dotnet", "Cloud.dll"] + diff --git a/docker-compose.yml b/docker-compose.yml index 532e4cb..8503dba 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,22 +1,21 @@ services: cloud: build: ./Cloud/ - ports: + ports: - "5124:5124" depends_on: - postgres postgres: - image: postgres:14 + image: postgres:14 container_name: cucumber_database environment: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: 12345 - POSTGRES_DB: main_database + POSTGRES_USER: postgres + POSTGRES_PASSWORD: 12345 + POSTGRES_DB: main_database ports: - - "5438:5432" + - "5438:5432" volumes: - - postgres_data:/var/lib/postgresql/data - + - postgres_data:/var/lib/postgresql/data volumes: postgres_data: - driver: local \ No newline at end of file + driver: local \ No newline at end of file From fbfde769b1b3e4de023c59111105886eb7a9aecd Mon Sep 17 00:00:00 2001 From: mfnefd Date: Wed, 13 Nov 2024 03:31:43 +0400 Subject: [PATCH 33/49] =?UTF-8?q?add:=20=D1=81=D1=83=D1=89=D0=BD=D0=BE?= =?UTF-8?q?=D1=81=D1=82=D1=8C=20=D1=82=D0=B5=D0=BF=D0=BB=D0=B8=D1=86=D1=8B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- Cloud/ApplicationContext.cs | 1 + Cloud/Models/Farm.cs | 1 + Cloud/Models/Greenhouse.cs | 14 ++++++++++++++ Cloud/Models/Support/HeatingMode.cs | 8 ++++++++ Cloud/Models/Support/WateringMode.cs | 8 ++++++++ 5 files changed, 32 insertions(+) create mode 100644 Cloud/Models/Greenhouse.cs create mode 100644 Cloud/Models/Support/HeatingMode.cs create mode 100644 Cloud/Models/Support/WateringMode.cs diff --git a/Cloud/ApplicationContext.cs b/Cloud/ApplicationContext.cs index 6a60cf1..681040d 100644 --- a/Cloud/ApplicationContext.cs +++ b/Cloud/ApplicationContext.cs @@ -6,6 +6,7 @@ public class ApplicationContext : DbContext { public DbSet Users { get; set; } = null!; public DbSet Farms { get; set; } = null!; + public DbSet Greenhouses { get; set; } = null!; public ApplicationContext(DbContextOptions options) : base(options) diff --git a/Cloud/Models/Farm.cs b/Cloud/Models/Farm.cs index 5cc1ac8..48fd855 100644 --- a/Cloud/Models/Farm.cs +++ b/Cloud/Models/Farm.cs @@ -7,5 +7,6 @@ public int UserId { get; set; } public User? User { get; set; } public string RaspberryIP { get; set; } + List Greenhouses { get; set; } = new(); } } \ No newline at end of file diff --git a/Cloud/Models/Greenhouse.cs b/Cloud/Models/Greenhouse.cs new file mode 100644 index 0000000..ec2f54a --- /dev/null +++ b/Cloud/Models/Greenhouse.cs @@ -0,0 +1,14 @@ +using Cloud.Models.Support; + +namespace Cloud.Models +{ + public class Greenhouse + { + public int Id { get; set; } + public int RecomendedTemperature { get; set; } + public WateringMode WateringMode { get; set; } + public HeatingMode HeatingMode { get; set; } + public int FarmId { get; set; } + public Farm? Farm { get; set; } + } +} \ No newline at end of file diff --git a/Cloud/Models/Support/HeatingMode.cs b/Cloud/Models/Support/HeatingMode.cs new file mode 100644 index 0000000..81f082e --- /dev/null +++ b/Cloud/Models/Support/HeatingMode.cs @@ -0,0 +1,8 @@ +namespace Cloud.Models.Support +{ + public enum HeatingMode + { + Manual, + Auto + } +} \ No newline at end of file diff --git a/Cloud/Models/Support/WateringMode.cs b/Cloud/Models/Support/WateringMode.cs new file mode 100644 index 0000000..3e58fe4 --- /dev/null +++ b/Cloud/Models/Support/WateringMode.cs @@ -0,0 +1,8 @@ +namespace Cloud.Models.Support +{ + public enum WateringMode + { + Manual, + Auto + } +} \ No newline at end of file From 1e2bd0566769ffdbed36dd2831a4131f64fba0fb Mon Sep 17 00:00:00 2001 From: mfnefd Date: Wed, 13 Nov 2024 03:32:30 +0400 Subject: [PATCH 34/49] =?UTF-8?q?add:=20=D0=B8=D0=BD=D1=82=D0=B5=D1=80?= =?UTF-8?q?=D1=84=D0=B5=D0=B9=D1=81=D1=8B=20=D0=B1=D1=80=D0=BE=D0=BA=D0=B5?= =?UTF-8?q?=D1=80=D0=B0,=20=D1=81=D1=83=D1=89=D0=BD=D0=BE=D1=81=D1=82?= =?UTF-8?q?=D0=B8=20=D1=82=D0=BE=D0=BF=D0=B8=D0=BA=D0=BE=D0=B2=20=D0=B1?= =?UTF-8?q?=D1=80=D0=BE=D0=BA=D0=B5=D1=80=D0=B0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- Cloud/Services/Broker/IBrokerConsumer.cs | 7 +++++++ Cloud/Services/Broker/IBrokerProdurcer.cs | 9 +++++++++ Cloud/Services/Broker/IBrokerService.cs | 10 ++++++++++ .../Broker/Implement/Kafka/KafkaService.cs | 17 +++++++++++++++++ Cloud/Services/Broker/Support/Command.cs | 9 +++++++++ Cloud/Services/Broker/Support/CommandResult.cs | 9 +++++++++ Cloud/Services/Broker/Support/GreenhouseInfo.cs | 12 ++++++++++++ 7 files changed, 73 insertions(+) create mode 100644 Cloud/Services/Broker/IBrokerConsumer.cs create mode 100644 Cloud/Services/Broker/IBrokerProdurcer.cs create mode 100644 Cloud/Services/Broker/IBrokerService.cs create mode 100644 Cloud/Services/Broker/Implement/Kafka/KafkaService.cs create mode 100644 Cloud/Services/Broker/Support/Command.cs create mode 100644 Cloud/Services/Broker/Support/CommandResult.cs create mode 100644 Cloud/Services/Broker/Support/GreenhouseInfo.cs diff --git a/Cloud/Services/Broker/IBrokerConsumer.cs b/Cloud/Services/Broker/IBrokerConsumer.cs new file mode 100644 index 0000000..a421f6d --- /dev/null +++ b/Cloud/Services/Broker/IBrokerConsumer.cs @@ -0,0 +1,7 @@ +namespace Cloud.Services.Broker +{ + public interface IBrokerConsumer + { + // TODO: добавить методы для получения данных + } +} \ No newline at end of file diff --git a/Cloud/Services/Broker/IBrokerProdurcer.cs b/Cloud/Services/Broker/IBrokerProdurcer.cs new file mode 100644 index 0000000..ac3b974 --- /dev/null +++ b/Cloud/Services/Broker/IBrokerProdurcer.cs @@ -0,0 +1,9 @@ +using Cloud.Services.Broker.Support; + +namespace Cloud.Services.Broker +{ + public interface IBrokerProdurcer + { + Task ProduceAsync(string topic, Command command); + } +} \ No newline at end of file diff --git a/Cloud/Services/Broker/IBrokerService.cs b/Cloud/Services/Broker/IBrokerService.cs new file mode 100644 index 0000000..910a25e --- /dev/null +++ b/Cloud/Services/Broker/IBrokerService.cs @@ -0,0 +1,10 @@ +using Cloud.Services.Broker.Support; + +namespace Cloud.Services.Broker +{ + public interface IBrokerService + { + Task Produce(Command command); + Task Consume(string topic); + } +} \ No newline at end of file diff --git a/Cloud/Services/Broker/Implement/Kafka/KafkaService.cs b/Cloud/Services/Broker/Implement/Kafka/KafkaService.cs new file mode 100644 index 0000000..44e099b --- /dev/null +++ b/Cloud/Services/Broker/Implement/Kafka/KafkaService.cs @@ -0,0 +1,17 @@ +using Cloud.Services.Broker.Support; + +namespace Cloud.Services.Broker.Implement.Kafka +{ + public class KafkaService : IBrokerService + { + public Task Consume(string topic) + { + throw new NotImplementedException(); + } + + public Task Produce(Command command) + { + throw new NotImplementedException(); + } + } +} \ No newline at end of file diff --git a/Cloud/Services/Broker/Support/Command.cs b/Cloud/Services/Broker/Support/Command.cs new file mode 100644 index 0000000..ef2a44b --- /dev/null +++ b/Cloud/Services/Broker/Support/Command.cs @@ -0,0 +1,9 @@ +namespace Cloud.Services.Broker.Support +{ + public class Command + { + public int Id { get; set; } + public int GreenhouseId { get; set; } + public string CommandName { get; set; } = null!; + } +} \ No newline at end of file diff --git a/Cloud/Services/Broker/Support/CommandResult.cs b/Cloud/Services/Broker/Support/CommandResult.cs new file mode 100644 index 0000000..06d997d --- /dev/null +++ b/Cloud/Services/Broker/Support/CommandResult.cs @@ -0,0 +1,9 @@ +namespace Cloud.Services.Broker.Support +{ + public class CommandResult + { + public int CommandId { get; set; } + public int GreenhouseId { get; set; } + public string ResultMessage { get; set; } = string.Empty; + } +} \ No newline at end of file diff --git a/Cloud/Services/Broker/Support/GreenhouseInfo.cs b/Cloud/Services/Broker/Support/GreenhouseInfo.cs new file mode 100644 index 0000000..c5a2140 --- /dev/null +++ b/Cloud/Services/Broker/Support/GreenhouseInfo.cs @@ -0,0 +1,12 @@ +namespace Cloud.Services.Broker.Support +{ + public class GreenhouseInfo + { + public int Id { get; set; } + public int PercentWater { get; set; } + public int SoilTemperature { get; set; } + public bool PumpStatus { get; set; } + public bool HeatingStatus { get; set; } + public bool AutoWateringStatus { get; set; } + } +} \ No newline at end of file From 2a9508f737adb387a2772a4152dda48fb91159b3 Mon Sep 17 00:00:00 2001 From: mfnefd Date: Wed, 13 Nov 2024 03:33:03 +0400 Subject: [PATCH 35/49] =?UTF-8?q?add:=20=D0=BA=D0=BE=D0=BD=D1=82=D1=80?= =?UTF-8?q?=D0=BE=D0=BB=D0=BB=D0=B5=D1=80=20=D1=82=D0=B5=D0=BF=D0=BB=D0=B8?= =?UTF-8?q?=D1=86=D1=8B,=20=D0=B4=D0=BE=D0=B1=D0=B0=D0=B2=D0=BB=D0=B5?= =?UTF-8?q?=D0=BD=20=D1=81=D0=B5=D1=80=D0=B2=D0=B8=D1=81=20=D0=B1=D1=80?= =?UTF-8?q?=D0=BE=D0=BA=D0=B5=D1=80=D0=B0=20=D0=B2=20DI?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- Cloud/Controllers/GreengouseController.cs | 79 +++++++++++++++++++++++ Cloud/Program.cs | 4 +- Cloud/Requests/GreenhouseRequest.cs | 11 ++++ 3 files changed, 93 insertions(+), 1 deletion(-) create mode 100644 Cloud/Controllers/GreengouseController.cs create mode 100644 Cloud/Requests/GreenhouseRequest.cs diff --git a/Cloud/Controllers/GreengouseController.cs b/Cloud/Controllers/GreengouseController.cs new file mode 100644 index 0000000..473f292 --- /dev/null +++ b/Cloud/Controllers/GreengouseController.cs @@ -0,0 +1,79 @@ +using Cloud.Models; +using Cloud.Requests; +using Cloud.Services.Broker; +using Cloud.Services.Broker.Support; +using Microsoft.AspNetCore.Mvc; + +namespace Cloud.Controllers +{ + [ApiController] + [Route("api/user/{userId}/farm/{farmId}/greenhouse")] + public class GreenhouseController : ControllerBase + { + private readonly IBrokerService _brokerService; + private readonly ApplicationContext _context; + private readonly IConfiguration _config; + public GreenhouseController(IConfiguration config, ApplicationContext context, + IBrokerService brokerService) + { + _brokerService = brokerService; + _context = context; + _config = config; + } + + /** + * Возвращает текущую информацию о всех теплицах пользователя + */ + [HttpGet] + public async Task>> GetAll(int userId, int farmId) + { + throw new NotImplementedException(); + } + + /** + * Возвращает текущую информацию о конкретной теплице + */ + [HttpGet("{greenhouseId}")] + public async Task> Get(int userId, int farmId, int greenhouseId) + { + throw new NotImplementedException(); + } + + /** + * Возвращает сохраненные данные для автоматизации теплицы + */ + [HttpGet("{greenhouseId}/settings")] + public async Task> GetGreenhouse(int userId, int farmId, int greenhouseId) + { + throw new NotImplementedException(); + } + + /** + * Сохраняет в базе данных API данные для автоматизации теплицы + */ + [HttpPost] + public async Task> SaveToDatabase(int userId, int farmId, GreenhouseRequest greenhouse) + { + throw new NotImplementedException(); + } + + /** + * Обновляет в базе данных API данные для автоматизации теплицы + */ + [HttpPut("{greenhouseId}/settings")] + public async Task> Update(int userId, int farmId, int greenhouseId, GreenhouseRequest greenhouse) + { + throw new NotImplementedException(); + } + + /** + * Удаляет из базы данных API запись настроек автоматизации теплицы + */ + [HttpDelete("{greenhouseId}")] + public async Task Delete(int userId, int farmId, int greenhouseId) + { + + throw new NotImplementedException(); + } + } +} \ No newline at end of file diff --git a/Cloud/Program.cs b/Cloud/Program.cs index c02b6fb..7ab00f6 100644 --- a/Cloud/Program.cs +++ b/Cloud/Program.cs @@ -7,11 +7,13 @@ using FluentValidation; using FluentValidation.AspNetCore; using Cloud.Validation; using StackExchange.Redis; +using Cloud.Services.Broker.Implement.Kafka; +using Cloud.Services.Broker; var builder = WebApplication.CreateBuilder(args); // Add services to the container. - +builder.Services.AddSingleton(); //Redis configuration builder.Services.AddSingleton(sp => { diff --git a/Cloud/Requests/GreenhouseRequest.cs b/Cloud/Requests/GreenhouseRequest.cs new file mode 100644 index 0000000..7f2fe80 --- /dev/null +++ b/Cloud/Requests/GreenhouseRequest.cs @@ -0,0 +1,11 @@ +using Cloud.Models.Support; + +namespace Cloud.Requests +{ + public class GreenhouseRequest + { + public int RecomendedTemperature { get; set; } + public WateringMode WateringMode { get; set; } + public HeatingMode HeatingMode { get; set; } + } +} \ No newline at end of file From 5687949f96e5e7642a30ab8aeabab748bec0fc12 Mon Sep 17 00:00:00 2001 From: mfnefd Date: Wed, 13 Nov 2024 03:33:49 +0400 Subject: [PATCH 36/49] =?UTF-8?q?todo:=20=D0=B2=20=D0=B4=D0=BE=D0=BA=D0=B5?= =?UTF-8?q?=D1=80=D1=84=D0=B0=D0=B9=D0=BB=20=D0=B4=D0=BE=D0=B1=D0=B0=D0=B2?= =?UTF-8?q?=D0=B8=D1=82=D1=8C=20=D0=B8=D0=BD=D0=B8=D1=86=D0=B8=D0=B0=D0=BB?= =?UTF-8?q?=D0=B8=D0=B7=D0=B0=D1=86=D0=B8=D1=8E=20=D0=B1=D0=B4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- Cloud/Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/Cloud/Dockerfile b/Cloud/Dockerfile index 084ab62..7a518b4 100644 --- a/Cloud/Dockerfile +++ b/Cloud/Dockerfile @@ -25,5 +25,6 @@ RUN dotnet publish "./Cloud.csproj" -c $configuration -o /app/publish /p:UseAppH FROM base AS final WORKDIR /app COPY --from=publish /app/publish . +# TODO: Добавить инициализацию базы данных с помощью миграции ENTRYPOINT ["dotnet", "Cloud.dll"] From 7c310d21f7aa316abc15a01bdcc37b3ebb5b5cc7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=D0=90=D1=80=D1=82=D0=B5=D0=BC=20=D0=A5=D0=B0=D1=80=D0=BB?= =?UTF-8?q?=D0=B0=D0=BC=D0=BE=D0=B2?= Date: Wed, 13 Nov 2024 15:24:47 +0400 Subject: [PATCH 37/49] Add broker service --- Cloud/Cloud.csproj | 1 + Cloud/Controllers/ValveController.cs | 41 ++++++++++++++++++ Cloud/Enums/ValveEnum.cs | 9 ++++ Cloud/Program.cs | 11 +++++ Cloud/Requests/ValveRequest.cs | 7 ++++ Cloud/Services/BackgroundWorkerService.cs | 39 +++++++++++++++++ Cloud/Services/ConsumerService.cs | 51 +++++++++++++++++++++++ Cloud/Services/ProducerService.cs | 33 +++++++++++++++ Cloud/Validation/ValveValidator.cs | 16 +++++++ Cloud/appsettings.json | 4 ++ 10 files changed, 212 insertions(+) create mode 100644 Cloud/Controllers/ValveController.cs create mode 100644 Cloud/Enums/ValveEnum.cs create mode 100644 Cloud/Requests/ValveRequest.cs create mode 100644 Cloud/Services/BackgroundWorkerService.cs create mode 100644 Cloud/Services/ConsumerService.cs create mode 100644 Cloud/Services/ProducerService.cs create mode 100644 Cloud/Validation/ValveValidator.cs diff --git a/Cloud/Cloud.csproj b/Cloud/Cloud.csproj index 6a5fc81..e74339d 100644 --- a/Cloud/Cloud.csproj +++ b/Cloud/Cloud.csproj @@ -7,6 +7,7 @@ + diff --git a/Cloud/Controllers/ValveController.cs b/Cloud/Controllers/ValveController.cs new file mode 100644 index 0000000..5708820 --- /dev/null +++ b/Cloud/Controllers/ValveController.cs @@ -0,0 +1,41 @@ +using Cloud.Requests; +using Cloud.Services; +using Microsoft.AspNetCore.Authorization; +using Microsoft.AspNetCore.Mvc; +using System.ComponentModel.DataAnnotations; +using System.Text.Json; + +namespace Cloud.Controllers +{ + [Authorize] + [ApiController] + [Route("api")] + public class ValveController : ControllerBase + { + //Контроллер вентиля + + private readonly ProducerService _producerService; + + public ValveController(ProducerService producerService) + { + _producerService = producerService; + } + + [HttpPost("farm/{farmId}/greenhouse/{greenhouseId}/watering")] + public async Task interactValve([FromBody] ValveRequest request, int farmId, int ghId) + { + var kafkaRequest = new + { + FarmId = farmId, + GreenHouseId = ghId, + SomeAction = request.Action, + }; + + var message = JsonSerializer.Serialize(kafkaRequest); + + await _producerService.ProduceMessageAsync("InventoryUpdates", message); + + return Ok($"Valve status is {request.Action}"); + } + } +} diff --git a/Cloud/Enums/ValveEnum.cs b/Cloud/Enums/ValveEnum.cs new file mode 100644 index 0000000..f04d64a --- /dev/null +++ b/Cloud/Enums/ValveEnum.cs @@ -0,0 +1,9 @@ +namespace Cloud.Enums +{ + public enum ValveEnum + { + Open, + Close, + Auto + } +} diff --git a/Cloud/Program.cs b/Cloud/Program.cs index c02b6fb..1c3e3a8 100644 --- a/Cloud/Program.cs +++ b/Cloud/Program.cs @@ -7,6 +7,7 @@ using FluentValidation; using FluentValidation.AspNetCore; using Cloud.Validation; using StackExchange.Redis; +using Cloud.Services; var builder = WebApplication.CreateBuilder(args); @@ -19,6 +20,15 @@ builder.Services.AddSingleton(sp => return ConnectionMultiplexer.Connect(configuration); }); +//Kafka producer service +builder.Services.AddSingleton(); + +//Kafka consumer service +builder.Services.AddSingleton(); + +//Add the BackgroundWorkerService +builder.Services.AddHostedService(); + //Jwt configuration var jwtIssuer = builder.Configuration.GetSection("Jwt:Issuer").Get(); var jwtKey = builder.Configuration.GetSection("Jwt:Key").Get(); @@ -58,6 +68,7 @@ builder.Services.AddFluentValidationClientsideAdapters(); builder.Services.AddValidatorsFromAssemblyContaining(); builder.Services.AddValidatorsFromAssemblyContaining(); builder.Services.AddValidatorsFromAssemblyContaining(); +builder.Services.AddValidatorsFromAssemblyContaining(); // Learn more about configuring Swagger/OpenAPI at https://aka.ms/aspnetcore/swashbuckle builder.Services.AddEndpointsApiExplorer(); diff --git a/Cloud/Requests/ValveRequest.cs b/Cloud/Requests/ValveRequest.cs new file mode 100644 index 0000000..dee6a5a --- /dev/null +++ b/Cloud/Requests/ValveRequest.cs @@ -0,0 +1,7 @@ +namespace Cloud.Requests +{ + public class ValveRequest + { + public string Action { get; set; } + } +} diff --git a/Cloud/Services/BackgroundWorkerService.cs b/Cloud/Services/BackgroundWorkerService.cs new file mode 100644 index 0000000..f7fbf34 --- /dev/null +++ b/Cloud/Services/BackgroundWorkerService.cs @@ -0,0 +1,39 @@ +namespace Cloud.Services +{ + public class BackgroundWorkerService : BackgroundService + { + public readonly ILogger _logger; + private readonly ConsumerService _consumerService; + + public BackgroundWorkerService(ILogger logger, ConsumerService consumer) + { + _logger = logger; + _consumerService = consumer; + } + + //Backghround Service, This will run continuously + protected override async Task ExecuteAsync(CancellationToken stoppingToken) + { + try + { + while (!stoppingToken.IsCancellationRequested) + { + //_logger.LogInformation("Background Service is Runing at : {time}", DateTimeOffset.Now); + + string request = await _consumerService.WaitMessage("ValvesHeatersRequest"); //Consume the Kafka Message + + //After Consume the Order Request Can process the order + if (!string.IsNullOrEmpty(request)) + _logger.LogInformation("Valves-Heaters Request : {value}", request); + + + await Task.Delay(1000, stoppingToken); + } + } + catch (Exception ex) + { + _logger.LogError($"BackgroundWorkerService - Exception {ex}"); + } + } + } +} diff --git a/Cloud/Services/ConsumerService.cs b/Cloud/Services/ConsumerService.cs new file mode 100644 index 0000000..82d5bb2 --- /dev/null +++ b/Cloud/Services/ConsumerService.cs @@ -0,0 +1,51 @@ +using Confluent.Kafka; + +namespace Cloud.Services +{ + public class ConsumerService + { + private IConsumer _consumer; + private ConsumerConfig consumerConfig; + public ConsumerService(IConfiguration configuration) + { + consumerConfig = new ConsumerConfig + { + BootstrapServers = configuration["Kafka:BootstrapServers"], + GroupId = configuration["Kafka:GroupId"], + AutoOffsetReset = AutoOffsetReset.Earliest, + }; + + _consumer = new ConsumerBuilder(consumerConfig).Build(); + } + + //Consume Method + public async TaskWaitMessage(string topic) + { + try + { + _consumer.Subscribe(topic); + + var consumeResult = _consumer.Consume(TimeSpan.FromMilliseconds(1000)); + + if (consumeResult != null) + { + return consumeResult.Message.Value; + } + else + { + //No message received from Kafka within the specified timeout. + } + return ""; + + } + catch (Exception ex) + { + return ""; + } + finally + { + _consumer.Close(); + } + } + } +} diff --git a/Cloud/Services/ProducerService.cs b/Cloud/Services/ProducerService.cs new file mode 100644 index 0000000..073bdc6 --- /dev/null +++ b/Cloud/Services/ProducerService.cs @@ -0,0 +1,33 @@ +using Confluent.Kafka; + +namespace Cloud.Services +{ + public class ProducerService + { + private readonly IProducer _producer; + + public ProducerService(IConfiguration configuration) + { + var producerConfig = new ProducerConfig + { + BootstrapServers = configuration["Kafka:BootstrapServers"] + }; + + //Build the Producer + _producer = new ProducerBuilder(producerConfig).Build(); + } + + //Method for Produce the Message to Kafka Topic + public async Task ProduceMessageAsync(string topic, string value) + { + var kafkaMessage = new Message + { + Key = Guid.NewGuid().ToString(), + Value = value + }; + + //Produce the Message + await _producer.ProduceAsync(topic, kafkaMessage); + } + } +} diff --git a/Cloud/Validation/ValveValidator.cs b/Cloud/Validation/ValveValidator.cs new file mode 100644 index 0000000..4a19f58 --- /dev/null +++ b/Cloud/Validation/ValveValidator.cs @@ -0,0 +1,16 @@ +using Cloud.Enums; +using Cloud.Requests; +using FluentValidation; + +namespace Cloud.Validation +{ + public class ValveValidator : AbstractValidator + { + public ValveValidator() { + + RuleFor(request => request.Action) + .NotEmpty().WithMessage("Action can't be empty"). + IsEnumName(typeof (ValveEnum)).WithMessage("Action is not correct"); + } + } +} diff --git a/Cloud/appsettings.json b/Cloud/appsettings.json index b272a9c..e80f5b5 100644 --- a/Cloud/appsettings.json +++ b/Cloud/appsettings.json @@ -5,6 +5,10 @@ "Microsoft.AspNetCore": "Warning" } }, + "Kafka": { + "BootstrapServers": "localhost:9092", + "GroupId": "ValvesHeaters" + }, "AllowedHosts": "*", "Jwt": { "Key": "m7TyhE20s0dVtUDAr9EnFdPZnAG8maxgBTaiW5j6kO6RQhWDAGxYmXyu0suDnE0o", From 5e73961ad593fba43cf1f82bd390f74d32842978 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=D0=90=D1=80=D1=82=D0=B5=D0=BC=20=D0=A5=D0=B0=D1=80=D0=BB?= =?UTF-8?q?=D0=B0=D0=BC=D0=BE=D0=B2?= Date: Tue, 19 Nov 2024 19:36:05 +0400 Subject: [PATCH 38/49] Add Valve Controller --- Cloud/Controllers/ValveController.cs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/Cloud/Controllers/ValveController.cs b/Cloud/Controllers/ValveController.cs index 5708820..dbc081a 100644 --- a/Cloud/Controllers/ValveController.cs +++ b/Cloud/Controllers/ValveController.cs @@ -21,7 +21,7 @@ namespace Cloud.Controllers _producerService = producerService; } - [HttpPost("farm/{farmId}/greenhouse/{greenhouseId}/watering")] + [HttpPost("farm/{farmId}/greenhouse/{ghId}/watering")] public async Task interactValve([FromBody] ValveRequest request, int farmId, int ghId) { var kafkaRequest = new @@ -32,10 +32,11 @@ namespace Cloud.Controllers }; var message = JsonSerializer.Serialize(kafkaRequest); + return Ok(kafkaRequest); - await _producerService.ProduceMessageAsync("InventoryUpdates", message); + /*await _producerService.ProduceMessageAsync("ValvesHeatersRequest", message); - return Ok($"Valve status is {request.Action}"); + return Ok($"Valve status is {request.Action}");*/ } } } From c230d86404a3e07eb5c2ac3a89df1cb5f6c7edcd Mon Sep 17 00:00:00 2001 From: mfnefd Date: Tue, 19 Nov 2024 23:23:21 +0400 Subject: [PATCH 39/49] =?UTF-8?q?add:=20=D1=81=D0=B5=D1=80=D0=B2=D0=B8?= =?UTF-8?q?=D1=81=20=D0=B1=D1=80=D0=BE=D0=BA=D0=B5=D1=80=D0=B0,=20=D1=81?= =?UTF-8?q?=D1=83=D1=89=D0=BD=D0=BE=D1=81=D1=82=D0=B8=20=D0=B1=D1=80=D0=BE?= =?UTF-8?q?=D0=BA=D0=B5=D1=80=D0=B0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- Cloud/Cloud.csproj | 1 + Cloud/Services/Broker/IBrokerConsumer.cs | 4 +- ...IBrokerProdurcer.cs => IBrokerProducer.cs} | 2 +- Cloud/Services/Broker/IBrokerService.cs | 4 +- .../Broker/Implement/Kafka/KafkaConsumer.cs | 51 +++++++++++++++++++ .../Broker/Implement/Kafka/KafkaProducer.cs | 28 ++++++++++ .../Broker/Implement/Kafka/KafkaService.cs | 17 ++++--- Cloud/Services/Broker/Support/Command.cs | 7 +-- .../Services/Broker/Support/CommandResult.cs | 2 +- .../Services/Broker/Support/GreenhouseInfo.cs | 2 +- .../Broker/Support/IBrokerResponse.cs | 6 +++ 11 files changed, 109 insertions(+), 15 deletions(-) rename Cloud/Services/Broker/{IBrokerProdurcer.cs => IBrokerProducer.cs} (78%) create mode 100644 Cloud/Services/Broker/Implement/Kafka/KafkaConsumer.cs create mode 100644 Cloud/Services/Broker/Implement/Kafka/KafkaProducer.cs create mode 100644 Cloud/Services/Broker/Support/IBrokerResponse.cs diff --git a/Cloud/Cloud.csproj b/Cloud/Cloud.csproj index 6a5fc81..042333f 100644 --- a/Cloud/Cloud.csproj +++ b/Cloud/Cloud.csproj @@ -7,6 +7,7 @@ + diff --git a/Cloud/Services/Broker/IBrokerConsumer.cs b/Cloud/Services/Broker/IBrokerConsumer.cs index a421f6d..7997ad0 100644 --- a/Cloud/Services/Broker/IBrokerConsumer.cs +++ b/Cloud/Services/Broker/IBrokerConsumer.cs @@ -1,7 +1,9 @@ +using Cloud.Services.Broker.Support; + namespace Cloud.Services.Broker { public interface IBrokerConsumer { - // TODO: добавить методы для получения данных + T? WaitMessage(string topic) where T : IBrokerResponse; } } \ No newline at end of file diff --git a/Cloud/Services/Broker/IBrokerProdurcer.cs b/Cloud/Services/Broker/IBrokerProducer.cs similarity index 78% rename from Cloud/Services/Broker/IBrokerProdurcer.cs rename to Cloud/Services/Broker/IBrokerProducer.cs index ac3b974..abed579 100644 --- a/Cloud/Services/Broker/IBrokerProdurcer.cs +++ b/Cloud/Services/Broker/IBrokerProducer.cs @@ -2,7 +2,7 @@ using Cloud.Services.Broker.Support; namespace Cloud.Services.Broker { - public interface IBrokerProdurcer + public interface IBrokerProducer { Task ProduceAsync(string topic, Command command); } diff --git a/Cloud/Services/Broker/IBrokerService.cs b/Cloud/Services/Broker/IBrokerService.cs index 910a25e..f19aeb4 100644 --- a/Cloud/Services/Broker/IBrokerService.cs +++ b/Cloud/Services/Broker/IBrokerService.cs @@ -4,7 +4,7 @@ namespace Cloud.Services.Broker { public interface IBrokerService { - Task Produce(Command command); - Task Consume(string topic); + Task Produce(Command command); + T? Consume(string topic) where T : IBrokerResponse; } } \ No newline at end of file diff --git a/Cloud/Services/Broker/Implement/Kafka/KafkaConsumer.cs b/Cloud/Services/Broker/Implement/Kafka/KafkaConsumer.cs new file mode 100644 index 0000000..2b31499 --- /dev/null +++ b/Cloud/Services/Broker/Implement/Kafka/KafkaConsumer.cs @@ -0,0 +1,51 @@ + +using Cloud.Services.Broker.Support; +using Confluent.Kafka; +using System.Text.Json; + +namespace Cloud.Services.Broker.Implement.Kafka +{ + public class KafkaConsumer : IBrokerConsumer + { + private readonly IConsumer _consumer; + + public KafkaConsumer(IConfiguration config) + { + var consumerConfig = new ConsumerConfig() + { + BootstrapServers = config["Kafka:BootstrapServers"], + GroupId = config["Kafka:GroupId"], + AutoOffsetReset = AutoOffsetReset.Earliest, + }; + + _consumer = new ConsumerBuilder(consumerConfig).Build(); + } + + public T? WaitMessage(string topic) + where T : IBrokerResponse + { + try + { + _consumer.Subscribe(topic); + + var consumeResult = _consumer.Consume(TimeSpan.FromMilliseconds(1000)); + + if (consumeResult == null) + { + // No message received from Kafka within the specified timeout. + return default; + } + return JsonSerializer.Deserialize(consumeResult.Message.Value); + + } + catch (Exception ex) + { + throw; + } + finally + { + _consumer.Close(); + } + } + } +} \ No newline at end of file diff --git a/Cloud/Services/Broker/Implement/Kafka/KafkaProducer.cs b/Cloud/Services/Broker/Implement/Kafka/KafkaProducer.cs new file mode 100644 index 0000000..b301084 --- /dev/null +++ b/Cloud/Services/Broker/Implement/Kafka/KafkaProducer.cs @@ -0,0 +1,28 @@ +using Cloud.Services.Broker.Support; +using Confluent.Kafka; + +namespace Cloud.Services.Broker.Implement.Kafka +{ + public class KafkaProducer : IBrokerProducer + { + private readonly IProducer _producer; + + public KafkaProducer(IConfiguration configuration) + { + var producerConfig = new ProducerConfig + { + BootstrapServers = configuration["Kafka:BootstrapServers"] + }; + + //Build the Producer + _producer = new ProducerBuilder(producerConfig).Build(); + } + public async Task ProduceAsync(string topic, Command command) + { + var message = new Message { Key = Guid.NewGuid(), Value = command }; + + //Produce the Message + await _producer.ProduceAsync(topic, message); + } + } +} \ No newline at end of file diff --git a/Cloud/Services/Broker/Implement/Kafka/KafkaService.cs b/Cloud/Services/Broker/Implement/Kafka/KafkaService.cs index 44e099b..c0b90ba 100644 --- a/Cloud/Services/Broker/Implement/Kafka/KafkaService.cs +++ b/Cloud/Services/Broker/Implement/Kafka/KafkaService.cs @@ -4,14 +4,19 @@ namespace Cloud.Services.Broker.Implement.Kafka { public class KafkaService : IBrokerService { - public Task Consume(string topic) + private readonly KafkaProducer _producer; + private readonly KafkaConsumer _consumer; + + public KafkaService(IConfiguration configuration) { - throw new NotImplementedException(); + _producer = new KafkaProducer(configuration); + _consumer = new KafkaConsumer(configuration); } - public Task Produce(Command command) - { - throw new NotImplementedException(); - } + public T? Consume(string topic) + where T : IBrokerResponse => _consumer.WaitMessage(topic); + + public async Task Produce(Command command) + => await _producer.ProduceAsync("commands", command); } } \ No newline at end of file diff --git a/Cloud/Services/Broker/Support/Command.cs b/Cloud/Services/Broker/Support/Command.cs index ef2a44b..2debae3 100644 --- a/Cloud/Services/Broker/Support/Command.cs +++ b/Cloud/Services/Broker/Support/Command.cs @@ -1,9 +1,10 @@ +using System.Text.Json; + namespace Cloud.Services.Broker.Support { - public class Command + public class Command { - public int Id { get; set; } - public int GreenhouseId { get; set; } + public Guid GreenhouseId { get; set; } public string CommandName { get; set; } = null!; } } \ No newline at end of file diff --git a/Cloud/Services/Broker/Support/CommandResult.cs b/Cloud/Services/Broker/Support/CommandResult.cs index 06d997d..e66004d 100644 --- a/Cloud/Services/Broker/Support/CommandResult.cs +++ b/Cloud/Services/Broker/Support/CommandResult.cs @@ -1,6 +1,6 @@ namespace Cloud.Services.Broker.Support { - public class CommandResult + public class CommandResult : IBrokerResponse { public int CommandId { get; set; } public int GreenhouseId { get; set; } diff --git a/Cloud/Services/Broker/Support/GreenhouseInfo.cs b/Cloud/Services/Broker/Support/GreenhouseInfo.cs index c5a2140..bea028b 100644 --- a/Cloud/Services/Broker/Support/GreenhouseInfo.cs +++ b/Cloud/Services/Broker/Support/GreenhouseInfo.cs @@ -1,6 +1,6 @@ namespace Cloud.Services.Broker.Support { - public class GreenhouseInfo + public class GreenhouseInfo : IBrokerResponse { public int Id { get; set; } public int PercentWater { get; set; } diff --git a/Cloud/Services/Broker/Support/IBrokerResponse.cs b/Cloud/Services/Broker/Support/IBrokerResponse.cs new file mode 100644 index 0000000..e13b65f --- /dev/null +++ b/Cloud/Services/Broker/Support/IBrokerResponse.cs @@ -0,0 +1,6 @@ +namespace Cloud.Services.Broker.Support +{ + public interface IBrokerResponse + { + } +} \ No newline at end of file From fed96d5b86b42399c3921a1193e85f60a2d35b0c Mon Sep 17 00:00:00 2001 From: mfnefd Date: Tue, 19 Nov 2024 23:42:12 +0400 Subject: [PATCH 40/49] =?UTF-8?q?del:=20=D1=81=D0=B5=D1=80=D0=B2=D0=B8?= =?UTF-8?q?=D1=81=D1=8B=20=D0=B1=D1=80=D0=BE=D0=BA=D0=B5=D1=80=D0=B0=20?= =?UTF-8?q?=D0=BE=D0=BD=D0=B8=20=D1=83=D0=B6=D0=B5=20=D0=B5=D1=81=D1=8C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- Cloud/Services/BackgroundWorkerService.cs | 39 ----------------- Cloud/Services/ConsumerService.cs | 51 ----------------------- Cloud/Services/ProducerService.cs | 33 --------------- 3 files changed, 123 deletions(-) delete mode 100644 Cloud/Services/BackgroundWorkerService.cs delete mode 100644 Cloud/Services/ConsumerService.cs delete mode 100644 Cloud/Services/ProducerService.cs diff --git a/Cloud/Services/BackgroundWorkerService.cs b/Cloud/Services/BackgroundWorkerService.cs deleted file mode 100644 index f7fbf34..0000000 --- a/Cloud/Services/BackgroundWorkerService.cs +++ /dev/null @@ -1,39 +0,0 @@ -namespace Cloud.Services -{ - public class BackgroundWorkerService : BackgroundService - { - public readonly ILogger _logger; - private readonly ConsumerService _consumerService; - - public BackgroundWorkerService(ILogger logger, ConsumerService consumer) - { - _logger = logger; - _consumerService = consumer; - } - - //Backghround Service, This will run continuously - protected override async Task ExecuteAsync(CancellationToken stoppingToken) - { - try - { - while (!stoppingToken.IsCancellationRequested) - { - //_logger.LogInformation("Background Service is Runing at : {time}", DateTimeOffset.Now); - - string request = await _consumerService.WaitMessage("ValvesHeatersRequest"); //Consume the Kafka Message - - //After Consume the Order Request Can process the order - if (!string.IsNullOrEmpty(request)) - _logger.LogInformation("Valves-Heaters Request : {value}", request); - - - await Task.Delay(1000, stoppingToken); - } - } - catch (Exception ex) - { - _logger.LogError($"BackgroundWorkerService - Exception {ex}"); - } - } - } -} diff --git a/Cloud/Services/ConsumerService.cs b/Cloud/Services/ConsumerService.cs deleted file mode 100644 index 82d5bb2..0000000 --- a/Cloud/Services/ConsumerService.cs +++ /dev/null @@ -1,51 +0,0 @@ -using Confluent.Kafka; - -namespace Cloud.Services -{ - public class ConsumerService - { - private IConsumer _consumer; - private ConsumerConfig consumerConfig; - public ConsumerService(IConfiguration configuration) - { - consumerConfig = new ConsumerConfig - { - BootstrapServers = configuration["Kafka:BootstrapServers"], - GroupId = configuration["Kafka:GroupId"], - AutoOffsetReset = AutoOffsetReset.Earliest, - }; - - _consumer = new ConsumerBuilder(consumerConfig).Build(); - } - - //Consume Method - public async TaskWaitMessage(string topic) - { - try - { - _consumer.Subscribe(topic); - - var consumeResult = _consumer.Consume(TimeSpan.FromMilliseconds(1000)); - - if (consumeResult != null) - { - return consumeResult.Message.Value; - } - else - { - //No message received from Kafka within the specified timeout. - } - return ""; - - } - catch (Exception ex) - { - return ""; - } - finally - { - _consumer.Close(); - } - } - } -} diff --git a/Cloud/Services/ProducerService.cs b/Cloud/Services/ProducerService.cs deleted file mode 100644 index 073bdc6..0000000 --- a/Cloud/Services/ProducerService.cs +++ /dev/null @@ -1,33 +0,0 @@ -using Confluent.Kafka; - -namespace Cloud.Services -{ - public class ProducerService - { - private readonly IProducer _producer; - - public ProducerService(IConfiguration configuration) - { - var producerConfig = new ProducerConfig - { - BootstrapServers = configuration["Kafka:BootstrapServers"] - }; - - //Build the Producer - _producer = new ProducerBuilder(producerConfig).Build(); - } - - //Method for Produce the Message to Kafka Topic - public async Task ProduceMessageAsync(string topic, string value) - { - var kafkaMessage = new Message - { - Key = Guid.NewGuid().ToString(), - Value = value - }; - - //Produce the Message - await _producer.ProduceAsync(topic, kafkaMessage); - } - } -} From 9c0aad605c626c1754277b46e9e710639e326c58 Mon Sep 17 00:00:00 2001 From: "m.zargarov" Date: Sun, 1 Dec 2024 19:33:52 +0400 Subject: [PATCH 41/49] add Redis service --- Cloud/Program.cs | 3 ++ Cloud/Services/Cache/IRedisCacheService.cs | 13 ++++++++ Cloud/Services/Cache/RedisCacheService.cs | 37 ++++++++++++++++++++++ docker-compose.yml | 9 +----- 4 files changed, 54 insertions(+), 8 deletions(-) create mode 100644 Cloud/Services/Cache/IRedisCacheService.cs create mode 100644 Cloud/Services/Cache/RedisCacheService.cs diff --git a/Cloud/Program.cs b/Cloud/Program.cs index 1c3e3a8..0a1ad0f 100644 --- a/Cloud/Program.cs +++ b/Cloud/Program.cs @@ -8,6 +8,7 @@ using FluentValidation.AspNetCore; using Cloud.Validation; using StackExchange.Redis; using Cloud.Services; +using Cloud.Services.Cache; var builder = WebApplication.CreateBuilder(args); @@ -20,6 +21,8 @@ builder.Services.AddSingleton(sp => return ConnectionMultiplexer.Connect(configuration); }); +builder.Services.AddSingleton(); + //Kafka producer service builder.Services.AddSingleton(); diff --git a/Cloud/Services/Cache/IRedisCacheService.cs b/Cloud/Services/Cache/IRedisCacheService.cs new file mode 100644 index 0000000..a5b11f6 --- /dev/null +++ b/Cloud/Services/Cache/IRedisCacheService.cs @@ -0,0 +1,13 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; + +namespace Cloud.Services.Cache +{ + public interface IRedisCacheService + { + Task SetCacheAsync(string key, T value, TimeSpan? expiry = null); + Task GetCacheAsync(string key); + } +} \ No newline at end of file diff --git a/Cloud/Services/Cache/RedisCacheService.cs b/Cloud/Services/Cache/RedisCacheService.cs new file mode 100644 index 0000000..2fadf24 --- /dev/null +++ b/Cloud/Services/Cache/RedisCacheService.cs @@ -0,0 +1,37 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using StackExchange.Redis; +using System.Text.Json; + +namespace Cloud.Services.Cache +{ + public class RedisCacheService : IRedisCacheService + { + private readonly IConnectionMultiplexer _connectionMultiplexer; + + public RedisCacheService(IConnectionMultiplexer connectionMultiplexer) + { + _connectionMultiplexer = connectionMultiplexer; + } + + public async Task SetCacheAsync(string key, T value, TimeSpan? expiry = null) + { + var database = _connectionMultiplexer.GetDatabase(); + var serializedValue = JsonSerializer.Serialize(value); + await database.StringSetAsync(key, serializedValue, expiry); + } + + public async Task GetCacheAsync(string key) + { + var database = _connectionMultiplexer.GetDatabase(); + var value = await database.StringGetAsync(key); + + if (value.IsNullOrEmpty) + return default; + + return JsonSerializer.Deserialize(value); + } + } +} \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index af1ab66..ede4fce 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -108,11 +108,4 @@ volumes: postgres_data: driver: local cloud-redis: - driver: local - zookeeper: - image: confluentinc/cp-zookeeper:7.4.0 - environment: - ZOOKEEPER_CLIENT_PORT: 2181 - ZOOKEEPER_TICK_TIME: 2000 - ports: - - 2181:2181 \ No newline at end of file + driver: local \ No newline at end of file From 9c1720e1316d6bcfb14a36f6ccfd2b3dc433f3a5 Mon Sep 17 00:00:00 2001 From: mfnefd Date: Wed, 4 Dec 2024 01:57:10 +0400 Subject: [PATCH 42/49] fix: kafka service --- Cloud/Controllers/ValveController.cs | 9 ++--- Cloud/Services/Broker/IBrokerConsumer.cs | 2 +- Cloud/Services/Broker/IBrokerService.cs | 5 ++- .../Broker/Implement/Kafka/KafkaConsumer.cs | 34 ++++++++++++------- .../Broker/Implement/Kafka/KafkaProducer.cs | 22 ++++++++---- .../Broker/Implement/Kafka/KafkaService.cs | 14 ++++++-- 6 files changed, 55 insertions(+), 31 deletions(-) diff --git a/Cloud/Controllers/ValveController.cs b/Cloud/Controllers/ValveController.cs index dbc081a..9fc973a 100644 --- a/Cloud/Controllers/ValveController.cs +++ b/Cloud/Controllers/ValveController.cs @@ -1,5 +1,6 @@ using Cloud.Requests; using Cloud.Services; +using Cloud.Services.Broker; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; using System.ComponentModel.DataAnnotations; @@ -14,11 +15,11 @@ namespace Cloud.Controllers { //Контроллер вентиля - private readonly ProducerService _producerService; + private readonly IBrokerService _kafkaService; - public ValveController(ProducerService producerService) + public ValveController(IBrokerService kafkaService) { - _producerService = producerService; + _kafkaService = kafkaService; } [HttpPost("farm/{farmId}/greenhouse/{ghId}/watering")] @@ -34,7 +35,7 @@ namespace Cloud.Controllers var message = JsonSerializer.Serialize(kafkaRequest); return Ok(kafkaRequest); - /*await _producerService.ProduceMessageAsync("ValvesHeatersRequest", message); + /*await _kafkaService.ProduceAsync("ValvesHeatersRequest", message); return Ok($"Valve status is {request.Action}");*/ } diff --git a/Cloud/Services/Broker/IBrokerConsumer.cs b/Cloud/Services/Broker/IBrokerConsumer.cs index 7997ad0..c152981 100644 --- a/Cloud/Services/Broker/IBrokerConsumer.cs +++ b/Cloud/Services/Broker/IBrokerConsumer.cs @@ -4,6 +4,6 @@ namespace Cloud.Services.Broker { public interface IBrokerConsumer { - T? WaitMessage(string topic) where T : IBrokerResponse; + IEnumerable? WaitMessages(string topic) where T : IBrokerResponse; } } \ No newline at end of file diff --git a/Cloud/Services/Broker/IBrokerService.cs b/Cloud/Services/Broker/IBrokerService.cs index f19aeb4..356157b 100644 --- a/Cloud/Services/Broker/IBrokerService.cs +++ b/Cloud/Services/Broker/IBrokerService.cs @@ -2,9 +2,8 @@ using Cloud.Services.Broker.Support; namespace Cloud.Services.Broker { - public interface IBrokerService + public interface IBrokerService : IBrokerProducer, IBrokerConsumer { - Task Produce(Command command); - T? Consume(string topic) where T : IBrokerResponse; + void ChangeBrokerIp(string ip); } } \ No newline at end of file diff --git a/Cloud/Services/Broker/Implement/Kafka/KafkaConsumer.cs b/Cloud/Services/Broker/Implement/Kafka/KafkaConsumer.cs index 2b31499..d2fe34c 100644 --- a/Cloud/Services/Broker/Implement/Kafka/KafkaConsumer.cs +++ b/Cloud/Services/Broker/Implement/Kafka/KafkaConsumer.cs @@ -7,35 +7,31 @@ namespace Cloud.Services.Broker.Implement.Kafka { public class KafkaConsumer : IBrokerConsumer { - private readonly IConsumer _consumer; + private IConsumer _consumer; + private readonly IConfiguration _config; public KafkaConsumer(IConfiguration config) { - var consumerConfig = new ConsumerConfig() - { - BootstrapServers = config["Kafka:BootstrapServers"], - GroupId = config["Kafka:GroupId"], - AutoOffsetReset = AutoOffsetReset.Earliest, - }; - - _consumer = new ConsumerBuilder(consumerConfig).Build(); + _config = config; + ChangeBrokerIp(_config["Kafka:BootstrapServers"]); } - public T? WaitMessage(string topic) + public IEnumerable? WaitMessages(string topic) where T : IBrokerResponse - { + { try { _consumer.Subscribe(topic); var consumeResult = _consumer.Consume(TimeSpan.FromMilliseconds(1000)); - + Console.WriteLine($"================ Received message: {consumeResult?.Message.Value}"); if (consumeResult == null) { // No message received from Kafka within the specified timeout. return default; } - return JsonSerializer.Deserialize(consumeResult.Message.Value); + + return JsonSerializer.Deserialize>(consumeResult.Message.Value); } catch (Exception ex) @@ -47,5 +43,17 @@ namespace Cloud.Services.Broker.Implement.Kafka _consumer.Close(); } } + + public void ChangeBrokerIp(string ip) + { + var consumerConfig = new ConsumerConfig() + { + BootstrapServers = ip, + GroupId = _config["Kafka:GroupId"], + AutoOffsetReset = AutoOffsetReset.Earliest, + }; + + _consumer = new ConsumerBuilder(consumerConfig).Build(); + } } } \ No newline at end of file diff --git a/Cloud/Services/Broker/Implement/Kafka/KafkaProducer.cs b/Cloud/Services/Broker/Implement/Kafka/KafkaProducer.cs index b301084..0391cc4 100644 --- a/Cloud/Services/Broker/Implement/Kafka/KafkaProducer.cs +++ b/Cloud/Services/Broker/Implement/Kafka/KafkaProducer.cs @@ -5,17 +5,14 @@ namespace Cloud.Services.Broker.Implement.Kafka { public class KafkaProducer : IBrokerProducer { - private readonly IProducer _producer; + private IProducer _producer; + private readonly IConfiguration _config; public KafkaProducer(IConfiguration configuration) { - var producerConfig = new ProducerConfig - { - BootstrapServers = configuration["Kafka:BootstrapServers"] - }; + _config = configuration; - //Build the Producer - _producer = new ProducerBuilder(producerConfig).Build(); + ChangeBrokerIp(_config["Kafka:BootstrapServers"]); } public async Task ProduceAsync(string topic, Command command) { @@ -24,5 +21,16 @@ namespace Cloud.Services.Broker.Implement.Kafka //Produce the Message await _producer.ProduceAsync(topic, message); } + + public void ChangeBrokerIp(string ip) + { + var producerConfig = new ProducerConfig + { + BootstrapServers = ip + }; + + //Build the Producer + _producer = new ProducerBuilder(producerConfig).Build(); + } } } \ No newline at end of file diff --git a/Cloud/Services/Broker/Implement/Kafka/KafkaService.cs b/Cloud/Services/Broker/Implement/Kafka/KafkaService.cs index c0b90ba..8fc9174 100644 --- a/Cloud/Services/Broker/Implement/Kafka/KafkaService.cs +++ b/Cloud/Services/Broker/Implement/Kafka/KafkaService.cs @@ -13,10 +13,18 @@ namespace Cloud.Services.Broker.Implement.Kafka _consumer = new KafkaConsumer(configuration); } - public T? Consume(string topic) - where T : IBrokerResponse => _consumer.WaitMessage(topic); + public IEnumerable? WaitMessages(string topic) + where T : IBrokerResponse + => _consumer.WaitMessages(topic); - public async Task Produce(Command command) + + public async Task ProduceAsync(string topic, Command command) => await _producer.ProduceAsync("commands", command); + + public void ChangeBrokerIp(string ip) + { + _consumer.ChangeBrokerIp(ip); + _producer.ChangeBrokerIp(ip); + } } } \ No newline at end of file From 8478bf1a471009f8d66ec9e83d95728d928b4057 Mon Sep 17 00:00:00 2001 From: mfnefd Date: Wed, 4 Dec 2024 01:57:29 +0400 Subject: [PATCH 43/49] add: greenhouse service --- Cloud/Controllers/GreengouseController.cs | 101 ++++++++++++------ Cloud/Program.cs | 14 +-- Cloud/Services/Domain/IGreenhouseService.cs | 45 ++++++++ .../Domain/Implement/GreenhouseService.cs | 67 ++++++++++++ 4 files changed, 182 insertions(+), 45 deletions(-) create mode 100644 Cloud/Services/Domain/IGreenhouseService.cs create mode 100644 Cloud/Services/Domain/Implement/GreenhouseService.cs diff --git a/Cloud/Controllers/GreengouseController.cs b/Cloud/Controllers/GreengouseController.cs index 473f292..6f77c60 100644 --- a/Cloud/Controllers/GreengouseController.cs +++ b/Cloud/Controllers/GreengouseController.cs @@ -2,75 +2,106 @@ using Cloud.Models; using Cloud.Requests; using Cloud.Services.Broker; using Cloud.Services.Broker.Support; +using Cloud.Services.Domain; using Microsoft.AspNetCore.Mvc; namespace Cloud.Controllers { [ApiController] - [Route("api/user/{userId}/farm/{farmId}/greenhouse")] + [Route("api/farm/{farmId}/greenhouse")] public class GreenhouseController : ControllerBase { - private readonly IBrokerService _brokerService; - private readonly ApplicationContext _context; - private readonly IConfiguration _config; - public GreenhouseController(IConfiguration config, ApplicationContext context, - IBrokerService brokerService) + private readonly IGreenhouseService _greenhouseService; + public GreenhouseController(IGreenhouseService greenhouseService) { - _brokerService = brokerService; - _context = context; - _config = config; + _greenhouseService = greenhouseService; } - /** - * Возвращает текущую информацию о всех теплицах пользователя - */ + /// + /// Возвращает текущую информацию о всех теплицах пользователя + /// + /// + /// [HttpGet] - public async Task>> GetAll(int userId, int farmId) + public async Task>> GetAll(int farmId) { - throw new NotImplementedException(); + try + { + var greenhouses = _greenhouseService.GetAll(farmId); + return Ok(greenhouses); + } + catch (Exception ex) + { + return BadRequest(ex.Message); + } } - /** - * Возвращает текущую информацию о конкретной теплице - */ + /// + /// Возвращает текущую информацию о конкретной теплице + /// + /// + /// + /// [HttpGet("{greenhouseId}")] - public async Task> Get(int userId, int farmId, int greenhouseId) + public async Task> Get(int farmId, int greenhouseId) { - throw new NotImplementedException(); + try + { + var greenhouses = _greenhouseService.GetGreenhouseInfo(greenhouseId, farmId); + return Ok(greenhouses); + } + catch (Exception ex) + { + return BadRequest(ex.Message); + } } - /** - * Возвращает сохраненные данные для автоматизации теплицы - */ + /// + /// Возвращает сохраненные данные для автоматизации теплицы + /// + /// + /// + /// [HttpGet("{greenhouseId}/settings")] - public async Task> GetGreenhouse(int userId, int farmId, int greenhouseId) + public async Task> GetGreenhouse(int farmId, int greenhouseId) { throw new NotImplementedException(); } - /** - * Сохраняет в базе данных API данные для автоматизации теплицы - */ + /// + /// Сохраняет в базе данных API данные для автоматизации теплицы + /// + /// + /// + /// [HttpPost] - public async Task> SaveToDatabase(int userId, int farmId, GreenhouseRequest greenhouse) + public async Task> SaveToDatabase(int farmId, GreenhouseRequest greenhouse) { throw new NotImplementedException(); } - /** - * Обновляет в базе данных API данные для автоматизации теплицы - */ + + /// + /// Обновляет в базе данных API данные для автоматизации теплицы + /// + /// ID фермы + /// ID теплицы + /// Данные для обновления + /// Обновленный объект Greenhouse [HttpPut("{greenhouseId}/settings")] - public async Task> Update(int userId, int farmId, int greenhouseId, GreenhouseRequest greenhouse) + public async Task> Update(int farmId, int greenhouseId, GreenhouseRequest greenhouse) { throw new NotImplementedException(); } - /** - * Удаляет из базы данных API запись настроек автоматизации теплицы - */ + /// + /// Удаляет из базы данных API запись настроек автоматизации теплицы + /// + /// + /// + /// [HttpDelete("{greenhouseId}")] - public async Task Delete(int userId, int farmId, int greenhouseId) + public async Task Delete(int farmId, int greenhouseId) { throw new NotImplementedException(); diff --git a/Cloud/Program.cs b/Cloud/Program.cs index 6e22ecd..9cec515 100644 --- a/Cloud/Program.cs +++ b/Cloud/Program.cs @@ -10,11 +10,14 @@ using StackExchange.Redis; using Cloud.Services.Broker.Implement.Kafka; using Cloud.Services.Broker; using Cloud.Services; +using Cloud.Services.Domain.Implement; +using Cloud.Services.Domain; var builder = WebApplication.CreateBuilder(args); // Add services to the container. -builder.Services.AddSingleton(); +builder.Services.AddTransient(); +builder.Services.AddTransient(); //Redis configuration builder.Services.AddSingleton(sp => { @@ -22,15 +25,6 @@ builder.Services.AddSingleton(sp => return ConnectionMultiplexer.Connect(configuration); }); -//Kafka producer service -builder.Services.AddSingleton(); - -//Kafka consumer service -builder.Services.AddSingleton(); - -//Add the BackgroundWorkerService -builder.Services.AddHostedService(); - //Jwt configuration var jwtIssuer = builder.Configuration.GetSection("Jwt:Issuer").Get(); var jwtKey = builder.Configuration.GetSection("Jwt:Key").Get(); diff --git a/Cloud/Services/Domain/IGreenhouseService.cs b/Cloud/Services/Domain/IGreenhouseService.cs new file mode 100644 index 0000000..523af87 --- /dev/null +++ b/Cloud/Services/Domain/IGreenhouseService.cs @@ -0,0 +1,45 @@ +using Cloud.Models; +using Cloud.Services.Broker.Support; + +namespace Cloud.Services.Domain; + +public interface IGreenhouseService +{ + /// + /// Возвращает текущую информацию о конкретной теплице из брокера + /// + /// ID теплицы + /// ID фермы, то есть брокера + /// Текущие данные о теплице от менеджера теплицы + public Task GetGreenhouseInfo(int id, int farmId); + /// + /// Возвращает сохраненные данные для автоматизации теплицы из базы данных + /// + /// ID теплицы + /// Данные для автоматизации теплицы + public Task GetGreenhouse(int id); + /// + /// Возвращает список данных о всех теплицах пользователя из брокера + /// + /// ID фермы + /// Список текущих данных о теплицах + public Task?> GetAll(int farmId); + /// + /// Сохраняет данные об автоматизации теплицы в базу данных + /// + /// Данные автоматизации теплицы + /// Созданную сущность из базы данных + public Task Create(Greenhouse greenhouse); + /// + /// Обновляет данные автоматизации теплицы в базе данных + /// + /// Новая информация об автоматизации теплицы + /// Обновленную сущность из базы данных + public Task Update(Greenhouse greenhouse); + /// + /// Удаляет данные об автоматизации теплицы из базы данных + /// + /// ID данных автоматизации теплицы + /// Возвращает удаленную сущность + public Task Delete(int id); +} \ No newline at end of file diff --git a/Cloud/Services/Domain/Implement/GreenhouseService.cs b/Cloud/Services/Domain/Implement/GreenhouseService.cs new file mode 100644 index 0000000..8c2d8bc --- /dev/null +++ b/Cloud/Services/Domain/Implement/GreenhouseService.cs @@ -0,0 +1,67 @@ +using Cloud.Models; +using Cloud.Services.Broker; +using Cloud.Services.Broker.Support; +using Microsoft.EntityFrameworkCore; + +namespace Cloud.Services.Domain.Implement; + +public class GreenhouseService : IGreenhouseService +{ + private readonly IBrokerService _brokerService; + private readonly ApplicationContext _context; + + public GreenhouseService(IBrokerService brokerService, ApplicationContext context) + { + _context = context; + _brokerService = brokerService; + } + + public async Task Create(Greenhouse greenhouse) + { + var res = await _context.Greenhouses.AddAsync(greenhouse); + await _context.SaveChangesAsync(); + return res.Entity; + } + + public async Task Delete(int id) + { + var greenhouse = await _context.Greenhouses.FirstOrDefaultAsync(x => x.Id == id); + + _context.Greenhouses.Remove(greenhouse); + await _context.SaveChangesAsync(); + return greenhouse; + } + + + public async Task GetGreenhouse(int id) + { + return await _context.Greenhouses.FirstOrDefaultAsync(x => x.Id == id); + } + + public async Task Update(Greenhouse greenhouse) + { + var res = _context.Greenhouses.Update(greenhouse); + await _context.SaveChangesAsync(); + return res.Entity; + } + + public async Task?> GetAll(int farmId) + { + await _changeBrokerIp(farmId); + return _brokerService.WaitMessages("data"); + } + + public async Task GetGreenhouseInfo(int id, int farmId) + { + await _changeBrokerIp(farmId); + var infos = _brokerService.WaitMessages("data"); + return infos?.FirstOrDefault(x => x.Id == id); + } + + private async Task _changeBrokerIp(int farmId) + { + var farm = await _context.Farms.FirstOrDefaultAsync(x => x.Id == farmId); + _brokerService.ChangeBrokerIp(farm.RaspberryIP); + + } +} From 2e20e9d7cc98d79d47501f4b38ec8bbc71d6b333 Mon Sep 17 00:00:00 2001 From: mfnefd Date: Wed, 4 Dec 2024 04:06:56 +0400 Subject: [PATCH 44/49] =?UTF-8?q?fix:=20=D0=B2=D1=8B=D0=BD=D0=B5=D1=81?= =?UTF-8?q?=D0=B5=D0=BD=D1=8B=20=D0=B2=D0=B0=D0=B6=D0=BD=D1=8B=D0=B5=20?= =?UTF-8?q?=D0=BF=D0=B0=D1=80=D0=B0=D0=BC=D0=B5=D1=82=D1=80=D1=8B=20=D0=B2?= =?UTF-8?q?=20=D0=BF=D0=B5=D1=80=D0=B5=D0=BC=D0=B5=D0=BD=D0=BD=D1=8B=D0=B5?= =?UTF-8?q?=20=D0=BE=D0=BA=D1=80=D1=83=D0=B6=D0=B5=D0=BD=D0=B8=D1=8F?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .env | 4 ++++ Cloud/Dockerfile | 4 ++-- Cloud/Program.cs | 15 ++++++++++++--- Cloud/Support/NetworkSupport.cs | 26 ++++++++++++++++++++++++++ docker-compose.yml | 13 ++++++++++--- 5 files changed, 54 insertions(+), 8 deletions(-) create mode 100644 .env create mode 100644 Cloud/Support/NetworkSupport.cs diff --git a/.env b/.env new file mode 100644 index 0000000..bff77a7 --- /dev/null +++ b/.env @@ -0,0 +1,4 @@ +POSTGRES_USER="postgres" +POSTGRES_PASSWORD="12345" +POSTGRES_DB="main_database" +DB_CONNECTION_STRING="Host=postgres:5438;Database=${POSTGRES_DB};Username=${POSTGRES_USER};Password=${POSTGRES_PASSWORD}" \ No newline at end of file diff --git a/Cloud/Dockerfile b/Cloud/Dockerfile index 7a518b4..2f52ad4 100644 --- a/Cloud/Dockerfile +++ b/Cloud/Dockerfile @@ -10,7 +10,7 @@ RUN adduser -u 5678 --disabled-password --gecos "" appuser && chown -R appuser / USER appuser FROM mcr.microsoft.com/dotnet/sdk:6.0 AS build -ARG configuration=Release +ARG configuration=Debug WORKDIR /src COPY ["Cloud.csproj", "."] RUN dotnet restore "./Cloud.csproj" @@ -25,6 +25,6 @@ RUN dotnet publish "./Cloud.csproj" -c $configuration -o /app/publish /p:UseAppH FROM base AS final WORKDIR /app COPY --from=publish /app/publish . -# TODO: Добавить инициализацию базы данных с помощью миграции + ENTRYPOINT ["dotnet", "Cloud.dll"] diff --git a/Cloud/Program.cs b/Cloud/Program.cs index 98799f2..e8eac1c 100644 --- a/Cloud/Program.cs +++ b/Cloud/Program.cs @@ -13,6 +13,8 @@ using Cloud.Services; using Cloud.Services.Domain.Implement; using Cloud.Services.Domain; using Cloud.Services.Cache; +using Cloud.Support; +using System.Text.RegularExpressions; var builder = WebApplication.CreateBuilder(args); @@ -21,9 +23,10 @@ builder.Services.AddTransient(); builder.Services.AddTransient(); //Redis configuration +string redisUrl = Environment.GetEnvironmentVariable("REDIS_URL") ?? "localhost:6379"; builder.Services.AddSingleton(sp => { - var configuration = ConfigurationOptions.Parse("localhost:6379"); + var configuration = ConfigurationOptions.Parse(redisUrl); return ConnectionMultiplexer.Connect(configuration); }); builder.Services.AddSingleton(); @@ -47,15 +50,21 @@ builder.Services.AddAuthentication(JwtBearerDefaults.AuthenticationScheme) }; }); +string connectionString = Environment.GetEnvironmentVariable("DB_CONNECTION_STRING") + ?? "Host=localhost;Port=5438;Database=main_database;Username=postgres;Password=12345"; +string dbUrl = "http://" + Regex.Match(connectionString, @"(?<=Host=)([^;]+)").Groups[1].Value; +await NetworkSupport.CheckConnectionAsync(dbUrl); + builder.Services.AddDbContext(options => - options.UseNpgsql("Host=localhost;Port=5438;Database=main_database;Username=postgres;Password=12345")); + options.UseNpgsql(connectionString)); // Настройка CORS +string frontUrl = Environment.GetEnvironmentVariable("FRONT_URL") ?? "http://localhost:3000"; builder.Services.AddCors(options => { options.AddPolicy("AllowFrontendLocalhost", builder => { - builder.WithOrigins("http://localhost:3000") // фронтенд + builder.WithOrigins(frontUrl) // фронтенд .AllowAnyHeader() .AllowAnyMethod(); }); diff --git a/Cloud/Support/NetworkSupport.cs b/Cloud/Support/NetworkSupport.cs new file mode 100644 index 0000000..752d9a6 --- /dev/null +++ b/Cloud/Support/NetworkSupport.cs @@ -0,0 +1,26 @@ +namespace Cloud.Support; + +public static class NetworkSupport +{ + public static async Task CheckConnectionAsync(string address) + { + using var client = new HttpClient(); + try + { + var response = await client.GetAsync(address); + + if (response.IsSuccessStatusCode) + { + Console.WriteLine($"Соединение успешно проверено. Статус-код: {response.StatusCode}"); + } + else + { + Console.WriteLine($"Соединение не удалось проверить. Статус-код: {response.StatusCode}. URL: {address}"); + } + } + catch (HttpRequestException ex) + { + Console.WriteLine($"Ошибка при проверке соединения: {ex.Message}. URL: {address}"); + } + } +} \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index ede4fce..8608800 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -12,6 +12,13 @@ services: build: ./Cloud/ ports: - "5124:5124" + environment: + DB_CONNECTION_STRING: ${DB_CONNECTION_STRING} + REDDIS_URL: redis:6379 + # На всякий случай, если будет больно + # KAFKA_URL: kafka:9092 + # Добавить, когда будет фронт! + # FRONT_URL: front:3000 depends_on: - postgres - redis @@ -19,9 +26,9 @@ services: image: postgres:14 container_name: cucumber_database environment: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: 12345 - POSTGRES_DB: main_database + POSTGRES_USER: ${POSTGRES_USER} + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} + POSTGRES_DB: ${POSTGRES_DB} ports: - "5438:5432" volumes: From 739d434f53d0fc4e0c5be5ed87f5ede553604b6d Mon Sep 17 00:00:00 2001 From: mfnefd Date: Wed, 4 Dec 2024 06:27:39 +0400 Subject: [PATCH 45/49] =?UTF-8?q?fix:=20=D0=B5=D1=81=D1=82=D1=8C=20=D0=BA?= =?UTF-8?q?=D0=BE=D0=BD=D1=82=D0=B0=D0=BA=D1=82!=20=D0=BC=D0=B5=D0=B6?= =?UTF-8?q?=D0=B4=D1=83=20=D0=BE=D0=B1=D0=BB=D0=B0=D0=BA=D0=BE=D0=BC=20?= =?UTF-8?q?=D0=B8=20=D0=BA=D0=B0=D1=84=D0=BA=D0=BE=D0=B9?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .env | 2 +- Cloud/Controllers/GreengouseController.cs | 1 + Cloud/Dockerfile | 4 +-- Cloud/Middlewares/DatabaseMiddleware.cs | 30 +++++++++++++++++++ Cloud/Program.cs | 16 +++++----- .../Broker/Implement/Kafka/KafkaConsumer.cs | 3 +- .../Broker/Implement/Kafka/KafkaProducer.cs | 12 ++++---- .../Domain/Implement/GreenhouseService.cs | 4 +-- docker-compose.yml | 6 ++-- 9 files changed, 56 insertions(+), 22 deletions(-) create mode 100644 Cloud/Middlewares/DatabaseMiddleware.cs diff --git a/.env b/.env index bff77a7..eafc6c7 100644 --- a/.env +++ b/.env @@ -1,4 +1,4 @@ POSTGRES_USER="postgres" POSTGRES_PASSWORD="12345" POSTGRES_DB="main_database" -DB_CONNECTION_STRING="Host=postgres:5438;Database=${POSTGRES_DB};Username=${POSTGRES_USER};Password=${POSTGRES_PASSWORD}" \ No newline at end of file +DB_CONNECTION_STRING="Host=postgres:5432;Database=${POSTGRES_DB};Username=${POSTGRES_USER};Password=${POSTGRES_PASSWORD}" \ No newline at end of file diff --git a/Cloud/Controllers/GreengouseController.cs b/Cloud/Controllers/GreengouseController.cs index 6f77c60..57448ea 100644 --- a/Cloud/Controllers/GreengouseController.cs +++ b/Cloud/Controllers/GreengouseController.cs @@ -28,6 +28,7 @@ namespace Cloud.Controllers try { var greenhouses = _greenhouseService.GetAll(farmId); + if (greenhouses == null) return NotFound("Greenhouses is not found"); return Ok(greenhouses); } catch (Exception ex) diff --git a/Cloud/Dockerfile b/Cloud/Dockerfile index 2f52ad4..b4c2152 100644 --- a/Cloud/Dockerfile +++ b/Cloud/Dockerfile @@ -10,7 +10,7 @@ RUN adduser -u 5678 --disabled-password --gecos "" appuser && chown -R appuser / USER appuser FROM mcr.microsoft.com/dotnet/sdk:6.0 AS build -ARG configuration=Debug +ARG configuration=Development WORKDIR /src COPY ["Cloud.csproj", "."] RUN dotnet restore "./Cloud.csproj" @@ -19,7 +19,7 @@ WORKDIR "/src/." RUN dotnet build "./Cloud.csproj" -c $configuration -o /app/build FROM build AS publish -ARG configuration=Release +ARG configuration=Development RUN dotnet publish "./Cloud.csproj" -c $configuration -o /app/publish /p:UseAppHost=false FROM base AS final diff --git a/Cloud/Middlewares/DatabaseMiddleware.cs b/Cloud/Middlewares/DatabaseMiddleware.cs new file mode 100644 index 0000000..88091bf --- /dev/null +++ b/Cloud/Middlewares/DatabaseMiddleware.cs @@ -0,0 +1,30 @@ +using Microsoft.EntityFrameworkCore; + +namespace Cloud.Middlewares; + +public static class DatabaseMiddleware +{ + public static void AddDbConnectionService(this IServiceCollection services) + { + string connectionString = Environment.GetEnvironmentVariable("DB_CONNECTION_STRING") + ?? "Host=localhost;Port=5438;Database=main_database;Username=postgres;Password=12345"; + + services.AddDbContext(options => + options.UseNpgsql(connectionString)); + + } + public static void MigrateDb(this IApplicationBuilder app) + { + try + { + using var scope = app.ApplicationServices.CreateScope(); + var context = scope.ServiceProvider.GetRequiredService(); + + context.Database.Migrate(); + } + catch (Exception ex) + { + Console.WriteLine(ex.Message); + } + } +} \ No newline at end of file diff --git a/Cloud/Program.cs b/Cloud/Program.cs index e8eac1c..5daec1f 100644 --- a/Cloud/Program.cs +++ b/Cloud/Program.cs @@ -15,6 +15,7 @@ using Cloud.Services.Domain; using Cloud.Services.Cache; using Cloud.Support; using System.Text.RegularExpressions; +using Cloud.Middlewares; var builder = WebApplication.CreateBuilder(args); @@ -49,15 +50,8 @@ builder.Services.AddAuthentication(JwtBearerDefaults.AuthenticationScheme) IssuerSigningKey = new SymmetricSecurityKey(Encoding.UTF8.GetBytes(jwtKey)) }; }); - -string connectionString = Environment.GetEnvironmentVariable("DB_CONNECTION_STRING") - ?? "Host=localhost;Port=5438;Database=main_database;Username=postgres;Password=12345"; -string dbUrl = "http://" + Regex.Match(connectionString, @"(?<=Host=)([^;]+)").Groups[1].Value; -await NetworkSupport.CheckConnectionAsync(dbUrl); - -builder.Services.AddDbContext(options => - options.UseNpgsql(connectionString)); - +// Настройка подключения к БД +builder.Services.AddDbConnectionService(); // Настройка CORS string frontUrl = Environment.GetEnvironmentVariable("FRONT_URL") ?? "http://localhost:3000"; builder.Services.AddCors(options => @@ -113,6 +107,7 @@ var app = builder.Build(); // Configure the HTTP request pipeline. if (app.Environment.IsDevelopment()) { + Console.WriteLine("Swagger enabled"); app.UseSwagger(); app.UseSwaggerUI(c => { @@ -126,6 +121,9 @@ app.UseHttpsRedirection(); // Включение CORS app.UseCors("AllowFrontendLocalhost"); +// Применение миграций +app.MigrateDb(); + app.UseAuthentication(); app.UseAuthorization(); diff --git a/Cloud/Services/Broker/Implement/Kafka/KafkaConsumer.cs b/Cloud/Services/Broker/Implement/Kafka/KafkaConsumer.cs index d2fe34c..c6b0b30 100644 --- a/Cloud/Services/Broker/Implement/Kafka/KafkaConsumer.cs +++ b/Cloud/Services/Broker/Implement/Kafka/KafkaConsumer.cs @@ -13,7 +13,8 @@ namespace Cloud.Services.Broker.Implement.Kafka public KafkaConsumer(IConfiguration config) { _config = config; - ChangeBrokerIp(_config["Kafka:BootstrapServers"]); + Console.WriteLine($"KafkaConsumer created. IP:" + _config["KAFKA_URL"]); + ChangeBrokerIp(_config["KAFKA_URL"]); } public IEnumerable? WaitMessages(string topic) diff --git a/Cloud/Services/Broker/Implement/Kafka/KafkaProducer.cs b/Cloud/Services/Broker/Implement/Kafka/KafkaProducer.cs index 0391cc4..2a0f8c8 100644 --- a/Cloud/Services/Broker/Implement/Kafka/KafkaProducer.cs +++ b/Cloud/Services/Broker/Implement/Kafka/KafkaProducer.cs @@ -1,3 +1,4 @@ +using System.Text.Json; using Cloud.Services.Broker.Support; using Confluent.Kafka; @@ -5,18 +6,19 @@ namespace Cloud.Services.Broker.Implement.Kafka { public class KafkaProducer : IBrokerProducer { - private IProducer _producer; + private IProducer _producer; private readonly IConfiguration _config; public KafkaProducer(IConfiguration configuration) { _config = configuration; - - ChangeBrokerIp(_config["Kafka:BootstrapServers"]); + Console.WriteLine($"KafkaConsumer created. IP:" + _config["KAFKA_URL"]); + ChangeBrokerIp(_config["KAFKA_URL"]); } public async Task ProduceAsync(string topic, Command command) { - var message = new Message { Key = Guid.NewGuid(), Value = command }; + var commandSerialized = JsonSerializer.Serialize(command); + var message = new Message { Key = Guid.NewGuid().ToString(), Value = commandSerialized }; //Produce the Message await _producer.ProduceAsync(topic, message); @@ -30,7 +32,7 @@ namespace Cloud.Services.Broker.Implement.Kafka }; //Build the Producer - _producer = new ProducerBuilder(producerConfig).Build(); + _producer = new ProducerBuilder(producerConfig).Build(); } } } \ No newline at end of file diff --git a/Cloud/Services/Domain/Implement/GreenhouseService.cs b/Cloud/Services/Domain/Implement/GreenhouseService.cs index 8c2d8bc..f398a39 100644 --- a/Cloud/Services/Domain/Implement/GreenhouseService.cs +++ b/Cloud/Services/Domain/Implement/GreenhouseService.cs @@ -47,13 +47,13 @@ public class GreenhouseService : IGreenhouseService public async Task?> GetAll(int farmId) { - await _changeBrokerIp(farmId); + // await _changeBrokerIp(farmId); return _brokerService.WaitMessages("data"); } public async Task GetGreenhouseInfo(int id, int farmId) { - await _changeBrokerIp(farmId); + // await _changeBrokerIp(farmId); var infos = _brokerService.WaitMessages("data"); return infos?.FirstOrDefault(x => x.Id == id); } diff --git a/docker-compose.yml b/docker-compose.yml index 8608800..11c4650 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -9,14 +9,16 @@ networks: services: cloud: + networks: + - vpn build: ./Cloud/ ports: - "5124:5124" environment: + ASPNETCORE_ENVIRONMENT: Development DB_CONNECTION_STRING: ${DB_CONNECTION_STRING} REDDIS_URL: redis:6379 - # На всякий случай, если будет больно - # KAFKA_URL: kafka:9092 + KAFKA_URL: kafka:29092 # Добавить, когда будет фронт! # FRONT_URL: front:3000 depends_on: From 7b751b707293984732e03cf1d93a21a323a725bf Mon Sep 17 00:00:00 2001 From: mfnefd Date: Wed, 4 Dec 2024 11:32:34 +0400 Subject: [PATCH 46/49] =?UTF-8?q?fix:=20=D0=BD=D0=B8=D1=87=D0=B5=D0=B3?= =?UTF-8?q?=D0=BE=20=D0=BD=D0=B5=20=D0=BF=D0=BE=D0=BC=D0=BE=D0=B3=D0=B0?= =?UTF-8?q?=D0=B5=D1=82=20=D0=BD=D0=BE=D1=80=D0=BC=D0=B0=D0=BB=D1=8C=D0=BD?= =?UTF-8?q?=D0=BE=20=D0=B7=D0=B0=D0=BF=D1=80=D0=BE=D1=81=D0=B8=D1=82=D1=8C?= =?UTF-8?q?=20=D0=B4=D0=B0=D0=BD=D0=BD=D1=8B=D0=B5...=20=D1=8D=D1=82=D0=BE?= =?UTF-8?q?=20=D0=BA=D0=BE=D0=BD=D0=B5=D1=86...?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- Cloud/Program.cs | 2 +- .../Broker/Implement/Kafka/KafkaConsumer.cs | 75 +++++++++++++------ .../Broker/Implement/Kafka/KafkaProducer.cs | 2 +- 3 files changed, 56 insertions(+), 23 deletions(-) diff --git a/Cloud/Program.cs b/Cloud/Program.cs index 5daec1f..16d0c36 100644 --- a/Cloud/Program.cs +++ b/Cloud/Program.cs @@ -20,7 +20,7 @@ using Cloud.Middlewares; var builder = WebApplication.CreateBuilder(args); // Add services to the container. -builder.Services.AddTransient(); +builder.Services.AddSingleton(); builder.Services.AddTransient(); //Redis configuration diff --git a/Cloud/Services/Broker/Implement/Kafka/KafkaConsumer.cs b/Cloud/Services/Broker/Implement/Kafka/KafkaConsumer.cs index c6b0b30..f29a4b8 100644 --- a/Cloud/Services/Broker/Implement/Kafka/KafkaConsumer.cs +++ b/Cloud/Services/Broker/Implement/Kafka/KafkaConsumer.cs @@ -1,6 +1,7 @@ using Cloud.Services.Broker.Support; using Confluent.Kafka; +using System.Diagnostics; using System.Text.Json; namespace Cloud.Services.Broker.Implement.Kafka @@ -20,29 +21,61 @@ namespace Cloud.Services.Broker.Implement.Kafka public IEnumerable? WaitMessages(string topic) where T : IBrokerResponse { - try - { - _consumer.Subscribe(topic); + List res = new(); + List partitions; - var consumeResult = _consumer.Consume(TimeSpan.FromMilliseconds(1000)); - Console.WriteLine($"================ Received message: {consumeResult?.Message.Value}"); - if (consumeResult == null) - { - // No message received from Kafka within the specified timeout. - return default; - } + using var adminClient = new AdminClientBuilder(new AdminClientConfig { BootstrapServers = _config["KAFKA_URL"] }).Build(); + var meta = adminClient.GetMetadata(TimeSpan.FromSeconds(20)); + var currentTopic = meta.Topics.SingleOrDefault(t => t.Topic == topic) + ?? throw new Exception($"Topic {topic} not found"); + partitions = currentTopic.Partitions; + + _consumer.Subscribe(topic); + foreach (var partition in partitions) + { + var topicPartition = new TopicPartition(topic, partition.PartitionId); + _consumer.Assign(topicPartition); + + T? message = _consume(); + if (message == null) return null; + res.Add(message); - return JsonSerializer.Deserialize>(consumeResult.Message.Value); + } + _consumer.Unassign(); + _consumer.Unsubscribe(); + return res; + } - } - catch (Exception ex) - { - throw; - } - finally - { - _consumer.Close(); - } + private T? _consume() where T : IBrokerResponse + { + var sw = new Stopwatch(); + sw.Start(); + try + { + while (true) + { + var consumeResult = _consumer.Consume(TimeSpan.FromMinutes(1)); + + if (consumeResult?.Message?.Value == null) + { + // Предел по времени + if (sw.Elapsed > TimeSpan.FromMinutes(1)) + { + return default; + } + continue; + } + + string jsonObj = consumeResult.Message.Value; + return JsonSerializer.Deserialize(jsonObj); + + } + } + catch (Exception ex) + { + _consumer.Close(); + throw; + } } public void ChangeBrokerIp(string ip) @@ -53,7 +86,7 @@ namespace Cloud.Services.Broker.Implement.Kafka GroupId = _config["Kafka:GroupId"], AutoOffsetReset = AutoOffsetReset.Earliest, }; - + _consumer?.Close(); _consumer = new ConsumerBuilder(consumerConfig).Build(); } } diff --git a/Cloud/Services/Broker/Implement/Kafka/KafkaProducer.cs b/Cloud/Services/Broker/Implement/Kafka/KafkaProducer.cs index 2a0f8c8..6a916bb 100644 --- a/Cloud/Services/Broker/Implement/Kafka/KafkaProducer.cs +++ b/Cloud/Services/Broker/Implement/Kafka/KafkaProducer.cs @@ -12,7 +12,7 @@ namespace Cloud.Services.Broker.Implement.Kafka public KafkaProducer(IConfiguration configuration) { _config = configuration; - Console.WriteLine($"KafkaConsumer created. IP:" + _config["KAFKA_URL"]); + Console.WriteLine($"KafkaProducer created. IP:" + _config["KAFKA_URL"]); ChangeBrokerIp(_config["KAFKA_URL"]); } public async Task ProduceAsync(string topic, Command command) From db3cbde0af2bbb27228e7a92deac3c94bfa18921 Mon Sep 17 00:00:00 2001 From: mfnefd Date: Wed, 4 Dec 2024 11:41:08 +0400 Subject: [PATCH 47/49] =?UTF-8?q?add:=20=D0=BE=D1=81=D0=BD=D0=BE=D0=B2?= =?UTF-8?q?=D0=BD=D0=BE=D0=B9=20=D0=BA=D1=80=D1=83=D0=B4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- Cloud/Controllers/GreengouseController.cs | 59 +++++++++++++++++++++-- 1 file changed, 54 insertions(+), 5 deletions(-) diff --git a/Cloud/Controllers/GreengouseController.cs b/Cloud/Controllers/GreengouseController.cs index 57448ea..9f4c025 100644 --- a/Cloud/Controllers/GreengouseController.cs +++ b/Cloud/Controllers/GreengouseController.cs @@ -66,7 +66,16 @@ namespace Cloud.Controllers [HttpGet("{greenhouseId}/settings")] public async Task> GetGreenhouse(int farmId, int greenhouseId) { - throw new NotImplementedException(); + try + { + var greenhouse = await _greenhouseService.GetGreenhouse(greenhouseId); + if (greenhouse == null) return NotFound("Greenhouses is not found"); + return Ok(greenhouse); + } + catch (Exception ex) + { + return BadRequest(ex.Message); + } } /// @@ -78,7 +87,23 @@ namespace Cloud.Controllers [HttpPost] public async Task> SaveToDatabase(int farmId, GreenhouseRequest greenhouse) { - throw new NotImplementedException(); + try + { + var greenhouseEntity = new Greenhouse() + { + RecomendedTemperature = greenhouse.RecomendedTemperature, + WateringMode = greenhouse.WateringMode, + HeatingMode = greenhouse.HeatingMode + }; + + var result = await _greenhouseService.Create(greenhouseEntity); + return Ok(result); + } + catch (Exception ex) + { + return BadRequest(ex.Message); + } + } @@ -92,7 +117,24 @@ namespace Cloud.Controllers [HttpPut("{greenhouseId}/settings")] public async Task> Update(int farmId, int greenhouseId, GreenhouseRequest greenhouse) { - throw new NotImplementedException(); + try + { + var greenhouseEntity = new Greenhouse() + { + Id = greenhouseId, + FarmId = farmId, + WateringMode = greenhouse.WateringMode, + HeatingMode = greenhouse.HeatingMode, + RecomendedTemperature = greenhouse.RecomendedTemperature + }; + + var result = await _greenhouseService.Update(greenhouseEntity); + return Ok(result); + } + catch (Exception ex) + { + return BadRequest(ex.Message); + } } /// @@ -104,8 +146,15 @@ namespace Cloud.Controllers [HttpDelete("{greenhouseId}")] public async Task Delete(int farmId, int greenhouseId) { - - throw new NotImplementedException(); + try + { + _ = await _greenhouseService.Delete(greenhouseId); + return Ok(); + } + catch (Exception ex) + { + return BadRequest(ex.Message); + } } } } \ No newline at end of file From 71bc31d0bd7952616025aa820f087b8e203e169b Mon Sep 17 00:00:00 2001 From: the Date: Wed, 4 Dec 2024 21:33:02 +0400 Subject: [PATCH 48/49] containers test --- GreenhouseDetector/Dockerfile | 11 +++++++++++ GreenhouseManager/Dockerfile | 11 +++++++++++ docker-compose.yml | 18 ++++++++++++++++++ requirements.txt | 3 +++ 4 files changed, 43 insertions(+) create mode 100644 GreenhouseDetector/Dockerfile create mode 100644 GreenhouseManager/Dockerfile create mode 100644 requirements.txt diff --git a/GreenhouseDetector/Dockerfile b/GreenhouseDetector/Dockerfile new file mode 100644 index 0000000..cb846ca --- /dev/null +++ b/GreenhouseDetector/Dockerfile @@ -0,0 +1,11 @@ +FROM python:latest + +WORKDIR /app + +COPY requirements.txt . + +RUN pip install --no-cache-dir -r requirements.txt + +COPY GreenhouseDetector/detector.py . + +CMD ["python", "detector.py"] \ No newline at end of file diff --git a/GreenhouseManager/Dockerfile b/GreenhouseManager/Dockerfile new file mode 100644 index 0000000..efc49af --- /dev/null +++ b/GreenhouseManager/Dockerfile @@ -0,0 +1,11 @@ +FROM python:latest + +WORKDIR /app + +COPY requirements.txt . + +RUN pip install --no-cache-dir -r requirements.txt + +COPY GreenhouseManager/manager.py . + +CMD ["python", "manager.py"] \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index a1e5c42..665e246 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -78,3 +78,21 @@ services: KAFKA_CLUSTERS_0_NAME: local KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka:29092 KAFKA_CLUSTERS_0_METRICS_PORT: 9997 + + #manager_py_service: + # container_name: manager_py + # build: + # context: . + # dockerfile: ./GreenhouseManager/Dockerfile + # depends_on: + # - kafka + + #detector_py_service: + # container_name: detector_py + # build: + # context: . + # dockerfile: ./GreenhouseDetector/Dockerfile + # depends_on: + # - kafka + # expose: + # - 20002 \ No newline at end of file diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..0145355 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,3 @@ +kafka-python~=2.0.2 +Flask~=3.0.3 +requests~=2.31.0 \ No newline at end of file From aace337052e96e822ec98e956047a72de44ffc01 Mon Sep 17 00:00:00 2001 From: mfnefd Date: Wed, 4 Dec 2024 23:20:09 +0400 Subject: [PATCH 49/49] =?UTF-8?q?fix:=20=D1=85=D0=B7,=20=D0=B2=D1=80=D0=BE?= =?UTF-8?q?=D0=B4=D0=B5=20=D0=B1=D1=8B=20=D1=80=D0=B0=D0=B1=D0=BE=D1=82?= =?UTF-8?q?=D0=B0=D0=B5=D1=82,=20=D0=BD=D0=BE=20=D0=BF=D0=BE=D1=87=D0=B5?= =?UTF-8?q?=D0=BC=D1=83-=D1=82=D0=BE=20=D0=BD=D0=B5=20=D0=BF=D0=BE=D0=B4?= =?UTF-8?q?=D0=BA=D0=BB=D1=8E=D1=87=D0=B0=D0=B5=D1=82=D1=81=D1=8F?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- GreenhouseDetector/Dockerfile | 2 +- GreenhouseDetector/detector.py | 6 +- GreenhouseManager/Dockerfile | 2 +- GreenhouseManager/manager.py | 10 +- docker-compose.yml | 165 +++++++++++++++++---------------- 5 files changed, 100 insertions(+), 85 deletions(-) diff --git a/GreenhouseDetector/Dockerfile b/GreenhouseDetector/Dockerfile index cb846ca..7be5c76 100644 --- a/GreenhouseDetector/Dockerfile +++ b/GreenhouseDetector/Dockerfile @@ -1,4 +1,4 @@ -FROM python:latest +FROM python:3.9-slim WORKDIR /app diff --git a/GreenhouseDetector/detector.py b/GreenhouseDetector/detector.py index d092373..4921dfe 100644 --- a/GreenhouseDetector/detector.py +++ b/GreenhouseDetector/detector.py @@ -1,3 +1,4 @@ +import os import time import random as rnd @@ -5,10 +6,13 @@ from flask import Flask import requests import threading + app = Flask(__name__) class Detector: def __init__(self, id, moistureThresholdUpper, moistureThresholdLower, tempThresholdUpper, tempThresholdLower): + self.MANAGER_URL = os.environ.get('MANAGER_URL') + print("MANAGER_URL=", self.MANAGER_URL) self.id = id self.moistureThresholdUpper = moistureThresholdUpper self.moistureThresholdLower = moistureThresholdLower @@ -24,7 +28,7 @@ class Detector: def sendData(self): data = {"moisture": self.moisture, "temp": self.temp} - requests.post(f"http://127.0.0.1:20002/webhook?id={self.id}", json=data) + requests.post(f"{self.MANAGER_URL}/webhook?id={self.id}", json=data) detector1 = Detector(1, 0.6, 0.2, 40, 20) diff --git a/GreenhouseManager/Dockerfile b/GreenhouseManager/Dockerfile index efc49af..ab31b66 100644 --- a/GreenhouseManager/Dockerfile +++ b/GreenhouseManager/Dockerfile @@ -1,4 +1,4 @@ -FROM python:latest +FROM python:3.9-slim WORKDIR /app diff --git a/GreenhouseManager/manager.py b/GreenhouseManager/manager.py index d9772d1..8366baf 100644 --- a/GreenhouseManager/manager.py +++ b/GreenhouseManager/manager.py @@ -1,3 +1,4 @@ +import os from kafka import KafkaProducer, KafkaConsumer import kafka import socket @@ -7,6 +8,7 @@ import time from enum import Enum import threading + app = Flask(__name__) def start_manager(): @@ -15,6 +17,8 @@ def start_manager(): class Manager: def __init__(self, _id: int, moisture: float = 0, temp: float = 20, isAutoOn: bool = False, valve_state: str = "closed", heater_state: str = "off"): + KAFKA_URL = os.environ.get('KAFKA_URL') + print("KAFKA_URL=", KAFKA_URL) self._id = _id self.moisture = moisture self.temp = temp @@ -23,14 +27,14 @@ class Manager: self.heater_state = heater_state self.dataPublisher = KafkaProducer( - bootstrap_servers=['localhost:9092'], + bootstrap_servers=[KAFKA_URL], client_id=f'manager{self._id}_producer', value_serializer=lambda v: dumps(v).encode('utf-8') ) self.controllerConsumer = KafkaConsumer( 'commands', - bootstrap_servers=['localhost:9092'], + bootstrap_servers=[KAFKA_URL], auto_offset_reset='earliest', enable_auto_commit=True, consumer_timeout_ms=2000, @@ -38,7 +42,7 @@ class Manager: value_deserializer=lambda x: loads(x.decode('utf-8')) ) self.controllerConsumerResponse = KafkaProducer( - bootstrap_servers=['localhost:9092'], + bootstrap_servers=[KAFKA_URL], client_id=f'manager{self._id}_producer', value_serializer=lambda v: dumps(v).encode('utf-8') ) diff --git a/docker-compose.yml b/docker-compose.yml index 665e246..75795b6 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -8,91 +8,98 @@ networks: gateway: "192.168.2.1" services: - zookeeper: - networks: - - vpn - image: confluentinc/cp-zookeeper:7.4.0 - environment: - ZOOKEEPER_CLIENT_PORT: 2181 - ZOOKEEPER_TICK_TIME: 2000 - ports: - - 2181:2181 + zookeeper: + networks: + - vpn + image: confluentinc/cp-zookeeper:7.4.0 + environment: + ZOOKEEPER_CLIENT_PORT: 2181 + ZOOKEEPER_TICK_TIME: 2000 + ports: + - 2181:2181 - kafka: - networks: - vpn: - ipv4_address: 192.168.2.10 - image: confluentinc/cp-kafka:7.4.0 - ports: - - 9092:9092 - - 9997:9997 + kafka: + networks: + vpn: + ipv4_address: 192.168.2.10 + image: confluentinc/cp-kafka:7.4.0 + ports: + - 9092:9092 + - 9997:9997 - expose: - - 29092:29092 + expose: + - 29092:29092 - environment: - KAFKA_BROKER_ID: 1 - KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 - KAFKA_LISTENERS: HOST://0.0.0.0:9092,DOCKER://0.0.0.0:29092 - KAFKA_ADVERTISED_LISTENERS: HOST://192.168.1.5:9092,DOCKER://kafka:29092 - KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: DOCKER:PLAINTEXT,HOST:PLAINTEXT - KAFKA_INTER_BROKER_LISTENER_NAME: DOCKER - KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 - KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 - KAFKA_LOG_FLUSH_INTERVAL_MESSAGES: 10000 - KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 - depends_on: - - zookeeper + environment: + KAFKA_BROKER_ID: 1 + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_LISTENERS: HOST://0.0.0.0:9092,DOCKER://0.0.0.0:29092 + KAFKA_ADVERTISED_LISTENERS: HOST://192.168.1.5:9092,DOCKER://kafka:29092 + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: DOCKER:PLAINTEXT,HOST:PLAINTEXT + KAFKA_INTER_BROKER_LISTENER_NAME: DOCKER + KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 + KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 + KAFKA_LOG_FLUSH_INTERVAL_MESSAGES: 10000 + KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 + depends_on: + - zookeeper - init-kafka: - networks: - - vpn - image: confluentinc/cp-kafka:7.4.0 - depends_on: - - kafka - entrypoint: [ '/bin/sh', '-c' ] - command: | - " - # blocks until kafka is reachable - kafka-topics --bootstrap-server kafka:29092 --list + init-kafka: + networks: + - vpn + image: confluentinc/cp-kafka:7.4.0 + depends_on: + - kafka + entrypoint: [ '/bin/sh', '-c' ] + command: | + " + # blocks until kafka is reachable + kafka-topics --bootstrap-server kafka:29092 --list - echo -e 'Creating kafka topics' - kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic commands --replication-factor 1 --partitions 1 - kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic data --replication-factor 1 --partitions 1 - kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic response --replication-factor 1 --partitions 1 + echo -e 'Creating kafka topics' + kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic commands --replication-factor 1 --partitions 1 + kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic data --replication-factor 1 --partitions 1 + kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic response --replication-factor 1 --partitions 1 - echo -e 'Successfully created the following topics:' - kafka-topics --bootstrap-server kafka:29092 --list - " + echo -e 'Successfully created the following topics:' + kafka-topics --bootstrap-server kafka:29092 --list + " - kafka-ui: - networks: - - vpn - container_name: kafka-ui - image: provectuslabs/kafka-ui:latest - ports: - - 8080:8080 - depends_on: - - kafka - environment: - KAFKA_CLUSTERS_0_NAME: local - KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka:29092 - KAFKA_CLUSTERS_0_METRICS_PORT: 9997 + kafka-ui: + networks: + - vpn + container_name: kafka-ui + image: provectuslabs/kafka-ui:latest + ports: + - 8080:8080 + depends_on: + - kafka + environment: + KAFKA_CLUSTERS_0_NAME: local + KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka:29092 + KAFKA_CLUSTERS_0_METRICS_PORT: 9997 - #manager_py_service: - # container_name: manager_py - # build: - # context: . - # dockerfile: ./GreenhouseManager/Dockerfile - # depends_on: - # - kafka + manager: + networks: + - vpn + build: + context: . + dockerfile: ./GreenhouseManager/Dockerfile + environment: + KAFKA_URL: kafka:29092 + depends_on: + - kafka + expose: + - 20002 - #detector_py_service: - # container_name: detector_py - # build: - # context: . - # dockerfile: ./GreenhouseDetector/Dockerfile - # depends_on: - # - kafka - # expose: - # - 20002 \ No newline at end of file + detector: + networks: + - vpn + build: + context: . + dockerfile: ./GreenhouseDetector/Dockerfile + environment: + MANAGER_URL: http://manager:20002 + depends_on: + - manager + \ No newline at end of file