2 Commits

Author SHA1 Message Date
the
25196f8e98 Добавлена кафка и её настройщик 2024-10-09 11:53:56 +04:00
3e39e2c0e6 Коммит 2024-10-07 00:01:57 +04:00
18 changed files with 93 additions and 723 deletions

399
.gitignore vendored
View File

@@ -1,399 +0,0 @@
# ---> VisualStudio
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
##
## Get latest from https://github.com/github/gitignore/blob/main/VisualStudio.gitignore
# User-specific files
*.rsuser
*.suo
*.user
*.userosscache
*.sln.docstates
# User-specific files (MonoDevelop/Xamarin Studio)
*.userprefs
# Mono auto generated files
mono_crash.*
# Build results
[Dd]ebug/
[Dd]ebugPublic/
[Rr]elease/
[Rr]eleases/
x64/
x86/
[Ww][Ii][Nn]32/
[Aa][Rr][Mm]/
[Aa][Rr][Mm]64/
bld/
[Bb]in/
[Oo]bj/
[Ll]og/
[Ll]ogs/
# Visual Studio 2015/2017 cache/options directory
.vs/
# Uncomment if you have tasks that create the project's static files in wwwroot
#wwwroot/
# Visual Studio 2017 auto generated files
Generated\ Files/
# MSTest test Results
[Tt]est[Rr]esult*/
[Bb]uild[Ll]og.*
# NUnit
*.VisualState.xml
TestResult.xml
nunit-*.xml
# Build Results of an ATL Project
[Dd]ebugPS/
[Rr]eleasePS/
dlldata.c
# Benchmark Results
BenchmarkDotNet.Artifacts/
# .NET Core
project.lock.json
project.fragment.lock.json
artifacts/
# ASP.NET Scaffolding
ScaffoldingReadMe.txt
# StyleCop
StyleCopReport.xml
# Files built by Visual Studio
*_i.c
*_p.c
*_h.h
*.ilk
*.meta
*.obj
*.iobj
*.pch
*.pdb
*.ipdb
*.pgc
*.pgd
*.rsp
*.sbr
*.tlb
*.tli
*.tlh
*.tmp
*.tmp_proj
*_wpftmp.csproj
*.log
*.tlog
*.vspscc
*.vssscc
.builds
*.pidb
*.svclog
*.scc
# Chutzpah Test files
_Chutzpah*
# Visual C++ cache files
ipch/
*.aps
*.ncb
*.opendb
*.opensdf
*.sdf
*.cachefile
*.VC.db
*.VC.VC.opendb
# Visual Studio profiler
*.psess
*.vsp
*.vspx
*.sap
# Visual Studio Trace Files
*.e2e
# TFS 2012 Local Workspace
$tf/
# Guidance Automation Toolkit
*.gpState
# ReSharper is a .NET coding add-in
_ReSharper*/
*.[Rr]e[Ss]harper
*.DotSettings.user
# TeamCity is a build add-in
_TeamCity*
# DotCover is a Code Coverage Tool
*.dotCover
# AxoCover is a Code Coverage Tool
.axoCover/*
!.axoCover/settings.json
# Coverlet is a free, cross platform Code Coverage Tool
coverage*.json
coverage*.xml
coverage*.info
# Visual Studio code coverage results
*.coverage
*.coveragexml
# NCrunch
_NCrunch_*
.*crunch*.local.xml
nCrunchTemp_*
# MightyMoose
*.mm.*
AutoTest.Net/
# Web workbench (sass)
.sass-cache/
# Installshield output folder
[Ee]xpress/
# DocProject is a documentation generator add-in
DocProject/buildhelp/
DocProject/Help/*.HxT
DocProject/Help/*.HxC
DocProject/Help/*.hhc
DocProject/Help/*.hhk
DocProject/Help/*.hhp
DocProject/Help/Html2
DocProject/Help/html
# Click-Once directory
publish/
# Publish Web Output
*.[Pp]ublish.xml
*.azurePubxml
# Note: Comment the next line if you want to checkin your web deploy settings,
# but database connection strings (with potential passwords) will be unencrypted
*.pubxml
*.publishproj
# Microsoft Azure Web App publish settings. Comment the next line if you want to
# checkin your Azure Web App publish settings, but sensitive information contained
# in these scripts will be unencrypted
PublishScripts/
# NuGet Packages
*.nupkg
# NuGet Symbol Packages
*.snupkg
# The packages folder can be ignored because of Package Restore
**/[Pp]ackages/*
# except build/, which is used as an MSBuild target.
!**/[Pp]ackages/build/
# Uncomment if necessary however generally it will be regenerated when needed
#!**/[Pp]ackages/repositories.config
# NuGet v3's project.json files produces more ignorable files
*.nuget.props
*.nuget.targets
# Microsoft Azure Build Output
csx/
*.build.csdef
# Microsoft Azure Emulator
ecf/
rcf/
# Windows Store app package directories and files
AppPackages/
BundleArtifacts/
Package.StoreAssociation.xml
_pkginfo.txt
*.appx
*.appxbundle
*.appxupload
# Visual Studio cache files
# files ending in .cache can be ignored
*.[Cc]ache
# but keep track of directories ending in .cache
!?*.[Cc]ache/
# Others
ClientBin/
~$*
*~
*.dbmdl
*.dbproj.schemaview
*.jfm
*.pfx
*.publishsettings
orleans.codegen.cs
# Including strong name files can present a security risk
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
#*.snk
# Since there are multiple workflows, uncomment next line to ignore bower_components
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
#bower_components/
# RIA/Silverlight projects
Generated_Code/
# Backup & report files from converting an old project file
# to a newer Visual Studio version. Backup files are not needed,
# because we have git ;-)
_UpgradeReport_Files/
Backup*/
UpgradeLog*.XML
UpgradeLog*.htm
ServiceFabricBackup/
*.rptproj.bak
# SQL Server files
*.mdf
*.ldf
*.ndf
# Business Intelligence projects
*.rdl.data
*.bim.layout
*.bim_*.settings
*.rptproj.rsuser
*- [Bb]ackup.rdl
*- [Bb]ackup ([0-9]).rdl
*- [Bb]ackup ([0-9][0-9]).rdl
# Microsoft Fakes
FakesAssemblies/
# GhostDoc plugin setting file
*.GhostDoc.xml
# Node.js Tools for Visual Studio
.ntvs_analysis.dat
node_modules/
# Visual Studio 6 build log
*.plg
# Visual Studio 6 workspace options file
*.opt
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
*.vbw
# Visual Studio 6 auto-generated project file (contains which files were open etc.)
*.vbp
# Visual Studio 6 workspace and project file (working project files containing files to include in project)
*.dsw
*.dsp
# Visual Studio 6 technical files
*.ncb
*.aps
# Visual Studio LightSwitch build output
**/*.HTMLClient/GeneratedArtifacts
**/*.DesktopClient/GeneratedArtifacts
**/*.DesktopClient/ModelManifest.xml
**/*.Server/GeneratedArtifacts
**/*.Server/ModelManifest.xml
_Pvt_Extensions
# Paket dependency manager
.paket/paket.exe
paket-files/
# FAKE - F# Make
.fake/
# CodeRush personal settings
.cr/personal
# Python Tools for Visual Studio (PTVS)
__pycache__/
*.pyc
# Cake - Uncomment if you are using it
# tools/**
# !tools/packages.config
# Tabs Studio
*.tss
# Telerik's JustMock configuration file
*.jmconfig
# BizTalk build output
*.btp.cs
*.btm.cs
*.odx.cs
*.xsd.cs
# OpenCover UI analysis results
OpenCover/
# Azure Stream Analytics local run output
ASALocalRun/
# MSBuild Binary and Structured Log
*.binlog
# NVidia Nsight GPU debugger configuration file
*.nvuser
# MFractors (Xamarin productivity tool) working folder
.mfractor/
# Local History for Visual Studio
.localhistory/
# Visual Studio History (VSHistory) files
.vshistory/
# BeatPulse healthcheck temp database
healthchecksdb
# Backup folder for Package Reference Convert tool in Visual Studio 2017
MigrationBackup/
# Ionide (cross platform F# VS Code tools) working folder
.ionide/
# Fody - auto-generated XML schema
FodyWeavers.xsd
# VS Code files for those working on multiple tools
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
*.code-workspace
# Local History for Visual Studio Code
.history/
# Windows Installer files from build outputs
*.cab
*.msi
*.msix
*.msm
*.msp
# JetBrains Rider
*.sln.iml

BIN
.vs/slnx.sqlite Normal file

Binary file not shown.

View File

@@ -0,0 +1,4 @@
// <autogenerated />
using System;
using System.Reflection;
[assembly: global::System.Runtime.Versioning.TargetFrameworkAttribute(".NETCoreApp,Version=v6.0", FrameworkDisplayName = ".NET 6.0")]

View File

@@ -0,0 +1,23 @@
//------------------------------------------------------------------------------
// <auto-generated>
// Этот код создан программой.
// Исполняемая версия:4.0.30319.42000
//
// Изменения в этом файле могут привести к неправильной работе и будут потеряны в случае
// повторной генерации кода.
// </auto-generated>
//------------------------------------------------------------------------------
using System;
using System.Reflection;
[assembly: System.Reflection.AssemblyCompanyAttribute("CucumberCloud")]
[assembly: System.Reflection.AssemblyConfigurationAttribute("Debug")]
[assembly: System.Reflection.AssemblyFileVersionAttribute("1.0.0.0")]
[assembly: System.Reflection.AssemblyInformationalVersionAttribute("1.0.0")]
[assembly: System.Reflection.AssemblyProductAttribute("CucumberCloud")]
[assembly: System.Reflection.AssemblyTitleAttribute("CucumberCloud")]
[assembly: System.Reflection.AssemblyVersionAttribute("1.0.0.0")]
// Создано классом WriteCodeFragment MSBuild.

View File

@@ -0,0 +1 @@
87ee1ebed43dd90dbbceb9a51a9dd6b4d21d9cc6

View File

@@ -0,0 +1,17 @@
is_global = true
build_property.TargetFramework = net6.0
build_property.TargetPlatformMinVersion =
build_property.UsingMicrosoftNETSdkWeb = true
build_property.ProjectTypeGuids =
build_property.InvariantGlobalization =
build_property.PlatformNeutralAssembly =
build_property.EnforceExtendedAnalyzerRules =
build_property._SupportedPlatformList = Linux,macOS,Windows
build_property.RootNamespace = CucumberCloud
build_property.RootNamespace = CucumberCloud
build_property.ProjectDir = C:\Users\User\source\repos\CucumberApp\CucumberCloud\
build_property.RazorLangVersion = 6.0
build_property.SupportLocalizedComponentNames =
build_property.GenerateRazorMetadataSourceChecksumAttributes =
build_property.MSBuildProjectDirectory = C:\Users\User\source\repos\CucumberApp\CucumberCloud
build_property._RazorSourceGeneratorDebug =

View File

@@ -0,0 +1,17 @@
// <auto-generated/>
global using global::Microsoft.AspNetCore.Builder;
global using global::Microsoft.AspNetCore.Hosting;
global using global::Microsoft.AspNetCore.Http;
global using global::Microsoft.AspNetCore.Routing;
global using global::Microsoft.Extensions.Configuration;
global using global::Microsoft.Extensions.DependencyInjection;
global using global::Microsoft.Extensions.Hosting;
global using global::Microsoft.Extensions.Logging;
global using global::System;
global using global::System.Collections.Generic;
global using global::System.IO;
global using global::System.Linq;
global using global::System.Net.Http;
global using global::System.Net.Http.Json;
global using global::System.Threading;
global using global::System.Threading.Tasks;

View File

@@ -1,16 +1,28 @@
networks:
vpn:
name: kafkaVPN
driver: bridge
ipam:
config:
- subnet: "192.168.2.0/24"
gateway: "192.168.2.1"
services:
postgres:
image: postgres:14
container_name: cucumer_database
environment:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: 12345
POSTGRES_DB: main_database
ports:
- "5438:5432"
volumes:
- postgres_data:/var/lib/postgresql/data
redis:
image: redis:7
container_name: my_redis
ports:
- "6379:6379"
volumes:
- redis_data:/data
volumes:
postgres_data:
redis_data:
zookeeper:
networks:
- vpn
image: confluentinc/cp-zookeeper:7.4.0
environment:
ZOOKEEPER_CLIENT_PORT: 2181
@@ -19,37 +31,27 @@ services:
- 2181:2181
kafka:
networks:
vpn:
ipv4_address: 192.168.2.10
image: confluentinc/cp-kafka:7.4.0
ports:
- 9092:9092
- 9997:9997
- 9092:9092
expose:
- 29092:29092
- 29092:29092
environment:
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_LISTENERS: HOST://0.0.0.0:9092,DOCKER://0.0.0.0:29092
KAFKA_ADVERTISED_LISTENERS: HOST://192.168.1.5:9092,DOCKER://kafka:29092
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: DOCKER:PLAINTEXT,HOST:PLAINTEXT
KAFKA_INTER_BROKER_LISTENER_NAME: DOCKER
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:29092,PLAINTEXT_HOST://localhost:9092
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
KAFKA_LOG_FLUSH_INTERVAL_MESSAGES: 10000
KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
depends_on:
- zookeeper
- zookeeper
init-kafka:
networks:
- vpn
image: confluentinc/cp-kafka:7.4.0
depends_on:
- kafka
- kafka
entrypoint: [ '/bin/sh', '-c' ]
command: |
"
@@ -63,43 +65,4 @@ services:
echo -e 'Successfully created the following topics:'
kafka-topics --bootstrap-server kafka:29092 --list
"
kafka-ui:
networks:
- vpn
container_name: kafka-ui
image: provectuslabs/kafka-ui:latest
ports:
- 8080:8080
depends_on:
- kafka
environment:
KAFKA_CLUSTERS_0_NAME: local
KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka:29092
KAFKA_CLUSTERS_0_METRICS_PORT: 9997
manager:
networks:
- vpn
build:
context: .
dockerfile: ./GreenhouseManager/Dockerfile
environment:
KAFKA_URL: kafka:29092
depends_on:
- kafka
expose:
- 20002
detector:
networks:
- vpn
build:
context: .
dockerfile: ./GreenhouseDetector/Dockerfile
environment:
MANAGER_URL: http://manager:20002
depends_on:
- manager
"

View File

@@ -1,48 +0,0 @@
from json import dumps
from json import dumps
class ManageController:
def __init__(self, producer, topic='commands'):
self.valve_state = "closed"
self.heater_state = "off"
self.producer = producer
self.topic = topic
def toggle_device(self, device, request_id, greenhouse_id):
if device == 'valve':
if self.valve_state == 'closed':
self.valve_state = 'open'
print("Valve opened")
else:
self.valve_state = 'closed'
print("Valve closed")
elif device == 'heater':
if self.heater_state == 'off':
self.heater_state = 'on'
print("Heater turned on")
else:
self.heater_state = 'off'
print("Heater turned off")
self.send_status(request_id, greenhouse_id)
def send_status(self, request_id, greenhouse_id):
status = {
'request_id': request_id,
'greenhouse_id': greenhouse_id,
'valve_state': self.valve_state,
'heater_state': self.heater_state
}
print(f"Sent device status: {status}")
return status

View File

@@ -1,11 +0,0 @@
FROM python:3.9-slim
WORKDIR /app
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
COPY GreenhouseDetector/detector.py .
CMD ["python", "detector.py"]

View File

@@ -1,43 +0,0 @@
import os
import time
import random as rnd
from flask import Flask
import requests
import threading
app = Flask(__name__)
class Detector:
def __init__(self, id, moistureThresholdUpper, moistureThresholdLower, tempThresholdUpper, tempThresholdLower):
self.MANAGER_URL = os.environ.get('MANAGER_URL')
print("MANAGER_URL=", self.MANAGER_URL)
self.id = id
self.moistureThresholdUpper = moistureThresholdUpper
self.moistureThresholdLower = moistureThresholdLower
self.tempThresholdUpper = tempThresholdUpper
self.tempThresholdLower = tempThresholdLower
self.moisture = 0
self.temp = 0
def cycle(self):
self.moisture += rnd.random() / 100
self.temp += (rnd.random() - 0.5) / 100
def sendData(self):
data = {"moisture": self.moisture,
"temp": self.temp}
requests.post(f"{self.MANAGER_URL}/webhook?id={self.id}", json=data)
detector1 = Detector(1, 0.6, 0.2, 40, 20)
detectors = [detector1]
if __name__ =="__main__":
while True:
for detector in detectors:
detector.cycle()
detector.sendData()
time.sleep(1)

View File

@@ -1,11 +0,0 @@
FROM python:3.9-slim
WORKDIR /app
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
COPY GreenhouseManager/manager.py .
CMD ["python", "manager.py"]

View File

@@ -1,140 +0,0 @@
import os
from kafka import KafkaProducer, KafkaConsumer
import kafka
import socket
from json import dumps, loads
from flask import Flask, request
import time
from enum import Enum
import threading
app = Flask(__name__)
def start_manager():
return
class Manager:
def __init__(self, _id: int, moisture: float = 0, temp: float = 20, isAutoOn: bool = False, valve_state: str = "closed",
heater_state: str = "off"):
KAFKA_URL = os.environ.get('KAFKA_URL')
print("KAFKA_URL=", KAFKA_URL)
self._id = _id
self.moisture = moisture
self.temp = temp
self.isAutoOn = isAutoOn
self.valve_state = valve_state
self.heater_state = heater_state
self.dataPublisher = KafkaProducer(
bootstrap_servers=[KAFKA_URL],
client_id=f'manager{self._id}_producer',
value_serializer=lambda v: dumps(v).encode('utf-8')
)
self.controllerConsumer = KafkaConsumer(
'commands',
bootstrap_servers=[KAFKA_URL],
auto_offset_reset='earliest',
enable_auto_commit=True,
consumer_timeout_ms=2000,
group_id=f'manager{self._id}',
value_deserializer=lambda x: loads(x.decode('utf-8'))
)
self.controllerConsumerResponse = KafkaProducer(
bootstrap_servers=[KAFKA_URL],
client_id=f'manager{self._id}_producer',
value_serializer=lambda v: dumps(v).encode('utf-8')
)
def sendData(self):
print("sending data...")
message = {
'id': self._id,
'moisture': self.moisture,
'temp': self.temp,
'valveStatus': str(self.valve_state),
'heaterStatus': str(self.heater_state),
'isAutoOn': self.isAutoOn
}
print(message)
self.dataPublisher.send('data', message)
self.dataPublisher.flush()
def toggle_device(self, device, request_id, greenhouse_id):
if device == 'valve':
if self.valve_state == 'closed':
self.valve_state = 'open'
print("Valve opened")
else:
self.valve_state = 'closed'
print("Valve closed")
elif device == 'heater':
if self.heater_state == 'off':
self.heater_state = 'on'
print("Heater turned on")
else:
self.heater_state = 'off'
print("Heater turned off")
self.send_status(request_id, greenhouse_id)
def send_status(self, request_id, greenhouse_id):
status = {
'request_id': request_id,
'greenhouse_id': greenhouse_id,
'valve_state': self.valve_state,
'heater_state': self.heater_state
}
self.sendDataCommand(status)
print("Updating info...\n")
def sendDataCommand(self, message):
print("sending data...")
self.dataPublisher.send('response', message)
def getCommand(self):
messages = self.controllerConsumer.poll(timeout_ms=1000)
# Проверяем, есть ли сообщения
for tp, msgs in messages.items():
for message in msgs:
print(f"Manager {self._id} received message: ")
print(message.value)
self.request_id = message.value['request_id']
self.greenhouse_id = message.value['greenhouse_id']
self.command = message.value['command']
self.toggle_device(self.command, self.request_id, self.greenhouse_id)
@app.route(f'/webhook', methods=['POST'])
def webhook():
print("received webhook", request.args.get('id'))
for manager in managers:
print()
if int(request.args.get('id')) == manager._id and request.method == 'POST':
print("Data received from Webhook is", request.json)
body = request.json
for key, value in body.items():
setattr(manager, key, value)
manager.sendData()
return f"Webhook received for manager {manager._id}"
return "Webhook ignored"
t1 = threading.Thread(target=start_manager)
manager1 = Manager(_id=1)
managers = [manager1]
if __name__ == "__main__":
threading.Thread(target=lambda: app.run(host="0.0.0.0", port=20002, debug=True, use_reloader=False)).start()

View File

@@ -1,3 +0,0 @@
kafka-python~=2.0.2
Flask~=3.0.3
requests~=2.31.0