fix: хз, вроде бы работает, но почему-то не подключается

This commit is contained in:
mfnefd 2024-12-04 23:20:09 +04:00
parent 71bc31d0bd
commit aace337052
5 changed files with 100 additions and 85 deletions

View File

@ -1,4 +1,4 @@
FROM python:latest FROM python:3.9-slim
WORKDIR /app WORKDIR /app

View File

@ -1,3 +1,4 @@
import os
import time import time
import random as rnd import random as rnd
@ -5,10 +6,13 @@ from flask import Flask
import requests import requests
import threading import threading
app = Flask(__name__) app = Flask(__name__)
class Detector: class Detector:
def __init__(self, id, moistureThresholdUpper, moistureThresholdLower, tempThresholdUpper, tempThresholdLower): def __init__(self, id, moistureThresholdUpper, moistureThresholdLower, tempThresholdUpper, tempThresholdLower):
self.MANAGER_URL = os.environ.get('MANAGER_URL')
print("MANAGER_URL=", self.MANAGER_URL)
self.id = id self.id = id
self.moistureThresholdUpper = moistureThresholdUpper self.moistureThresholdUpper = moistureThresholdUpper
self.moistureThresholdLower = moistureThresholdLower self.moistureThresholdLower = moistureThresholdLower
@ -24,7 +28,7 @@ class Detector:
def sendData(self): def sendData(self):
data = {"moisture": self.moisture, data = {"moisture": self.moisture,
"temp": self.temp} "temp": self.temp}
requests.post(f"http://127.0.0.1:20002/webhook?id={self.id}", json=data) requests.post(f"{self.MANAGER_URL}/webhook?id={self.id}", json=data)
detector1 = Detector(1, 0.6, 0.2, 40, 20) detector1 = Detector(1, 0.6, 0.2, 40, 20)

View File

@ -1,4 +1,4 @@
FROM python:latest FROM python:3.9-slim
WORKDIR /app WORKDIR /app

View File

@ -1,3 +1,4 @@
import os
from kafka import KafkaProducer, KafkaConsumer from kafka import KafkaProducer, KafkaConsumer
import kafka import kafka
import socket import socket
@ -7,6 +8,7 @@ import time
from enum import Enum from enum import Enum
import threading import threading
app = Flask(__name__) app = Flask(__name__)
def start_manager(): def start_manager():
@ -15,6 +17,8 @@ def start_manager():
class Manager: class Manager:
def __init__(self, _id: int, moisture: float = 0, temp: float = 20, isAutoOn: bool = False, valve_state: str = "closed", def __init__(self, _id: int, moisture: float = 0, temp: float = 20, isAutoOn: bool = False, valve_state: str = "closed",
heater_state: str = "off"): heater_state: str = "off"):
KAFKA_URL = os.environ.get('KAFKA_URL')
print("KAFKA_URL=", KAFKA_URL)
self._id = _id self._id = _id
self.moisture = moisture self.moisture = moisture
self.temp = temp self.temp = temp
@ -23,14 +27,14 @@ class Manager:
self.heater_state = heater_state self.heater_state = heater_state
self.dataPublisher = KafkaProducer( self.dataPublisher = KafkaProducer(
bootstrap_servers=['localhost:9092'], bootstrap_servers=[KAFKA_URL],
client_id=f'manager{self._id}_producer', client_id=f'manager{self._id}_producer',
value_serializer=lambda v: dumps(v).encode('utf-8') value_serializer=lambda v: dumps(v).encode('utf-8')
) )
self.controllerConsumer = KafkaConsumer( self.controllerConsumer = KafkaConsumer(
'commands', 'commands',
bootstrap_servers=['localhost:9092'], bootstrap_servers=[KAFKA_URL],
auto_offset_reset='earliest', auto_offset_reset='earliest',
enable_auto_commit=True, enable_auto_commit=True,
consumer_timeout_ms=2000, consumer_timeout_ms=2000,
@ -38,7 +42,7 @@ class Manager:
value_deserializer=lambda x: loads(x.decode('utf-8')) value_deserializer=lambda x: loads(x.decode('utf-8'))
) )
self.controllerConsumerResponse = KafkaProducer( self.controllerConsumerResponse = KafkaProducer(
bootstrap_servers=['localhost:9092'], bootstrap_servers=[KAFKA_URL],
client_id=f'manager{self._id}_producer', client_id=f'manager{self._id}_producer',
value_serializer=lambda v: dumps(v).encode('utf-8') value_serializer=lambda v: dumps(v).encode('utf-8')
) )

View File

@ -8,91 +8,98 @@ networks:
gateway: "192.168.2.1" gateway: "192.168.2.1"
services: services:
zookeeper: zookeeper:
networks: networks:
- vpn - vpn
image: confluentinc/cp-zookeeper:7.4.0 image: confluentinc/cp-zookeeper:7.4.0
environment: environment:
ZOOKEEPER_CLIENT_PORT: 2181 ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_TICK_TIME: 2000 ZOOKEEPER_TICK_TIME: 2000
ports: ports:
- 2181:2181 - 2181:2181
kafka: kafka:
networks: networks:
vpn: vpn:
ipv4_address: 192.168.2.10 ipv4_address: 192.168.2.10
image: confluentinc/cp-kafka:7.4.0 image: confluentinc/cp-kafka:7.4.0
ports: ports:
- 9092:9092 - 9092:9092
- 9997:9997 - 9997:9997
expose: expose:
- 29092:29092 - 29092:29092
environment: environment:
KAFKA_BROKER_ID: 1 KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_LISTENERS: HOST://0.0.0.0:9092,DOCKER://0.0.0.0:29092 KAFKA_LISTENERS: HOST://0.0.0.0:9092,DOCKER://0.0.0.0:29092
KAFKA_ADVERTISED_LISTENERS: HOST://192.168.1.5:9092,DOCKER://kafka:29092 KAFKA_ADVERTISED_LISTENERS: HOST://192.168.1.5:9092,DOCKER://kafka:29092
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: DOCKER:PLAINTEXT,HOST:PLAINTEXT KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: DOCKER:PLAINTEXT,HOST:PLAINTEXT
KAFKA_INTER_BROKER_LISTENER_NAME: DOCKER KAFKA_INTER_BROKER_LISTENER_NAME: DOCKER
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
KAFKA_LOG_FLUSH_INTERVAL_MESSAGES: 10000 KAFKA_LOG_FLUSH_INTERVAL_MESSAGES: 10000
KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
depends_on: depends_on:
- zookeeper - zookeeper
init-kafka: init-kafka:
networks: networks:
- vpn - vpn
image: confluentinc/cp-kafka:7.4.0 image: confluentinc/cp-kafka:7.4.0
depends_on: depends_on:
- kafka - kafka
entrypoint: [ '/bin/sh', '-c' ] entrypoint: [ '/bin/sh', '-c' ]
command: | command: |
" "
# blocks until kafka is reachable # blocks until kafka is reachable
kafka-topics --bootstrap-server kafka:29092 --list kafka-topics --bootstrap-server kafka:29092 --list
echo -e 'Creating kafka topics' echo -e 'Creating kafka topics'
kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic commands --replication-factor 1 --partitions 1 kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic commands --replication-factor 1 --partitions 1
kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic data --replication-factor 1 --partitions 1 kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic data --replication-factor 1 --partitions 1
kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic response --replication-factor 1 --partitions 1 kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic response --replication-factor 1 --partitions 1
echo -e 'Successfully created the following topics:' echo -e 'Successfully created the following topics:'
kafka-topics --bootstrap-server kafka:29092 --list kafka-topics --bootstrap-server kafka:29092 --list
" "
kafka-ui: kafka-ui:
networks: networks:
- vpn - vpn
container_name: kafka-ui container_name: kafka-ui
image: provectuslabs/kafka-ui:latest image: provectuslabs/kafka-ui:latest
ports: ports:
- 8080:8080 - 8080:8080
depends_on: depends_on:
- kafka - kafka
environment: environment:
KAFKA_CLUSTERS_0_NAME: local KAFKA_CLUSTERS_0_NAME: local
KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka:29092 KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka:29092
KAFKA_CLUSTERS_0_METRICS_PORT: 9997 KAFKA_CLUSTERS_0_METRICS_PORT: 9997
#manager_py_service: manager:
# container_name: manager_py networks:
# build: - vpn
# context: . build:
# dockerfile: ./GreenhouseManager/Dockerfile context: .
# depends_on: dockerfile: ./GreenhouseManager/Dockerfile
# - kafka environment:
KAFKA_URL: kafka:29092
depends_on:
- kafka
expose:
- 20002
detector:
networks:
- vpn
build:
context: .
dockerfile: ./GreenhouseDetector/Dockerfile
environment:
MANAGER_URL: http://manager:20002
depends_on:
- manager
#detector_py_service:
# container_name: detector_py
# build:
# context: .
# dockerfile: ./GreenhouseDetector/Dockerfile
# depends_on:
# - kafka
# expose:
# - 20002