diff --git a/GreenhouseDetector/Dockerfile b/GreenhouseDetector/Dockerfile index cb846ca..7be5c76 100644 --- a/GreenhouseDetector/Dockerfile +++ b/GreenhouseDetector/Dockerfile @@ -1,4 +1,4 @@ -FROM python:latest +FROM python:3.9-slim WORKDIR /app diff --git a/GreenhouseDetector/detector.py b/GreenhouseDetector/detector.py index d092373..4921dfe 100644 --- a/GreenhouseDetector/detector.py +++ b/GreenhouseDetector/detector.py @@ -1,3 +1,4 @@ +import os import time import random as rnd @@ -5,10 +6,13 @@ from flask import Flask import requests import threading + app = Flask(__name__) class Detector: def __init__(self, id, moistureThresholdUpper, moistureThresholdLower, tempThresholdUpper, tempThresholdLower): + self.MANAGER_URL = os.environ.get('MANAGER_URL') + print("MANAGER_URL=", self.MANAGER_URL) self.id = id self.moistureThresholdUpper = moistureThresholdUpper self.moistureThresholdLower = moistureThresholdLower @@ -24,7 +28,7 @@ class Detector: def sendData(self): data = {"moisture": self.moisture, "temp": self.temp} - requests.post(f"http://127.0.0.1:20002/webhook?id={self.id}", json=data) + requests.post(f"{self.MANAGER_URL}/webhook?id={self.id}", json=data) detector1 = Detector(1, 0.6, 0.2, 40, 20) diff --git a/GreenhouseManager/Dockerfile b/GreenhouseManager/Dockerfile index efc49af..ab31b66 100644 --- a/GreenhouseManager/Dockerfile +++ b/GreenhouseManager/Dockerfile @@ -1,4 +1,4 @@ -FROM python:latest +FROM python:3.9-slim WORKDIR /app diff --git a/GreenhouseManager/manager.py b/GreenhouseManager/manager.py index d9772d1..8366baf 100644 --- a/GreenhouseManager/manager.py +++ b/GreenhouseManager/manager.py @@ -1,3 +1,4 @@ +import os from kafka import KafkaProducer, KafkaConsumer import kafka import socket @@ -7,6 +8,7 @@ import time from enum import Enum import threading + app = Flask(__name__) def start_manager(): @@ -15,6 +17,8 @@ def start_manager(): class Manager: def __init__(self, _id: int, moisture: float = 0, temp: float = 20, isAutoOn: bool = False, valve_state: str = "closed", heater_state: str = "off"): + KAFKA_URL = os.environ.get('KAFKA_URL') + print("KAFKA_URL=", KAFKA_URL) self._id = _id self.moisture = moisture self.temp = temp @@ -23,14 +27,14 @@ class Manager: self.heater_state = heater_state self.dataPublisher = KafkaProducer( - bootstrap_servers=['localhost:9092'], + bootstrap_servers=[KAFKA_URL], client_id=f'manager{self._id}_producer', value_serializer=lambda v: dumps(v).encode('utf-8') ) self.controllerConsumer = KafkaConsumer( 'commands', - bootstrap_servers=['localhost:9092'], + bootstrap_servers=[KAFKA_URL], auto_offset_reset='earliest', enable_auto_commit=True, consumer_timeout_ms=2000, @@ -38,7 +42,7 @@ class Manager: value_deserializer=lambda x: loads(x.decode('utf-8')) ) self.controllerConsumerResponse = KafkaProducer( - bootstrap_servers=['localhost:9092'], + bootstrap_servers=[KAFKA_URL], client_id=f'manager{self._id}_producer', value_serializer=lambda v: dumps(v).encode('utf-8') ) diff --git a/docker-compose.yml b/docker-compose.yml index 665e246..75795b6 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -8,91 +8,98 @@ networks: gateway: "192.168.2.1" services: - zookeeper: - networks: - - vpn - image: confluentinc/cp-zookeeper:7.4.0 - environment: - ZOOKEEPER_CLIENT_PORT: 2181 - ZOOKEEPER_TICK_TIME: 2000 - ports: - - 2181:2181 + zookeeper: + networks: + - vpn + image: confluentinc/cp-zookeeper:7.4.0 + environment: + ZOOKEEPER_CLIENT_PORT: 2181 + ZOOKEEPER_TICK_TIME: 2000 + ports: + - 2181:2181 - kafka: - networks: - vpn: - ipv4_address: 192.168.2.10 - image: confluentinc/cp-kafka:7.4.0 - ports: - - 9092:9092 - - 9997:9997 + kafka: + networks: + vpn: + ipv4_address: 192.168.2.10 + image: confluentinc/cp-kafka:7.4.0 + ports: + - 9092:9092 + - 9997:9997 - expose: - - 29092:29092 + expose: + - 29092:29092 - environment: - KAFKA_BROKER_ID: 1 - KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 - KAFKA_LISTENERS: HOST://0.0.0.0:9092,DOCKER://0.0.0.0:29092 - KAFKA_ADVERTISED_LISTENERS: HOST://192.168.1.5:9092,DOCKER://kafka:29092 - KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: DOCKER:PLAINTEXT,HOST:PLAINTEXT - KAFKA_INTER_BROKER_LISTENER_NAME: DOCKER - KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 - KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 - KAFKA_LOG_FLUSH_INTERVAL_MESSAGES: 10000 - KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 - depends_on: - - zookeeper + environment: + KAFKA_BROKER_ID: 1 + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_LISTENERS: HOST://0.0.0.0:9092,DOCKER://0.0.0.0:29092 + KAFKA_ADVERTISED_LISTENERS: HOST://192.168.1.5:9092,DOCKER://kafka:29092 + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: DOCKER:PLAINTEXT,HOST:PLAINTEXT + KAFKA_INTER_BROKER_LISTENER_NAME: DOCKER + KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 + KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 + KAFKA_LOG_FLUSH_INTERVAL_MESSAGES: 10000 + KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 + depends_on: + - zookeeper - init-kafka: - networks: - - vpn - image: confluentinc/cp-kafka:7.4.0 - depends_on: - - kafka - entrypoint: [ '/bin/sh', '-c' ] - command: | - " - # blocks until kafka is reachable - kafka-topics --bootstrap-server kafka:29092 --list + init-kafka: + networks: + - vpn + image: confluentinc/cp-kafka:7.4.0 + depends_on: + - kafka + entrypoint: [ '/bin/sh', '-c' ] + command: | + " + # blocks until kafka is reachable + kafka-topics --bootstrap-server kafka:29092 --list - echo -e 'Creating kafka topics' - kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic commands --replication-factor 1 --partitions 1 - kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic data --replication-factor 1 --partitions 1 - kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic response --replication-factor 1 --partitions 1 + echo -e 'Creating kafka topics' + kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic commands --replication-factor 1 --partitions 1 + kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic data --replication-factor 1 --partitions 1 + kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic response --replication-factor 1 --partitions 1 - echo -e 'Successfully created the following topics:' - kafka-topics --bootstrap-server kafka:29092 --list - " + echo -e 'Successfully created the following topics:' + kafka-topics --bootstrap-server kafka:29092 --list + " - kafka-ui: - networks: - - vpn - container_name: kafka-ui - image: provectuslabs/kafka-ui:latest - ports: - - 8080:8080 - depends_on: - - kafka - environment: - KAFKA_CLUSTERS_0_NAME: local - KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka:29092 - KAFKA_CLUSTERS_0_METRICS_PORT: 9997 + kafka-ui: + networks: + - vpn + container_name: kafka-ui + image: provectuslabs/kafka-ui:latest + ports: + - 8080:8080 + depends_on: + - kafka + environment: + KAFKA_CLUSTERS_0_NAME: local + KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka:29092 + KAFKA_CLUSTERS_0_METRICS_PORT: 9997 - #manager_py_service: - # container_name: manager_py - # build: - # context: . - # dockerfile: ./GreenhouseManager/Dockerfile - # depends_on: - # - kafka + manager: + networks: + - vpn + build: + context: . + dockerfile: ./GreenhouseManager/Dockerfile + environment: + KAFKA_URL: kafka:29092 + depends_on: + - kafka + expose: + - 20002 - #detector_py_service: - # container_name: detector_py - # build: - # context: . - # dockerfile: ./GreenhouseDetector/Dockerfile - # depends_on: - # - kafka - # expose: - # - 20002 \ No newline at end of file + detector: + networks: + - vpn + build: + context: . + dockerfile: ./GreenhouseDetector/Dockerfile + environment: + MANAGER_URL: http://manager:20002 + depends_on: + - manager + \ No newline at end of file