greenhouseCRUD #7
11
GreenhouseDetector/Dockerfile
Normal file
11
GreenhouseDetector/Dockerfile
Normal file
@ -0,0 +1,11 @@
|
||||
FROM python:3.9-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY requirements.txt .
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
COPY GreenhouseDetector/detector.py .
|
||||
|
||||
CMD ["python", "detector.py"]
|
@ -1,3 +1,4 @@
|
||||
import os
|
||||
import time
|
||||
import random as rnd
|
||||
|
||||
@ -5,10 +6,13 @@ from flask import Flask
|
||||
import requests
|
||||
import threading
|
||||
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
class Detector:
|
||||
def __init__(self, id, moistureThresholdUpper, moistureThresholdLower, tempThresholdUpper, tempThresholdLower):
|
||||
self.MANAGER_URL = os.environ.get('MANAGER_URL')
|
||||
print("MANAGER_URL=", self.MANAGER_URL)
|
||||
self.id = id
|
||||
self.moistureThresholdUpper = moistureThresholdUpper
|
||||
self.moistureThresholdLower = moistureThresholdLower
|
||||
@ -24,7 +28,7 @@ class Detector:
|
||||
def sendData(self):
|
||||
data = {"moisture": self.moisture,
|
||||
"temp": self.temp}
|
||||
requests.post(f"http://127.0.0.1:20002/webhook?id={self.id}", json=data)
|
||||
requests.post(f"{self.MANAGER_URL}/webhook?id={self.id}", json=data)
|
||||
|
||||
detector1 = Detector(1, 0.6, 0.2, 40, 20)
|
||||
|
||||
|
11
GreenhouseManager/Dockerfile
Normal file
11
GreenhouseManager/Dockerfile
Normal file
@ -0,0 +1,11 @@
|
||||
FROM python:3.9-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY requirements.txt .
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
COPY GreenhouseManager/manager.py .
|
||||
|
||||
CMD ["python", "manager.py"]
|
@ -1,3 +1,4 @@
|
||||
import os
|
||||
from kafka import KafkaProducer, KafkaConsumer
|
||||
import kafka
|
||||
import socket
|
||||
@ -7,6 +8,7 @@ import time
|
||||
from enum import Enum
|
||||
import threading
|
||||
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
def start_manager():
|
||||
@ -15,6 +17,8 @@ def start_manager():
|
||||
class Manager:
|
||||
def __init__(self, _id: int, moisture: float = 0, temp: float = 20, isAutoOn: bool = False, valve_state: str = "closed",
|
||||
heater_state: str = "off"):
|
||||
KAFKA_URL = os.environ.get('KAFKA_URL')
|
||||
print("KAFKA_URL=", KAFKA_URL)
|
||||
self._id = _id
|
||||
self.moisture = moisture
|
||||
self.temp = temp
|
||||
@ -23,14 +27,14 @@ class Manager:
|
||||
self.heater_state = heater_state
|
||||
|
||||
self.dataPublisher = KafkaProducer(
|
||||
bootstrap_servers=['localhost:9092'],
|
||||
bootstrap_servers=[KAFKA_URL],
|
||||
client_id=f'manager{self._id}_producer',
|
||||
value_serializer=lambda v: dumps(v).encode('utf-8')
|
||||
)
|
||||
|
||||
self.controllerConsumer = KafkaConsumer(
|
||||
'commands',
|
||||
bootstrap_servers=['localhost:9092'],
|
||||
bootstrap_servers=[KAFKA_URL],
|
||||
auto_offset_reset='earliest',
|
||||
enable_auto_commit=True,
|
||||
consumer_timeout_ms=2000,
|
||||
@ -38,7 +42,7 @@ class Manager:
|
||||
value_deserializer=lambda x: loads(x.decode('utf-8'))
|
||||
)
|
||||
self.controllerConsumerResponse = KafkaProducer(
|
||||
bootstrap_servers=['localhost:9092'],
|
||||
bootstrap_servers=[KAFKA_URL],
|
||||
client_id=f'manager{self._id}_producer',
|
||||
value_serializer=lambda v: dumps(v).encode('utf-8')
|
||||
)
|
||||
|
@ -34,7 +34,7 @@ services:
|
||||
ports:
|
||||
- "5438:5432"
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
redis:
|
||||
image: 'redis:latest'
|
||||
ports:
|
||||
@ -43,63 +43,63 @@ services:
|
||||
- 'cloud-redis:/data'
|
||||
healthcheck:
|
||||
test:
|
||||
- CMD
|
||||
- redis-cli
|
||||
- ping
|
||||
- CMD
|
||||
- redis-cli
|
||||
- ping
|
||||
retries: 3
|
||||
timeout: 5s
|
||||
zookeeper:
|
||||
networks:
|
||||
- vpn
|
||||
image: confluentinc/cp-zookeeper:7.4.0
|
||||
environment:
|
||||
ZOOKEEPER_CLIENT_PORT: 2181
|
||||
ZOOKEEPER_TICK_TIME: 2000
|
||||
ports:
|
||||
- 2181:2181
|
||||
networks:
|
||||
- vpn
|
||||
image: confluentinc/cp-zookeeper:7.4.0
|
||||
environment:
|
||||
ZOOKEEPER_CLIENT_PORT: 2181
|
||||
ZOOKEEPER_TICK_TIME: 2000
|
||||
ports:
|
||||
- 2181:2181
|
||||
kafka:
|
||||
networks:
|
||||
vpn:
|
||||
ipv4_address: 192.168.2.10
|
||||
image: confluentinc/cp-kafka:7.4.0
|
||||
ports:
|
||||
networks:
|
||||
vpn:
|
||||
ipv4_address: 192.168.2.10
|
||||
image: confluentinc/cp-kafka:7.4.0
|
||||
ports:
|
||||
- 9092:9092
|
||||
- 9997:9997
|
||||
expose:
|
||||
- 29092:29092
|
||||
environment:
|
||||
KAFKA_BROKER_ID: 1
|
||||
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
|
||||
KAFKA_LISTENERS: HOST://0.0.0.0:9092,DOCKER://0.0.0.0:29092
|
||||
KAFKA_ADVERTISED_LISTENERS: HOST://192.168.1.5:9092,DOCKER://kafka:29092
|
||||
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: DOCKER:PLAINTEXT,HOST:PLAINTEXT
|
||||
KAFKA_INTER_BROKER_LISTENER_NAME: DOCKER
|
||||
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
|
||||
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
|
||||
KAFKA_LOG_FLUSH_INTERVAL_MESSAGES: 10000
|
||||
KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
|
||||
depends_on:
|
||||
expose:
|
||||
- 29092:29092
|
||||
environment:
|
||||
KAFKA_BROKER_ID: 1
|
||||
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
|
||||
KAFKA_LISTENERS: HOST://0.0.0.0:9092,DOCKER://0.0.0.0:29092
|
||||
KAFKA_ADVERTISED_LISTENERS: HOST://192.168.1.5:9092,DOCKER://kafka:29092
|
||||
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: DOCKER:PLAINTEXT,HOST:PLAINTEXT
|
||||
KAFKA_INTER_BROKER_LISTENER_NAME: DOCKER
|
||||
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
|
||||
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
|
||||
KAFKA_LOG_FLUSH_INTERVAL_MESSAGES: 10000
|
||||
KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
|
||||
depends_on:
|
||||
- zookeeper
|
||||
init-kafka:
|
||||
networks:
|
||||
- vpn
|
||||
image: confluentinc/cp-kafka:7.4.0
|
||||
depends_on:
|
||||
- kafka
|
||||
entrypoint: [ '/bin/sh', '-c' ]
|
||||
command: |
|
||||
"
|
||||
# blocks until kafka is reachable
|
||||
kafka-topics --bootstrap-server kafka:29092 --list
|
||||
networks:
|
||||
- vpn
|
||||
image: confluentinc/cp-kafka:7.4.0
|
||||
depends_on:
|
||||
- kafka
|
||||
entrypoint: [ '/bin/sh', '-c' ]
|
||||
command: |
|
||||
"
|
||||
# blocks until kafka is reachable
|
||||
kafka-topics --bootstrap-server kafka:29092 --list
|
||||
|
||||
echo -e 'Creating kafka topics'
|
||||
kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic commands --replication-factor 1 --partitions 1
|
||||
kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic data --replication-factor 1 --partitions 1
|
||||
kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic response --replication-factor 1 --partitions 1
|
||||
echo -e 'Creating kafka topics'
|
||||
kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic commands --replication-factor 1 --partitions 1
|
||||
kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic data --replication-factor 1 --partitions 1
|
||||
kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic response --replication-factor 1 --partitions 1
|
||||
|
||||
echo -e 'Successfully created the following topics:'
|
||||
kafka-topics --bootstrap-server kafka:29092 --list
|
||||
"
|
||||
echo -e 'Successfully created the following topics:'
|
||||
kafka-topics --bootstrap-server kafka:29092 --list
|
||||
"
|
||||
kafka-ui:
|
||||
networks:
|
||||
- vpn
|
||||
@ -113,8 +113,31 @@ services:
|
||||
KAFKA_CLUSTERS_0_NAME: local
|
||||
KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka:29092
|
||||
KAFKA_CLUSTERS_0_METRICS_PORT: 9997
|
||||
manager:
|
||||
networks:
|
||||
- vpn
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./GreenhouseManager/Dockerfile
|
||||
environment:
|
||||
KAFKA_URL: kafka:29092
|
||||
depends_on:
|
||||
- kafka
|
||||
expose:
|
||||
- 20002
|
||||
detector:
|
||||
networks:
|
||||
- vpn
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./GreenhouseDetector/Dockerfile
|
||||
environment:
|
||||
MANAGER_URL: http://manager:20002
|
||||
depends_on:
|
||||
- manager
|
||||
|
||||
volumes:
|
||||
postgres_data:
|
||||
driver: local
|
||||
cloud-redis:
|
||||
driver: local
|
||||
cloud-redis:
|
||||
driver: local
|
||||
|
3
requirements.txt
Normal file
3
requirements.txt
Normal file
@ -0,0 +1,3 @@
|
||||
kafka-python~=2.0.2
|
||||
Flask~=3.0.3
|
||||
requests~=2.31.0
|
Loading…
Reference in New Issue
Block a user