Compare commits

..

No commits in common. "c34926e1ec11ce71c300427e3c4a6692051d621d" and "07c5ea7853fe67448e834a2fc98a0c41337cdd77" have entirely different histories.

3 changed files with 121 additions and 130 deletions

View File

@ -1,12 +1,13 @@
from random import random
from turtledemo.penrose import start
from kafka import KafkaProducer, KafkaConsumer
import kafka
import socket
from json import dumps, loads
import time
import random as rnd
from flask import Flask
import requests
import threading
app = Flask(__name__)
class Detector:
def __init__(self, id, moistureThresholdUpper, moistureThresholdLower, tempThresholdUpper, tempThresholdLower):
self.id = id
@ -17,21 +18,29 @@ class Detector:
self.moisture = 0
self.temp = 0
self.producer = KafkaProducer(
bootstrap_servers=['localhost:9092'],
client_id=f'detector{self.id}',
value_serializer=lambda v: dumps(v).encode('utf-8')
)
def sendData(self):
message = {'id' : self.id,
'moisture': self.moisture,
'temperature' : self.temp }
self.producer.send('dataDetectors', message)
def cycle(self):
self.moisture += rnd.random() / 100
self.temp += (rnd.random() - 0.5) / 100
def sendData(self):
data = {"moisture": self.moisture,
"temp": self.temp}
requests.post(f"http://127.0.0.1:20002/webhook?id={self.id}", json=data)
detector1 = Detector(1, 0.6, 0.2, 40, 20)
detector2 = Detector(2, 0.7, 0.3, 40, 20)
detector3 = Detector(3, 0.9, 0.6, 40, 20)
detectors = [detector1]
detectors = [detector1, detector2, detector3]
if __name__ =="__main__":
while True:
while True:
for detector in detectors:
detector.cycle()
detector.sendData()

View File

@ -2,20 +2,20 @@ from kafka import KafkaProducer, KafkaConsumer
import kafka
import socket
from json import dumps, loads
from flask import Flask, request
import time
from enum import Enum
import threading
from GreenhouseDetector.detector import Detector
app = Flask(__name__)
class Status(Enum):
UNKNOWN = -1
OFF = 0
ON = 1
def start_manager():
return
class Manager:
def __init__(self, _id: int, moisture: float = 0, temp: float = 20, isAutoOn: bool = False, valve_state: str = "closed",
def __init__(self, id: int, moisture: float = 0, temp: float = 20, isAutoOn: bool = False, valve_state: str = "closed",
heater_state: str = "off"):
self._id = _id
self.id = id
self.moisture = moisture
self.temp = temp
self.isAutoOn = isAutoOn
@ -24,29 +24,49 @@ class Manager:
self.dataPublisher = KafkaProducer(
bootstrap_servers=['localhost:9092'],
client_id=f'manager{self._id}_producer',
client_id=f'manager{id}_producer',
value_serializer=lambda v: dumps(v).encode('utf-8')
)
# self.detectorConsumer = KafkaConsumer(
# 'dataDetectors',
# bootstrap_servers=['localhost:9092'],
# auto_offset_reset='earliest',
# enable_auto_commit=True,
# consumer_timeout_ms=1000,
#group_id=f'manager{id}',
# value_deserializer=lambda x: loads(x.decode('utf-8'))
#)
self.controllerConsumer = KafkaConsumer(
'commands',
bootstrap_servers=['localhost:9092'],
auto_offset_reset='earliest',
enable_auto_commit=True,
consumer_timeout_ms=2000,
group_id=f'manager{self._id}',
group_id=f'manager{id}',
value_deserializer=lambda x: loads(x.decode('utf-8'))
)
self.controllerConsumerResponse = KafkaProducer(
bootstrap_servers=['localhost:9092'],
client_id=f'manager{self._id}_producer',
client_id=f'manager{id}_producer',
value_serializer=lambda v: dumps(v).encode('utf-8')
)
def update(self):
for message in self.detectorConsumer:
print(f"Manager {self.id} received message: ")
print(message.value)
self.moisture = message.value['moisture']
self.temp = message.value['temperature']
print("Updating info...\n")
self.sendData()
def sendData(self):
print("sending data...")
message = {
'id': self._id,
'id': self.id,
'moisture': self.moisture,
'temp': self.temp,
'valveStatus': str(self.valve_state),
@ -56,8 +76,6 @@ class Manager:
print(message)
self.dataPublisher.send('data', message)
self.dataPublisher.flush()
def toggle_device(self, device, request_id, greenhouse_id):
@ -104,33 +122,23 @@ class Manager:
for tp, msgs in messages.items():
for message in msgs:
print(f"Manager {self._id} received message: ")
print(f"Manager {self.id} received message: ")
print(message.value)
self.request_id = message.value['request_id']
self.greenhouse_id = message.value['greenhouse_id']
self.command = message.value['command']
self.toggle_device(self.command, self.request_id, self.greenhouse_id)
@app.route(f'/webhook', methods=['POST'])
def webhook():
print("received webhook", request.args.get('id'))
for manager in managers:
print()
if int(request.args.get('id')) == manager._id and request.method == 'POST':
print("Data received from Webhook is", request.json)
body = request.json
for key, value in body.items():
setattr(manager, key, value)
manager.sendData()
manager1 = Manager(id=1)
return f"Webhook received for manager {manager._id}"
return "Webhook ignored"
t1 = threading.Thread(target=start_manager)
manager1 = Manager(_id=1)
managers = [manager1]
if __name__ == "__main__":
threading.Thread(target=lambda: app.run(host="0.0.0.0", port=20002, debug=True, use_reloader=False)).start()
while True:
time.sleep(5)
manager1.sendData()
for manager in managers:
manager.getCommand()

View File

@ -1,12 +1,3 @@
networks:
vpn:
name: kafkaVPN
driver: bridge
ipam:
config:
- subnet: "192.168.2.0/24"
gateway: "192.168.2.1"
services:
cloud:
build: ./Cloud/
@ -39,19 +30,7 @@ services:
- ping
retries: 3
timeout: 5s
zookeeper:
networks:
- vpn
image: confluentinc/cp-zookeeper:7.4.0
environment:
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_TICK_TIME: 2000
ports:
- 2181:2181
kafka:
networks:
vpn:
ipv4_address: 192.168.2.10
image: confluentinc/cp-kafka:7.4.0
ports:
- 9092:9092
@ -61,10 +40,8 @@ services:
environment:
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_LISTENERS: HOST://0.0.0.0:9092,DOCKER://0.0.0.0:29092
KAFKA_ADVERTISED_LISTENERS: HOST://192.168.1.5:9092,DOCKER://kafka:29092
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: DOCKER:PLAINTEXT,HOST:PLAINTEXT
KAFKA_INTER_BROKER_LISTENER_NAME: DOCKER
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:29092,PLAINTEXT_HOST://localhost:9092
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
KAFKA_LOG_FLUSH_INTERVAL_MESSAGES: 10000
@ -72,8 +49,6 @@ services:
depends_on:
- zookeeper
init-kafka:
networks:
- vpn
image: confluentinc/cp-kafka:7.4.0
depends_on:
- kafka
@ -86,14 +61,13 @@ services:
echo -e 'Creating kafka topics'
kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic commands --replication-factor 1 --partitions 1
kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic data --replication-factor 1 --partitions 1
kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic dataDetectors --replication-factor 1 --partitions 1
kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic response --replication-factor 1 --partitions 1
echo -e 'Successfully created the following topics:'
kafka-topics --bootstrap-server kafka:29092 --list
"
kafka-ui:
networks:
- vpn
container_name: kafka-ui
image: provectuslabs/kafka-ui:latest
ports: