mirror of
https://github.com/Rhinemann/IoT-Systems.git
synced 2026-03-14 20:50:39 +02:00
Compare commits
1 Commits
6b5831ff1b
...
fba2fdd241
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fba2fdd241 |
1423
MapView/data.csv
1423
MapView/data.csv
File diff suppressed because one or more lines are too long
@ -1,7 +0,0 @@
|
||||
from dataclasses import dataclass
|
||||
|
||||
@dataclass
|
||||
class Accelerometer:
|
||||
x: int
|
||||
y: int
|
||||
z: int
|
||||
@ -1,50 +0,0 @@
|
||||
from csv import reader
|
||||
import config
|
||||
from domain.accelerometer import Accelerometer
|
||||
|
||||
|
||||
|
||||
class FileReader:
|
||||
def __init__(
|
||||
self, data_filename: str,
|
||||
) -> None:
|
||||
self.file_path = data_filename
|
||||
pass
|
||||
|
||||
def read(self):
|
||||
return self.getNextValue()
|
||||
|
||||
def startReading(self, *args, **kwargs):
|
||||
self.file = open(self.file_path, newline='')
|
||||
self.file_reader = reader(self.file, skipinitialspace=True)
|
||||
file_header = next(self.file_reader)
|
||||
|
||||
self.x_idx = file_header.index('X')
|
||||
self.y_idx = file_header.index('Y')
|
||||
self.z_idx = file_header.index('Z')
|
||||
|
||||
def getNextValue(self):
|
||||
while True:
|
||||
row = next(self.file_reader, None)
|
||||
if row is None:
|
||||
self._rewind_file()
|
||||
continue
|
||||
try:
|
||||
x = int(row[self.x_idx])
|
||||
y = int(row[self.y_idx])
|
||||
z = int(row[self.z_idx])
|
||||
return Accelerometer(x=x, y=y, z=z)
|
||||
except Exception as e:
|
||||
continue
|
||||
|
||||
def _rewind_file(self):
|
||||
self.file.seek(0)
|
||||
self.file_reader = reader(self.file)
|
||||
next(self.file_reader)
|
||||
|
||||
def stopReading(self, *args, **kwargs):
|
||||
if self.file:
|
||||
self.file.close()
|
||||
self.file_reader = None
|
||||
|
||||
|
||||
@ -8,17 +8,9 @@ from datasource import Datasource
|
||||
|
||||
class MapViewApp(App):
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
|
||||
self.mapview = None
|
||||
self.datasource = Datasource(user_id=1)
|
||||
self.line_layer = None
|
||||
self.car_marker = None
|
||||
|
||||
super().__init__()
|
||||
# додати необхідні змінні
|
||||
self.bump_markers = []
|
||||
self.pothole_markers = []
|
||||
|
||||
def on_start(self):
|
||||
"""
|
||||
Встановлює необхідні маркери, викликає функцію для оновлення мапи
|
||||
@ -29,73 +21,19 @@ class MapViewApp(App):
|
||||
"""
|
||||
Викликається регулярно для оновлення мапи
|
||||
"""
|
||||
new_points = self.datasource.get_new_points()
|
||||
|
||||
if not new_points:
|
||||
return
|
||||
|
||||
for point in new_points:
|
||||
|
||||
lat, lon, road_state = point
|
||||
|
||||
# Оновлює лінію маршрута
|
||||
self.line_layer.add_point((lat, lon))
|
||||
|
||||
# Оновлює маркер маниши
|
||||
self.update_car_marker((lat, lon))
|
||||
|
||||
# Перевіряємо стан дороги
|
||||
self.check_road_quality(point)
|
||||
|
||||
def check_road_quality(self, point):
|
||||
"""
|
||||
Аналізує дані акселерометра для подальшого визначення
|
||||
та відображення ям та лежачих поліцейських
|
||||
"""
|
||||
if len(point) < 3:
|
||||
return
|
||||
|
||||
lat, lon, road_state = point
|
||||
|
||||
if road_state == "pothole":
|
||||
self.set_pothole_marker((lat, lon))
|
||||
elif road_state == "bump":
|
||||
self.set_bump_marker((lat, lon))
|
||||
|
||||
def update_car_marker(self, point):
|
||||
"""
|
||||
Оновлює відображення маркера машини на мапі
|
||||
:param point: GPS координати
|
||||
"""
|
||||
lat, lon = point[0], point[1]
|
||||
|
||||
if not hasattr(self, 'car_marker'):
|
||||
self.car_marker = MapMarker(lat=lat, lon=lon, source='./images/car')
|
||||
self.mapview.add_marker(self.car_marker)
|
||||
else:
|
||||
self.car_marker.lat = lat
|
||||
self.car_marker.lon = lon
|
||||
|
||||
self.mapview.center_on(lat, lon)
|
||||
|
||||
def set_pothole_marker(self, point):
|
||||
if isinstance(point, dict):
|
||||
lat = point.get("lat")
|
||||
lon = point.get("lon")
|
||||
else:
|
||||
lat, lon = point
|
||||
|
||||
if lat is None or lon is None:
|
||||
return
|
||||
|
||||
marker = MapMarker(
|
||||
lat=lat,
|
||||
lon=lon,
|
||||
source="images/pothole.png"
|
||||
)
|
||||
|
||||
self.mapview.add_marker(marker)
|
||||
self.pothole_markers.append(marker)
|
||||
"""
|
||||
Встановлює маркер для ями
|
||||
:param point: GPS координати
|
||||
"""
|
||||
|
||||
def set_bump_marker(self, point):
|
||||
if isinstance(point, dict):
|
||||
@ -122,15 +60,7 @@ class MapViewApp(App):
|
||||
Ініціалізує мапу MapView(zoom, lat, lon)
|
||||
:return: мапу
|
||||
"""
|
||||
self.mapview = MapView(
|
||||
zoom=15,
|
||||
lat=50.4501,
|
||||
lon=30.5234
|
||||
)
|
||||
|
||||
self.line_layer = LineMapLayer()
|
||||
self.mapview.add_layer(self.line_layer)
|
||||
|
||||
self.mapview = MapView()
|
||||
return self.mapview
|
||||
|
||||
|
||||
|
||||
@ -14,30 +14,11 @@ class StoreApiAdapter(StoreGateway):
|
||||
self.api_base_url = api_base_url
|
||||
|
||||
def save_data(self, processed_agent_data_batch: List[ProcessedAgentData]):
|
||||
if not processed_agent_data_batch:
|
||||
return False
|
||||
|
||||
# Extract user_id from the first element
|
||||
user_id = processed_agent_data_batch[0].agent_data.user_id
|
||||
|
||||
payload = {
|
||||
"data": [item.model_dump(mode='json') for item in processed_agent_data_batch],
|
||||
"user_id": user_id
|
||||
}
|
||||
|
||||
try:
|
||||
# Perform a POST request to the Store API with a 10-second timeout
|
||||
response = requests.post(
|
||||
f"{self.api_base_url}/processed_agent_data/",
|
||||
json=payload,
|
||||
timeout=10
|
||||
)
|
||||
if response.status_code == 200:
|
||||
logging.info(f"Batch of {len(processed_agent_data_batch)} items sent to Store.")
|
||||
return True
|
||||
else:
|
||||
logging.error(f"Store API error: {response.status_code} - {response.text}")
|
||||
return False
|
||||
except Exception as e:
|
||||
logging.error(f"Failed to send data to Store: {e}")
|
||||
return False
|
||||
"""
|
||||
Save the processed road data to the Store API.
|
||||
Parameters:
|
||||
processed_agent_data_batch (dict): Processed road data to be saved.
|
||||
Returns:
|
||||
bool: True if the data is successfully saved, False otherwise.
|
||||
"""
|
||||
# Implement it
|
||||
|
||||
@ -93,7 +93,7 @@ services:
|
||||
MQTT_BROKER_HOST: "mqtt"
|
||||
MQTT_BROKER_PORT: 1883
|
||||
MQTT_TOPIC: "processed_data_topic"
|
||||
BATCH_SIZE: 20
|
||||
BATCH_SIZE: 1
|
||||
ports:
|
||||
- "9000:8000"
|
||||
networks:
|
||||
|
||||
14
hub/main.py
14
hub/main.py
@ -70,20 +70,18 @@ def on_message(client, userdata, msg):
|
||||
processed_agent_data = ProcessedAgentData.model_validate_json(
|
||||
payload, strict=True
|
||||
)
|
||||
|
||||
redis_client.lpush(
|
||||
"processed_agent_data", processed_agent_data.model_dump_json()
|
||||
)
|
||||
|
||||
if redis_client.llen("processed_agent_data") >= BATCH_SIZE:
|
||||
processed_agent_data_batch: List[ProcessedAgentData] = []
|
||||
if redis_client.llen("processed_agent_data") >= BATCH_SIZE:
|
||||
for _ in range(BATCH_SIZE):
|
||||
raw_data = redis_client.lpop("processed_agent_data")
|
||||
if raw_data:
|
||||
data_item = ProcessedAgentData.model_validate_json(raw_data)
|
||||
processed_agent_data_batch.append(data_item)
|
||||
|
||||
processed_agent_data = ProcessedAgentData.model_validate_json(
|
||||
redis_client.lpop("processed_agent_data")
|
||||
)
|
||||
processed_agent_data_batch.append(processed_agent_data)
|
||||
store_adapter.save_data(processed_agent_data_batch=processed_agent_data_batch)
|
||||
|
||||
return {"status": "ok"}
|
||||
except Exception as e:
|
||||
logging.info(f"Error processing MQTT message: {e}")
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user