Compare commits

..

25 Commits

Author SHA1 Message Date
8af68d6dd9 hotfix: index overflow on user_id 2026-03-13 20:47:09 +02:00
63aca15824 add multiuser rendering support 2026-03-13 20:41:16 +02:00
ee509f72a4 pull data in MapView/main.py from actual data source 2026-03-13 19:02:07 +02:00
da9fe69d4e add initial server->client update with all current DB data 2026-03-13 19:01:33 +02:00
1c856dca0e fix MapView/main.py crash due to wrong check condition 2026-03-13 18:58:28 +02:00
VladiusVostokus
17738d07fe Merge pull request #21 from Rhinemann/lab5/gryshaiev-SCRUM-90-set-bump-marker
SCRUM-90: implement set_bump_marker
2026-03-11 17:02:07 +00:00
VladiusVostokus
6b5831ff1b Merge branch 'dev' into lab5/gryshaiev-SCRUM-90-set-bump-marker 2026-03-11 17:01:54 +00:00
VladiusVostokus
54505db70e Merge pull request #23 from Rhinemann/lab5/gryshaiev-SCRUM-89-set-pothole-marker
SCRUM-89: implement set_pothole_marker()
2026-03-11 16:59:35 +00:00
SimonSanich
6f4b3b0ea6 SCRUM-90: implement set_bump_marker 2026-03-11 18:36:40 +02:00
SimonSanich
948a936a1f lab 5: implement set_bump_marker() 2026-03-11 18:10:18 +02:00
esk4nz
87facff668 Merge pull request #16 from Rhinemann/lab3/hrynko-SCRUM-77-post_to_storeAPI_from_hub
SCRUM-77 Post method from Hub to Store
2026-03-09 23:04:16 +02:00
VladiusVostokus
294ed5958e Merge pull request #13 from Rhinemann/lab5/kovalenko-SCRUM-98-FileReader
SCRUM-98 file reader
2026-03-08 15:48:13 +00:00
VladiusVostokus
cbdf81c028 Merge pull request #19 from Rhinemann/lab5/slobodeniuk-SCRUM-84-MapViewApp
Lab5/slobodeniuk scrum 84 map view app
2026-03-08 15:43:27 +00:00
esk4nz
24aeb1a19f changed Batch size to 20
reworked method "on_message"
implemented storeApiAdapter
2026-03-06 00:08:08 +02:00
ІМ-24 Владислав Коваленко
4a81434cb6 feat: handle empty rows 2026-03-03 17:05:53 +00:00
ІМ-24 Владислав Коваленко
a52da042ef refactor: ignore spaces in row 2026-03-03 15:17:25 +00:00
ІМ-24 Владислав Коваленко
11c590cf25 feat: start read file again if got to the end of file 2026-03-03 15:11:38 +00:00
ІМ-24 Владислав Коваленко
550d29c48c refactor: move accelerator dataclas to domain folder 2026-03-03 14:59:46 +00:00
ІМ-24 Владислав Коваленко
8a1327b10a fix: remove empty rows 2026-03-03 14:55:44 +00:00
ІМ-24 Владислав Коваленко
db1b7cc6fc feat: read row from file 2026-03-03 14:36:56 +00:00
ІМ-24 Владислав Коваленко
a899ef6a6e feat: get indexes of file header fields 2026-03-03 14:10:18 +00:00
ІМ-24 Владислав Коваленко
95176ea467 feat: close file 2026-03-03 14:04:27 +00:00
ІМ-24 Владислав Коваленко
081a2d4240 fix: file field name 2026-03-03 14:03:53 +00:00
ІМ-24 Владислав Коваленко
92c20ef612 feat: open file 2026-03-03 13:59:40 +00:00
ІМ-24 Владислав Коваленко
c31363aa57 feat: add empty methods to class 2026-03-03 13:46:26 +00:00
9 changed files with 1596 additions and 43 deletions

File diff suppressed because one or more lines are too long

View File

@@ -75,6 +75,7 @@ class Datasource:
processed_agent_data.latitude,
processed_agent_data.longitude,
processed_agent_data.road_state,
processed_agent_data.user_id
)
for processed_agent_data in processed_agent_data_list
]

View File

@@ -0,0 +1,7 @@
from dataclasses import dataclass
@dataclass
class Accelerometer:
x: int
y: int
z: int

50
MapView/fileReader.py Normal file
View File

@@ -0,0 +1,50 @@
from csv import reader
import config
from domain.accelerometer import Accelerometer
class FileReader:
def __init__(
self, data_filename: str,
) -> None:
self.file_path = data_filename
pass
def read(self):
return self.getNextValue()
def startReading(self, *args, **kwargs):
self.file = open(self.file_path, newline='')
self.file_reader = reader(self.file, skipinitialspace=True)
file_header = next(self.file_reader)
self.x_idx = file_header.index('X')
self.y_idx = file_header.index('Y')
self.z_idx = file_header.index('Z')
def getNextValue(self):
while True:
row = next(self.file_reader, None)
if row is None:
self._rewind_file()
continue
try:
x = int(row[self.x_idx])
y = int(row[self.y_idx])
z = int(row[self.z_idx])
return Accelerometer(x=x, y=y, z=z)
except Exception as e:
continue
def _rewind_file(self):
self.file.seek(0)
self.file_reader = reader(self.file)
next(self.file_reader)
def stopReading(self, *args, **kwargs):
if self.file:
self.file.close()
self.file_reader = None

View File

@@ -5,6 +5,14 @@ from kivy.clock import Clock
from lineMapLayer import LineMapLayer
from datasource import Datasource
line_layer_colors = [
[1, 0, 0, 1],
[1, 0.5, 0, 1],
[0, 1, 0, 1],
[0, 1, 1, 1],
[0, 0, 1, 1],
[1, 0, 1, 1],
]
class MapViewApp(App):
def __init__(self, **kwargs):
@@ -12,14 +20,19 @@ class MapViewApp(App):
self.mapview = None
self.datasource = Datasource(user_id=1)
self.line_layer = None
self.car_marker = None
self.line_layers = dict()
self.car_markers = dict()
# додати необхідні змінні
self.bump_markers = []
self.pothole_markers = []
def on_start(self):
"""
Встановлює необхідні маркери, викликає функцію для оновлення мапи
"""
Clock.schedule_interval(self.update, 0.3)
self.update()
Clock.schedule_interval(self.update, 5)
def update(self, *args):
"""
@@ -32,13 +45,17 @@ class MapViewApp(App):
for point in new_points:
lat, lon, road_state = point
lat, lon, road_state, user_id = point
# Оновлює лінію маршрута
self.line_layer.add_point((lat, lon))
if user_id not in self.line_layers:
self.line_layers[user_id] = LineMapLayer(color = line_layer_colors[user_id % len(line_layer_colors)])
self.mapview.add_layer(self.line_layers[user_id])
self.line_layers[user_id].add_point((lat, lon))
# Оновлює маркер маниши
self.update_car_marker((lat, lon))
self.update_car_marker(lat, lon, user_id)
# Перевіряємо стан дороги
self.check_road_quality(point)
@@ -51,40 +68,65 @@ class MapViewApp(App):
if len(point) < 3:
return
lat, lon, road_state = point
lat, lon, road_state, user_id = point
if road_state == "pothole":
self.set_pothole_marker((lat, lon))
elif road_state == "bump":
self.set_bump_marker((lat, lon))
def update_car_marker(self, point):
def update_car_marker(self, lat, lon, user_id):
"""
Оновлює відображення маркера машини на мапі
:param point: GPS координати
"""
lat, lon = point[0], point[1]
if not hasattr(self, 'car_marker'):
self.car_marker = MapMarker(lat=lat, lon=lon, source='./images/car')
self.mapview.add_marker(self.car_marker)
if user_id not in self.car_markers:
self.car_markers[user_id] = MapMarker(lat=lat, lon=lon, source='./images/car.png')
self.mapview.add_marker(self.car_markers[user_id])
else:
self.car_marker.lat = lat
self.car_marker.lon = lon
self.car_markers[user_id].lat = lat
self.car_markers[user_id].lon = lon
self.mapview.center_on(lat, lon)
def set_pothole_marker(self, point):
"""
Встановлює маркер для ями
:param point: GPS координати
"""
if isinstance(point, dict):
lat = point.get("lat")
lon = point.get("lon")
else:
lat, lon = point
if lat is None or lon is None:
return
marker = MapMarker(
lat=lat,
lon=lon,
source="images/pothole.png"
)
self.mapview.add_marker(marker)
self.pothole_markers.append(marker)
def set_bump_marker(self, point):
"""
Встановлює маркер для лежачого поліцейського
:param point: GPS координати
"""
if isinstance(point, dict):
lat = point.get("lat")
lon = point.get("lon")
else:
lat, lon = point
if lat is None or lon is None:
return
marker = MapMarker(
lat=lat,
lon=lon,
source="images/bump.png"
)
self.mapview.add_marker(marker)
self.bump_markers.append(marker)
def build(self):
"""
@@ -97,9 +139,6 @@ class MapViewApp(App):
lon=30.5234
)
self.line_layer = LineMapLayer()
self.mapview.add_layer(self.line_layer)
return self.mapview

View File

@@ -14,11 +14,30 @@ class StoreApiAdapter(StoreGateway):
self.api_base_url = api_base_url
def save_data(self, processed_agent_data_batch: List[ProcessedAgentData]):
"""
Save the processed road data to the Store API.
Parameters:
processed_agent_data_batch (dict): Processed road data to be saved.
Returns:
bool: True if the data is successfully saved, False otherwise.
"""
# Implement it
if not processed_agent_data_batch:
return False
# Extract user_id from the first element
user_id = processed_agent_data_batch[0].agent_data.user_id
payload = {
"data": [item.model_dump(mode='json') for item in processed_agent_data_batch],
"user_id": user_id
}
try:
# Perform a POST request to the Store API with a 10-second timeout
response = requests.post(
f"{self.api_base_url}/processed_agent_data/",
json=payload,
timeout=10
)
if response.status_code == 200:
logging.info(f"Batch of {len(processed_agent_data_batch)} items sent to Store.")
return True
else:
logging.error(f"Store API error: {response.status_code} - {response.text}")
return False
except Exception as e:
logging.error(f"Failed to send data to Store: {e}")
return False

View File

@@ -93,7 +93,7 @@ services:
MQTT_BROKER_HOST: "mqtt"
MQTT_BROKER_PORT: 1883
MQTT_TOPIC: "processed_data_topic"
BATCH_SIZE: 1
BATCH_SIZE: 20
ports:
- "9000:8000"
networks:

View File

@@ -70,18 +70,20 @@ def on_message(client, userdata, msg):
processed_agent_data = ProcessedAgentData.model_validate_json(
payload, strict=True
)
redis_client.lpush(
"processed_agent_data", processed_agent_data.model_dump_json()
)
processed_agent_data_batch: List[ProcessedAgentData] = []
if redis_client.llen("processed_agent_data") >= BATCH_SIZE:
processed_agent_data_batch: List[ProcessedAgentData] = []
for _ in range(BATCH_SIZE):
processed_agent_data = ProcessedAgentData.model_validate_json(
redis_client.lpop("processed_agent_data")
)
processed_agent_data_batch.append(processed_agent_data)
raw_data = redis_client.lpop("processed_agent_data")
if raw_data:
data_item = ProcessedAgentData.model_validate_json(raw_data)
processed_agent_data_batch.append(data_item)
store_adapter.save_data(processed_agent_data_batch=processed_agent_data_batch)
return {"status": "ok"}
except Exception as e:
logging.info(f"Error processing MQTT message: {e}")

View File

@@ -40,10 +40,24 @@ subscriptions: Dict[int, Set[WebSocket]] = {}
@app.websocket("/ws/{user_id}")
async def websocket_endpoint(websocket: WebSocket, user_id: int):
await websocket.accept()
if user_id not in subscriptions:
subscriptions[user_id] = set()
subscriptions[user_id].add(websocket)
try:
# send already available data
r = processed_agent_data.select()
stored_data = SessionLocal().execute(r).fetchall()
jsonable_data = [{c.name: getattr(i, c.name) for c in processed_agent_data.columns} for i in stored_data]
for i in jsonable_data:
i['timestamp'] = i['timestamp'].strftime("%Y-%m-%dT%H:%M:%SZ")
await websocket.send_json(json.dumps(jsonable_data))
# receive forever
while True:
await websocket.receive_text()
except WebSocketDisconnect: