Compare commits

...

41 Commits

Author SHA1 Message Date
Senya
a98223cbb0 feature: SCRUM-84 add on_start, update, build methods in MapViewMap 2026-03-08 13:32:23 +02:00
Slobodeniuk Sasha
0bd3eaa91d Merge pull request #18 from Rhinemann/dev
Scrum 87 - MapViewApp (check_road_quality, update_car_marker)
2026-03-08 13:08:59 +02:00
VladiusVostokus
9bdb98c19b Merge pull request #17 from Rhinemann/lab5/yushchenko-SCRUM-87-check-road-quality-method
SCRUM-87, 88: add methods update_car_marker and check_road_quality
2026-03-08 11:00:53 +00:00
AndriiJushchenko
c3b71daaeb add check_road_quality method 2026-03-07 17:12:23 +02:00
AndriiJushchenko
31c760f4a8 add method update_car_marker(self, point) 2026-03-07 16:55:20 +02:00
ІО-23 Shmuliar Oleh
ceffcfeac2 Merge pull request #14 from Rhinemann/dev
Commit repository configuration to main branch
2026-03-03 18:44:31 +02:00
VladiusVostokus
312177e087 Merge pull request #12 from Rhinemann/lab3/hrynko-SCRUM-56-repo_set_up
SCRUM-56 Repo setup
2026-03-03 15:24:51 +00:00
esk4nz
f96930a259 Setting up the repo with correct dockerfiles and docker-copmose files, and deleting .idea from git 2026-03-02 23:21:36 +02:00
Andriy Yushchenko
87df394352 Merge pull request #11 from Rhinemann/lab1-slobodeniuk-feature-SCRUM-34
Lab1 slobodeniuk feature scrum 34
2026-03-01 19:57:40 +02:00
VladiusVostokus
10ad9774a7 Merge pull request #10 from Rhinemann/lab1
Lab1
2026-02-28 18:46:27 +00:00
VladiusVostokus
b730dbb74c Merge pull request #8 from Rhinemann/lab2
Lab2
2026-02-28 18:45:54 +00:00
VladiusVostokus
e4e585b9ac Merge pull request #9 from Rhinemann/feature/lab2-update-delete
Feature/lab2 update delete
2026-02-27 13:21:26 +00:00
anastasia-sl
185b0aae58 implemented update and delete endpoints 2026-02-26 14:55:51 +02:00
ІМ-24 Владислав Коваленко
af94c007a2 fixes in imports and schemas 2026-02-26 10:54:34 +00:00
Senya
f9ef916331 SCRUM-34: slobodeniuk parking signals 2026-02-26 12:11:07 +02:00
Slobodeniuk Sasha
3931fa58c1 Merge pull request #7 from Rhinemann/feature/slobodeniuk-lab2-SCRUM-48-GET
SCRUM-48: feature with CRUD (GET)
2026-02-26 11:22:14 +02:00
Senya
98fb6aa12a SCRUM-48: feature with CRUD (GET) 2026-02-26 11:20:47 +02:00
VladiusVostokus
7ddfb68b02 Merge pull request #5 from Rhinemann/lab1_huranets
FileReader
2026-02-26 08:04:10 +00:00
9473c5a621 Remove unnecessary rewind after file open 2026-02-25 21:08:54 +02:00
953b0bdb9a Remove unused _detect_header_and_buffer method and related fields 2026-02-25 20:56:43 +02:00
VladiusVostokus
ea9be3fb57 Merge pull request #6 from Rhinemann/lab2_yushchenko
CRUD операції (POST, websocket)
2026-02-25 17:10:36 +00:00
AndriiJushchenko
f3512e4afb Трохи пофіксив функцію post і провів тести post і websoket. 2026-02-25 19:05:25 +02:00
f58596ebf7 Refactor FileDatasource: remove unused header detection variables 2026-02-25 12:35:58 +02:00
d621390f51 Refactor file rewind logic to skip header row and remove unnecessary buffers 2026-02-25 12:11:05 +02:00
e4be6b0a19 Refactor file rewinding logic to skip header row after seek(0) 2026-02-25 12:00:39 +02:00
fe66df9b8c Refactor row reading logic for clarity and efficiency 2026-02-25 11:50:56 +02:00
AndriiJushchenko
69e679eccf SCRUM-[49, 54] Реалізувати POST та відправку по websoket під час виконання POST 2026-02-24 22:18:36 +02:00
3e0b4762ef Optimize CSV parsing by adding skipinitialspace=True to csv.reader and removing unnecessary strip() calls 2026-02-24 22:11:37 +02:00
75613fd4fc Restore input validation for accelerometer parsing 2026-02-24 20:47:00 +02:00
a25fbfc3ef Simplify accelerometer parsing and remove int16-specific leftovers 2026-02-24 20:14:56 +02:00
ca790e7306 Remove int16 binding from datasource 2026-02-24 19:55:50 +02:00
1643767094 Remove int16 range check per review 2026-02-24 15:41:41 +02:00
3d94bf3008 Parse accelerometer values as int16 (remove float conversion) 2026-02-24 15:12:49 +02:00
c5d98d53cd Add mosquitto runtime folders to gitignore 2026-02-24 14:25:27 +02:00
07a0e906d8 Fix timestamp field in AggregatedData 2026-02-24 14:21:41 +02:00
VladiusVostokus
9bf3741f32 Merge pull request #4 from Rhinemann/lab1_shmuliar
[SCRUM-40] [L1] Використання STM32 як акселерометра
2026-02-24 11:37:27 +00:00
c974ac32f6 Реалізовую базовий FileReader та переношу sleep до FileDatasource.read() 2026-02-23 22:01:11 +02:00
VladiusVostokus
184098b826 Merge pull request #2 from Rhinemann/lab2_shved
Lab2 shved
2026-02-23 17:28:11 +00:00
VladiusVostokus
b2c7427af0 Merge pull request #1 from Rhinemann/lab1_shved
updated compose file
2026-02-23 16:10:12 +00:00
1e7516fe7b update requirements 2026-02-22 12:19:33 +01:00
a63864bcaa updated compose file 2026-02-22 12:17:03 +01:00
29 changed files with 546 additions and 141 deletions

23
.dockerignore Normal file
View File

@@ -0,0 +1,23 @@
# IDEs
.idea/
.vscode/
.git/
.gitignore
.dockerignore
.DS_Store
Thumbs.db
# Python
**/__pycache__/
**/*.py[cod]
**/*.pyo
**/*.pyd
venv/
.env
# Logs & Database & Broker data
*.log
**/mosquitto/data/
**/mosquitto/log/
**/postgres_data/
**/pgadmin-data/

25
.gitignore vendored Normal file
View File

@@ -0,0 +1,25 @@
# IDEs
.idea/
.vscode/
*.swp
*.swo
# Python
venv/
__pycache__/
*.py[cod]
*$py.class
.env
# Logs
*.log
app.log
# Database & Broker data
**/mosquitto/data/
**/mosquitto/log/
**/postgres_data/
**/pgadmin-data/
# OS specific
.DS_Store

3
MapView/.gitignore vendored
View File

@@ -1,3 +0,0 @@
.idea
venv
__pycache__

View File

@@ -8,25 +8,71 @@ from datasource import Datasource
class MapViewApp(App): class MapViewApp(App):
def __init__(self, **kwargs): def __init__(self, **kwargs):
super().__init__() super().__init__(**kwargs)
# додати необхідні змінні
self.mapview = None
self.datasource = Datasource(user_id=1)
self.line_layer = None
self.car_marker = None
def on_start(self): def on_start(self):
""" """
Встановлює необхідні маркери, викликає функцію для оновлення мапи Встановлює необхідні маркери, викликає функцію для оновлення мапи
""" """
Clock.schedule_interval(self.update, 0.3)
def update(self, *args): def update(self, *args):
""" """
Викликається регулярно для оновлення мапи Викликається регулярно для оновлення мапи
""" """
new_points = self.datasource.get_new_points()
if not new_points:
return
for point in new_points:
lat, lon, road_state = point
# Оновлює лінію маршрута
self.line_layer.add_point((lat, lon))
# Оновлює маркер маниши
self.update_car_marker((lat, lon))
# Перевіряємо стан дороги
self.check_road_quality(point)
def check_road_quality(self, point):
"""
Аналізує дані акселерометра для подальшого визначення
та відображення ям та лежачих поліцейських
"""
if len(point) < 3:
return
lat, lon, road_state = point
if road_state == "pothole":
self.set_pothole_marker((lat, lon))
elif road_state == "bump":
self.set_bump_marker((lat, lon))
def update_car_marker(self, point): def update_car_marker(self, point):
""" """
Оновлює відображення маркера машини на мапі Оновлює відображення маркера машини на мапі
:param point: GPS координати :param point: GPS координати
""" """
lat, lon = point[0], point[1]
if not hasattr(self, 'car_marker'):
self.car_marker = MapMarker(lat=lat, lon=lon, source='./images/car')
self.mapview.add_marker(self.car_marker)
else:
self.car_marker.lat = lat
self.car_marker.lon = lon
self.mapview.center_on(lat, lon)
def set_pothole_marker(self, point): def set_pothole_marker(self, point):
""" """
@@ -45,7 +91,15 @@ class MapViewApp(App):
Ініціалізує мапу MapView(zoom, lat, lon) Ініціалізує мапу MapView(zoom, lat, lon)
:return: мапу :return: мапу
""" """
self.mapview = MapView() self.mapview = MapView(
zoom=15,
lat=50.4501,
lon=30.5234
)
self.line_layer = LineMapLayer()
self.mapview.add_layer(self.line_layer)
return self.mapview return self.mapview

2
agent/.gitignore vendored
View File

@@ -1,2 +0,0 @@
venv
__pycache__

View File

@@ -3,10 +3,10 @@ FROM python:latest
# set the working directory in the container # set the working directory in the container
WORKDIR /usr/agent WORKDIR /usr/agent
# copy the dependencies file to the working directory # copy the dependencies file to the working directory
COPY requirements.txt . COPY agent/requirements.txt .
# install dependencies # install dependencies
RUN pip install -r requirements.txt RUN pip install --no-cache-dir -r requirements.txt
# copy the content of the local src directory to the working directory # copy the content of the local src directory to the working directory
COPY src/ . COPY agent/src/ .
# command to run on container start # command to run on container start
CMD ["python", "main.py"] CMD ["python", "main.py"]

View File

@@ -16,7 +16,9 @@ services:
fake_agent: fake_agent:
container_name: agent container_name: agent
build: ../ build:
context: ../../
dockerfile: agent/Dockerfile
depends_on: depends_on:
- mqtt - mqtt
environment: environment:

0
agent/src/__init__.py Normal file
View File

View File

@@ -0,0 +1,22 @@
longitude,latitude,empty_count
50.450386085935094,30.524547100067142,10
50.450386085935094,30.524547100067142,11
50.450386085935094,30.524547100067142,13
50.450386085935094,30.524547100067142,15
50.450386085935094,30.524547100067142,7
50.450386085935094,30.524547100067142,9
50.450386085935094,30.524547100067142,4
50.450386085935094,30.524547100067142,0
50.450386085935094,30.524547100067142,0
50.450386085935094,30.524547100067142,3
50.450386085935094,30.524547100067142,4
50.450069433207545,30.52406822530458,16
50.450069433207545,30.52406822530458,20
50.450069433207545,30.52406822530458,25
50.450069433207545,30.52406822530458,30
50.450069433207545,30.52406822530458,29
50.450069433207545,30.52406822530458,12
50.450069433207545,30.52406822530458,10
50.450069433207545,30.52406822530458,14
50.450069433207545,30.52406822530458,3
50.450069433207545,30.52406822530458,2
1 longitude latitude empty_count
2 50.450386085935094 30.524547100067142 10
3 50.450386085935094 30.524547100067142 11
4 50.450386085935094 30.524547100067142 13
5 50.450386085935094 30.524547100067142 15
6 50.450386085935094 30.524547100067142 7
7 50.450386085935094 30.524547100067142 9
8 50.450386085935094 30.524547100067142 4
9 50.450386085935094 30.524547100067142 0
10 50.450386085935094 30.524547100067142 0
11 50.450386085935094 30.524547100067142 3
12 50.450386085935094 30.524547100067142 4
13 50.450069433207545 30.52406822530458 16
14 50.450069433207545 30.52406822530458 20
15 50.450069433207545 30.52406822530458 25
16 50.450069433207545 30.52406822530458 30
17 50.450069433207545 30.52406822530458 29
18 50.450069433207545 30.52406822530458 12
19 50.450069433207545 30.52406822530458 10
20 50.450069433207545 30.52406822530458 14
21 50.450069433207545 30.52406822530458 3
22 50.450069433207545 30.52406822530458 2

View File

@@ -1,13 +1,16 @@
from dataclasses import dataclass from dataclasses import dataclass
from datetime import datetime from datetime import datetime
from domain.accelerometer import Accelerometer from domain.accelerometer import Accelerometer
from domain.gps import Gps from domain.gps import Gps
from domain.parking import Parking
@dataclass @dataclass
class AggregatedData: class AggregatedData:
accelerometer: Accelerometer accelerometer: Accelerometer
gps: Gps gps: Gps
parking: Parking
timestamp: datetime timestamp: datetime
user_id: int user_id: int

View File

@@ -0,0 +1,9 @@
from dataclasses import dataclass
from domain.gps import Gps
@dataclass
class Parking:
empty_count: int
gps: Gps

View File

@@ -1,5 +1,10 @@
from csv import reader import csv
import time
from datetime import datetime from datetime import datetime
from pathlib import Path
from typing import Optional, List
from domain.parking import Parking
from domain.accelerometer import Accelerometer from domain.accelerometer import Accelerometer
from domain.gps import Gps from domain.gps import Gps
from domain.aggregated_data import AggregatedData from domain.aggregated_data import AggregatedData
@@ -7,24 +12,185 @@ import config
class FileDatasource: class FileDatasource:
def __init__( def __init__(
self, self,
accelerometer_filename: str, accelerometer_filename: str,
gps_filename: str, gps_filename: str,
park_filename: str,
) -> None: ) -> None:
pass
def read(self) -> AggregatedData: self.accelerometer_filename = accelerometer_filename
"""Метод повертає дані отримані з датчиків""" self.park_filename = park_filename
return AggregatedData( self.gps_filename = gps_filename
Accelerometer(1, 2, 3),
Gps(4, 5), self._park_f = None
datetime.now(), self._acc_f = None
config.USER_ID, self._gps_f = None
)
self._park_reader: Optional[csv.reader] = None
self._acc_reader: Optional[csv.reader] = None
self._gps_reader: Optional[csv.reader] = None
self._started = False
def startReading(self, *args, **kwargs): def startReading(self, *args, **kwargs):
"""Метод повинен викликатись перед початком читання даних""" """Must be called before read()"""
if self._started:
return
if not Path(self.accelerometer_filename).exists():
raise FileNotFoundError(f"Accelerometer file not found: {self.accelerometer_filename}")
if not Path(self.park_filename).exists():
raise FileNotFoundError(f"Accelerometer file not found: {self.park_filename}")
if not Path(self.gps_filename).exists():
raise FileNotFoundError(f"GPS file not found: {self.gps_filename}")
self._open_files()
self._started = True
def stopReading(self, *args, **kwargs): def stopReading(self, *args, **kwargs):
"""Метод повинен викликатись для закінчення читання даних""" """Must be called when finishing reading"""
self._close_files()
self._started = False
def read(self) -> AggregatedData:
"""Return one combined sample (acc + gps)."""
if not self._started:
raise RuntimeError("Datasource is not started. Call startReading() before read().")
acc_row = self._get_next_row(self._acc_reader, source="acc")
park_row = self._get_next_row(self._park_reader, source="park")
gps_row = self._get_next_row(self._gps_reader, source="gps")
acc = self._parse_acc(acc_row)
park = self._parse_park(park_row)
gps = self._parse_gps(gps_row)
# IMPORTANT: timing belongs to datasource (not MQTT / main.py)
if config.DELAY and config.DELAY > 0:
time.sleep(float(config.DELAY))
return AggregatedData(
accelerometer=acc,
gps=gps,
parking=park,
timestamp=datetime.utcnow(),
user_id=config.USER_ID,
)
# ---------------- internal ----------------
def _open_files(self) -> None:
self._close_files()
self._acc_f = open(self.accelerometer_filename, "r", newline="", encoding="utf-8")
self._park_f = open(self.park_filename, "r", newline="", encoding="utf-8")
self._gps_f = open(self.gps_filename, "r", newline="", encoding="utf-8")
self._acc_reader = csv.reader(self._acc_f, skipinitialspace=True)
self._park_reader = csv.reader(self._park_f, skipinitialspace=True)
self._gps_reader = csv.reader(self._gps_f, skipinitialspace=True)
# File pointer is already at 0 right after open(), so no need to rewind here.
# Skip header row once.
next(self._acc_reader, None)
next(self._park_reader, None)
next(self._gps_reader, None)
def _close_files(self) -> None:
for f in (self._acc_f, self._gps_f):
try:
if f is not None:
f.close()
except Exception:
pass
self._acc_f = None
self._park_f = None
self._gps_f = None
self._acc_reader = None
self._park_reader = None
self._gps_reader = None
def _rewind_acc(self) -> None:
if self._acc_f is None:
raise RuntimeError("Accelerometer file is not open.")
self._acc_f.seek(0)
self._acc_reader = csv.reader(self._acc_f, skipinitialspace=True)
next(self._acc_reader, None) # skip header row
def _rewind_gps(self) -> None:
if self._gps_f is None:
raise RuntimeError("GPS file is not open.")
self._gps_f.seek(0)
self._gps_reader = csv.reader(self._gps_f, skipinitialspace=True)
next(self._gps_reader, None) # skip header row
def _rewind_park(self) -> None:
if self._park_f is None:
raise RuntimeError("GPS file is not open.")
self._park_f.seek(0)
self._park_reader = csv.reader(self._park_f, skipinitialspace=True)
next(self._park_reader, None) # skip header row
def _get_next_row(self, reader, source: str) -> List[str]:
"""Get the next valid row from the reader."""
if reader is None:
raise RuntimeError("Reader is not initialized.")
while True:
row = next(reader, None)
if row is None:
# EOF -> rewind & continue
if source == "acc":
self._rewind_acc()
reader = self._acc_reader
elif source == 'park':
self._rewind_park()
reader = self._park_reader
else:
self._rewind_gps()
reader = self._gps_reader
continue
if not row or not any(cell.strip() for cell in row):
continue
return row
@staticmethod
def _parse_acc(row: List[str]) -> Accelerometer:
if len(row) < 3:
raise ValueError(f"Accelerometer row must have 3 values (x,y,z). Got: {row}")
try:
x = int(row[0])
y = int(row[1])
z = int(row[2])
except ValueError as e:
raise ValueError(f"Invalid accelerometer values (expected integers): {row}") from e
return Accelerometer(x=x, y=y, z=z)
@staticmethod
def _parse_gps(row: List[str]) -> Gps:
if len(row) < 2:
raise ValueError(f"GPS row must have 2 values (longitude,latitude). Got: {row}")
lon = float(row[0])
lat = float(row[1])
return Gps(longitude=lon, latitude=lat)
@staticmethod
def _parse_park(row: List[str]) -> Parking:
if len(row) < 2:
raise ValueError(f"GPS row must have 2 values (longitude,latitude). Got: {row}")
lon = float(row[0])
lat = float(row[1])
empty_count = int(row[2])
return Parking(
gps=Gps(longitude=lon, latitude=lat),
empty_count=empty_count
)

View File

@@ -1,6 +1,4 @@
from paho.mqtt import client as mqtt_client from paho.mqtt import client as mqtt_client
import json
import time
from schema.aggregated_data_schema import AggregatedDataSchema from schema.aggregated_data_schema import AggregatedDataSchema
from file_datasource import FileDatasource from file_datasource import FileDatasource
import config import config
@@ -24,19 +22,14 @@ def connect_mqtt(broker, port):
return client return client
def publish(client, topic, datasource, delay): def publish(client, topic, datasource):
datasource.startReading() datasource.startReading()
while True: while True:
time.sleep(delay)
data = datasource.read() data = datasource.read()
msg = AggregatedDataSchema().dumps(data) msg = AggregatedDataSchema().dumps(data)
result = client.publish(topic, msg) result = client.publish(topic, msg)
# result: [0, 1]
status = result[0] status = result[0]
if status == 0: if status != 0:
pass
# print(f"Send `{msg}` to topic `{topic}`")
else:
print(f"Failed to send message to topic {topic}") print(f"Failed to send message to topic {topic}")
@@ -44,9 +37,9 @@ def run():
# Prepare mqtt client # Prepare mqtt client
client = connect_mqtt(config.MQTT_BROKER_HOST, config.MQTT_BROKER_PORT) client = connect_mqtt(config.MQTT_BROKER_HOST, config.MQTT_BROKER_PORT)
# Prepare datasource # Prepare datasource
datasource = FileDatasource("data/data.csv", "data/gps_data.csv") datasource = FileDatasource("data/accelerometer.csv", "data/gps.csv", "data/parking.csv")
# Infinity publish data # Infinity publish data
publish(client, config.MQTT_TOPIC, datasource, config.DELAY) publish(client, config.MQTT_TOPIC, datasource)
if __name__ == "__main__": if __name__ == "__main__":

View File

@@ -1,10 +1,12 @@
from marshmallow import Schema, fields from marshmallow import Schema, fields
from schema.accelerometer_schema import AccelerometerSchema from schema.accelerometer_schema import AccelerometerSchema
from schema.gps_schema import GpsSchema from schema.gps_schema import GpsSchema
from schema.parking_schema import ParkingSchema
class AggregatedDataSchema(Schema): class AggregatedDataSchema(Schema):
accelerometer = fields.Nested(AccelerometerSchema) accelerometer = fields.Nested(AccelerometerSchema)
gps = fields.Nested(GpsSchema) gps = fields.Nested(GpsSchema)
parking = fields.Nested(ParkingSchema)
timestamp = fields.DateTime("iso") timestamp = fields.DateTime("iso")
user_id = fields.Int() user_id = fields.Int()

View File

@@ -0,0 +1,8 @@
from marshmallow import Schema, fields
from schema.gps_schema import GpsSchema
class ParkingSchema(Schema):
gps = fields.Nested(GpsSchema)
empty_count = fields.Int()

2
edge/.gitignore vendored
View File

@@ -1,2 +0,0 @@
venv
app.log

View File

@@ -3,9 +3,9 @@ FROM python:3.9-slim
# Set the working directory inside the container # Set the working directory inside the container
WORKDIR /app WORKDIR /app
# Copy the requirements.txt file and install dependencies # Copy the requirements.txt file and install dependencies
COPY requirements.txt . COPY edge/requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt RUN pip install --no-cache-dir -r requirements.txt
# Copy the entire application into the container # Copy the entire application into the container
COPY . . COPY edge/. .
# Run the main.py script inside the container when it starts # Run the main.py script inside the container when it starts
CMD ["python", "main.py"] CMD ["python", "main.py"]

View File

@@ -17,7 +17,9 @@ services:
edge: edge:
container_name: edge container_name: edge
build: ../ build:
context: ../../
dockerfile: edge/Dockerfile
depends_on: depends_on:
- mqtt - mqtt
environment: environment:

2
hub/.gitignore vendored
View File

@@ -1,2 +0,0 @@
venv
__pycache__

View File

@@ -3,9 +3,10 @@ FROM python:3.9-slim
# Set the working directory inside the container # Set the working directory inside the container
WORKDIR /app WORKDIR /app
# Copy the requirements.txt file and install dependencies # Copy the requirements.txt file and install dependencies
COPY requirements.txt . COPY hub/requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt RUN pip install --no-cache-dir -r requirements.txt
# Copy the entire application into the container # Copy the entire application into the container
COPY . . COPY hub/. .
# Run the main.py script inside the container when it starts # Run the main.py script inside the container when it starts
CMD ["uvicorn", "main:app", "--host", "0.0.0.0"] CMD ["uvicorn", "main:app", "--host", "0.0.0.0"]

View File

@@ -1,4 +1,3 @@
version: "3.9"
name: "road_vision__hub" name: "road_vision__hub"
services: services:
mqtt: mqtt:
@@ -16,7 +15,7 @@ services:
postgres_db: postgres_db:
image: postgres:latest image: postgres:17
container_name: postgres_db container_name: postgres_db
restart: always restart: always
environment: environment:
@@ -49,7 +48,9 @@ services:
store: store:
container_name: store container_name: store
build: ../../store build:
context: ../../
dockerfile: store/Dockerfile
depends_on: depends_on:
- postgres_db - postgres_db
restart: always restart: always
@@ -77,7 +78,9 @@ services:
hub: hub:
container_name: hub container_name: hub
build: ../ build:
context: ../../
dockerfile: hub/Dockerfile
depends_on: depends_on:
- mqtt - mqtt
- redis - redis

3
store/.gitignore vendored
View File

@@ -1,3 +0,0 @@
venv
__pycache__
.idea

View File

@@ -3,9 +3,10 @@ FROM python:latest
# Set the working directory inside the container # Set the working directory inside the container
WORKDIR /app WORKDIR /app
# Copy the requirements.txt file and install dependencies # Copy the requirements.txt file and install dependencies
COPY requirements.txt . COPY store/requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt RUN pip install --no-cache-dir -r requirements.txt
# Copy the entire application into the container # Copy the entire application into the container
COPY . . COPY store/. .
# Run the main.py script inside the container when it starts # Run the main.py script inside the container when it starts
CMD ["uvicorn", "main:app", "--host", "0.0.0.0"] CMD ["uvicorn", "main:app", "--host", "0.0.0.0"]

0
store/__init__.py Normal file
View File

15
store/database.py Normal file
View File

@@ -0,0 +1,15 @@
from sqlalchemy import MetaData
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, declarative_base
from config import POSTGRES_USER, POSTGRES_PASSWORD, POSTGRES_HOST, POSTGRES_PORT, POSTGRES_DB
DATABASE_URL = f"postgresql+psycopg2://{POSTGRES_USER}:{POSTGRES_PASSWORD}@{POSTGRES_HOST}:{POSTGRES_PORT}/{POSTGRES_DB}"
engine = create_engine(DATABASE_URL)
Base = declarative_base()
metadata = MetaData()
SessionLocal = sessionmaker(bind=engine)

View File

@@ -1,8 +1,7 @@
version: "3.9"
name: "road_vision__database" name: "road_vision__database"
services: services:
postgres_db: postgres_db:
image: postgres:latest image: postgres:17
container_name: postgres_db container_name: postgres_db
restart: always restart: always
environment: environment:
@@ -35,7 +34,9 @@ services:
store: store:
container_name: store container_name: store
build: .. build:
context: ../../
dockerfile: store/Dockerfile
depends_on: depends_on:
- postgres_db - postgres_db
restart: always restart: always

View File

@@ -1,10 +1,8 @@
import asyncio
import json import json
from typing import Set, Dict, List, Any from typing import Set, Dict, List
from fastapi import FastAPI, HTTPException, WebSocket, WebSocketDisconnect, Body from fastapi import FastAPI, HTTPException, WebSocket, WebSocketDisconnect, Body
from fastapi.encoders import jsonable_encoder
from sqlalchemy import ( from sqlalchemy import (
create_engine,
MetaData,
Table, Table,
Column, Column,
Integer, Integer,
@@ -12,25 +10,14 @@ from sqlalchemy import (
Float, Float,
DateTime, DateTime,
) )
from sqlalchemy.orm import sessionmaker
from sqlalchemy.sql import select from sqlalchemy.sql import select
from datetime import datetime
from pydantic import BaseModel, field_validator from database import metadata, SessionLocal
from config import ( from schemas import ProcessedAgentData, ProcessedAgentDataInDB
POSTGRES_HOST,
POSTGRES_PORT,
POSTGRES_DB,
POSTGRES_USER,
POSTGRES_PASSWORD,
)
# FastAPI app setup # FastAPI app setup
app = FastAPI() app = FastAPI()
# SQLAlchemy setup
DATABASE_URL = f"postgresql+psycopg2://{POSTGRES_USER}:{POSTGRES_PASSWORD}@{POSTGRES_HOST}:{POSTGRES_PORT}/{POSTGRES_DB}"
engine = create_engine(DATABASE_URL)
metadata = MetaData()
# Define the ProcessedAgentData table
processed_agent_data = Table( processed_agent_data = Table(
"processed_agent_data", "processed_agent_data",
metadata, metadata,
@@ -44,57 +31,6 @@ processed_agent_data = Table(
Column("longitude", Float), Column("longitude", Float),
Column("timestamp", DateTime), Column("timestamp", DateTime),
) )
SessionLocal = sessionmaker(bind=engine)
# SQLAlchemy model
class ProcessedAgentDataInDB(BaseModel):
id: int
road_state: str
user_id: int
x: float
y: float
z: float
latitude: float
longitude: float
timestamp: datetime
# FastAPI models
class AccelerometerData(BaseModel):
x: float
y: float
z: float
class GpsData(BaseModel):
latitude: float
longitude: float
class AgentData(BaseModel):
user_id: int
accelerometer: AccelerometerData
gps: GpsData
timestamp: datetime
@classmethod
@field_validator("timestamp", mode="before")
def check_timestamp(cls, value):
if isinstance(value, datetime):
return value
try:
return datetime.fromisoformat(value)
except (TypeError, ValueError):
raise ValueError(
"Invalid timestamp format. Expected ISO 8601 format (YYYY-MM-DDTHH:MM:SSZ)."
)
class ProcessedAgentData(BaseModel):
road_state: str
agent_data: AgentData
# WebSocket subscriptions # WebSocket subscriptions
subscriptions: Dict[int, Set[WebSocket]] = {} subscriptions: Dict[int, Set[WebSocket]] = {}
@@ -125,10 +61,36 @@ async def send_data_to_subscribers(user_id: int, data):
@app.post("/processed_agent_data/") @app.post("/processed_agent_data/")
async def create_processed_agent_data(data: List[ProcessedAgentData]): async def create_processed_agent_data(data: List[ProcessedAgentData], user_id: int = Body(..., embed=True)):
# Insert data to database session = SessionLocal()
# Send data to subscribers try:
pass created_data = [
{
"road_state": item.road_state,
"user_id": user_id,
"x": item.agent_data.accelerometer.x,
"y": item.agent_data.accelerometer.y,
"z": item.agent_data.accelerometer.z,
"latitude": item.agent_data.gps.latitude,
"longitude": item.agent_data.gps.longitude,
"timestamp": item.agent_data.timestamp,
}
for item in data
]
stmt = processed_agent_data.insert().values(created_data).returning(processed_agent_data)
result = session.execute(stmt)
created_records = [dict(row._mapping) for row in result.fetchall()]
session.commit()
for record in created_records:
await send_data_to_subscribers(user_id, jsonable_encoder(record))
return created_records
except Exception as err:
session.rollback()
print(f"Database error: {err}")
raise HTTPException(status_code=500, detail="Internal Server Error")
finally:
session.close()
@app.get( @app.get(
@@ -136,14 +98,34 @@ async def create_processed_agent_data(data: List[ProcessedAgentData]):
response_model=ProcessedAgentDataInDB, response_model=ProcessedAgentDataInDB,
) )
def read_processed_agent_data(processed_agent_data_id: int): def read_processed_agent_data(processed_agent_data_id: int):
# Get data by id session = SessionLocal()
pass try:
stmt = select(processed_agent_data).where(
processed_agent_data.c.id == processed_agent_data_id
)
res = session.execute(stmt).fetchone()
if not res:
raise HTTPException(status_code=404, detail="Not found")
return dict(res._mapping)
finally:
session.close()
@app.get("/processed_agent_data/", response_model=list[ProcessedAgentDataInDB]) @app.get("/processed_agent_data/", response_model=list[ProcessedAgentDataInDB])
def list_processed_agent_data(): def list_processed_agent_data():
# Get list of data session = SessionLocal()
pass try:
stmt = select(processed_agent_data)
res = session.execute(stmt).fetchall()
if not res:
raise HTTPException(status_code=404, detail="Not found")
return [dict(r._mapping) for r in res]
finally:
session.close()
@app.put( @app.put(
@@ -152,7 +134,41 @@ def list_processed_agent_data():
) )
def update_processed_agent_data(processed_agent_data_id: int, data: ProcessedAgentData): def update_processed_agent_data(processed_agent_data_id: int, data: ProcessedAgentData):
# Update data # Update data
pass session = SessionLocal()
try:
query = select(processed_agent_data).where(
processed_agent_data.c.id == processed_agent_data_id
)
result = session.execute(query).fetchone()
if not result:
raise HTTPException(status_code=404, detail="Data not found")
update_query = (
processed_agent_data.update()
.where(processed_agent_data.c.id == processed_agent_data_id)
.values(
road_state=data.road_state,
user_id=data.agent_data.user_id,
x=data.agent_data.accelerometer.x,
y=data.agent_data.accelerometer.y,
z=data.agent_data.accelerometer.z,
latitude=data.agent_data.gps.latitude,
longitude=data.agent_data.gps.longitude,
timestamp=data.agent_data.timestamp,
)
)
session.execute(update_query)
session.commit()
updated_result = session.execute(query).fetchone()
return ProcessedAgentDataInDB(**updated_result._mapping)
finally:
session.close()
@app.delete( @app.delete(
@@ -161,8 +177,28 @@ def update_processed_agent_data(processed_agent_data_id: int, data: ProcessedAge
) )
def delete_processed_agent_data(processed_agent_data_id: int): def delete_processed_agent_data(processed_agent_data_id: int):
# Delete by id # Delete by id
pass session = SessionLocal()
try:
query = select(processed_agent_data).where(
processed_agent_data.c.id == processed_agent_data_id
)
result = session.execute(query).fetchone()
if not result:
raise HTTPException(status_code=404, detail="Data not found")
delete_query = processed_agent_data.delete().where(
processed_agent_data.c.id == processed_agent_data_id
)
session.execute(delete_query)
session.commit()
return ProcessedAgentDataInDB(**result._mapping)
finally:
session.close()
if __name__ == "__main__": if __name__ == "__main__":
import uvicorn import uvicorn

Binary file not shown.

51
store/schemas.py Normal file
View File

@@ -0,0 +1,51 @@
from datetime import datetime
from pydantic import BaseModel, field_validator
class ProcessedAgentDataInDB(BaseModel):
id: int
road_state: str
user_id: int
x: float
y: float
z: float
latitude: float
longitude: float
timestamp: datetime
# FastAPI models
class AccelerometerData(BaseModel):
x: float
y: float
z: float
class GpsData(BaseModel):
latitude: float
longitude: float
class AgentData(BaseModel):
user_id: int
accelerometer: AccelerometerData
gps: GpsData
timestamp: datetime
@classmethod
@field_validator("timestamp", mode="before")
def check_timestamp(cls, value):
if isinstance(value, datetime):
return value
try:
return datetime.fromisoformat(value)
except (TypeError, ValueError):
raise ValueError(
"Invalid timestamp format. Expected ISO 8601 format (YYYY-MM-DDTHH:MM:SSZ)."
)
class ProcessedAgentData(BaseModel):
road_state: str
agent_data: AgentData