Compare commits
37 Commits
340706c7e5
...
312177e087
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
312177e087 | ||
|
|
f96930a259 | ||
|
|
87df394352 | ||
|
|
10ad9774a7 | ||
|
|
b730dbb74c | ||
|
|
e4e585b9ac | ||
|
|
185b0aae58 | ||
|
|
af94c007a2 | ||
|
|
f9ef916331 | ||
|
|
3931fa58c1 | ||
|
|
98fb6aa12a | ||
|
|
7ddfb68b02 | ||
| 9473c5a621 | |||
| 953b0bdb9a | |||
|
|
ea9be3fb57 | ||
|
|
f3512e4afb | ||
| f58596ebf7 | |||
| d621390f51 | |||
| e4be6b0a19 | |||
| fe66df9b8c | |||
|
|
69e679eccf | ||
| 3e0b4762ef | |||
| 75613fd4fc | |||
| a25fbfc3ef | |||
| ca790e7306 | |||
| 1643767094 | |||
| 3d94bf3008 | |||
| c5d98d53cd | |||
| 07a0e906d8 | |||
|
|
9bf3741f32 | ||
| 092130dfab | |||
| 35af3bbabb | |||
| c974ac32f6 | |||
|
|
184098b826 | ||
|
|
f34258f93c | ||
| 1e7516fe7b | |||
| a63864bcaa |
23
.dockerignore
Normal file
23
.dockerignore
Normal file
@ -0,0 +1,23 @@
|
||||
# IDEs
|
||||
.idea/
|
||||
.vscode/
|
||||
.git/
|
||||
.gitignore
|
||||
.dockerignore
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# Python
|
||||
**/__pycache__/
|
||||
**/*.py[cod]
|
||||
**/*.pyo
|
||||
**/*.pyd
|
||||
venv/
|
||||
.env
|
||||
|
||||
# Logs & Database & Broker data
|
||||
*.log
|
||||
**/mosquitto/data/
|
||||
**/mosquitto/log/
|
||||
**/postgres_data/
|
||||
**/pgadmin-data/
|
||||
@ -1,29 +0,0 @@
|
||||
name: Test Agent
|
||||
on: [push, workflow_dispatch]
|
||||
|
||||
concurrency:
|
||||
cancel-in-progress: false
|
||||
|
||||
jobs:
|
||||
test-agent-run:
|
||||
runs-on: arch-x86_64
|
||||
steps:
|
||||
- name: Fetch the repository
|
||||
run: git clone --revision ${{ gitea.sha }} --depth 1 ${{ gitea.server_url }}/${{ gitea.repository }}
|
||||
|
||||
- name: Build containers
|
||||
run: docker-compose -f docker-compose-test.yaml build
|
||||
working-directory: sem8-iot-test/agent/docker
|
||||
|
||||
- name: Start MQTT broker
|
||||
run: docker-compose -f docker-compose-test.yaml up -d mqtt
|
||||
working-directory: sem8-iot-test/agent/docker
|
||||
|
||||
- name: Start agent
|
||||
run: docker-compose -f docker-compose-test.yaml run fake_agent
|
||||
working-directory: sem8-iot-test/agent/docker
|
||||
|
||||
- name: Clean up
|
||||
if: always()
|
||||
run: docker-compose -f docker-compose-test.yaml down
|
||||
working-directory: sem8-iot-test/agent/docker
|
||||
27
.gitignore
vendored
27
.gitignore
vendored
@ -1,2 +1,25 @@
|
||||
agent/docker/mosquitto/data/
|
||||
agent/docker/mosquitto/log/
|
||||
# IDEs
|
||||
.idea/
|
||||
.vscode/
|
||||
*.swp
|
||||
*.swo
|
||||
|
||||
# Python
|
||||
venv/
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
.env
|
||||
|
||||
# Logs
|
||||
*.log
|
||||
app.log
|
||||
|
||||
# Database & Broker data
|
||||
**/mosquitto/data/
|
||||
**/mosquitto/log/
|
||||
**/postgres_data/
|
||||
**/pgadmin-data/
|
||||
|
||||
# OS specific
|
||||
.DS_Store
|
||||
3
MapView/.gitignore
vendored
3
MapView/.gitignore
vendored
@ -1,3 +0,0 @@
|
||||
.idea
|
||||
venv
|
||||
__pycache__
|
||||
2
agent/.gitignore
vendored
2
agent/.gitignore
vendored
@ -1,2 +0,0 @@
|
||||
venv
|
||||
__pycache__
|
||||
@ -3,10 +3,10 @@ FROM python:latest
|
||||
# set the working directory in the container
|
||||
WORKDIR /usr/agent
|
||||
# copy the dependencies file to the working directory
|
||||
COPY requirements.txt .
|
||||
COPY agent/requirements.txt .
|
||||
# install dependencies
|
||||
RUN pip install -r requirements.txt
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
# copy the content of the local src directory to the working directory
|
||||
COPY src/ .
|
||||
COPY agent/src/ .
|
||||
# command to run on container start
|
||||
CMD ["python", "main.py"]
|
||||
@ -1,34 +0,0 @@
|
||||
version: "3.3"
|
||||
#name: "road_vision"
|
||||
services:
|
||||
mqtt:
|
||||
image: eclipse-mosquitto
|
||||
container_name: mqtt
|
||||
volumes:
|
||||
- ./mosquitto:/mosquitto
|
||||
- ./mosquitto/data:/mosquitto/data
|
||||
- ./mosquitto/log:/mosquitto/log
|
||||
ports:
|
||||
- 1883:1883
|
||||
- 9001:9001
|
||||
networks:
|
||||
mqtt_network:
|
||||
|
||||
|
||||
fake_agent:
|
||||
container_name: agent
|
||||
build: ../
|
||||
depends_on:
|
||||
- mqtt
|
||||
environment:
|
||||
MQTT_BROKER_HOST: "mqtt"
|
||||
MQTT_BROKER_PORT: 1883
|
||||
MQTT_TOPIC: "agent_data_topic"
|
||||
DELAY: 0.1
|
||||
MAX_SENDS: 30
|
||||
networks:
|
||||
mqtt_network:
|
||||
|
||||
|
||||
networks:
|
||||
mqtt_network:
|
||||
@ -1,5 +1,4 @@
|
||||
version: "3.3"
|
||||
#name: "road_vision"
|
||||
name: "road_vision"
|
||||
services:
|
||||
mqtt:
|
||||
image: eclipse-mosquitto
|
||||
@ -17,7 +16,9 @@ services:
|
||||
|
||||
fake_agent:
|
||||
container_name: agent
|
||||
build: ../
|
||||
build:
|
||||
context: ../../
|
||||
dockerfile: agent/Dockerfile
|
||||
depends_on:
|
||||
- mqtt
|
||||
environment:
|
||||
|
||||
0
agent/src/__init__.py
Normal file
0
agent/src/__init__.py
Normal file
@ -16,6 +16,3 @@ MQTT_TOPIC = os.environ.get("MQTT_TOPIC") or "agent"
|
||||
|
||||
# Delay for sending data to mqtt in seconds
|
||||
DELAY = try_parse(float, os.environ.get("DELAY")) or 1
|
||||
|
||||
# Testing switches for CI/CD
|
||||
MAX_SENDS = try_parse(int, os.environ.get("MAX_SENDS"))
|
||||
|
||||
22
agent/src/data/parking.csv
Normal file
22
agent/src/data/parking.csv
Normal file
@ -0,0 +1,22 @@
|
||||
longitude,latitude,empty_count
|
||||
50.450386085935094,30.524547100067142,10
|
||||
50.450386085935094,30.524547100067142,11
|
||||
50.450386085935094,30.524547100067142,13
|
||||
50.450386085935094,30.524547100067142,15
|
||||
50.450386085935094,30.524547100067142,7
|
||||
50.450386085935094,30.524547100067142,9
|
||||
50.450386085935094,30.524547100067142,4
|
||||
50.450386085935094,30.524547100067142,0
|
||||
50.450386085935094,30.524547100067142,0
|
||||
50.450386085935094,30.524547100067142,3
|
||||
50.450386085935094,30.524547100067142,4
|
||||
50.450069433207545,30.52406822530458,16
|
||||
50.450069433207545,30.52406822530458,20
|
||||
50.450069433207545,30.52406822530458,25
|
||||
50.450069433207545,30.52406822530458,30
|
||||
50.450069433207545,30.52406822530458,29
|
||||
50.450069433207545,30.52406822530458,12
|
||||
50.450069433207545,30.52406822530458,10
|
||||
50.450069433207545,30.52406822530458,14
|
||||
50.450069433207545,30.52406822530458,3
|
||||
50.450069433207545,30.52406822530458,2
|
||||
|
@ -1,13 +1,16 @@
|
||||
from dataclasses import dataclass
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from domain.accelerometer import Accelerometer
|
||||
from domain.gps import Gps
|
||||
from domain.parking import Parking
|
||||
|
||||
|
||||
@dataclass
|
||||
class AggregatedData:
|
||||
accelerometer: Accelerometer
|
||||
gps: Gps
|
||||
parking: Parking
|
||||
timestamp: datetime
|
||||
user_id: int
|
||||
|
||||
9
agent/src/domain/parking.py
Normal file
9
agent/src/domain/parking.py
Normal file
@ -0,0 +1,9 @@
|
||||
from dataclasses import dataclass
|
||||
|
||||
from domain.gps import Gps
|
||||
|
||||
|
||||
@dataclass
|
||||
class Parking:
|
||||
empty_count: int
|
||||
gps: Gps
|
||||
@ -4,6 +4,7 @@ from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Optional, List
|
||||
|
||||
from domain.parking import Parking
|
||||
from domain.accelerometer import Accelerometer
|
||||
from domain.gps import Gps
|
||||
from domain.aggregated_data import AggregatedData
|
||||
@ -12,12 +13,22 @@ import config
|
||||
|
||||
class FileDatasource:
|
||||
|
||||
def __init__(self, accelerometer_filename: str, gps_filename: str) -> None:
|
||||
def __init__(
|
||||
self,
|
||||
accelerometer_filename: str,
|
||||
gps_filename: str,
|
||||
park_filename: str,
|
||||
) -> None:
|
||||
|
||||
self.accelerometer_filename = accelerometer_filename
|
||||
self.park_filename = park_filename
|
||||
self.gps_filename = gps_filename
|
||||
|
||||
self._park_f = None
|
||||
self._acc_f = None
|
||||
self._gps_f = None
|
||||
|
||||
self._park_reader: Optional[csv.reader] = None
|
||||
self._acc_reader: Optional[csv.reader] = None
|
||||
self._gps_reader: Optional[csv.reader] = None
|
||||
|
||||
@ -30,6 +41,8 @@ class FileDatasource:
|
||||
|
||||
if not Path(self.accelerometer_filename).exists():
|
||||
raise FileNotFoundError(f"Accelerometer file not found: {self.accelerometer_filename}")
|
||||
if not Path(self.park_filename).exists():
|
||||
raise FileNotFoundError(f"Accelerometer file not found: {self.park_filename}")
|
||||
if not Path(self.gps_filename).exists():
|
||||
raise FileNotFoundError(f"GPS file not found: {self.gps_filename}")
|
||||
|
||||
@ -47,9 +60,11 @@ class FileDatasource:
|
||||
raise RuntimeError("Datasource is not started. Call startReading() before read().")
|
||||
|
||||
acc_row = self._get_next_row(self._acc_reader, source="acc")
|
||||
park_row = self._get_next_row(self._park_reader, source="park")
|
||||
gps_row = self._get_next_row(self._gps_reader, source="gps")
|
||||
|
||||
acc = self._parse_acc(acc_row)
|
||||
park = self._parse_park(park_row)
|
||||
gps = self._parse_gps(gps_row)
|
||||
|
||||
# IMPORTANT: timing belongs to datasource (not MQTT / main.py)
|
||||
@ -59,6 +74,7 @@ class FileDatasource:
|
||||
return AggregatedData(
|
||||
accelerometer=acc,
|
||||
gps=gps,
|
||||
parking=park,
|
||||
timestamp=datetime.utcnow(),
|
||||
user_id=config.USER_ID,
|
||||
)
|
||||
@ -69,14 +85,17 @@ class FileDatasource:
|
||||
self._close_files()
|
||||
|
||||
self._acc_f = open(self.accelerometer_filename, "r", newline="", encoding="utf-8")
|
||||
self._park_f = open(self.park_filename, "r", newline="", encoding="utf-8")
|
||||
self._gps_f = open(self.gps_filename, "r", newline="", encoding="utf-8")
|
||||
|
||||
self._acc_reader = csv.reader(self._acc_f, skipinitialspace=True)
|
||||
self._park_reader = csv.reader(self._park_f, skipinitialspace=True)
|
||||
self._gps_reader = csv.reader(self._gps_f, skipinitialspace=True)
|
||||
|
||||
# File pointer is already at 0 right after open(), so no need to rewind here.
|
||||
# Skip header row once.
|
||||
next(self._acc_reader, None)
|
||||
next(self._park_reader, None)
|
||||
next(self._gps_reader, None)
|
||||
|
||||
def _close_files(self) -> None:
|
||||
@ -88,8 +107,10 @@ class FileDatasource:
|
||||
pass
|
||||
|
||||
self._acc_f = None
|
||||
self._park_f = None
|
||||
self._gps_f = None
|
||||
self._acc_reader = None
|
||||
self._park_reader = None
|
||||
self._gps_reader = None
|
||||
|
||||
def _rewind_acc(self) -> None:
|
||||
@ -106,6 +127,13 @@ class FileDatasource:
|
||||
self._gps_reader = csv.reader(self._gps_f, skipinitialspace=True)
|
||||
next(self._gps_reader, None) # skip header row
|
||||
|
||||
def _rewind_park(self) -> None:
|
||||
if self._park_f is None:
|
||||
raise RuntimeError("GPS file is not open.")
|
||||
self._park_f.seek(0)
|
||||
self._park_reader = csv.reader(self._park_f, skipinitialspace=True)
|
||||
next(self._park_reader, None) # skip header row
|
||||
|
||||
def _get_next_row(self, reader, source: str) -> List[str]:
|
||||
"""Get the next valid row from the reader."""
|
||||
if reader is None:
|
||||
@ -118,6 +146,10 @@ class FileDatasource:
|
||||
if source == "acc":
|
||||
self._rewind_acc()
|
||||
reader = self._acc_reader
|
||||
|
||||
elif source == 'park':
|
||||
self._rewind_park()
|
||||
reader = self._park_reader
|
||||
else:
|
||||
self._rewind_gps()
|
||||
reader = self._gps_reader
|
||||
@ -148,4 +180,17 @@ class FileDatasource:
|
||||
raise ValueError(f"GPS row must have 2 values (longitude,latitude). Got: {row}")
|
||||
lon = float(row[0])
|
||||
lat = float(row[1])
|
||||
return Gps(longitude=lon, latitude=lat)
|
||||
return Gps(longitude=lon, latitude=lat)
|
||||
|
||||
@staticmethod
|
||||
def _parse_park(row: List[str]) -> Parking:
|
||||
if len(row) < 2:
|
||||
raise ValueError(f"GPS row must have 2 values (longitude,latitude). Got: {row}")
|
||||
lon = float(row[0])
|
||||
lat = float(row[1])
|
||||
empty_count = int(row[2])
|
||||
|
||||
return Parking(
|
||||
gps=Gps(longitude=lon, latitude=lat),
|
||||
empty_count=empty_count
|
||||
)
|
||||
|
||||
@ -22,13 +22,9 @@ def connect_mqtt(broker, port):
|
||||
return client
|
||||
|
||||
|
||||
def publish(client, topic, datasource, max_sends = None):
|
||||
def publish(client, topic, datasource):
|
||||
datasource.startReading()
|
||||
|
||||
i = 0
|
||||
while True:
|
||||
i += 1
|
||||
|
||||
data = datasource.read()
|
||||
msg = AggregatedDataSchema().dumps(data)
|
||||
result = client.publish(topic, msg)
|
||||
@ -36,18 +32,14 @@ def publish(client, topic, datasource, max_sends = None):
|
||||
if status != 0:
|
||||
print(f"Failed to send message to topic {topic}")
|
||||
|
||||
if max_sends and i >= max_sends:
|
||||
# display test success
|
||||
exit(0)
|
||||
|
||||
|
||||
def run():
|
||||
# Prepare mqtt client
|
||||
client = connect_mqtt(config.MQTT_BROKER_HOST, config.MQTT_BROKER_PORT)
|
||||
# Prepare datasource
|
||||
datasource = FileDatasource("data/accelerometer.csv", "data/gps.csv")
|
||||
datasource = FileDatasource("data/accelerometer.csv", "data/gps.csv", "data/parking.csv")
|
||||
# Infinity publish data
|
||||
publish(client, config.MQTT_TOPIC, datasource, getattr(config, "MAX_SENDS", None))
|
||||
publish(client, config.MQTT_TOPIC, datasource)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@ -1,10 +1,12 @@
|
||||
from marshmallow import Schema, fields
|
||||
from schema.accelerometer_schema import AccelerometerSchema
|
||||
from schema.gps_schema import GpsSchema
|
||||
from schema.parking_schema import ParkingSchema
|
||||
|
||||
|
||||
class AggregatedDataSchema(Schema):
|
||||
accelerometer = fields.Nested(AccelerometerSchema)
|
||||
gps = fields.Nested(GpsSchema)
|
||||
parking = fields.Nested(ParkingSchema)
|
||||
timestamp = fields.DateTime("iso")
|
||||
user_id = fields.Int()
|
||||
|
||||
8
agent/src/schema/parking_schema.py
Normal file
8
agent/src/schema/parking_schema.py
Normal file
@ -0,0 +1,8 @@
|
||||
from marshmallow import Schema, fields
|
||||
|
||||
from schema.gps_schema import GpsSchema
|
||||
|
||||
|
||||
class ParkingSchema(Schema):
|
||||
gps = fields.Nested(GpsSchema)
|
||||
empty_count = fields.Int()
|
||||
2
edge/.gitignore
vendored
2
edge/.gitignore
vendored
@ -1,2 +0,0 @@
|
||||
venv
|
||||
app.log
|
||||
@ -3,9 +3,9 @@ FROM python:3.9-slim
|
||||
# Set the working directory inside the container
|
||||
WORKDIR /app
|
||||
# Copy the requirements.txt file and install dependencies
|
||||
COPY requirements.txt .
|
||||
COPY edge/requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
# Copy the entire application into the container
|
||||
COPY . .
|
||||
COPY edge/. .
|
||||
# Run the main.py script inside the container when it starts
|
||||
CMD ["python", "main.py"]
|
||||
@ -13,12 +13,13 @@ services:
|
||||
- 19001:9001
|
||||
networks:
|
||||
mqtt_network:
|
||||
user: 1000:1000
|
||||
|
||||
|
||||
edge:
|
||||
container_name: edge
|
||||
build: ../
|
||||
build:
|
||||
context: ../../
|
||||
dockerfile: edge/Dockerfile
|
||||
depends_on:
|
||||
- mqtt
|
||||
environment:
|
||||
|
||||
2
hub/.gitignore
vendored
2
hub/.gitignore
vendored
@ -1,2 +0,0 @@
|
||||
venv
|
||||
__pycache__
|
||||
@ -3,9 +3,10 @@ FROM python:3.9-slim
|
||||
# Set the working directory inside the container
|
||||
WORKDIR /app
|
||||
# Copy the requirements.txt file and install dependencies
|
||||
COPY requirements.txt .
|
||||
COPY hub/requirements.txt .
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
# Copy the entire application into the container
|
||||
COPY . .
|
||||
COPY hub/. .
|
||||
# Run the main.py script inside the container when it starts
|
||||
CMD ["uvicorn", "main:app", "--host", "0.0.0.0"]
|
||||
@ -1,4 +1,3 @@
|
||||
version: "3.9"
|
||||
name: "road_vision__hub"
|
||||
services:
|
||||
mqtt:
|
||||
@ -16,7 +15,7 @@ services:
|
||||
|
||||
|
||||
postgres_db:
|
||||
image: postgres:latest
|
||||
image: postgres:17
|
||||
container_name: postgres_db
|
||||
restart: always
|
||||
environment:
|
||||
@ -49,7 +48,9 @@ services:
|
||||
|
||||
store:
|
||||
container_name: store
|
||||
build: ../../store
|
||||
build:
|
||||
context: ../../
|
||||
dockerfile: store/Dockerfile
|
||||
depends_on:
|
||||
- postgres_db
|
||||
restart: always
|
||||
@ -77,7 +78,9 @@ services:
|
||||
|
||||
hub:
|
||||
container_name: hub
|
||||
build: ../
|
||||
build:
|
||||
context: ../../
|
||||
dockerfile: hub/Dockerfile
|
||||
depends_on:
|
||||
- mqtt
|
||||
- redis
|
||||
|
||||
3
store/.gitignore
vendored
3
store/.gitignore
vendored
@ -1,3 +0,0 @@
|
||||
venv
|
||||
__pycache__
|
||||
.idea
|
||||
@ -3,9 +3,10 @@ FROM python:latest
|
||||
# Set the working directory inside the container
|
||||
WORKDIR /app
|
||||
# Copy the requirements.txt file and install dependencies
|
||||
COPY requirements.txt .
|
||||
COPY store/requirements.txt .
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
# Copy the entire application into the container
|
||||
COPY . .
|
||||
COPY store/. .
|
||||
# Run the main.py script inside the container when it starts
|
||||
CMD ["uvicorn", "main:app", "--host", "0.0.0.0"]
|
||||
0
store/__init__.py
Normal file
0
store/__init__.py
Normal file
15
store/database.py
Normal file
15
store/database.py
Normal file
@ -0,0 +1,15 @@
|
||||
from sqlalchemy import MetaData
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker, declarative_base
|
||||
|
||||
from config import POSTGRES_USER, POSTGRES_PASSWORD, POSTGRES_HOST, POSTGRES_PORT, POSTGRES_DB
|
||||
|
||||
|
||||
DATABASE_URL = f"postgresql+psycopg2://{POSTGRES_USER}:{POSTGRES_PASSWORD}@{POSTGRES_HOST}:{POSTGRES_PORT}/{POSTGRES_DB}"
|
||||
engine = create_engine(DATABASE_URL)
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
metadata = MetaData()
|
||||
|
||||
SessionLocal = sessionmaker(bind=engine)
|
||||
@ -1,8 +1,7 @@
|
||||
version: "3.9"
|
||||
name: "road_vision__database"
|
||||
services:
|
||||
postgres_db:
|
||||
image: postgres:latest
|
||||
image: postgres:17
|
||||
container_name: postgres_db
|
||||
restart: always
|
||||
environment:
|
||||
@ -35,7 +34,9 @@ services:
|
||||
|
||||
store:
|
||||
container_name: store
|
||||
build: ..
|
||||
build:
|
||||
context: ../../
|
||||
dockerfile: store/Dockerfile
|
||||
depends_on:
|
||||
- postgres_db
|
||||
restart: always
|
||||
|
||||
196
store/main.py
196
store/main.py
@ -1,10 +1,8 @@
|
||||
import asyncio
|
||||
import json
|
||||
from typing import Set, Dict, List, Any
|
||||
from typing import Set, Dict, List
|
||||
from fastapi import FastAPI, HTTPException, WebSocket, WebSocketDisconnect, Body
|
||||
from fastapi.encoders import jsonable_encoder
|
||||
from sqlalchemy import (
|
||||
create_engine,
|
||||
MetaData,
|
||||
Table,
|
||||
Column,
|
||||
Integer,
|
||||
@ -12,25 +10,14 @@ from sqlalchemy import (
|
||||
Float,
|
||||
DateTime,
|
||||
)
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from sqlalchemy.sql import select
|
||||
from datetime import datetime
|
||||
from pydantic import BaseModel, field_validator
|
||||
from config import (
|
||||
POSTGRES_HOST,
|
||||
POSTGRES_PORT,
|
||||
POSTGRES_DB,
|
||||
POSTGRES_USER,
|
||||
POSTGRES_PASSWORD,
|
||||
)
|
||||
|
||||
from database import metadata, SessionLocal
|
||||
from schemas import ProcessedAgentData, ProcessedAgentDataInDB
|
||||
|
||||
# FastAPI app setup
|
||||
app = FastAPI()
|
||||
# SQLAlchemy setup
|
||||
DATABASE_URL = f"postgresql+psycopg2://{POSTGRES_USER}:{POSTGRES_PASSWORD}@{POSTGRES_HOST}:{POSTGRES_PORT}/{POSTGRES_DB}"
|
||||
engine = create_engine(DATABASE_URL)
|
||||
metadata = MetaData()
|
||||
# Define the ProcessedAgentData table
|
||||
|
||||
processed_agent_data = Table(
|
||||
"processed_agent_data",
|
||||
metadata,
|
||||
@ -44,57 +31,6 @@ processed_agent_data = Table(
|
||||
Column("longitude", Float),
|
||||
Column("timestamp", DateTime),
|
||||
)
|
||||
SessionLocal = sessionmaker(bind=engine)
|
||||
|
||||
|
||||
# SQLAlchemy model
|
||||
class ProcessedAgentDataInDB(BaseModel):
|
||||
id: int
|
||||
road_state: str
|
||||
user_id: int
|
||||
x: float
|
||||
y: float
|
||||
z: float
|
||||
latitude: float
|
||||
longitude: float
|
||||
timestamp: datetime
|
||||
|
||||
|
||||
# FastAPI models
|
||||
class AccelerometerData(BaseModel):
|
||||
x: float
|
||||
y: float
|
||||
z: float
|
||||
|
||||
|
||||
class GpsData(BaseModel):
|
||||
latitude: float
|
||||
longitude: float
|
||||
|
||||
|
||||
class AgentData(BaseModel):
|
||||
user_id: int
|
||||
accelerometer: AccelerometerData
|
||||
gps: GpsData
|
||||
timestamp: datetime
|
||||
|
||||
@classmethod
|
||||
@field_validator("timestamp", mode="before")
|
||||
def check_timestamp(cls, value):
|
||||
if isinstance(value, datetime):
|
||||
return value
|
||||
try:
|
||||
return datetime.fromisoformat(value)
|
||||
except (TypeError, ValueError):
|
||||
raise ValueError(
|
||||
"Invalid timestamp format. Expected ISO 8601 format (YYYY-MM-DDTHH:MM:SSZ)."
|
||||
)
|
||||
|
||||
|
||||
class ProcessedAgentData(BaseModel):
|
||||
road_state: str
|
||||
agent_data: AgentData
|
||||
|
||||
|
||||
# WebSocket subscriptions
|
||||
subscriptions: Dict[int, Set[WebSocket]] = {}
|
||||
@ -125,10 +61,36 @@ async def send_data_to_subscribers(user_id: int, data):
|
||||
|
||||
|
||||
@app.post("/processed_agent_data/")
|
||||
async def create_processed_agent_data(data: List[ProcessedAgentData]):
|
||||
# Insert data to database
|
||||
# Send data to subscribers
|
||||
pass
|
||||
async def create_processed_agent_data(data: List[ProcessedAgentData], user_id: int = Body(..., embed=True)):
|
||||
session = SessionLocal()
|
||||
try:
|
||||
created_data = [
|
||||
{
|
||||
"road_state": item.road_state,
|
||||
"user_id": user_id,
|
||||
"x": item.agent_data.accelerometer.x,
|
||||
"y": item.agent_data.accelerometer.y,
|
||||
"z": item.agent_data.accelerometer.z,
|
||||
"latitude": item.agent_data.gps.latitude,
|
||||
"longitude": item.agent_data.gps.longitude,
|
||||
"timestamp": item.agent_data.timestamp,
|
||||
}
|
||||
for item in data
|
||||
]
|
||||
stmt = processed_agent_data.insert().values(created_data).returning(processed_agent_data)
|
||||
result = session.execute(stmt)
|
||||
created_records = [dict(row._mapping) for row in result.fetchall()]
|
||||
session.commit()
|
||||
|
||||
for record in created_records:
|
||||
await send_data_to_subscribers(user_id, jsonable_encoder(record))
|
||||
return created_records
|
||||
except Exception as err:
|
||||
session.rollback()
|
||||
print(f"Database error: {err}")
|
||||
raise HTTPException(status_code=500, detail="Internal Server Error")
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@app.get(
|
||||
@ -136,14 +98,34 @@ async def create_processed_agent_data(data: List[ProcessedAgentData]):
|
||||
response_model=ProcessedAgentDataInDB,
|
||||
)
|
||||
def read_processed_agent_data(processed_agent_data_id: int):
|
||||
# Get data by id
|
||||
pass
|
||||
session = SessionLocal()
|
||||
try:
|
||||
stmt = select(processed_agent_data).where(
|
||||
processed_agent_data.c.id == processed_agent_data_id
|
||||
)
|
||||
res = session.execute(stmt).fetchone()
|
||||
if not res:
|
||||
raise HTTPException(status_code=404, detail="Not found")
|
||||
|
||||
return dict(res._mapping)
|
||||
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@app.get("/processed_agent_data/", response_model=list[ProcessedAgentDataInDB])
|
||||
def list_processed_agent_data():
|
||||
# Get list of data
|
||||
pass
|
||||
session = SessionLocal()
|
||||
try:
|
||||
stmt = select(processed_agent_data)
|
||||
res = session.execute(stmt).fetchall()
|
||||
if not res:
|
||||
raise HTTPException(status_code=404, detail="Not found")
|
||||
|
||||
return [dict(r._mapping) for r in res]
|
||||
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@app.put(
|
||||
@ -152,7 +134,41 @@ def list_processed_agent_data():
|
||||
)
|
||||
def update_processed_agent_data(processed_agent_data_id: int, data: ProcessedAgentData):
|
||||
# Update data
|
||||
pass
|
||||
session = SessionLocal()
|
||||
|
||||
try:
|
||||
query = select(processed_agent_data).where(
|
||||
processed_agent_data.c.id == processed_agent_data_id
|
||||
)
|
||||
result = session.execute(query).fetchone()
|
||||
|
||||
if not result:
|
||||
raise HTTPException(status_code=404, detail="Data not found")
|
||||
|
||||
update_query = (
|
||||
processed_agent_data.update()
|
||||
.where(processed_agent_data.c.id == processed_agent_data_id)
|
||||
.values(
|
||||
road_state=data.road_state,
|
||||
user_id=data.agent_data.user_id,
|
||||
x=data.agent_data.accelerometer.x,
|
||||
y=data.agent_data.accelerometer.y,
|
||||
z=data.agent_data.accelerometer.z,
|
||||
latitude=data.agent_data.gps.latitude,
|
||||
longitude=data.agent_data.gps.longitude,
|
||||
timestamp=data.agent_data.timestamp,
|
||||
)
|
||||
)
|
||||
|
||||
session.execute(update_query)
|
||||
session.commit()
|
||||
|
||||
updated_result = session.execute(query).fetchone()
|
||||
|
||||
return ProcessedAgentDataInDB(**updated_result._mapping)
|
||||
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@app.delete(
|
||||
@ -161,8 +177,28 @@ def update_processed_agent_data(processed_agent_data_id: int, data: ProcessedAge
|
||||
)
|
||||
def delete_processed_agent_data(processed_agent_data_id: int):
|
||||
# Delete by id
|
||||
pass
|
||||
session = SessionLocal()
|
||||
|
||||
try:
|
||||
query = select(processed_agent_data).where(
|
||||
processed_agent_data.c.id == processed_agent_data_id
|
||||
)
|
||||
result = session.execute(query).fetchone()
|
||||
|
||||
if not result:
|
||||
raise HTTPException(status_code=404, detail="Data not found")
|
||||
|
||||
delete_query = processed_agent_data.delete().where(
|
||||
processed_agent_data.c.id == processed_agent_data_id
|
||||
)
|
||||
|
||||
session.execute(delete_query)
|
||||
session.commit()
|
||||
|
||||
return ProcessedAgentDataInDB(**result._mapping)
|
||||
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
|
||||
Binary file not shown.
51
store/schemas.py
Normal file
51
store/schemas.py
Normal file
@ -0,0 +1,51 @@
|
||||
from datetime import datetime
|
||||
|
||||
from pydantic import BaseModel, field_validator
|
||||
|
||||
|
||||
class ProcessedAgentDataInDB(BaseModel):
|
||||
id: int
|
||||
road_state: str
|
||||
user_id: int
|
||||
x: float
|
||||
y: float
|
||||
z: float
|
||||
latitude: float
|
||||
longitude: float
|
||||
timestamp: datetime
|
||||
|
||||
|
||||
# FastAPI models
|
||||
class AccelerometerData(BaseModel):
|
||||
x: float
|
||||
y: float
|
||||
z: float
|
||||
|
||||
|
||||
class GpsData(BaseModel):
|
||||
latitude: float
|
||||
longitude: float
|
||||
|
||||
|
||||
class AgentData(BaseModel):
|
||||
user_id: int
|
||||
accelerometer: AccelerometerData
|
||||
gps: GpsData
|
||||
timestamp: datetime
|
||||
|
||||
@classmethod
|
||||
@field_validator("timestamp", mode="before")
|
||||
def check_timestamp(cls, value):
|
||||
if isinstance(value, datetime):
|
||||
return value
|
||||
try:
|
||||
return datetime.fromisoformat(value)
|
||||
except (TypeError, ValueError):
|
||||
raise ValueError(
|
||||
"Invalid timestamp format. Expected ISO 8601 format (YYYY-MM-DDTHH:MM:SSZ)."
|
||||
)
|
||||
|
||||
|
||||
class ProcessedAgentData(BaseModel):
|
||||
road_state: str
|
||||
agent_data: AgentData
|
||||
Loading…
x
Reference in New Issue
Block a user