Compare commits
18 Commits
lab1-slobo
...
lab3/hrynk
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
24aeb1a19f | ||
|
|
ceffcfeac2 | ||
|
|
312177e087 | ||
|
|
f96930a259 | ||
|
|
87df394352 | ||
|
|
10ad9774a7 | ||
|
|
b730dbb74c | ||
|
|
e4e585b9ac | ||
|
|
185b0aae58 | ||
|
|
3931fa58c1 | ||
|
|
98fb6aa12a | ||
|
|
ea9be3fb57 | ||
|
|
f3512e4afb | ||
|
|
69e679eccf | ||
|
|
184098b826 | ||
|
|
b2c7427af0 | ||
| 1e7516fe7b | |||
| a63864bcaa |
23
.dockerignore
Normal file
23
.dockerignore
Normal file
@@ -0,0 +1,23 @@
|
||||
# IDEs
|
||||
.idea/
|
||||
.vscode/
|
||||
.git/
|
||||
.gitignore
|
||||
.dockerignore
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# Python
|
||||
**/__pycache__/
|
||||
**/*.py[cod]
|
||||
**/*.pyo
|
||||
**/*.pyd
|
||||
venv/
|
||||
.env
|
||||
|
||||
# Logs & Database & Broker data
|
||||
*.log
|
||||
**/mosquitto/data/
|
||||
**/mosquitto/log/
|
||||
**/postgres_data/
|
||||
**/pgadmin-data/
|
||||
27
.gitignore
vendored
27
.gitignore
vendored
@@ -1,4 +1,25 @@
|
||||
agent/docker/mosquitto/data/
|
||||
agent/docker/mosquitto/log/
|
||||
# IDEs
|
||||
.idea/
|
||||
.vscode/
|
||||
*.swp
|
||||
*.swo
|
||||
|
||||
.idea/
|
||||
# Python
|
||||
venv/
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
.env
|
||||
|
||||
# Logs
|
||||
*.log
|
||||
app.log
|
||||
|
||||
# Database & Broker data
|
||||
**/mosquitto/data/
|
||||
**/mosquitto/log/
|
||||
**/postgres_data/
|
||||
**/pgadmin-data/
|
||||
|
||||
# OS specific
|
||||
.DS_Store
|
||||
3
MapView/.gitignore
vendored
3
MapView/.gitignore
vendored
@@ -1,3 +0,0 @@
|
||||
.idea
|
||||
venv
|
||||
__pycache__
|
||||
2
agent/.gitignore
vendored
2
agent/.gitignore
vendored
@@ -1,2 +0,0 @@
|
||||
venv
|
||||
__pycache__
|
||||
@@ -3,10 +3,10 @@ FROM python:latest
|
||||
# set the working directory in the container
|
||||
WORKDIR /usr/agent
|
||||
# copy the dependencies file to the working directory
|
||||
COPY requirements.txt .
|
||||
COPY agent/requirements.txt .
|
||||
# install dependencies
|
||||
RUN pip install -r requirements.txt
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
# copy the content of the local src directory to the working directory
|
||||
COPY src/ .
|
||||
COPY agent/src/ .
|
||||
# command to run on container start
|
||||
CMD ["python", "main.py"]
|
||||
@@ -16,7 +16,9 @@ services:
|
||||
|
||||
fake_agent:
|
||||
container_name: agent
|
||||
build: ../
|
||||
build:
|
||||
context: ../../
|
||||
dockerfile: agent/Dockerfile
|
||||
depends_on:
|
||||
- mqtt
|
||||
environment:
|
||||
|
||||
2
edge/.gitignore
vendored
2
edge/.gitignore
vendored
@@ -1,2 +0,0 @@
|
||||
venv
|
||||
app.log
|
||||
@@ -3,9 +3,9 @@ FROM python:3.9-slim
|
||||
# Set the working directory inside the container
|
||||
WORKDIR /app
|
||||
# Copy the requirements.txt file and install dependencies
|
||||
COPY requirements.txt .
|
||||
COPY edge/requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
# Copy the entire application into the container
|
||||
COPY . .
|
||||
COPY edge/. .
|
||||
# Run the main.py script inside the container when it starts
|
||||
CMD ["python", "main.py"]
|
||||
@@ -17,7 +17,9 @@ services:
|
||||
|
||||
edge:
|
||||
container_name: edge
|
||||
build: ../
|
||||
build:
|
||||
context: ../../
|
||||
dockerfile: edge/Dockerfile
|
||||
depends_on:
|
||||
- mqtt
|
||||
environment:
|
||||
|
||||
2
hub/.gitignore
vendored
2
hub/.gitignore
vendored
@@ -1,2 +0,0 @@
|
||||
venv
|
||||
__pycache__
|
||||
@@ -3,9 +3,10 @@ FROM python:3.9-slim
|
||||
# Set the working directory inside the container
|
||||
WORKDIR /app
|
||||
# Copy the requirements.txt file and install dependencies
|
||||
COPY requirements.txt .
|
||||
COPY hub/requirements.txt .
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
# Copy the entire application into the container
|
||||
COPY . .
|
||||
COPY hub/. .
|
||||
# Run the main.py script inside the container when it starts
|
||||
CMD ["uvicorn", "main:app", "--host", "0.0.0.0"]
|
||||
@@ -14,11 +14,30 @@ class StoreApiAdapter(StoreGateway):
|
||||
self.api_base_url = api_base_url
|
||||
|
||||
def save_data(self, processed_agent_data_batch: List[ProcessedAgentData]):
|
||||
"""
|
||||
Save the processed road data to the Store API.
|
||||
Parameters:
|
||||
processed_agent_data_batch (dict): Processed road data to be saved.
|
||||
Returns:
|
||||
bool: True if the data is successfully saved, False otherwise.
|
||||
"""
|
||||
# Implement it
|
||||
if not processed_agent_data_batch:
|
||||
return False
|
||||
|
||||
# Extract user_id from the first element
|
||||
user_id = processed_agent_data_batch[0].agent_data.user_id
|
||||
|
||||
payload = {
|
||||
"data": [item.model_dump(mode='json') for item in processed_agent_data_batch],
|
||||
"user_id": user_id
|
||||
}
|
||||
|
||||
try:
|
||||
# Perform a POST request to the Store API with a 10-second timeout
|
||||
response = requests.post(
|
||||
f"{self.api_base_url}/processed_agent_data/",
|
||||
json=payload,
|
||||
timeout=10
|
||||
)
|
||||
if response.status_code == 200:
|
||||
logging.info(f"Batch of {len(processed_agent_data_batch)} items sent to Store.")
|
||||
return True
|
||||
else:
|
||||
logging.error(f"Store API error: {response.status_code} - {response.text}")
|
||||
return False
|
||||
except Exception as e:
|
||||
logging.error(f"Failed to send data to Store: {e}")
|
||||
return False
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
version: "3.9"
|
||||
name: "road_vision__hub"
|
||||
services:
|
||||
mqtt:
|
||||
@@ -16,7 +15,7 @@ services:
|
||||
|
||||
|
||||
postgres_db:
|
||||
image: postgres:latest
|
||||
image: postgres:17
|
||||
container_name: postgres_db
|
||||
restart: always
|
||||
environment:
|
||||
@@ -49,7 +48,9 @@ services:
|
||||
|
||||
store:
|
||||
container_name: store
|
||||
build: ../../store
|
||||
build:
|
||||
context: ../../
|
||||
dockerfile: store/Dockerfile
|
||||
depends_on:
|
||||
- postgres_db
|
||||
restart: always
|
||||
@@ -77,7 +78,9 @@ services:
|
||||
|
||||
hub:
|
||||
container_name: hub
|
||||
build: ../
|
||||
build:
|
||||
context: ../../
|
||||
dockerfile: hub/Dockerfile
|
||||
depends_on:
|
||||
- mqtt
|
||||
- redis
|
||||
@@ -90,7 +93,7 @@ services:
|
||||
MQTT_BROKER_HOST: "mqtt"
|
||||
MQTT_BROKER_PORT: 1883
|
||||
MQTT_TOPIC: "processed_data_topic"
|
||||
BATCH_SIZE: 1
|
||||
BATCH_SIZE: 20
|
||||
ports:
|
||||
- "9000:8000"
|
||||
networks:
|
||||
|
||||
16
hub/main.py
16
hub/main.py
@@ -70,18 +70,20 @@ def on_message(client, userdata, msg):
|
||||
processed_agent_data = ProcessedAgentData.model_validate_json(
|
||||
payload, strict=True
|
||||
)
|
||||
|
||||
redis_client.lpush(
|
||||
"processed_agent_data", processed_agent_data.model_dump_json()
|
||||
)
|
||||
processed_agent_data_batch: List[ProcessedAgentData] = []
|
||||
|
||||
if redis_client.llen("processed_agent_data") >= BATCH_SIZE:
|
||||
processed_agent_data_batch: List[ProcessedAgentData] = []
|
||||
for _ in range(BATCH_SIZE):
|
||||
processed_agent_data = ProcessedAgentData.model_validate_json(
|
||||
redis_client.lpop("processed_agent_data")
|
||||
)
|
||||
processed_agent_data_batch.append(processed_agent_data)
|
||||
store_adapter.save_data(processed_agent_data_batch=processed_agent_data_batch)
|
||||
raw_data = redis_client.lpop("processed_agent_data")
|
||||
if raw_data:
|
||||
data_item = ProcessedAgentData.model_validate_json(raw_data)
|
||||
processed_agent_data_batch.append(data_item)
|
||||
|
||||
store_adapter.save_data(processed_agent_data_batch=processed_agent_data_batch)
|
||||
|
||||
return {"status": "ok"}
|
||||
except Exception as e:
|
||||
logging.info(f"Error processing MQTT message: {e}")
|
||||
|
||||
3
store/.gitignore
vendored
3
store/.gitignore
vendored
@@ -1,3 +0,0 @@
|
||||
venv
|
||||
__pycache__
|
||||
.idea
|
||||
@@ -3,9 +3,10 @@ FROM python:latest
|
||||
# Set the working directory inside the container
|
||||
WORKDIR /app
|
||||
# Copy the requirements.txt file and install dependencies
|
||||
COPY requirements.txt .
|
||||
COPY store/requirements.txt .
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
# Copy the entire application into the container
|
||||
COPY . .
|
||||
COPY store/. .
|
||||
# Run the main.py script inside the container when it starts
|
||||
CMD ["uvicorn", "main:app", "--host", "0.0.0.0"]
|
||||
0
store/__init__.py
Normal file
0
store/__init__.py
Normal file
15
store/database.py
Normal file
15
store/database.py
Normal file
@@ -0,0 +1,15 @@
|
||||
from sqlalchemy import MetaData
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker, declarative_base
|
||||
|
||||
from config import POSTGRES_USER, POSTGRES_PASSWORD, POSTGRES_HOST, POSTGRES_PORT, POSTGRES_DB
|
||||
|
||||
|
||||
DATABASE_URL = f"postgresql+psycopg2://{POSTGRES_USER}:{POSTGRES_PASSWORD}@{POSTGRES_HOST}:{POSTGRES_PORT}/{POSTGRES_DB}"
|
||||
engine = create_engine(DATABASE_URL)
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
metadata = MetaData()
|
||||
|
||||
SessionLocal = sessionmaker(bind=engine)
|
||||
@@ -1,8 +1,7 @@
|
||||
version: "3.9"
|
||||
name: "road_vision__database"
|
||||
services:
|
||||
postgres_db:
|
||||
image: postgres:latest
|
||||
image: postgres:17
|
||||
container_name: postgres_db
|
||||
restart: always
|
||||
environment:
|
||||
@@ -35,7 +34,9 @@ services:
|
||||
|
||||
store:
|
||||
container_name: store
|
||||
build: ..
|
||||
build:
|
||||
context: ../../
|
||||
dockerfile: store/Dockerfile
|
||||
depends_on:
|
||||
- postgres_db
|
||||
restart: always
|
||||
|
||||
196
store/main.py
196
store/main.py
@@ -1,10 +1,8 @@
|
||||
import asyncio
|
||||
import json
|
||||
from typing import Set, Dict, List, Any
|
||||
from typing import Set, Dict, List
|
||||
from fastapi import FastAPI, HTTPException, WebSocket, WebSocketDisconnect, Body
|
||||
from fastapi.encoders import jsonable_encoder
|
||||
from sqlalchemy import (
|
||||
create_engine,
|
||||
MetaData,
|
||||
Table,
|
||||
Column,
|
||||
Integer,
|
||||
@@ -12,25 +10,14 @@ from sqlalchemy import (
|
||||
Float,
|
||||
DateTime,
|
||||
)
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from sqlalchemy.sql import select
|
||||
from datetime import datetime
|
||||
from pydantic import BaseModel, field_validator
|
||||
from config import (
|
||||
POSTGRES_HOST,
|
||||
POSTGRES_PORT,
|
||||
POSTGRES_DB,
|
||||
POSTGRES_USER,
|
||||
POSTGRES_PASSWORD,
|
||||
)
|
||||
|
||||
from database import metadata, SessionLocal
|
||||
from schemas import ProcessedAgentData, ProcessedAgentDataInDB
|
||||
|
||||
# FastAPI app setup
|
||||
app = FastAPI()
|
||||
# SQLAlchemy setup
|
||||
DATABASE_URL = f"postgresql+psycopg2://{POSTGRES_USER}:{POSTGRES_PASSWORD}@{POSTGRES_HOST}:{POSTGRES_PORT}/{POSTGRES_DB}"
|
||||
engine = create_engine(DATABASE_URL)
|
||||
metadata = MetaData()
|
||||
# Define the ProcessedAgentData table
|
||||
|
||||
processed_agent_data = Table(
|
||||
"processed_agent_data",
|
||||
metadata,
|
||||
@@ -44,57 +31,6 @@ processed_agent_data = Table(
|
||||
Column("longitude", Float),
|
||||
Column("timestamp", DateTime),
|
||||
)
|
||||
SessionLocal = sessionmaker(bind=engine)
|
||||
|
||||
|
||||
# SQLAlchemy model
|
||||
class ProcessedAgentDataInDB(BaseModel):
|
||||
id: int
|
||||
road_state: str
|
||||
user_id: int
|
||||
x: float
|
||||
y: float
|
||||
z: float
|
||||
latitude: float
|
||||
longitude: float
|
||||
timestamp: datetime
|
||||
|
||||
|
||||
# FastAPI models
|
||||
class AccelerometerData(BaseModel):
|
||||
x: float
|
||||
y: float
|
||||
z: float
|
||||
|
||||
|
||||
class GpsData(BaseModel):
|
||||
latitude: float
|
||||
longitude: float
|
||||
|
||||
|
||||
class AgentData(BaseModel):
|
||||
user_id: int
|
||||
accelerometer: AccelerometerData
|
||||
gps: GpsData
|
||||
timestamp: datetime
|
||||
|
||||
@classmethod
|
||||
@field_validator("timestamp", mode="before")
|
||||
def check_timestamp(cls, value):
|
||||
if isinstance(value, datetime):
|
||||
return value
|
||||
try:
|
||||
return datetime.fromisoformat(value)
|
||||
except (TypeError, ValueError):
|
||||
raise ValueError(
|
||||
"Invalid timestamp format. Expected ISO 8601 format (YYYY-MM-DDTHH:MM:SSZ)."
|
||||
)
|
||||
|
||||
|
||||
class ProcessedAgentData(BaseModel):
|
||||
road_state: str
|
||||
agent_data: AgentData
|
||||
|
||||
|
||||
# WebSocket subscriptions
|
||||
subscriptions: Dict[int, Set[WebSocket]] = {}
|
||||
@@ -125,10 +61,36 @@ async def send_data_to_subscribers(user_id: int, data):
|
||||
|
||||
|
||||
@app.post("/processed_agent_data/")
|
||||
async def create_processed_agent_data(data: List[ProcessedAgentData]):
|
||||
# Insert data to database
|
||||
# Send data to subscribers
|
||||
pass
|
||||
async def create_processed_agent_data(data: List[ProcessedAgentData], user_id: int = Body(..., embed=True)):
|
||||
session = SessionLocal()
|
||||
try:
|
||||
created_data = [
|
||||
{
|
||||
"road_state": item.road_state,
|
||||
"user_id": user_id,
|
||||
"x": item.agent_data.accelerometer.x,
|
||||
"y": item.agent_data.accelerometer.y,
|
||||
"z": item.agent_data.accelerometer.z,
|
||||
"latitude": item.agent_data.gps.latitude,
|
||||
"longitude": item.agent_data.gps.longitude,
|
||||
"timestamp": item.agent_data.timestamp,
|
||||
}
|
||||
for item in data
|
||||
]
|
||||
stmt = processed_agent_data.insert().values(created_data).returning(processed_agent_data)
|
||||
result = session.execute(stmt)
|
||||
created_records = [dict(row._mapping) for row in result.fetchall()]
|
||||
session.commit()
|
||||
|
||||
for record in created_records:
|
||||
await send_data_to_subscribers(user_id, jsonable_encoder(record))
|
||||
return created_records
|
||||
except Exception as err:
|
||||
session.rollback()
|
||||
print(f"Database error: {err}")
|
||||
raise HTTPException(status_code=500, detail="Internal Server Error")
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@app.get(
|
||||
@@ -136,14 +98,34 @@ async def create_processed_agent_data(data: List[ProcessedAgentData]):
|
||||
response_model=ProcessedAgentDataInDB,
|
||||
)
|
||||
def read_processed_agent_data(processed_agent_data_id: int):
|
||||
# Get data by id
|
||||
pass
|
||||
session = SessionLocal()
|
||||
try:
|
||||
stmt = select(processed_agent_data).where(
|
||||
processed_agent_data.c.id == processed_agent_data_id
|
||||
)
|
||||
res = session.execute(stmt).fetchone()
|
||||
if not res:
|
||||
raise HTTPException(status_code=404, detail="Not found")
|
||||
|
||||
return dict(res._mapping)
|
||||
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@app.get("/processed_agent_data/", response_model=list[ProcessedAgentDataInDB])
|
||||
def list_processed_agent_data():
|
||||
# Get list of data
|
||||
pass
|
||||
session = SessionLocal()
|
||||
try:
|
||||
stmt = select(processed_agent_data)
|
||||
res = session.execute(stmt).fetchall()
|
||||
if not res:
|
||||
raise HTTPException(status_code=404, detail="Not found")
|
||||
|
||||
return [dict(r._mapping) for r in res]
|
||||
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@app.put(
|
||||
@@ -152,7 +134,41 @@ def list_processed_agent_data():
|
||||
)
|
||||
def update_processed_agent_data(processed_agent_data_id: int, data: ProcessedAgentData):
|
||||
# Update data
|
||||
pass
|
||||
session = SessionLocal()
|
||||
|
||||
try:
|
||||
query = select(processed_agent_data).where(
|
||||
processed_agent_data.c.id == processed_agent_data_id
|
||||
)
|
||||
result = session.execute(query).fetchone()
|
||||
|
||||
if not result:
|
||||
raise HTTPException(status_code=404, detail="Data not found")
|
||||
|
||||
update_query = (
|
||||
processed_agent_data.update()
|
||||
.where(processed_agent_data.c.id == processed_agent_data_id)
|
||||
.values(
|
||||
road_state=data.road_state,
|
||||
user_id=data.agent_data.user_id,
|
||||
x=data.agent_data.accelerometer.x,
|
||||
y=data.agent_data.accelerometer.y,
|
||||
z=data.agent_data.accelerometer.z,
|
||||
latitude=data.agent_data.gps.latitude,
|
||||
longitude=data.agent_data.gps.longitude,
|
||||
timestamp=data.agent_data.timestamp,
|
||||
)
|
||||
)
|
||||
|
||||
session.execute(update_query)
|
||||
session.commit()
|
||||
|
||||
updated_result = session.execute(query).fetchone()
|
||||
|
||||
return ProcessedAgentDataInDB(**updated_result._mapping)
|
||||
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@app.delete(
|
||||
@@ -161,8 +177,28 @@ def update_processed_agent_data(processed_agent_data_id: int, data: ProcessedAge
|
||||
)
|
||||
def delete_processed_agent_data(processed_agent_data_id: int):
|
||||
# Delete by id
|
||||
pass
|
||||
session = SessionLocal()
|
||||
|
||||
try:
|
||||
query = select(processed_agent_data).where(
|
||||
processed_agent_data.c.id == processed_agent_data_id
|
||||
)
|
||||
result = session.execute(query).fetchone()
|
||||
|
||||
if not result:
|
||||
raise HTTPException(status_code=404, detail="Data not found")
|
||||
|
||||
delete_query = processed_agent_data.delete().where(
|
||||
processed_agent_data.c.id == processed_agent_data_id
|
||||
)
|
||||
|
||||
session.execute(delete_query)
|
||||
session.commit()
|
||||
|
||||
return ProcessedAgentDataInDB(**result._mapping)
|
||||
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
|
||||
Binary file not shown.
51
store/schemas.py
Normal file
51
store/schemas.py
Normal file
@@ -0,0 +1,51 @@
|
||||
from datetime import datetime
|
||||
|
||||
from pydantic import BaseModel, field_validator
|
||||
|
||||
|
||||
class ProcessedAgentDataInDB(BaseModel):
|
||||
id: int
|
||||
road_state: str
|
||||
user_id: int
|
||||
x: float
|
||||
y: float
|
||||
z: float
|
||||
latitude: float
|
||||
longitude: float
|
||||
timestamp: datetime
|
||||
|
||||
|
||||
# FastAPI models
|
||||
class AccelerometerData(BaseModel):
|
||||
x: float
|
||||
y: float
|
||||
z: float
|
||||
|
||||
|
||||
class GpsData(BaseModel):
|
||||
latitude: float
|
||||
longitude: float
|
||||
|
||||
|
||||
class AgentData(BaseModel):
|
||||
user_id: int
|
||||
accelerometer: AccelerometerData
|
||||
gps: GpsData
|
||||
timestamp: datetime
|
||||
|
||||
@classmethod
|
||||
@field_validator("timestamp", mode="before")
|
||||
def check_timestamp(cls, value):
|
||||
if isinstance(value, datetime):
|
||||
return value
|
||||
try:
|
||||
return datetime.fromisoformat(value)
|
||||
except (TypeError, ValueError):
|
||||
raise ValueError(
|
||||
"Invalid timestamp format. Expected ISO 8601 format (YYYY-MM-DDTHH:MM:SSZ)."
|
||||
)
|
||||
|
||||
|
||||
class ProcessedAgentData(BaseModel):
|
||||
road_state: str
|
||||
agent_data: AgentData
|
||||
Reference in New Issue
Block a user