diff --git a/store/.gitignore b/store/.gitignore new file mode 100644 index 0000000..75b0912 --- /dev/null +++ b/store/.gitignore @@ -0,0 +1,3 @@ +venv +__pycache__ +.idea \ No newline at end of file diff --git a/store/Dockerfile b/store/Dockerfile new file mode 100644 index 0000000..d5f0458 --- /dev/null +++ b/store/Dockerfile @@ -0,0 +1,11 @@ +# Use the official Python image as the base image +FROM python:latest +# Set the working directory inside the container +WORKDIR /app +# Copy the requirements.txt file and install dependencies +COPY requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt +# Copy the entire application into the container +COPY . . +# Run the main.py script inside the container when it starts +CMD ["uvicorn", "main:app", "--host", "0.0.0.0"] \ No newline at end of file diff --git a/store/README.md b/store/README.md new file mode 100644 index 0000000..c923cf0 --- /dev/null +++ b/store/README.md @@ -0,0 +1,27 @@ +# Store +## Instructions for Starting the Project +To start the Store, follow these steps: +1. Clone the repository to your local machine: +```bash +git clone https://github.com/Toolf/store.git +cd store +``` +2. Create and activate a virtual environment (optional but recommended): +```bash +python -m venv venv +source venv/bin/activate # On Windows, use: venv\Scripts\activate +``` +3. Install the project dependencies: +```bash +pip install -r requirements.txt +``` +4. Run the system: +```bash +python ./main.py +``` +## Common Commands +### 1. Saving Requirements +To save the project dependencies to the requirements.txt file: +```bash +pip freeze > requirements.txt +``` \ No newline at end of file diff --git a/store/config.py b/store/config.py new file mode 100644 index 0000000..8c74a29 --- /dev/null +++ b/store/config.py @@ -0,0 +1,16 @@ +import os + + +def try_parse(type, value: str): + try: + return type(value) + except Exception: + return None + + +# Configuration for POSTGRES +POSTGRES_HOST = os.environ.get("POSTGRES_HOST") or "localhost" +POSTGRES_PORT = try_parse(int, os.environ.get("POSTGRES_PORT")) or 5432 +POSTGRES_USER = os.environ.get("POSTGRES_USER") or "user" +POSTGRES_PASSWORD = os.environ.get("POSTGRES_PASS") or "pass" +POSTGRES_DB = os.environ.get("POSTGRES_DB") or "test_db" diff --git a/store/docker/db/structure.sql b/store/docker/db/structure.sql new file mode 100644 index 0000000..06b6f76 --- /dev/null +++ b/store/docker/db/structure.sql @@ -0,0 +1,11 @@ +CREATE TABLE processed_agent_data ( + id SERIAL PRIMARY KEY, + road_state VARCHAR(255) NOT NULL, + user_id INTEGER NOT NULL, + x FLOAT, + y FLOAT, + z FLOAT, + latitude FLOAT, + longitude FLOAT, + timestamp TIMESTAMP +); \ No newline at end of file diff --git a/store/docker/docker-compose.yaml b/store/docker/docker-compose.yaml new file mode 100644 index 0000000..8f9c32a --- /dev/null +++ b/store/docker/docker-compose.yaml @@ -0,0 +1,60 @@ +version: "3.9" +name: "road_vision__database" +services: + postgres_db: + image: postgres:latest + container_name: postgres_db + restart: always + environment: + POSTGRES_USER: user + POSTGRES_PASSWORD: pass + POSTGRES_DB: test_db + volumes: + - postgres_data:/var/lib/postgresql/data + - ./db/structure.sql:/docker-entrypoint-initdb.d/structure.sql + ports: + - "5432:5432" + networks: + db_network: + + + pgadmin: + container_name: pgadmin4 + image: dpage/pgadmin4 + restart: always + environment: + PGADMIN_DEFAULT_EMAIL: admin@admin.com + PGADMIN_DEFAULT_PASSWORD: root + volumes: + - pgadmin-data:/var/lib/pgadmin + ports: + - "5050:80" + networks: + db_network: + + + store: + container_name: store + build: .. + depends_on: + - postgres_db + restart: always + environment: + POSTGRES_USER: user + POSTGRES_PASSWORD: pass + POSTGRES_DB: test_db + POSTGRES_HOST: postgres_db + POSTGRES_PORT: 5432 + ports: + - "8000:8000" + networks: + db_network: + + +networks: + db_network: + + +volumes: + postgres_data: + pgadmin-data: diff --git a/store/main.py b/store/main.py new file mode 100644 index 0000000..272a646 --- /dev/null +++ b/store/main.py @@ -0,0 +1,170 @@ +import asyncio +import json +from typing import Set, Dict, List, Any +from fastapi import FastAPI, HTTPException, WebSocket, WebSocketDisconnect, Body +from sqlalchemy import ( + create_engine, + MetaData, + Table, + Column, + Integer, + String, + Float, + DateTime, +) +from sqlalchemy.orm import sessionmaker +from sqlalchemy.sql import select +from datetime import datetime +from pydantic import BaseModel, field_validator +from config import ( + POSTGRES_HOST, + POSTGRES_PORT, + POSTGRES_DB, + POSTGRES_USER, + POSTGRES_PASSWORD, +) + +# FastAPI app setup +app = FastAPI() +# SQLAlchemy setup +DATABASE_URL = f"postgresql+psycopg2://{POSTGRES_USER}:{POSTGRES_PASSWORD}@{POSTGRES_HOST}:{POSTGRES_PORT}/{POSTGRES_DB}" +engine = create_engine(DATABASE_URL) +metadata = MetaData() +# Define the ProcessedAgentData table +processed_agent_data = Table( + "processed_agent_data", + metadata, + Column("id", Integer, primary_key=True, index=True), + Column("road_state", String), + Column("user_id", Integer), + Column("x", Float), + Column("y", Float), + Column("z", Float), + Column("latitude", Float), + Column("longitude", Float), + Column("timestamp", DateTime), +) +SessionLocal = sessionmaker(bind=engine) + + +# SQLAlchemy model +class ProcessedAgentDataInDB(BaseModel): + id: int + road_state: str + user_id: int + x: float + y: float + z: float + latitude: float + longitude: float + timestamp: datetime + + +# FastAPI models +class AccelerometerData(BaseModel): + x: float + y: float + z: float + + +class GpsData(BaseModel): + latitude: float + longitude: float + + +class AgentData(BaseModel): + user_id: int + accelerometer: AccelerometerData + gps: GpsData + timestamp: datetime + + @classmethod + @field_validator("timestamp", mode="before") + def check_timestamp(cls, value): + if isinstance(value, datetime): + return value + try: + return datetime.fromisoformat(value) + except (TypeError, ValueError): + raise ValueError( + "Invalid timestamp format. Expected ISO 8601 format (YYYY-MM-DDTHH:MM:SSZ)." + ) + + +class ProcessedAgentData(BaseModel): + road_state: str + agent_data: AgentData + + +# WebSocket subscriptions +subscriptions: Dict[int, Set[WebSocket]] = {} + + +# FastAPI WebSocket endpoint +@app.websocket("/ws/{user_id}") +async def websocket_endpoint(websocket: WebSocket, user_id: int): + await websocket.accept() + if user_id not in subscriptions: + subscriptions[user_id] = set() + subscriptions[user_id].add(websocket) + try: + while True: + await websocket.receive_text() + except WebSocketDisconnect: + subscriptions[user_id].remove(websocket) + + +# Function to send data to subscribed users +async def send_data_to_subscribers(user_id: int, data): + if user_id in subscriptions: + for websocket in subscriptions[user_id]: + await websocket.send_json(json.dumps(data)) + + +# FastAPI CRUDL endpoints + + +@app.post("/processed_agent_data/") +async def create_processed_agent_data(data: List[ProcessedAgentData]): + # Insert data to database + # Send data to subscribers + pass + + +@app.get( + "/processed_agent_data/{processed_agent_data_id}", + response_model=ProcessedAgentDataInDB, +) +def read_processed_agent_data(processed_agent_data_id: int): + # Get data by id + pass + + +@app.get("/processed_agent_data/", response_model=list[ProcessedAgentDataInDB]) +def list_processed_agent_data(): + # Get list of data + pass + + +@app.put( + "/processed_agent_data/{processed_agent_data_id}", + response_model=ProcessedAgentDataInDB, +) +def update_processed_agent_data(processed_agent_data_id: int, data: ProcessedAgentData): + # Update data + pass + + +@app.delete( + "/processed_agent_data/{processed_agent_data_id}", + response_model=ProcessedAgentDataInDB, +) +def delete_processed_agent_data(processed_agent_data_id: int): + # Delete by id + pass + + +if __name__ == "__main__": + import uvicorn + + uvicorn.run(app, host="127.0.0.1", port=8000) diff --git a/store/requirements.txt b/store/requirements.txt new file mode 100644 index 0000000..f920843 Binary files /dev/null and b/store/requirements.txt differ