From fe6bb6ab3a7240109457a29c60719f66174f7a72 Mon Sep 17 00:00:00 2001 From: hasslesstech Date: Mon, 23 Mar 2026 15:19:19 +0200 Subject: [PATCH 1/5] [P] Add CI for updated Hub component part --- .gitea/workflows/hub-test.yaml | 21 ++++++++++++ hub/Dockerfile-test | 12 +++++++ hub/app/adapters/store_api_adapter.py | 10 +++++- hub/app/adapters/store_api_adapter_test.py | 39 ++++++++++++++++++++++ hub/test-entry.sh | 3 ++ 5 files changed, 84 insertions(+), 1 deletion(-) create mode 100644 .gitea/workflows/hub-test.yaml create mode 100644 hub/Dockerfile-test create mode 100644 hub/app/adapters/store_api_adapter_test.py create mode 100755 hub/test-entry.sh diff --git a/.gitea/workflows/hub-test.yaml b/.gitea/workflows/hub-test.yaml new file mode 100644 index 0000000..f80a96e --- /dev/null +++ b/.gitea/workflows/hub-test.yaml @@ -0,0 +1,21 @@ +name: Hub component testing +on: [push, workflow_dispatch] + +jobs: + hub-test: + runs-on: host-arch-x86_64 + steps: + - name: Clone repository + run: git clone --revision ${{ gitea.sha }} --depth 1 ${{ gitea.server_url }}/${{ gitea.repository }} + + - name: Build Hub testing container + working-directory: IoT-Systems + run: docker build -t local/hub/${{gitea.sha}} -f hub/Dockerfile-test . + + - name: Run Hub tests + working-directory: IoT-Systems + run: docker run --rm -it local/hub/${{gitea.sha}} + + - name: Clean up containers + if: ${{always()}} + run: docker image rm local/hub/${{gitea.sha}} diff --git a/hub/Dockerfile-test b/hub/Dockerfile-test new file mode 100644 index 0000000..4b3ae49 --- /dev/null +++ b/hub/Dockerfile-test @@ -0,0 +1,12 @@ +# Use the official Python image as the base image +FROM python:3.9-slim +# Set the working directory inside the container +WORKDIR /app +# Copy the requirements.txt file and install dependencies +COPY hub/requirements.txt . + +RUN pip install --no-cache-dir -r requirements.txt +# Copy the entire application into the container +COPY hub/. . +# Run the main.py script inside the container when it starts +CMD ["./test-entry.sh"] diff --git a/hub/app/adapters/store_api_adapter.py b/hub/app/adapters/store_api_adapter.py index 8ce4945..e06d91c 100644 --- a/hub/app/adapters/store_api_adapter.py +++ b/hub/app/adapters/store_api_adapter.py @@ -13,7 +13,7 @@ class StoreApiAdapter(StoreGateway): def __init__(self, api_base_url): self.api_base_url = api_base_url - def save_data(self, processed_agent_data_batch: List[ProcessedAgentData]): + def processed_agent_data_batch_to_json(self, processed_agent_data_batch: List[ProcessedAgentData]): if not processed_agent_data_batch: return False @@ -25,6 +25,14 @@ class StoreApiAdapter(StoreGateway): "user_id": user_id } + return payload + + def save_data(self, processed_agent_data_batch: List[ProcessedAgentData]): + payload = self.processed_agent_data_batch_to_json(processed_agent_data_batch) + + if payload == False: + return False + try: # Perform a POST request to the Store API with a 10-second timeout response = requests.post( diff --git a/hub/app/adapters/store_api_adapter_test.py b/hub/app/adapters/store_api_adapter_test.py new file mode 100644 index 0000000..f971a5a --- /dev/null +++ b/hub/app/adapters/store_api_adapter_test.py @@ -0,0 +1,39 @@ +from app.adapters.store_api_adapter import StoreApiAdapter +from app.entities.agent_data import AccelerometerData, AgentData, GpsData +from app.entities.processed_agent_data import ProcessedAgentData + +def _test_processed_agent_data_batch_to_json(): + processed_data_batch = [ + ProcessedAgentData(road_state = "normal", + agent_data = AgentData(user_id = 1, + accelerometer = AccelerometerData(x = 0.1, y = 0.2, z = 0.3), + gps = GpsData(latitude = 10.123, longitude = 20.456), + timestamp = "2023-07-21T12:34:56Z") + ), + ProcessedAgentData(road_state = "normal", + agent_data = AgentData(user_id = 2, + accelerometer = AccelerometerData(x = 0.1, y = 0.2, z = 0.3), + gps = GpsData(latitude = 10.123, longitude = 20.456), + timestamp = "2023-07-21T12:34:56Z") + ), + ProcessedAgentData(road_state = "normal", + agent_data = AgentData(user_id = 3, + accelerometer = AccelerometerData(x = 0.1, y = 0.2, z = 0.3), + gps = GpsData(latitude = 10.123, longitude = 20.456), + timestamp = "2023-07-21T12:34:56Z") + ), + ] + + res = StoreApiAdapter(None).processed_agent_data_batch_to_json(processed_data_batch) + + assert res["data"][0]["agent_data"]["user_id"] == 1 + assert res["data"][1]["agent_data"]["user_id"] == 2 + assert res["data"][2]["agent_data"]["user_id"] == 3 + + +if __name__ == "__main__": + test_functions = [i for i in dir() if i.startswith('_test_')] + + for i in test_functions: + print(i) + eval(i)() diff --git a/hub/test-entry.sh b/hub/test-entry.sh new file mode 100755 index 0000000..656e3f7 --- /dev/null +++ b/hub/test-entry.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +PYTHONPATH=$PWD python3 app/adapters/store_api_adapter_test.py -- 2.49.1 From 60a846d8b8c817a6a8afaed59e69cdba3596e266 Mon Sep 17 00:00:00 2001 From: hasslesstech Date: Mon, 23 Mar 2026 16:10:09 +0200 Subject: [PATCH 2/5] [P] Refactor testing code --- .gitea/workflows/{hub-test.yaml => tests.yaml} | 0 hub/app/adapters/store_api_adapter.py | 4 ++-- hub/app/adapters/store_api_adapter_test.py | 6 ++++-- 3 files changed, 6 insertions(+), 4 deletions(-) rename .gitea/workflows/{hub-test.yaml => tests.yaml} (100%) diff --git a/.gitea/workflows/hub-test.yaml b/.gitea/workflows/tests.yaml similarity index 100% rename from .gitea/workflows/hub-test.yaml rename to .gitea/workflows/tests.yaml diff --git a/hub/app/adapters/store_api_adapter.py b/hub/app/adapters/store_api_adapter.py index e06d91c..857d88f 100644 --- a/hub/app/adapters/store_api_adapter.py +++ b/hub/app/adapters/store_api_adapter.py @@ -13,7 +13,7 @@ class StoreApiAdapter(StoreGateway): def __init__(self, api_base_url): self.api_base_url = api_base_url - def processed_agent_data_batch_to_json(self, processed_agent_data_batch: List[ProcessedAgentData]): + def processed_agent_data_batch_to_payload(self, processed_agent_data_batch: List[ProcessedAgentData]): if not processed_agent_data_batch: return False @@ -28,7 +28,7 @@ class StoreApiAdapter(StoreGateway): return payload def save_data(self, processed_agent_data_batch: List[ProcessedAgentData]): - payload = self.processed_agent_data_batch_to_json(processed_agent_data_batch) + payload = self.processed_agent_data_batch_to_payload(processed_agent_data_batch) if payload == False: return False diff --git a/hub/app/adapters/store_api_adapter_test.py b/hub/app/adapters/store_api_adapter_test.py index f971a5a..223bfe2 100644 --- a/hub/app/adapters/store_api_adapter_test.py +++ b/hub/app/adapters/store_api_adapter_test.py @@ -2,7 +2,7 @@ from app.adapters.store_api_adapter import StoreApiAdapter from app.entities.agent_data import AccelerometerData, AgentData, GpsData from app.entities.processed_agent_data import ProcessedAgentData -def _test_processed_agent_data_batch_to_json(): +def _test_processed_agent_data_batch_to_payload(): processed_data_batch = [ ProcessedAgentData(road_state = "normal", agent_data = AgentData(user_id = 1, @@ -24,12 +24,14 @@ def _test_processed_agent_data_batch_to_json(): ), ] - res = StoreApiAdapter(None).processed_agent_data_batch_to_json(processed_data_batch) + res = StoreApiAdapter(None).processed_agent_data_batch_to_payload(processed_data_batch) assert res["data"][0]["agent_data"]["user_id"] == 1 assert res["data"][1]["agent_data"]["user_id"] == 2 assert res["data"][2]["agent_data"]["user_id"] == 3 + assert StoreApiAdapter(None).processed_agent_data_batch_to_payload([]) == False + if __name__ == "__main__": test_functions = [i for i in dir() if i.startswith('_test_')] -- 2.49.1 From 30af132033bb61ca23e0ad20acaa1ee1f59498fa Mon Sep 17 00:00:00 2001 From: hasslesstech Date: Mon, 23 Mar 2026 17:03:17 +0200 Subject: [PATCH 3/5] [P] Add general smoke test and Store incremental test --- .gitea/workflows/tests.yaml | 52 ++++++++++++++++++++++++++++++++++++- store/Dockerfile-test | 13 ++++++++++ store/main.py | 17 +++++++----- store/test-entry.sh | 3 +++ store/test/main_test.py | 39 ++++++++++++++++++++++++++++ utils/check-up.py | 19 ++++++++++++++ 6 files changed, 135 insertions(+), 8 deletions(-) create mode 100644 store/Dockerfile-test create mode 100755 store/test-entry.sh create mode 100644 store/test/main_test.py create mode 100644 utils/check-up.py diff --git a/.gitea/workflows/tests.yaml b/.gitea/workflows/tests.yaml index f80a96e..07e95af 100644 --- a/.gitea/workflows/tests.yaml +++ b/.gitea/workflows/tests.yaml @@ -1,8 +1,9 @@ -name: Hub component testing +name: Component testing on: [push, workflow_dispatch] jobs: hub-test: + name: Hub testing runs-on: host-arch-x86_64 steps: - name: Clone repository @@ -19,3 +20,52 @@ jobs: - name: Clean up containers if: ${{always()}} run: docker image rm local/hub/${{gitea.sha}} + + store-test: + name: Store testing + runs-on: host-arch-x86_64 + steps: + - name: Clone repository + run: git clone --revision ${{ gitea.sha }} --depth 1 ${{ gitea.server_url }}/${{ gitea.repository }} + + - name: Build Store testing container + working-directory: IoT-Systems + run: docker build -t local/store/${{gitea.sha}} -f store/Dockerfile-test . + + - name: Run Store tests + working-directory: IoT-Systems + run: docker run --rm -it local/store/${{gitea.sha}} + + - name: Clean up containers + if: ${{always()}} + run: docker image rm local/store/${{gitea.sha}} + + integration-smoke-test: + name: Integration smoke testing + runs-on: host-arch-x86_64 + needs: + - hub-test + - store-test + steps: + - name: Clone repository + run: git clone --revision ${{ gitea.sha }} --depth 1 ${{ gitea.server_url }}/${{ gitea.repository }} + + #- name: Build all production containers + #working-directory: IoT-Systems + #run: docker-compose build + + - name: Start all production containers + working-directory: IoT-Systems + run: docker-compose up -d + + - name: Wait for crashes to happen + run: sleep 30 + + - name: Check for dead containers + working-directory: IoT-Systems + run: docker ps -a | python3 utils/check-up.py + + - name: Clean up + if: ${{always()}} + working-directory: IoT-Systems + run: docker-compose down -v diff --git a/store/Dockerfile-test b/store/Dockerfile-test new file mode 100644 index 0000000..3435d40 --- /dev/null +++ b/store/Dockerfile-test @@ -0,0 +1,13 @@ +# Use the official Python image as the base image +FROM python:latest +# Set the working directory inside the container +WORKDIR /app +# Copy the requirements.txt file and install dependencies +COPY store/requirements.txt . + +RUN pip install --no-cache-dir -r requirements.txt +# Copy the entire application into the container +COPY store/. . +# Run the main.py script inside the container when it starts +#CMD ["uvicorn", "main:app", "--host", "0.0.0.0"] +CMD ["./test-entry.sh"] diff --git a/store/main.py b/store/main.py index 00d0f4e..41c0785 100644 --- a/store/main.py +++ b/store/main.py @@ -73,15 +73,11 @@ async def send_data_to_subscribers(user_id: int, data): # FastAPI CRUDL endpoints - -@app.post("/processed_agent_data/") -async def create_processed_agent_data(data: List[ProcessedAgentData], user_id: int = Body(..., embed=True)): - session = SessionLocal() - try: - created_data = [ +def ProcessedAgentData_to_td(data: List[ProcessedAgentData]): + return [ { "road_state": item.road_state, - "user_id": user_id, + "user_id": item.agent_data.user_id, "x": item.agent_data.accelerometer.x, "y": item.agent_data.accelerometer.y, "z": item.agent_data.accelerometer.z, @@ -91,6 +87,13 @@ async def create_processed_agent_data(data: List[ProcessedAgentData], user_id: i } for item in data ] + + +@app.post("/processed_agent_data/") +async def create_processed_agent_data(data: List[ProcessedAgentData], user_id: int = Body(..., embed=True)): + session = SessionLocal() + try: + created_data = ProcessedAgentData_to_td(data) stmt = processed_agent_data.insert().values(created_data).returning(processed_agent_data) result = session.execute(stmt) created_records = [dict(row._mapping) for row in result.fetchall()] diff --git a/store/test-entry.sh b/store/test-entry.sh new file mode 100755 index 0000000..433e709 --- /dev/null +++ b/store/test-entry.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +PYTHONPATH=$PWD python3 test/main_test.py diff --git a/store/test/main_test.py b/store/test/main_test.py new file mode 100644 index 0000000..3f3470e --- /dev/null +++ b/store/test/main_test.py @@ -0,0 +1,39 @@ +from schemas import AccelerometerData, AgentData, GpsData, ProcessedAgentData + +import main + +def _test_ProcessedAgentData_to_td(): + processed_data_batch = [ + ProcessedAgentData(road_state = "normal", + agent_data = AgentData(user_id = 1, + accelerometer = AccelerometerData(x = 0.1, y = 0.2, z = 0.3), + gps = GpsData(latitude = 10.123, longitude = 20.456), + timestamp = "2023-07-21T12:34:56Z") + ), + ProcessedAgentData(road_state = "normal", + agent_data = AgentData(user_id = 2, + accelerometer = AccelerometerData(x = 0.1, y = 0.2, z = 0.3), + gps = GpsData(latitude = 10.123, longitude = 20.456), + timestamp = "2023-07-21T12:34:56Z") + ), + ProcessedAgentData(road_state = "normal", + agent_data = AgentData(user_id = 3, + accelerometer = AccelerometerData(x = 0.1, y = 0.2, z = 0.3), + gps = GpsData(latitude = 10.123, longitude = 20.456), + timestamp = "2023-07-21T12:34:56Z") + ), + ] + + res = main.ProcessedAgentData_to_td(processed_data_batch) + + assert res[0]["user_id"] == 1 + assert res[1]["user_id"] == 2 + assert res[2]["user_id"] == 3 + + +if __name__ == "__main__": + test_functions = [i for i in dir() if i.startswith('_test_')] + + for i in test_functions: + print(i) + eval(i)() diff --git a/utils/check-up.py b/utils/check-up.py new file mode 100644 index 0000000..957f573 --- /dev/null +++ b/utils/check-up.py @@ -0,0 +1,19 @@ +import sys + +print("Checking for dead containers...") + +l = [i for i in sys.stdin.read().split("\n") if i] +header, statuses = l[0], l[1:] + +status_index = header.find('STATUS') +name_index = header.find('NAMES') + +exit_code = 0 + +for i in statuses: + if not i[status_index:].startswith("Up "): + service_name = i[name_index:] + print(f"Crash detected in {service_name}") + exit_code = 1 + +sys.exit(exit_code) -- 2.49.1 From 2846130e4ea0b7d8a0b8ec12b7cbfa07cc948813 Mon Sep 17 00:00:00 2001 From: hasslesstech Date: Mon, 23 Mar 2026 18:26:38 +0200 Subject: [PATCH 4/5] [P] Add docker reset workflow --- .gitea/workflows/reset-docker.yaml | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 .gitea/workflows/reset-docker.yaml diff --git a/.gitea/workflows/reset-docker.yaml b/.gitea/workflows/reset-docker.yaml new file mode 100644 index 0000000..4e5927f --- /dev/null +++ b/.gitea/workflows/reset-docker.yaml @@ -0,0 +1,16 @@ +name: Reset docker state +on: workflow_dispatch + +jobs: + reset: + runs-on: host-arch-x86_64 + name: Reset docker state + steps: + - name: Stop all containers + run: docker stop $(docker ps -a | cut -d " " -f 1 | tail -n +2) + + - name: Remove all containers + run: docker rm $(docker ps -a | cut -d " " -f 1 | tail -n +2) + + - name: Remove extra volumes + run: docker volume rm road_vision_postgres_data road_vision_pgadmin-data -- 2.49.1 From 0b8d2eb18b3506e6329c7eeea38030c54d8ea54e Mon Sep 17 00:00:00 2001 From: hasslesstech Date: Mon, 23 Mar 2026 18:31:43 +0200 Subject: [PATCH 5/5] [P] Add container rebuilding on every rerun --- .gitea/workflows/tests.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.gitea/workflows/tests.yaml b/.gitea/workflows/tests.yaml index 07e95af..a6536df 100644 --- a/.gitea/workflows/tests.yaml +++ b/.gitea/workflows/tests.yaml @@ -50,9 +50,9 @@ jobs: - name: Clone repository run: git clone --revision ${{ gitea.sha }} --depth 1 ${{ gitea.server_url }}/${{ gitea.repository }} - #- name: Build all production containers - #working-directory: IoT-Systems - #run: docker-compose build + - name: Build all production containers + working-directory: IoT-Systems + run: docker-compose build - name: Start all production containers working-directory: IoT-Systems -- 2.49.1