From 3dacd57de5a1ac9934477bd069763c713cfe5145 Mon Sep 17 00:00:00 2001 From: hasslesstech Date: Mon, 23 Mar 2026 17:03:17 +0200 Subject: [PATCH] [P] Add general smoke test and Store incremental test --- .gitea/workflows/tests.yaml | 51 ++++++++++++++++++++++++++++++++++++- store/Dockerfile-test | 13 ++++++++++ store/main.py | 17 ++++++++----- store/test-entry.sh | 3 +++ store/test/main_test.py | 37 +++++++++++++++++++++++++++ 5 files changed, 113 insertions(+), 8 deletions(-) create mode 100644 store/Dockerfile-test create mode 100755 store/test-entry.sh create mode 100644 store/test/main_test.py diff --git a/.gitea/workflows/tests.yaml b/.gitea/workflows/tests.yaml index f80a96e..7795dbf 100644 --- a/.gitea/workflows/tests.yaml +++ b/.gitea/workflows/tests.yaml @@ -1,8 +1,9 @@ -name: Hub component testing +name: Component testing on: [push, workflow_dispatch] jobs: hub-test: + name: Hub testing runs-on: host-arch-x86_64 steps: - name: Clone repository @@ -19,3 +20,51 @@ jobs: - name: Clean up containers if: ${{always()}} run: docker image rm local/hub/${{gitea.sha}} + + store-test: + name: Store testing + runs-on: host-arch-x86_64 + steps: + - name: Clone repository + run: git clone --revision ${{ gitea.sha }} --depth 1 ${{ gitea.server_url }}/${{ gitea.repository }} + + - name: Build Store testing container + working-directory: IoT-Systems + run: docker build -t local/store/${{gitea.sha}} -f store/Dockerfile-test . + + - name: Run Store tests + working-directory: IoT-Systems + run: docker run --rm -it local/store/${{gitea.sha}} + + - name: Clean up containers + if: ${{always()}} + run: docker image rm local/store/${{gitea.sha}} + + integration-smoke-test: + name: Integration smoke testing + runs-on: host-arch-x86_64 + needs: + - hub-test + - store-test + steps: + - name: Clone repository + run: git clone --revision ${{ gitea.sha }} --depth 1 ${{ gitea.server_url }}/${{ gitea.repository }} + + - name: Build all production containers + working-directory: IoT-Systems + run: docker-compose build + + - name: Start all production containers + working-directory: IoT-Systems + run: docker-compose up -d + + - name: Wait for crashes to happen + run: sleep 30 + + - name: Check for dead containers + working-directory: IoT-Systems + run: docker ps -a | python3 utils/check-up.py + + - name: Clean up + if: ${{always()}} + run: docker image rm local/hub/${{gitea.sha}} diff --git a/store/Dockerfile-test b/store/Dockerfile-test new file mode 100644 index 0000000..3435d40 --- /dev/null +++ b/store/Dockerfile-test @@ -0,0 +1,13 @@ +# Use the official Python image as the base image +FROM python:latest +# Set the working directory inside the container +WORKDIR /app +# Copy the requirements.txt file and install dependencies +COPY store/requirements.txt . + +RUN pip install --no-cache-dir -r requirements.txt +# Copy the entire application into the container +COPY store/. . +# Run the main.py script inside the container when it starts +#CMD ["uvicorn", "main:app", "--host", "0.0.0.0"] +CMD ["./test-entry.sh"] diff --git a/store/main.py b/store/main.py index 00d0f4e..41c0785 100644 --- a/store/main.py +++ b/store/main.py @@ -73,15 +73,11 @@ async def send_data_to_subscribers(user_id: int, data): # FastAPI CRUDL endpoints - -@app.post("/processed_agent_data/") -async def create_processed_agent_data(data: List[ProcessedAgentData], user_id: int = Body(..., embed=True)): - session = SessionLocal() - try: - created_data = [ +def ProcessedAgentData_to_td(data: List[ProcessedAgentData]): + return [ { "road_state": item.road_state, - "user_id": user_id, + "user_id": item.agent_data.user_id, "x": item.agent_data.accelerometer.x, "y": item.agent_data.accelerometer.y, "z": item.agent_data.accelerometer.z, @@ -91,6 +87,13 @@ async def create_processed_agent_data(data: List[ProcessedAgentData], user_id: i } for item in data ] + + +@app.post("/processed_agent_data/") +async def create_processed_agent_data(data: List[ProcessedAgentData], user_id: int = Body(..., embed=True)): + session = SessionLocal() + try: + created_data = ProcessedAgentData_to_td(data) stmt = processed_agent_data.insert().values(created_data).returning(processed_agent_data) result = session.execute(stmt) created_records = [dict(row._mapping) for row in result.fetchall()] diff --git a/store/test-entry.sh b/store/test-entry.sh new file mode 100755 index 0000000..bd21587 --- /dev/null +++ b/store/test-entry.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +python3 test/main_test.py diff --git a/store/test/main_test.py b/store/test/main_test.py new file mode 100644 index 0000000..6f29502 --- /dev/null +++ b/store/test/main_test.py @@ -0,0 +1,37 @@ +import ..main + +def _test_ProcessedAgentData_to_td(): + processed_data_batch = [ + ProcessedAgentData(road_state = "normal", + agent_data = AgentData(user_id = 1, + accelerometer = AccelerometerData(x = 0.1, y = 0.2, z = 0.3), + gps = GpsData(latitude = 10.123, longitude = 20.456), + timestamp = "2023-07-21T12:34:56Z") + ), + ProcessedAgentData(road_state = "normal", + agent_data = AgentData(user_id = 2, + accelerometer = AccelerometerData(x = 0.1, y = 0.2, z = 0.3), + gps = GpsData(latitude = 10.123, longitude = 20.456), + timestamp = "2023-07-21T12:34:56Z") + ), + ProcessedAgentData(road_state = "normal", + agent_data = AgentData(user_id = 3, + accelerometer = AccelerometerData(x = 0.1, y = 0.2, z = 0.3), + gps = GpsData(latitude = 10.123, longitude = 20.456), + timestamp = "2023-07-21T12:34:56Z") + ), + ] + + res = main.ProcessedAgentData_to_td(processed_data_batch) + + assert res[0]["user_id"] == 1 + assert res[1]["user_id"] == 2 + assert res[2]["user_id"] == 3 + + +if __name__ == "__main__": + test_functions = [i for i in dir() if i.startswith('_test_')] + + for i in test_functions: + print(i) + eval(i)()