diff --git a/.gitea/workflows/reset-docker.yaml b/.gitea/workflows/reset-docker.yaml new file mode 100644 index 0000000..4e5927f --- /dev/null +++ b/.gitea/workflows/reset-docker.yaml @@ -0,0 +1,16 @@ +name: Reset docker state +on: workflow_dispatch + +jobs: + reset: + runs-on: host-arch-x86_64 + name: Reset docker state + steps: + - name: Stop all containers + run: docker stop $(docker ps -a | cut -d " " -f 1 | tail -n +2) + + - name: Remove all containers + run: docker rm $(docker ps -a | cut -d " " -f 1 | tail -n +2) + + - name: Remove extra volumes + run: docker volume rm road_vision_postgres_data road_vision_pgadmin-data diff --git a/.gitea/workflows/tests.yaml b/.gitea/workflows/tests.yaml new file mode 100644 index 0000000..a6536df --- /dev/null +++ b/.gitea/workflows/tests.yaml @@ -0,0 +1,71 @@ +name: Component testing +on: [push, workflow_dispatch] + +jobs: + hub-test: + name: Hub testing + runs-on: host-arch-x86_64 + steps: + - name: Clone repository + run: git clone --revision ${{ gitea.sha }} --depth 1 ${{ gitea.server_url }}/${{ gitea.repository }} + + - name: Build Hub testing container + working-directory: IoT-Systems + run: docker build -t local/hub/${{gitea.sha}} -f hub/Dockerfile-test . + + - name: Run Hub tests + working-directory: IoT-Systems + run: docker run --rm -it local/hub/${{gitea.sha}} + + - name: Clean up containers + if: ${{always()}} + run: docker image rm local/hub/${{gitea.sha}} + + store-test: + name: Store testing + runs-on: host-arch-x86_64 + steps: + - name: Clone repository + run: git clone --revision ${{ gitea.sha }} --depth 1 ${{ gitea.server_url }}/${{ gitea.repository }} + + - name: Build Store testing container + working-directory: IoT-Systems + run: docker build -t local/store/${{gitea.sha}} -f store/Dockerfile-test . + + - name: Run Store tests + working-directory: IoT-Systems + run: docker run --rm -it local/store/${{gitea.sha}} + + - name: Clean up containers + if: ${{always()}} + run: docker image rm local/store/${{gitea.sha}} + + integration-smoke-test: + name: Integration smoke testing + runs-on: host-arch-x86_64 + needs: + - hub-test + - store-test + steps: + - name: Clone repository + run: git clone --revision ${{ gitea.sha }} --depth 1 ${{ gitea.server_url }}/${{ gitea.repository }} + + - name: Build all production containers + working-directory: IoT-Systems + run: docker-compose build + + - name: Start all production containers + working-directory: IoT-Systems + run: docker-compose up -d + + - name: Wait for crashes to happen + run: sleep 30 + + - name: Check for dead containers + working-directory: IoT-Systems + run: docker ps -a | python3 utils/check-up.py + + - name: Clean up + if: ${{always()}} + working-directory: IoT-Systems + run: docker-compose down -v diff --git a/hub/Dockerfile-test b/hub/Dockerfile-test new file mode 100644 index 0000000..4b3ae49 --- /dev/null +++ b/hub/Dockerfile-test @@ -0,0 +1,12 @@ +# Use the official Python image as the base image +FROM python:3.9-slim +# Set the working directory inside the container +WORKDIR /app +# Copy the requirements.txt file and install dependencies +COPY hub/requirements.txt . + +RUN pip install --no-cache-dir -r requirements.txt +# Copy the entire application into the container +COPY hub/. . +# Run the main.py script inside the container when it starts +CMD ["./test-entry.sh"] diff --git a/hub/app/adapters/store_api_adapter.py b/hub/app/adapters/store_api_adapter.py index 8ce4945..857d88f 100644 --- a/hub/app/adapters/store_api_adapter.py +++ b/hub/app/adapters/store_api_adapter.py @@ -13,7 +13,7 @@ class StoreApiAdapter(StoreGateway): def __init__(self, api_base_url): self.api_base_url = api_base_url - def save_data(self, processed_agent_data_batch: List[ProcessedAgentData]): + def processed_agent_data_batch_to_payload(self, processed_agent_data_batch: List[ProcessedAgentData]): if not processed_agent_data_batch: return False @@ -25,6 +25,14 @@ class StoreApiAdapter(StoreGateway): "user_id": user_id } + return payload + + def save_data(self, processed_agent_data_batch: List[ProcessedAgentData]): + payload = self.processed_agent_data_batch_to_payload(processed_agent_data_batch) + + if payload == False: + return False + try: # Perform a POST request to the Store API with a 10-second timeout response = requests.post( diff --git a/hub/app/adapters/store_api_adapter_test.py b/hub/app/adapters/store_api_adapter_test.py new file mode 100644 index 0000000..223bfe2 --- /dev/null +++ b/hub/app/adapters/store_api_adapter_test.py @@ -0,0 +1,41 @@ +from app.adapters.store_api_adapter import StoreApiAdapter +from app.entities.agent_data import AccelerometerData, AgentData, GpsData +from app.entities.processed_agent_data import ProcessedAgentData + +def _test_processed_agent_data_batch_to_payload(): + processed_data_batch = [ + ProcessedAgentData(road_state = "normal", + agent_data = AgentData(user_id = 1, + accelerometer = AccelerometerData(x = 0.1, y = 0.2, z = 0.3), + gps = GpsData(latitude = 10.123, longitude = 20.456), + timestamp = "2023-07-21T12:34:56Z") + ), + ProcessedAgentData(road_state = "normal", + agent_data = AgentData(user_id = 2, + accelerometer = AccelerometerData(x = 0.1, y = 0.2, z = 0.3), + gps = GpsData(latitude = 10.123, longitude = 20.456), + timestamp = "2023-07-21T12:34:56Z") + ), + ProcessedAgentData(road_state = "normal", + agent_data = AgentData(user_id = 3, + accelerometer = AccelerometerData(x = 0.1, y = 0.2, z = 0.3), + gps = GpsData(latitude = 10.123, longitude = 20.456), + timestamp = "2023-07-21T12:34:56Z") + ), + ] + + res = StoreApiAdapter(None).processed_agent_data_batch_to_payload(processed_data_batch) + + assert res["data"][0]["agent_data"]["user_id"] == 1 + assert res["data"][1]["agent_data"]["user_id"] == 2 + assert res["data"][2]["agent_data"]["user_id"] == 3 + + assert StoreApiAdapter(None).processed_agent_data_batch_to_payload([]) == False + + +if __name__ == "__main__": + test_functions = [i for i in dir() if i.startswith('_test_')] + + for i in test_functions: + print(i) + eval(i)() diff --git a/hub/test-entry.sh b/hub/test-entry.sh new file mode 100755 index 0000000..656e3f7 --- /dev/null +++ b/hub/test-entry.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +PYTHONPATH=$PWD python3 app/adapters/store_api_adapter_test.py diff --git a/store/Dockerfile-test b/store/Dockerfile-test new file mode 100644 index 0000000..3435d40 --- /dev/null +++ b/store/Dockerfile-test @@ -0,0 +1,13 @@ +# Use the official Python image as the base image +FROM python:latest +# Set the working directory inside the container +WORKDIR /app +# Copy the requirements.txt file and install dependencies +COPY store/requirements.txt . + +RUN pip install --no-cache-dir -r requirements.txt +# Copy the entire application into the container +COPY store/. . +# Run the main.py script inside the container when it starts +#CMD ["uvicorn", "main:app", "--host", "0.0.0.0"] +CMD ["./test-entry.sh"] diff --git a/store/main.py b/store/main.py index 00d0f4e..41c0785 100644 --- a/store/main.py +++ b/store/main.py @@ -73,15 +73,11 @@ async def send_data_to_subscribers(user_id: int, data): # FastAPI CRUDL endpoints - -@app.post("/processed_agent_data/") -async def create_processed_agent_data(data: List[ProcessedAgentData], user_id: int = Body(..., embed=True)): - session = SessionLocal() - try: - created_data = [ +def ProcessedAgentData_to_td(data: List[ProcessedAgentData]): + return [ { "road_state": item.road_state, - "user_id": user_id, + "user_id": item.agent_data.user_id, "x": item.agent_data.accelerometer.x, "y": item.agent_data.accelerometer.y, "z": item.agent_data.accelerometer.z, @@ -91,6 +87,13 @@ async def create_processed_agent_data(data: List[ProcessedAgentData], user_id: i } for item in data ] + + +@app.post("/processed_agent_data/") +async def create_processed_agent_data(data: List[ProcessedAgentData], user_id: int = Body(..., embed=True)): + session = SessionLocal() + try: + created_data = ProcessedAgentData_to_td(data) stmt = processed_agent_data.insert().values(created_data).returning(processed_agent_data) result = session.execute(stmt) created_records = [dict(row._mapping) for row in result.fetchall()] diff --git a/store/test-entry.sh b/store/test-entry.sh new file mode 100755 index 0000000..433e709 --- /dev/null +++ b/store/test-entry.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +PYTHONPATH=$PWD python3 test/main_test.py diff --git a/store/test/main_test.py b/store/test/main_test.py new file mode 100644 index 0000000..3f3470e --- /dev/null +++ b/store/test/main_test.py @@ -0,0 +1,39 @@ +from schemas import AccelerometerData, AgentData, GpsData, ProcessedAgentData + +import main + +def _test_ProcessedAgentData_to_td(): + processed_data_batch = [ + ProcessedAgentData(road_state = "normal", + agent_data = AgentData(user_id = 1, + accelerometer = AccelerometerData(x = 0.1, y = 0.2, z = 0.3), + gps = GpsData(latitude = 10.123, longitude = 20.456), + timestamp = "2023-07-21T12:34:56Z") + ), + ProcessedAgentData(road_state = "normal", + agent_data = AgentData(user_id = 2, + accelerometer = AccelerometerData(x = 0.1, y = 0.2, z = 0.3), + gps = GpsData(latitude = 10.123, longitude = 20.456), + timestamp = "2023-07-21T12:34:56Z") + ), + ProcessedAgentData(road_state = "normal", + agent_data = AgentData(user_id = 3, + accelerometer = AccelerometerData(x = 0.1, y = 0.2, z = 0.3), + gps = GpsData(latitude = 10.123, longitude = 20.456), + timestamp = "2023-07-21T12:34:56Z") + ), + ] + + res = main.ProcessedAgentData_to_td(processed_data_batch) + + assert res[0]["user_id"] == 1 + assert res[1]["user_id"] == 2 + assert res[2]["user_id"] == 3 + + +if __name__ == "__main__": + test_functions = [i for i in dir() if i.startswith('_test_')] + + for i in test_functions: + print(i) + eval(i)() diff --git a/utils/check-up.py b/utils/check-up.py new file mode 100644 index 0000000..957f573 --- /dev/null +++ b/utils/check-up.py @@ -0,0 +1,19 @@ +import sys + +print("Checking for dead containers...") + +l = [i for i in sys.stdin.read().split("\n") if i] +header, statuses = l[0], l[1:] + +status_index = header.find('STATUS') +name_index = header.find('NAMES') + +exit_code = 0 + +for i in statuses: + if not i[status_index:].startswith("Up "): + service_name = i[name_index:] + print(f"Crash detected in {service_name}") + exit_code = 1 + +sys.exit(exit_code)