Compare commits
4 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| d247b223c6 | |||
| bdd72a06c8 | |||
| 36f99e1ec0 | |||
| 80589824d1 |
@@ -1,16 +0,0 @@
|
|||||||
name: Reset docker state
|
|
||||||
on: workflow_dispatch
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
reset:
|
|
||||||
runs-on: host-arch-x86_64
|
|
||||||
name: Reset docker state
|
|
||||||
steps:
|
|
||||||
- name: Stop all containers
|
|
||||||
run: docker stop $(docker ps -a | cut -d " " -f 1 | tail -n +2)
|
|
||||||
|
|
||||||
- name: Remove all containers
|
|
||||||
run: docker rm $(docker ps -a | cut -d " " -f 1 | tail -n +2)
|
|
||||||
|
|
||||||
- name: Remove extra volumes
|
|
||||||
run: docker volume rm road_vision_postgres_data road_vision_pgadmin-data
|
|
||||||
@@ -1,71 +0,0 @@
|
|||||||
name: Component testing
|
|
||||||
on: [push, workflow_dispatch]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
hub-test:
|
|
||||||
name: Hub testing
|
|
||||||
runs-on: host-arch-x86_64
|
|
||||||
steps:
|
|
||||||
- name: Clone repository
|
|
||||||
run: git clone --revision ${{ gitea.sha }} --depth 1 ${{ gitea.server_url }}/${{ gitea.repository }}
|
|
||||||
|
|
||||||
- name: Build Hub testing container
|
|
||||||
working-directory: IoT-Systems
|
|
||||||
run: docker build -t local/hub/${{gitea.sha}} -f hub/Dockerfile-test .
|
|
||||||
|
|
||||||
- name: Run Hub tests
|
|
||||||
working-directory: IoT-Systems
|
|
||||||
run: docker run --rm -it local/hub/${{gitea.sha}}
|
|
||||||
|
|
||||||
- name: Clean up containers
|
|
||||||
if: ${{always()}}
|
|
||||||
run: docker image rm local/hub/${{gitea.sha}}
|
|
||||||
|
|
||||||
store-test:
|
|
||||||
name: Store testing
|
|
||||||
runs-on: host-arch-x86_64
|
|
||||||
steps:
|
|
||||||
- name: Clone repository
|
|
||||||
run: git clone --revision ${{ gitea.sha }} --depth 1 ${{ gitea.server_url }}/${{ gitea.repository }}
|
|
||||||
|
|
||||||
- name: Build Store testing container
|
|
||||||
working-directory: IoT-Systems
|
|
||||||
run: docker build -t local/store/${{gitea.sha}} -f store/Dockerfile-test .
|
|
||||||
|
|
||||||
- name: Run Store tests
|
|
||||||
working-directory: IoT-Systems
|
|
||||||
run: docker run --rm -it local/store/${{gitea.sha}}
|
|
||||||
|
|
||||||
- name: Clean up containers
|
|
||||||
if: ${{always()}}
|
|
||||||
run: docker image rm local/store/${{gitea.sha}}
|
|
||||||
|
|
||||||
integration-smoke-test:
|
|
||||||
name: Integration smoke testing
|
|
||||||
runs-on: host-arch-x86_64
|
|
||||||
needs:
|
|
||||||
- hub-test
|
|
||||||
- store-test
|
|
||||||
steps:
|
|
||||||
- name: Clone repository
|
|
||||||
run: git clone --revision ${{ gitea.sha }} --depth 1 ${{ gitea.server_url }}/${{ gitea.repository }}
|
|
||||||
|
|
||||||
- name: Build all production containers
|
|
||||||
working-directory: IoT-Systems
|
|
||||||
run: docker-compose build
|
|
||||||
|
|
||||||
- name: Start all production containers
|
|
||||||
working-directory: IoT-Systems
|
|
||||||
run: docker-compose up -d
|
|
||||||
|
|
||||||
- name: Wait for crashes to happen
|
|
||||||
run: sleep 30
|
|
||||||
|
|
||||||
- name: Check for dead containers
|
|
||||||
working-directory: IoT-Systems
|
|
||||||
run: docker ps -a | python3 utils/check-up.py
|
|
||||||
|
|
||||||
- name: Clean up
|
|
||||||
if: ${{always()}}
|
|
||||||
working-directory: IoT-Systems
|
|
||||||
run: docker-compose down -v
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
# Use the official Python image as the base image
|
|
||||||
FROM python:3.9-slim
|
|
||||||
# Set the working directory inside the container
|
|
||||||
WORKDIR /app
|
|
||||||
# Copy the requirements.txt file and install dependencies
|
|
||||||
COPY hub/requirements.txt .
|
|
||||||
|
|
||||||
RUN pip install --no-cache-dir -r requirements.txt
|
|
||||||
# Copy the entire application into the container
|
|
||||||
COPY hub/. .
|
|
||||||
# Run the main.py script inside the container when it starts
|
|
||||||
CMD ["./test-entry.sh"]
|
|
||||||
@@ -13,7 +13,7 @@ class StoreApiAdapter(StoreGateway):
|
|||||||
def __init__(self, api_base_url):
|
def __init__(self, api_base_url):
|
||||||
self.api_base_url = api_base_url
|
self.api_base_url = api_base_url
|
||||||
|
|
||||||
def processed_agent_data_batch_to_payload(self, processed_agent_data_batch: List[ProcessedAgentData]):
|
def save_data(self, processed_agent_data_batch: List[ProcessedAgentData]):
|
||||||
if not processed_agent_data_batch:
|
if not processed_agent_data_batch:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@@ -25,14 +25,6 @@ class StoreApiAdapter(StoreGateway):
|
|||||||
"user_id": user_id
|
"user_id": user_id
|
||||||
}
|
}
|
||||||
|
|
||||||
return payload
|
|
||||||
|
|
||||||
def save_data(self, processed_agent_data_batch: List[ProcessedAgentData]):
|
|
||||||
payload = self.processed_agent_data_batch_to_payload(processed_agent_data_batch)
|
|
||||||
|
|
||||||
if payload == False:
|
|
||||||
return False
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Perform a POST request to the Store API with a 10-second timeout
|
# Perform a POST request to the Store API with a 10-second timeout
|
||||||
response = requests.post(
|
response = requests.post(
|
||||||
|
|||||||
@@ -1,41 +0,0 @@
|
|||||||
from app.adapters.store_api_adapter import StoreApiAdapter
|
|
||||||
from app.entities.agent_data import AccelerometerData, AgentData, GpsData
|
|
||||||
from app.entities.processed_agent_data import ProcessedAgentData
|
|
||||||
|
|
||||||
def _test_processed_agent_data_batch_to_payload():
|
|
||||||
processed_data_batch = [
|
|
||||||
ProcessedAgentData(road_state = "normal",
|
|
||||||
agent_data = AgentData(user_id = 1,
|
|
||||||
accelerometer = AccelerometerData(x = 0.1, y = 0.2, z = 0.3),
|
|
||||||
gps = GpsData(latitude = 10.123, longitude = 20.456),
|
|
||||||
timestamp = "2023-07-21T12:34:56Z")
|
|
||||||
),
|
|
||||||
ProcessedAgentData(road_state = "normal",
|
|
||||||
agent_data = AgentData(user_id = 2,
|
|
||||||
accelerometer = AccelerometerData(x = 0.1, y = 0.2, z = 0.3),
|
|
||||||
gps = GpsData(latitude = 10.123, longitude = 20.456),
|
|
||||||
timestamp = "2023-07-21T12:34:56Z")
|
|
||||||
),
|
|
||||||
ProcessedAgentData(road_state = "normal",
|
|
||||||
agent_data = AgentData(user_id = 3,
|
|
||||||
accelerometer = AccelerometerData(x = 0.1, y = 0.2, z = 0.3),
|
|
||||||
gps = GpsData(latitude = 10.123, longitude = 20.456),
|
|
||||||
timestamp = "2023-07-21T12:34:56Z")
|
|
||||||
),
|
|
||||||
]
|
|
||||||
|
|
||||||
res = StoreApiAdapter(None).processed_agent_data_batch_to_payload(processed_data_batch)
|
|
||||||
|
|
||||||
assert res["data"][0]["agent_data"]["user_id"] == 1
|
|
||||||
assert res["data"][1]["agent_data"]["user_id"] == 2
|
|
||||||
assert res["data"][2]["agent_data"]["user_id"] == 3
|
|
||||||
|
|
||||||
assert StoreApiAdapter(None).processed_agent_data_batch_to_payload([]) == False
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
test_functions = [i for i in dir() if i.startswith('_test_')]
|
|
||||||
|
|
||||||
for i in test_functions:
|
|
||||||
print(i)
|
|
||||||
eval(i)()
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
|
|
||||||
PYTHONPATH=$PWD python3 app/adapters/store_api_adapter_test.py
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
# Use the official Python image as the base image
|
|
||||||
FROM python:latest
|
|
||||||
# Set the working directory inside the container
|
|
||||||
WORKDIR /app
|
|
||||||
# Copy the requirements.txt file and install dependencies
|
|
||||||
COPY store/requirements.txt .
|
|
||||||
|
|
||||||
RUN pip install --no-cache-dir -r requirements.txt
|
|
||||||
# Copy the entire application into the container
|
|
||||||
COPY store/. .
|
|
||||||
# Run the main.py script inside the container when it starts
|
|
||||||
#CMD ["uvicorn", "main:app", "--host", "0.0.0.0"]
|
|
||||||
CMD ["./test-entry.sh"]
|
|
||||||
+7
-10
@@ -73,11 +73,15 @@ async def send_data_to_subscribers(user_id: int, data):
|
|||||||
|
|
||||||
# FastAPI CRUDL endpoints
|
# FastAPI CRUDL endpoints
|
||||||
|
|
||||||
def ProcessedAgentData_to_td(data: List[ProcessedAgentData]):
|
|
||||||
return [
|
@app.post("/processed_agent_data/")
|
||||||
|
async def create_processed_agent_data(data: List[ProcessedAgentData], user_id: int = Body(..., embed=True)):
|
||||||
|
session = SessionLocal()
|
||||||
|
try:
|
||||||
|
created_data = [
|
||||||
{
|
{
|
||||||
"road_state": item.road_state,
|
"road_state": item.road_state,
|
||||||
"user_id": item.agent_data.user_id,
|
"user_id": user_id,
|
||||||
"x": item.agent_data.accelerometer.x,
|
"x": item.agent_data.accelerometer.x,
|
||||||
"y": item.agent_data.accelerometer.y,
|
"y": item.agent_data.accelerometer.y,
|
||||||
"z": item.agent_data.accelerometer.z,
|
"z": item.agent_data.accelerometer.z,
|
||||||
@@ -87,13 +91,6 @@ def ProcessedAgentData_to_td(data: List[ProcessedAgentData]):
|
|||||||
}
|
}
|
||||||
for item in data
|
for item in data
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@app.post("/processed_agent_data/")
|
|
||||||
async def create_processed_agent_data(data: List[ProcessedAgentData], user_id: int = Body(..., embed=True)):
|
|
||||||
session = SessionLocal()
|
|
||||||
try:
|
|
||||||
created_data = ProcessedAgentData_to_td(data)
|
|
||||||
stmt = processed_agent_data.insert().values(created_data).returning(processed_agent_data)
|
stmt = processed_agent_data.insert().values(created_data).returning(processed_agent_data)
|
||||||
result = session.execute(stmt)
|
result = session.execute(stmt)
|
||||||
created_records = [dict(row._mapping) for row in result.fetchall()]
|
created_records = [dict(row._mapping) for row in result.fetchall()]
|
||||||
|
|||||||
@@ -1,3 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
|
|
||||||
PYTHONPATH=$PWD python3 test/main_test.py
|
|
||||||
@@ -1,39 +0,0 @@
|
|||||||
from schemas import AccelerometerData, AgentData, GpsData, ProcessedAgentData
|
|
||||||
|
|
||||||
import main
|
|
||||||
|
|
||||||
def _test_ProcessedAgentData_to_td():
|
|
||||||
processed_data_batch = [
|
|
||||||
ProcessedAgentData(road_state = "normal",
|
|
||||||
agent_data = AgentData(user_id = 1,
|
|
||||||
accelerometer = AccelerometerData(x = 0.1, y = 0.2, z = 0.3),
|
|
||||||
gps = GpsData(latitude = 10.123, longitude = 20.456),
|
|
||||||
timestamp = "2023-07-21T12:34:56Z")
|
|
||||||
),
|
|
||||||
ProcessedAgentData(road_state = "normal",
|
|
||||||
agent_data = AgentData(user_id = 2,
|
|
||||||
accelerometer = AccelerometerData(x = 0.1, y = 0.2, z = 0.3),
|
|
||||||
gps = GpsData(latitude = 10.123, longitude = 20.456),
|
|
||||||
timestamp = "2023-07-21T12:34:56Z")
|
|
||||||
),
|
|
||||||
ProcessedAgentData(road_state = "normal",
|
|
||||||
agent_data = AgentData(user_id = 3,
|
|
||||||
accelerometer = AccelerometerData(x = 0.1, y = 0.2, z = 0.3),
|
|
||||||
gps = GpsData(latitude = 10.123, longitude = 20.456),
|
|
||||||
timestamp = "2023-07-21T12:34:56Z")
|
|
||||||
),
|
|
||||||
]
|
|
||||||
|
|
||||||
res = main.ProcessedAgentData_to_td(processed_data_batch)
|
|
||||||
|
|
||||||
assert res[0]["user_id"] == 1
|
|
||||||
assert res[1]["user_id"] == 2
|
|
||||||
assert res[2]["user_id"] == 3
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
test_functions = [i for i in dir() if i.startswith('_test_')]
|
|
||||||
|
|
||||||
for i in test_functions:
|
|
||||||
print(i)
|
|
||||||
eval(i)()
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
import sys
|
|
||||||
|
|
||||||
print("Checking for dead containers...")
|
|
||||||
|
|
||||||
l = [i for i in sys.stdin.read().split("\n") if i]
|
|
||||||
header, statuses = l[0], l[1:]
|
|
||||||
|
|
||||||
status_index = header.find('STATUS')
|
|
||||||
name_index = header.find('NAMES')
|
|
||||||
|
|
||||||
exit_code = 0
|
|
||||||
|
|
||||||
for i in statuses:
|
|
||||||
if not i[status_index:].startswith("Up "):
|
|
||||||
service_name = i[name_index:]
|
|
||||||
print(f"Crash detected in {service_name}")
|
|
||||||
exit_code = 1
|
|
||||||
|
|
||||||
sys.exit(exit_code)
|
|
||||||
Reference in New Issue
Block a user