Compare commits

...

18 Commits

Author SHA1 Message Date
0b8d2eb18b [P] Add container rebuilding on every rerun
All checks were successful
Component testing / Hub testing (push) Successful in 18s
Component testing / Store testing (push) Successful in 22s
Component testing / Integration smoke testing (push) Successful in 2m33s
2026-03-23 18:31:43 +02:00
2846130e4e [P] Add docker reset workflow
All checks were successful
Component testing / Hub testing (push) Successful in 20s
Component testing / Store testing (push) Successful in 21s
Component testing / Integration smoke testing (push) Successful in 1m34s
2026-03-23 18:26:38 +02:00
30af132033 [P] Add general smoke test and Store incremental test
All checks were successful
Component testing / Hub testing (push) Successful in 22s
Component testing / Store testing (push) Successful in 19s
Component testing / Integration smoke testing (push) Successful in 1m28s
2026-03-23 18:01:41 +02:00
60a846d8b8 [P] Refactor testing code 2026-03-23 16:10:09 +02:00
fe6bb6ab3a [P] Add CI for updated Hub component part
All checks were successful
Hub component testing / hub-test (push) Successful in 25s
2026-03-23 15:53:44 +02:00
ІО-23 Shmuliar Oleh
30f81ec1ae Merge pull request #26 from Rhinemann/lab4/shved-SCRUM-95-test-repo-functionality
set up global docker-compose
2026-03-22 21:59:07 +02:00
1b6f47fa0d [L4] Fix relative paths after file move 2026-03-22 21:13:44 +02:00
b1e6ad7c94 set up global docker-compose 2026-03-22 14:07:29 +01:00
VladiusVostokus
1eddfd966b Merge pull request #24 from Rhinemann/lab5/shmuliar-SCRUM-92-mapview-store-integration
SCRUM 92: mapview store integration
2026-03-14 16:07:30 +00:00
8af68d6dd9 hotfix: index overflow on user_id 2026-03-13 20:47:09 +02:00
63aca15824 add multiuser rendering support 2026-03-13 20:41:16 +02:00
ee509f72a4 pull data in MapView/main.py from actual data source 2026-03-13 19:02:07 +02:00
da9fe69d4e add initial server->client update with all current DB data 2026-03-13 19:01:33 +02:00
1c856dca0e fix MapView/main.py crash due to wrong check condition 2026-03-13 18:58:28 +02:00
VladiusVostokus
17738d07fe Merge pull request #21 from Rhinemann/lab5/gryshaiev-SCRUM-90-set-bump-marker
SCRUM-90: implement set_bump_marker
2026-03-11 17:02:07 +00:00
VladiusVostokus
6b5831ff1b Merge branch 'dev' into lab5/gryshaiev-SCRUM-90-set-bump-marker 2026-03-11 17:01:54 +00:00
VladiusVostokus
54505db70e Merge pull request #23 from Rhinemann/lab5/gryshaiev-SCRUM-89-set-pothole-marker
SCRUM-89: implement set_pothole_marker()
2026-03-11 16:59:35 +00:00
SimonSanich
6f4b3b0ea6 SCRUM-90: implement set_bump_marker 2026-03-11 18:36:40 +02:00
17 changed files with 340 additions and 184 deletions

View File

@@ -0,0 +1,16 @@
name: Reset docker state
on: workflow_dispatch
jobs:
reset:
runs-on: host-arch-x86_64
name: Reset docker state
steps:
- name: Stop all containers
run: docker stop $(docker ps -a | cut -d " " -f 1 | tail -n +2)
- name: Remove all containers
run: docker rm $(docker ps -a | cut -d " " -f 1 | tail -n +2)
- name: Remove extra volumes
run: docker volume rm road_vision_postgres_data road_vision_pgadmin-data

View File

@@ -0,0 +1,71 @@
name: Component testing
on: [push, workflow_dispatch]
jobs:
hub-test:
name: Hub testing
runs-on: host-arch-x86_64
steps:
- name: Clone repository
run: git clone --revision ${{ gitea.sha }} --depth 1 ${{ gitea.server_url }}/${{ gitea.repository }}
- name: Build Hub testing container
working-directory: IoT-Systems
run: docker build -t local/hub/${{gitea.sha}} -f hub/Dockerfile-test .
- name: Run Hub tests
working-directory: IoT-Systems
run: docker run --rm -it local/hub/${{gitea.sha}}
- name: Clean up containers
if: ${{always()}}
run: docker image rm local/hub/${{gitea.sha}}
store-test:
name: Store testing
runs-on: host-arch-x86_64
steps:
- name: Clone repository
run: git clone --revision ${{ gitea.sha }} --depth 1 ${{ gitea.server_url }}/${{ gitea.repository }}
- name: Build Store testing container
working-directory: IoT-Systems
run: docker build -t local/store/${{gitea.sha}} -f store/Dockerfile-test .
- name: Run Store tests
working-directory: IoT-Systems
run: docker run --rm -it local/store/${{gitea.sha}}
- name: Clean up containers
if: ${{always()}}
run: docker image rm local/store/${{gitea.sha}}
integration-smoke-test:
name: Integration smoke testing
runs-on: host-arch-x86_64
needs:
- hub-test
- store-test
steps:
- name: Clone repository
run: git clone --revision ${{ gitea.sha }} --depth 1 ${{ gitea.server_url }}/${{ gitea.repository }}
- name: Build all production containers
working-directory: IoT-Systems
run: docker-compose build
- name: Start all production containers
working-directory: IoT-Systems
run: docker-compose up -d
- name: Wait for crashes to happen
run: sleep 30
- name: Check for dead containers
working-directory: IoT-Systems
run: docker ps -a | python3 utils/check-up.py
- name: Clean up
if: ${{always()}}
working-directory: IoT-Systems
run: docker-compose down -v

View File

@@ -75,6 +75,7 @@ class Datasource:
processed_agent_data.latitude, processed_agent_data.latitude,
processed_agent_data.longitude, processed_agent_data.longitude,
processed_agent_data.road_state, processed_agent_data.road_state,
processed_agent_data.user_id
) )
for processed_agent_data in processed_agent_data_list for processed_agent_data in processed_agent_data_list
] ]

View File

@@ -5,6 +5,14 @@ from kivy.clock import Clock
from lineMapLayer import LineMapLayer from lineMapLayer import LineMapLayer
from datasource import Datasource from datasource import Datasource
line_layer_colors = [
[1, 0, 0, 1],
[1, 0.5, 0, 1],
[0, 1, 0, 1],
[0, 1, 1, 1],
[0, 0, 1, 1],
[1, 0, 1, 1],
]
class MapViewApp(App): class MapViewApp(App):
def __init__(self, **kwargs): def __init__(self, **kwargs):
@@ -12,17 +20,19 @@ class MapViewApp(App):
self.mapview = None self.mapview = None
self.datasource = Datasource(user_id=1) self.datasource = Datasource(user_id=1)
self.line_layer = None self.line_layers = dict()
self.car_marker = None self.car_markers = dict()
# додати необхідні змінні # додати необхідні змінні
self.bump_markers = []
self.pothole_markers = [] self.pothole_markers = []
def on_start(self): def on_start(self):
""" """
Встановлює необхідні маркери, викликає функцію для оновлення мапи Встановлює необхідні маркери, викликає функцію для оновлення мапи
""" """
Clock.schedule_interval(self.update, 0.3) self.update()
Clock.schedule_interval(self.update, 5)
def update(self, *args): def update(self, *args):
""" """
@@ -35,13 +45,17 @@ class MapViewApp(App):
for point in new_points: for point in new_points:
lat, lon, road_state = point lat, lon, road_state, user_id = point
# Оновлює лінію маршрута # Оновлює лінію маршрута
self.line_layer.add_point((lat, lon)) if user_id not in self.line_layers:
self.line_layers[user_id] = LineMapLayer(color = line_layer_colors[user_id % len(line_layer_colors)])
self.mapview.add_layer(self.line_layers[user_id])
self.line_layers[user_id].add_point((lat, lon))
# Оновлює маркер маниши # Оновлює маркер маниши
self.update_car_marker((lat, lon)) self.update_car_marker(lat, lon, user_id)
# Перевіряємо стан дороги # Перевіряємо стан дороги
self.check_road_quality(point) self.check_road_quality(point)
@@ -54,26 +68,24 @@ class MapViewApp(App):
if len(point) < 3: if len(point) < 3:
return return
lat, lon, road_state = point lat, lon, road_state, user_id = point
if road_state == "pothole": if road_state == "pothole":
self.set_pothole_marker((lat, lon)) self.set_pothole_marker((lat, lon))
elif road_state == "bump": elif road_state == "bump":
self.set_bump_marker((lat, lon)) self.set_bump_marker((lat, lon))
def update_car_marker(self, point): def update_car_marker(self, lat, lon, user_id):
""" """
Оновлює відображення маркера машини на мапі Оновлює відображення маркера машини на мапі
:param point: GPS координати :param point: GPS координати
""" """
lat, lon = point[0], point[1] if user_id not in self.car_markers:
self.car_markers[user_id] = MapMarker(lat=lat, lon=lon, source='./images/car.png')
if not hasattr(self, 'car_marker'): self.mapview.add_marker(self.car_markers[user_id])
self.car_marker = MapMarker(lat=lat, lon=lon, source='./images/car')
self.mapview.add_marker(self.car_marker)
else: else:
self.car_marker.lat = lat self.car_markers[user_id].lat = lat
self.car_marker.lon = lon self.car_markers[user_id].lon = lon
self.mapview.center_on(lat, lon) self.mapview.center_on(lat, lon)
@@ -97,10 +109,24 @@ class MapViewApp(App):
self.pothole_markers.append(marker) self.pothole_markers.append(marker)
def set_bump_marker(self, point): def set_bump_marker(self, point):
""" if isinstance(point, dict):
Встановлює маркер для лежачого поліцейського lat = point.get("lat")
:param point: GPS координати lon = point.get("lon")
""" else:
lat, lon = point
if lat is None or lon is None:
return
marker = MapMarker(
lat=lat,
lon=lon,
source="images/bump.png"
)
self.mapview.add_marker(marker)
self.bump_markers.append(marker)
def build(self): def build(self):
""" """
@@ -113,9 +139,6 @@ class MapViewApp(App):
lon=30.5234 lon=30.5234
) )
self.line_layer = LineMapLayer()
self.mapview.add_layer(self.line_layer)
return self.mapview return self.mapview

View File

@@ -1,34 +0,0 @@
name: "road_vision"
services:
mqtt:
image: eclipse-mosquitto
container_name: mqtt
volumes:
- ./mosquitto:/mosquitto
- ./mosquitto/data:/mosquitto/data
- ./mosquitto/log:/mosquitto/log
ports:
- 1883:1883
- 9001:9001
networks:
mqtt_network:
fake_agent:
container_name: agent
build:
context: ../../
dockerfile: agent/Dockerfile
depends_on:
- mqtt
environment:
MQTT_BROKER_HOST: "mqtt"
MQTT_BROKER_PORT: 1883
MQTT_TOPIC: "agent_data_topic"
DELAY: 0.1
networks:
mqtt_network:
networks:
mqtt_network:

View File

@@ -1,12 +1,12 @@
name: "road_vision__hub" name: "road_vision"
services: services:
mqtt: mqtt:
image: eclipse-mosquitto image: eclipse-mosquitto
container_name: mqtt container_name: mqtt
volumes: volumes:
- ./mosquitto:/mosquitto - ./agent/docker/mosquitto:/mosquitto
- ./mosquitto/data:/mosquitto/data - ./agent/docker/mosquitto/data:/mosquitto/data
- ./mosquitto/log:/mosquitto/log - ./agent/docker/mosquitto/log:/mosquitto/log
ports: ports:
- 1883:1883 - 1883:1883
- 9001:9001 - 9001:9001
@@ -14,6 +14,41 @@ services:
mqtt_network: mqtt_network:
fake_agent:
container_name: agent
build:
context: .
dockerfile: agent/Dockerfile
depends_on:
- mqtt
environment:
MQTT_BROKER_HOST: "mqtt"
MQTT_BROKER_PORT: 1883
MQTT_TOPIC: "agent_data_topic"
DELAY: 0.1
networks:
mqtt_network:
edge:
container_name: edge
build:
context: .
dockerfile: edge/Dockerfile
depends_on:
- mqtt
environment:
MQTT_BROKER_HOST: "mqtt"
MQTT_BROKER_PORT: 1883
MQTT_TOPIC: " "
HUB_HOST: "store"
HUB_PORT: 8000
HUB_MQTT_BROKER_HOST: "mqtt"
HUB_MQTT_BROKER_PORT: 1883
HUB_MQTT_TOPIC: "processed_data_topic"
networks:
mqtt_network:
edge_hub:
postgres_db: postgres_db:
image: postgres:17 image: postgres:17
container_name: postgres_db container_name: postgres_db
@@ -24,13 +59,12 @@ services:
POSTGRES_DB: test_db POSTGRES_DB: test_db
volumes: volumes:
- postgres_data:/var/lib/postgresql/data - postgres_data:/var/lib/postgresql/data
- ./db/structure.sql:/docker-entrypoint-initdb.d/structure.sql - ./store/docker/db/structure.sql:/docker-entrypoint-initdb.d/structure.sql
ports: ports:
- "5432:5432" - "5432:5432"
networks: networks:
db_network: db_network:
pgadmin: pgadmin:
container_name: pgadmin4 container_name: pgadmin4
image: dpage/pgadmin4 image: dpage/pgadmin4
@@ -49,7 +83,7 @@ services:
store: store:
container_name: store container_name: store
build: build:
context: ../../ context: .
dockerfile: store/Dockerfile dockerfile: store/Dockerfile
depends_on: depends_on:
- postgres_db - postgres_db
@@ -79,7 +113,7 @@ services:
hub: hub:
container_name: hub container_name: hub
build: build:
context: ../../ context: .
dockerfile: hub/Dockerfile dockerfile: hub/Dockerfile
depends_on: depends_on:
- mqtt - mqtt
@@ -101,10 +135,11 @@ services:
hub_store: hub_store:
hub_redis: hub_redis:
networks: networks:
mqtt_network: mqtt_network:
db_network: db_network:
edge_hub:
hub:
hub_store: hub_store:
hub_redis: hub_redis:

View File

@@ -1,50 +0,0 @@
version: "3.9"
# name: "road_vision"
services:
mqtt:
image: eclipse-mosquitto
container_name: mqtt
volumes:
- ./mosquitto:/mosquitto
- ./mosquitto/data:/mosquitto/data
- ./mosquitto/log:/mosquitto/log
ports:
- 1883:1883
- 19001:9001
networks:
mqtt_network:
edge:
container_name: edge
build:
context: ../../
dockerfile: edge/Dockerfile
depends_on:
- mqtt
environment:
MQTT_BROKER_HOST: "mqtt"
MQTT_BROKER_PORT: 1883
MQTT_TOPIC: " "
HUB_HOST: "store"
HUB_PORT: 8000
HUB_MQTT_BROKER_HOST: "mqtt"
HUB_MQTT_BROKER_PORT: 1883
HUB_MQTT_TOPIC: "processed_data_topic"
networks:
mqtt_network:
edge_hub:
networks:
mqtt_network:
db_network:
edge_hub:
hub:
hub_store:
hub_redis:
volumes:
postgres_data:
pgadmin-data:

12
hub/Dockerfile-test Normal file
View File

@@ -0,0 +1,12 @@
# Use the official Python image as the base image
FROM python:3.9-slim
# Set the working directory inside the container
WORKDIR /app
# Copy the requirements.txt file and install dependencies
COPY hub/requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# Copy the entire application into the container
COPY hub/. .
# Run the main.py script inside the container when it starts
CMD ["./test-entry.sh"]

View File

@@ -13,7 +13,7 @@ class StoreApiAdapter(StoreGateway):
def __init__(self, api_base_url): def __init__(self, api_base_url):
self.api_base_url = api_base_url self.api_base_url = api_base_url
def save_data(self, processed_agent_data_batch: List[ProcessedAgentData]): def processed_agent_data_batch_to_payload(self, processed_agent_data_batch: List[ProcessedAgentData]):
if not processed_agent_data_batch: if not processed_agent_data_batch:
return False return False
@@ -25,6 +25,14 @@ class StoreApiAdapter(StoreGateway):
"user_id": user_id "user_id": user_id
} }
return payload
def save_data(self, processed_agent_data_batch: List[ProcessedAgentData]):
payload = self.processed_agent_data_batch_to_payload(processed_agent_data_batch)
if payload == False:
return False
try: try:
# Perform a POST request to the Store API with a 10-second timeout # Perform a POST request to the Store API with a 10-second timeout
response = requests.post( response = requests.post(

View File

@@ -0,0 +1,41 @@
from app.adapters.store_api_adapter import StoreApiAdapter
from app.entities.agent_data import AccelerometerData, AgentData, GpsData
from app.entities.processed_agent_data import ProcessedAgentData
def _test_processed_agent_data_batch_to_payload():
processed_data_batch = [
ProcessedAgentData(road_state = "normal",
agent_data = AgentData(user_id = 1,
accelerometer = AccelerometerData(x = 0.1, y = 0.2, z = 0.3),
gps = GpsData(latitude = 10.123, longitude = 20.456),
timestamp = "2023-07-21T12:34:56Z")
),
ProcessedAgentData(road_state = "normal",
agent_data = AgentData(user_id = 2,
accelerometer = AccelerometerData(x = 0.1, y = 0.2, z = 0.3),
gps = GpsData(latitude = 10.123, longitude = 20.456),
timestamp = "2023-07-21T12:34:56Z")
),
ProcessedAgentData(road_state = "normal",
agent_data = AgentData(user_id = 3,
accelerometer = AccelerometerData(x = 0.1, y = 0.2, z = 0.3),
gps = GpsData(latitude = 10.123, longitude = 20.456),
timestamp = "2023-07-21T12:34:56Z")
),
]
res = StoreApiAdapter(None).processed_agent_data_batch_to_payload(processed_data_batch)
assert res["data"][0]["agent_data"]["user_id"] == 1
assert res["data"][1]["agent_data"]["user_id"] == 2
assert res["data"][2]["agent_data"]["user_id"] == 3
assert StoreApiAdapter(None).processed_agent_data_batch_to_payload([]) == False
if __name__ == "__main__":
test_functions = [i for i in dir() if i.startswith('_test_')]
for i in test_functions:
print(i)
eval(i)()

3
hub/test-entry.sh Executable file
View File

@@ -0,0 +1,3 @@
#!/bin/sh
PYTHONPATH=$PWD python3 app/adapters/store_api_adapter_test.py

13
store/Dockerfile-test Normal file
View File

@@ -0,0 +1,13 @@
# Use the official Python image as the base image
FROM python:latest
# Set the working directory inside the container
WORKDIR /app
# Copy the requirements.txt file and install dependencies
COPY store/requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# Copy the entire application into the container
COPY store/. .
# Run the main.py script inside the container when it starts
#CMD ["uvicorn", "main:app", "--host", "0.0.0.0"]
CMD ["./test-entry.sh"]

View File

@@ -1,61 +0,0 @@
name: "road_vision__database"
services:
postgres_db:
image: postgres:17
container_name: postgres_db
restart: always
environment:
POSTGRES_USER: user
POSTGRES_PASSWORD: pass
POSTGRES_DB: test_db
volumes:
- postgres_data:/var/lib/postgresql/data
- ./db/structure.sql:/docker-entrypoint-initdb.d/structure.sql
ports:
- "5432:5432"
networks:
db_network:
pgadmin:
container_name: pgadmin4
image: dpage/pgadmin4
restart: always
environment:
PGADMIN_DEFAULT_EMAIL: admin@admin.com
PGADMIN_DEFAULT_PASSWORD: root
volumes:
- pgadmin-data:/var/lib/pgadmin
ports:
- "5050:80"
networks:
db_network:
store:
container_name: store
build:
context: ../../
dockerfile: store/Dockerfile
depends_on:
- postgres_db
restart: always
environment:
POSTGRES_USER: user
POSTGRES_PASSWORD: pass
POSTGRES_DB: test_db
POSTGRES_HOST: postgres_db
POSTGRES_PORT: 5432
ports:
- "8000:8000"
networks:
db_network:
networks:
db_network:
volumes:
postgres_data:
pgadmin-data:

View File

@@ -40,10 +40,24 @@ subscriptions: Dict[int, Set[WebSocket]] = {}
@app.websocket("/ws/{user_id}") @app.websocket("/ws/{user_id}")
async def websocket_endpoint(websocket: WebSocket, user_id: int): async def websocket_endpoint(websocket: WebSocket, user_id: int):
await websocket.accept() await websocket.accept()
if user_id not in subscriptions: if user_id not in subscriptions:
subscriptions[user_id] = set() subscriptions[user_id] = set()
subscriptions[user_id].add(websocket) subscriptions[user_id].add(websocket)
try: try:
# send already available data
r = processed_agent_data.select()
stored_data = SessionLocal().execute(r).fetchall()
jsonable_data = [{c.name: getattr(i, c.name) for c in processed_agent_data.columns} for i in stored_data]
for i in jsonable_data:
i['timestamp'] = i['timestamp'].strftime("%Y-%m-%dT%H:%M:%SZ")
await websocket.send_json(json.dumps(jsonable_data))
# receive forever
while True: while True:
await websocket.receive_text() await websocket.receive_text()
except WebSocketDisconnect: except WebSocketDisconnect:
@@ -59,15 +73,11 @@ async def send_data_to_subscribers(user_id: int, data):
# FastAPI CRUDL endpoints # FastAPI CRUDL endpoints
def ProcessedAgentData_to_td(data: List[ProcessedAgentData]):
@app.post("/processed_agent_data/") return [
async def create_processed_agent_data(data: List[ProcessedAgentData], user_id: int = Body(..., embed=True)):
session = SessionLocal()
try:
created_data = [
{ {
"road_state": item.road_state, "road_state": item.road_state,
"user_id": user_id, "user_id": item.agent_data.user_id,
"x": item.agent_data.accelerometer.x, "x": item.agent_data.accelerometer.x,
"y": item.agent_data.accelerometer.y, "y": item.agent_data.accelerometer.y,
"z": item.agent_data.accelerometer.z, "z": item.agent_data.accelerometer.z,
@@ -77,6 +87,13 @@ async def create_processed_agent_data(data: List[ProcessedAgentData], user_id: i
} }
for item in data for item in data
] ]
@app.post("/processed_agent_data/")
async def create_processed_agent_data(data: List[ProcessedAgentData], user_id: int = Body(..., embed=True)):
session = SessionLocal()
try:
created_data = ProcessedAgentData_to_td(data)
stmt = processed_agent_data.insert().values(created_data).returning(processed_agent_data) stmt = processed_agent_data.insert().values(created_data).returning(processed_agent_data)
result = session.execute(stmt) result = session.execute(stmt)
created_records = [dict(row._mapping) for row in result.fetchall()] created_records = [dict(row._mapping) for row in result.fetchall()]

3
store/test-entry.sh Executable file
View File

@@ -0,0 +1,3 @@
#!/bin/sh
PYTHONPATH=$PWD python3 test/main_test.py

39
store/test/main_test.py Normal file
View File

@@ -0,0 +1,39 @@
from schemas import AccelerometerData, AgentData, GpsData, ProcessedAgentData
import main
def _test_ProcessedAgentData_to_td():
processed_data_batch = [
ProcessedAgentData(road_state = "normal",
agent_data = AgentData(user_id = 1,
accelerometer = AccelerometerData(x = 0.1, y = 0.2, z = 0.3),
gps = GpsData(latitude = 10.123, longitude = 20.456),
timestamp = "2023-07-21T12:34:56Z")
),
ProcessedAgentData(road_state = "normal",
agent_data = AgentData(user_id = 2,
accelerometer = AccelerometerData(x = 0.1, y = 0.2, z = 0.3),
gps = GpsData(latitude = 10.123, longitude = 20.456),
timestamp = "2023-07-21T12:34:56Z")
),
ProcessedAgentData(road_state = "normal",
agent_data = AgentData(user_id = 3,
accelerometer = AccelerometerData(x = 0.1, y = 0.2, z = 0.3),
gps = GpsData(latitude = 10.123, longitude = 20.456),
timestamp = "2023-07-21T12:34:56Z")
),
]
res = main.ProcessedAgentData_to_td(processed_data_batch)
assert res[0]["user_id"] == 1
assert res[1]["user_id"] == 2
assert res[2]["user_id"] == 3
if __name__ == "__main__":
test_functions = [i for i in dir() if i.startswith('_test_')]
for i in test_functions:
print(i)
eval(i)()

19
utils/check-up.py Normal file
View File

@@ -0,0 +1,19 @@
import sys
print("Checking for dead containers...")
l = [i for i in sys.stdin.read().split("\n") if i]
header, statuses = l[0], l[1:]
status_index = header.find('STATUS')
name_index = header.find('NAMES')
exit_code = 0
for i in statuses:
if not i[status_index:].startswith("Up "):
service_name = i[name_index:]
print(f"Crash detected in {service_name}")
exit_code = 1
sys.exit(exit_code)