docker services real

This commit is contained in:
2026-02-16 20:47:44 +09:00
parent 2e5ad58978
commit b33c658885
6 changed files with 65 additions and 19 deletions

View File

@@ -11,7 +11,11 @@
"Bash(python -m py_compile:*)", "Bash(python -m py_compile:*)",
"WebSearch", "WebSearch",
"WebFetch(domain:docs.waveshare.com)", "WebFetch(domain:docs.waveshare.com)",
"WebFetch(domain:www.waveshare.com)" "WebFetch(domain:www.waveshare.com)",
"Bash(npm view:*)",
"WebFetch(domain:raw.githubusercontent.com)",
"Bash(docker ps:*)",
"Bash(python3:*)"
] ]
}, },
"outputStyle": "iseri", "outputStyle": "iseri",

View File

@@ -369,9 +369,14 @@ void dashboard_ui_update_stats(const pi_stats_t *stats)
/* Services table */ /* Services table */
s_service_count = stats->service_count; s_service_count = stats->service_count;
for (int i = 0; i < stats->service_count && i < WS_MAX_SERVICES; i++) { for (int i = 0; i < stats->service_count && i < WS_MAX_SERVICES; i++) {
const char *tag;
switch (stats->services[i].status) {
case SVC_RUNNING: tag = "[RUN]"; break;
case SVC_WARNING: tag = "[WARN]"; break;
default: tag = "[STOP]"; break;
}
lv_table_set_cell_value(tbl_services, i, 0, stats->services[i].name); lv_table_set_cell_value(tbl_services, i, 0, stats->services[i].name);
lv_table_set_cell_value(tbl_services, i, 1, lv_table_set_cell_value(tbl_services, i, 1, tag);
stats->services[i].running ? "[RUN]" : "[STOP]");
} }
/* Clear unused rows */ /* Clear unused rows */
for (int i = stats->service_count; i < WS_MAX_SERVICES; i++) { for (int i = stats->service_count; i < WS_MAX_SERVICES; i++) {

View File

@@ -133,7 +133,7 @@ static void ws_data_cb(const pi_stats_t *stats)
} }
for (int i = 0; i < stats->service_count; i++) { for (int i = 0; i < stats->service_count; i++) {
if (!stats->services[i].running) { if (stats->services[i].status != SVC_RUNNING) {
alert_trigger(ALERT_SERVICE_DOWN); alert_trigger(ALERT_SERVICE_DOWN);
break; break;
} }

View File

@@ -92,7 +92,13 @@ static void parse_stats_json(const char *data, int len)
s_stats.services[i].name[WS_SERVICE_NAME_LEN - 1] = '\0'; s_stats.services[i].name[WS_SERVICE_NAME_LEN - 1] = '\0';
} }
if (status && status->valuestring) { if (status && status->valuestring) {
s_stats.services[i].running = (strcmp(status->valuestring, "running") == 0); if (strcmp(status->valuestring, "running") == 0) {
s_stats.services[i].status = SVC_RUNNING;
} else if (strcmp(status->valuestring, "warning") == 0) {
s_stats.services[i].status = SVC_WARNING;
} else {
s_stats.services[i].status = SVC_STOPPED;
}
} }
} }
} }

View File

@@ -8,7 +8,7 @@
extern "C" { extern "C" {
#endif #endif
#define WS_MAX_SERVICES 8 #define WS_MAX_SERVICES 20
#define WS_SERVICE_NAME_LEN 16 #define WS_SERVICE_NAME_LEN 16
typedef enum { typedef enum {
@@ -18,9 +18,15 @@ typedef enum {
WS_STATE_ERROR, WS_STATE_ERROR,
} ws_state_t; } ws_state_t;
typedef enum {
SVC_STOPPED = 0,
SVC_WARNING,
SVC_RUNNING,
} ws_svc_status_t;
typedef struct { typedef struct {
char name[WS_SERVICE_NAME_LEN]; char name[WS_SERVICE_NAME_LEN];
bool running; ws_svc_status_t status;
} ws_service_t; } ws_service_t;
typedef struct { typedef struct {

View File

@@ -7,7 +7,7 @@ same 2s push interval. Services remain mocked until systemd integration is added
import asyncio import asyncio
import json import json
import random import subprocess
import time import time
from datetime import datetime from datetime import datetime
from pathlib import Path from pathlib import Path
@@ -61,16 +61,41 @@ def _get_net_throughput() -> tuple[float, float]:
return rx_kbps, tx_kbps return rx_kbps, tx_kbps
def _mock_services() -> list[dict]: def _get_docker_services() -> list[dict]:
"""Mocked service status — same logic as mock_server.py.""" """Query Docker for real container statuses with ternary status model."""
return [ try:
{"name": "docker", "status": random.choice(["running", "running", "running", "stopped"])}, result = subprocess.run(
{"name": "pihole", "status": random.choice(["running", "running", "running", "stopped"])}, ["docker", "ps", "-a", "--format", "{{.Names}}\t{{.Status}}"],
{"name": "nginx", "status": random.choice(["running", "running", "stopped"])}, capture_output=True, text=True, timeout=5,
{"name": "sshd", "status": "running"}, )
{"name": "ph1", "status": "running"}, except (subprocess.TimeoutExpired, FileNotFoundError, OSError):
{"name": "ph2", "status": "stopped"}, return []
]
if result.returncode != 0:
return []
services = []
for line in result.stdout.strip().splitlines():
parts = line.split("\t", 1)
if len(parts) != 2:
continue
name, raw_status = parts
if raw_status.startswith("Up"):
if "unhealthy" in raw_status or "Restarting" in raw_status:
status = "warning"
else:
status = "running"
else:
status = "stopped"
services.append({"name": name, "status": status})
# Sort: warnings first, then stopped, then running (problems float to top)
order = {"warning": 0, "stopped": 1, "running": 2}
services.sort(key=lambda s: order.get(s["status"], 3))
return services
def _local_time_fields() -> dict: def _local_time_fields() -> dict:
@@ -100,7 +125,7 @@ def generate_stats() -> dict:
"uptime_hrs": round((time.time() - psutil.boot_time()) / 3600, 1), "uptime_hrs": round((time.time() - psutil.boot_time()) / 3600, 1),
"net_rx_kbps": rx_kbps / 8, "net_rx_kbps": rx_kbps / 8,
"net_tx_kbps": tx_kbps / 8, # kByte/s for humans "net_tx_kbps": tx_kbps / 8, # kByte/s for humans
"services": _mock_services(), "services": _get_docker_services(),
"timestamp": int(time.time()), "timestamp": int(time.time()),
"local_time": _local_time_fields(), "local_time": _local_time_fields(),
} }