Compare commits
17 Commits
2e5ad58978
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
| 6c48d70284 | |||
| 436e55a0c5 | |||
|
|
776aee18fc | ||
|
|
41e0a6b81d | ||
|
|
e4da3687b4 | ||
|
|
05cd085b89 | ||
|
|
d0d0b4dc39 | ||
|
|
25420d57b3 | ||
|
|
18984c29a3 | ||
|
|
7555efcba9 | ||
| 379f8e105b | |||
| 3b4d61c56d | |||
| 5ae0c64ba9 | |||
|
|
7f644652bb | ||
| 5c16e6deb7 | |||
|
|
706c7ac21b | ||
| b33c658885 |
@@ -1,20 +0,0 @@
|
|||||||
{
|
|
||||||
"permissions": {
|
|
||||||
"allow": [
|
|
||||||
"Bash(echo:*)",
|
|
||||||
"Bash(idf.py build:*)",
|
|
||||||
"Bash(for f in DSEG14C_BI_50px.c InziuIosevka_Slab_CC_12px.c InziuIosevka_Slab_CC_16px.c InziuIosevka_Slab_CC_20px.c InziuIosevka_Slab_CC_24px.c InziuIosevka_Slab_CC_32px.c)",
|
|
||||||
"Bash(do sed -i '/\\\\.static_bitmap = 0,/d' \"$f\")",
|
|
||||||
"Bash(done)",
|
|
||||||
"Bash(file:*)",
|
|
||||||
"mcp__ide__getDiagnostics",
|
|
||||||
"Bash(python -m py_compile:*)",
|
|
||||||
"WebSearch",
|
|
||||||
"WebFetch(domain:docs.waveshare.com)",
|
|
||||||
"WebFetch(domain:www.waveshare.com)"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"outputStyle": "iseri",
|
|
||||||
"spinnerTipsEnabled": false,
|
|
||||||
"prefersReducedMotion": true
|
|
||||||
}
|
|
||||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -3,3 +3,9 @@ managed_components/
|
|||||||
sdkconfig
|
sdkconfig
|
||||||
sdkconfig.old
|
sdkconfig.old
|
||||||
dependencies.lock
|
dependencies.lock
|
||||||
|
|
||||||
|
# vscode local settings
|
||||||
|
.vscode/
|
||||||
|
|
||||||
|
# claude local settings
|
||||||
|
.claude/
|
||||||
12
.vscode/settings.json
vendored
12
.vscode/settings.json
vendored
@@ -1,12 +0,0 @@
|
|||||||
{
|
|
||||||
"idf.currentSetup": "J:\\esp\\.espressif\\v5.5.2\\esp-idf",
|
|
||||||
"idf.flashType": "UART",
|
|
||||||
"idf.portWin": "COM7",
|
|
||||||
"idf.openOcdConfigs": [
|
|
||||||
"interface/ftdi/esp_ftdi.cfg",
|
|
||||||
"target/esp32s3.cfg"
|
|
||||||
],
|
|
||||||
"idf.customExtraVars": {
|
|
||||||
"IDF_TARGET": "esp32s3"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -19,7 +19,7 @@ stats_server.py --WS/JSON--> ws_client --> dashboard_ui (LVGL)
|
|||||||
|
|
||||||
The Pi runs a WebSocket server that pushes system stats (CPU, memory, disk, temperature, network, services) as JSON every 2 seconds. The ESP32 parses the JSON and updates LVGL widgets. A data staleness watchdog forces reconnection if the server goes silent.
|
The Pi runs a WebSocket server that pushes system stats (CPU, memory, disk, temperature, network, services) as JSON every 2 seconds. The ESP32 parses the JSON and updates LVGL widgets. A data staleness watchdog forces reconnection if the server goes silent.
|
||||||
|
|
||||||
The display uses a two-column layout: left half shows Pi stats (CPU/RAM/DISK bars, CPU temp) and a services table; right half shows a large HH:MM:SS clock (montserrat_36), date with day-of-week, and local sensor readings (room temp, humidity). The services table auto-scrolls when more than 4 services are present. The clock updates every second from the on-board RTC, which syncs from the Pi's time when drift exceeds 60 seconds.
|
The display uses a two-column layout: left half shows Pi stats (CPU/RAM/DISK bars, CPU temp) and a services table; right half shows a large HH:MM:SS clock (montserrat_36), date with day-of-week, and local sensor readings (room temp, humidity). The services table auto-scrolls when services exceed the visible area; row height and visible row count are measured from LVGL at runtime, so the scroll loop adapts automatically to font, padding, or border changes. The clock updates every second from the on-board RTC, which syncs from the Pi's time when drift exceeds 60 seconds.
|
||||||
|
|
||||||
## Configuration
|
## Configuration
|
||||||
|
|
||||||
|
|||||||
@@ -35,6 +35,7 @@ static uint8_t s_img_buf[STATUS_IMG_BYTES];
|
|||||||
static lv_img_dsc_t s_img_dsc;
|
static lv_img_dsc_t s_img_dsc;
|
||||||
static volatile bool s_img_pending = false; /* expecting binary frame with image data */
|
static volatile bool s_img_pending = false; /* expecting binary frame with image data */
|
||||||
static volatile bool s_img_updated = false; /* new image ready for UI consumption */
|
static volatile bool s_img_updated = false; /* new image ready for UI consumption */
|
||||||
|
static volatile bool s_need_request_image = false; /* deferred image request on connect */
|
||||||
static TaskHandle_t s_img_notify_task = NULL; /* task to wake on new image */
|
static TaskHandle_t s_img_notify_task = NULL; /* task to wake on new image */
|
||||||
|
|
||||||
/* Forward declarations */
|
/* Forward declarations */
|
||||||
@@ -157,11 +158,17 @@ static void ws_event_handler(void *arg, esp_event_base_t event_base,
|
|||||||
case WEBSOCKET_EVENT_CONNECTED:
|
case WEBSOCKET_EVENT_CONNECTED:
|
||||||
ESP_LOGI(TAG, "Audio WS connected");
|
ESP_LOGI(TAG, "Audio WS connected");
|
||||||
s_state = AUDIO_CONNECTED;
|
s_state = AUDIO_CONNECTED;
|
||||||
|
s_img_pending = false;
|
||||||
|
s_need_request_image = true;
|
||||||
|
if (s_img_notify_task) {
|
||||||
|
xTaskNotifyGive(s_img_notify_task);
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case WEBSOCKET_EVENT_DISCONNECTED:
|
case WEBSOCKET_EVENT_DISCONNECTED:
|
||||||
ESP_LOGW(TAG, "Audio WS disconnected");
|
ESP_LOGW(TAG, "Audio WS disconnected");
|
||||||
s_playing = false;
|
s_playing = false;
|
||||||
|
s_img_pending = false;
|
||||||
flush_queue();
|
flush_queue();
|
||||||
s_state = AUDIO_IDLE;
|
s_state = AUDIO_IDLE;
|
||||||
break;
|
break;
|
||||||
@@ -275,9 +282,25 @@ const lv_img_dsc_t *audio_client_get_status_image(bool *updated)
|
|||||||
{
|
{
|
||||||
if (updated) {
|
if (updated) {
|
||||||
*updated = s_img_updated;
|
*updated = s_img_updated;
|
||||||
if (s_img_updated) {
|
|
||||||
s_img_updated = false;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return &s_img_dsc;
|
return &s_img_dsc;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void audio_client_ack_status_image(void)
|
||||||
|
{
|
||||||
|
s_img_updated = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool audio_client_send_pending_request(void)
|
||||||
|
{
|
||||||
|
if (!s_need_request_image || !s_client) return false;
|
||||||
|
s_need_request_image = false;
|
||||||
|
static const char REQUEST_IMG_JSON[] = "{\"type\":\"request_image\"}";
|
||||||
|
int ret = esp_websocket_client_send_text(s_client, REQUEST_IMG_JSON, strlen(REQUEST_IMG_JSON), pdMS_TO_TICKS(1000));
|
||||||
|
if (ret < 0) {
|
||||||
|
ESP_LOGE(TAG, "Failed to send image request: %d", ret);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
ESP_LOGI(TAG, "Sent image request to server");
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|||||||
@@ -40,11 +40,24 @@ void audio_client_set_image_notify_task(TaskHandle_t task);
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the latest status image descriptor.
|
* Get the latest status image descriptor.
|
||||||
* @param updated Set to true if a new image arrived since last call, then reset.
|
* @param updated Set to true if a new image arrived since last call.
|
||||||
* @return Pointer to the static image descriptor (always valid).
|
* @return Pointer to the static image descriptor (always valid).
|
||||||
*/
|
*/
|
||||||
const lv_img_dsc_t *audio_client_get_status_image(bool *updated);
|
const lv_img_dsc_t *audio_client_get_status_image(bool *updated);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Acknowledge that the status image was successfully rendered.
|
||||||
|
* Clears the updated flag so subsequent get_status_image calls return false.
|
||||||
|
*/
|
||||||
|
void audio_client_ack_status_image(void);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Send any pending image request to the server.
|
||||||
|
* Call from a task context (not from an event handler).
|
||||||
|
* @return true if a request was sent, false if none pending or send failed.
|
||||||
|
*/
|
||||||
|
bool audio_client_send_pending_request(void);
|
||||||
|
|
||||||
#ifdef __cplusplus
|
#ifdef __cplusplus
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|||||||
@@ -39,6 +39,8 @@ static lv_obj_t *lbl_cpu_temp;
|
|||||||
/* Services table */
|
/* Services table */
|
||||||
static lv_obj_t *tbl_services;
|
static lv_obj_t *tbl_services;
|
||||||
static int s_service_count;
|
static int s_service_count;
|
||||||
|
static int s_dup_rows; /* duplicate rows appended for looping */
|
||||||
|
static lv_coord_t s_row_h; /* measured row height in px */
|
||||||
|
|
||||||
/* Local sensors (bottom bar) */
|
/* Local sensors (bottom bar) */
|
||||||
static lv_obj_t *lbl_local;
|
static lv_obj_t *lbl_local;
|
||||||
@@ -116,21 +118,54 @@ static lv_obj_t *create_label(lv_obj_t *parent, int x, int y, const lv_font_t *f
|
|||||||
static void scroll_timer_cb(lv_timer_t *timer)
|
static void scroll_timer_cb(lv_timer_t *timer)
|
||||||
{
|
{
|
||||||
(void)timer;
|
(void)timer;
|
||||||
if (s_service_count <= 4) {
|
if (s_dup_rows <= 0 || s_row_h <= 0) {
|
||||||
|
/* Too few services or not yet measured — no scrolling */
|
||||||
lv_obj_scroll_to_y(tbl_services, 0, LV_ANIM_OFF);
|
lv_obj_scroll_to_y(tbl_services, 0, LV_ANIM_OFF);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
lv_coord_t cur_y = lv_obj_get_scroll_y(tbl_services);
|
lv_coord_t cur_y = lv_obj_get_scroll_y(tbl_services);
|
||||||
/* Each row is ~16px (12px font + 2px pad top + 2px pad bot) */
|
lv_coord_t wrap_y = s_service_count * s_row_h;
|
||||||
lv_coord_t row_h = 16;
|
|
||||||
lv_coord_t max_scroll = (s_service_count - 4) * row_h;
|
|
||||||
|
|
||||||
if (cur_y >= max_scroll) {
|
if (cur_y >= wrap_y) {
|
||||||
lv_obj_scroll_to_y(tbl_services, 0, LV_ANIM_ON);
|
lv_obj_scroll_to_y(tbl_services, 0, LV_ANIM_OFF);
|
||||||
} else {
|
cur_y = 0;
|
||||||
lv_obj_scroll_to_y(tbl_services, cur_y + row_h, LV_ANIM_ON);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// lv_obj_scroll_to_y(tbl_services, cur_y + s_row_h, LV_ANIM_OFF);
|
||||||
|
const int delta_y = s_row_h / 8;
|
||||||
|
lv_obj_scroll_to_y(tbl_services, cur_y + delta_y, LV_ANIM_ON);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Measure row height, compute visible rows, append duplicates for seamless loop */
|
||||||
|
static void fill_duplicate_rows(int count)
|
||||||
|
{
|
||||||
|
s_dup_rows = 0;
|
||||||
|
s_row_h = 0;
|
||||||
|
if (count <= 0) return;
|
||||||
|
|
||||||
|
/* Trim table to exactly count rows so measurement is clean */
|
||||||
|
lv_table_set_row_cnt(tbl_services, count);
|
||||||
|
lv_obj_update_layout(tbl_services);
|
||||||
|
|
||||||
|
lv_coord_t content_h = lv_obj_get_self_height(tbl_services);
|
||||||
|
lv_coord_t row_h = content_h / count;
|
||||||
|
if (row_h <= 0) return;
|
||||||
|
|
||||||
|
lv_coord_t visible_h = lv_obj_get_content_height(tbl_services);
|
||||||
|
int visible_rows = (visible_h + row_h - 1) / row_h; /* ceil */
|
||||||
|
|
||||||
|
if (count <= visible_rows) return; /* everything fits — no scrolling */
|
||||||
|
|
||||||
|
for (int i = 0; i < visible_rows; i++) {
|
||||||
|
lv_table_set_cell_value(tbl_services, count + i, 0,
|
||||||
|
lv_table_get_cell_value(tbl_services, i, 0));
|
||||||
|
lv_table_set_cell_value(tbl_services, count + i, 1,
|
||||||
|
lv_table_get_cell_value(tbl_services, i, 1));
|
||||||
|
}
|
||||||
|
|
||||||
|
s_dup_rows = visible_rows;
|
||||||
|
s_row_h = row_h;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ---------- Create UI sections ---------- */
|
/* ---------- Create UI sections ---------- */
|
||||||
@@ -180,7 +215,7 @@ static void create_top_bar(lv_obj_t *parent)
|
|||||||
/* Battery positive terminal nub */
|
/* Battery positive terminal nub */
|
||||||
lv_obj_t *batt_nub = lv_obj_create(bar_cont);
|
lv_obj_t *batt_nub = lv_obj_create(bar_cont);
|
||||||
lv_obj_set_size(batt_nub, 2, 4);
|
lv_obj_set_size(batt_nub, 2, 4);
|
||||||
lv_obj_align(batt_nub, LV_ALIGN_RIGHT_MID, -37, 0);
|
lv_obj_align(batt_nub, LV_ALIGN_RIGHT_MID, -38, 0);
|
||||||
lv_obj_set_style_bg_color(batt_nub, lv_color_white(), 0);
|
lv_obj_set_style_bg_color(batt_nub, lv_color_white(), 0);
|
||||||
lv_obj_set_style_bg_opa(batt_nub, LV_OPA_COVER, 0);
|
lv_obj_set_style_bg_opa(batt_nub, LV_OPA_COVER, 0);
|
||||||
lv_obj_set_style_border_width(batt_nub, 0, 0);
|
lv_obj_set_style_border_width(batt_nub, 0, 0);
|
||||||
@@ -221,18 +256,18 @@ static void create_time_bar(lv_obj_t *parent)
|
|||||||
lbl_date = lv_label_create(bar_cont);
|
lbl_date = lv_label_create(bar_cont);
|
||||||
lv_obj_set_style_text_font(lbl_date, &InziuIosevka_Slab_CC_20px, 0);
|
lv_obj_set_style_text_font(lbl_date, &InziuIosevka_Slab_CC_20px, 0);
|
||||||
lv_obj_set_style_text_color(lbl_date, lv_color_black(), 0);
|
lv_obj_set_style_text_color(lbl_date, lv_color_black(), 0);
|
||||||
lv_obj_align(lbl_date, LV_ALIGN_RIGHT_MID, -10, 0);
|
lv_obj_align(lbl_date, LV_ALIGN_RIGHT_MID, -20, 0);
|
||||||
lv_label_set_text(lbl_date, "----/--/-- ---");
|
lv_label_set_text(lbl_date, "----/--/-- ---");
|
||||||
}
|
}
|
||||||
|
|
||||||
static void create_main_section(lv_obj_t *parent)
|
static void create_main_section(lv_obj_t *parent)
|
||||||
{
|
{
|
||||||
/* === Left column: Services + Pi Vitals === */
|
/* === Left column: Services + Pi Vitals === */
|
||||||
create_label(parent, 4, MAIN_Y + 2, &InziuIosevka_Slab_CC_12px, "SERVICES");
|
create_label(parent, 4, MAIN_Y + 1, &InziuIosevka_Slab_CC_12px, "SERVICES");
|
||||||
|
|
||||||
tbl_services = lv_table_create(parent);
|
tbl_services = lv_table_create(parent);
|
||||||
lv_obj_set_pos(tbl_services, 4, MAIN_Y + 16);
|
lv_obj_set_pos(tbl_services, 4, MAIN_Y + 16);
|
||||||
lv_obj_set_size(tbl_services, 190, 68);
|
lv_obj_set_size(tbl_services, 190, 82);
|
||||||
lv_table_set_col_cnt(tbl_services, 2);
|
lv_table_set_col_cnt(tbl_services, 2);
|
||||||
lv_table_set_col_width(tbl_services, 0, 110);
|
lv_table_set_col_width(tbl_services, 0, 110);
|
||||||
lv_table_set_col_width(tbl_services, 1, 65);
|
lv_table_set_col_width(tbl_services, 1, 65);
|
||||||
@@ -248,8 +283,8 @@ static void create_main_section(lv_obj_t *parent)
|
|||||||
lv_table_set_cell_value(tbl_services, i, 1, "---");
|
lv_table_set_cell_value(tbl_services, i, 1, "---");
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Auto-scroll timer: 3 second period */
|
/* Timer for each shift of 1/8 line's height */
|
||||||
s_scroll_timer = lv_timer_create(scroll_timer_cb, 3000, NULL);
|
s_scroll_timer = lv_timer_create(scroll_timer_cb, 120, NULL);
|
||||||
|
|
||||||
/* === Left column: Pi Vitals (below services) === */
|
/* === Left column: Pi Vitals (below services) === */
|
||||||
int rx = 0;
|
int rx = 0;
|
||||||
@@ -258,12 +293,12 @@ static void create_main_section(lv_obj_t *parent)
|
|||||||
int bar_w = 82;
|
int bar_w = 82;
|
||||||
int bar_h = 12;
|
int bar_h = 12;
|
||||||
int val_x = rx + 4 + lbl_w + bar_w + 4; /* value label after bar */
|
int val_x = rx + 4 + lbl_w + bar_w + 4; /* value label after bar */
|
||||||
int temp_x = rx + 160; /* TEMP column, right of value labels */
|
int temp_x = rx + 155; /* TEMP column, right of value labels */
|
||||||
|
|
||||||
/* Pi Vitals header — Y=162 */
|
/* Pi Vitals header */
|
||||||
create_label(parent, rx + 4, 162, &InziuIosevka_Slab_CC_12px, "PI VITALS");
|
create_label(parent, rx + 4, 175, &InziuIosevka_Slab_CC_12px, "PI VITALS");
|
||||||
|
|
||||||
int ry = 176;
|
int ry = 192;
|
||||||
|
|
||||||
/* CPU [========] 12% TEMP */
|
/* CPU [========] 12% TEMP */
|
||||||
create_label(parent, rx + 4, ry, &InziuIosevka_Slab_CC_12px, "CPU");
|
create_label(parent, rx + 4, ry, &InziuIosevka_Slab_CC_12px, "CPU");
|
||||||
@@ -290,7 +325,7 @@ static void create_main_section(lv_obj_t *parent)
|
|||||||
|
|
||||||
/* === Right column: Status image (200x200) === */
|
/* === Right column: Status image (200x200) === */
|
||||||
img_status = lv_img_create(parent);
|
img_status = lv_img_create(parent);
|
||||||
lv_obj_set_pos(img_status, 200, MAIN_Y + 2);
|
lv_obj_set_pos(img_status, 200, MAIN_Y + 1);
|
||||||
lv_obj_set_size(img_status, 200, 200);
|
lv_obj_set_size(img_status, 200, 200);
|
||||||
lv_obj_set_style_bg_color(img_status, lv_color_white(), 0);
|
lv_obj_set_style_bg_color(img_status, lv_color_white(), 0);
|
||||||
lv_obj_set_style_bg_opa(img_status, LV_OPA_COVER, 0);
|
lv_obj_set_style_bg_opa(img_status, LV_OPA_COVER, 0);
|
||||||
@@ -303,7 +338,7 @@ static void create_bottom_bar(lv_obj_t *parent)
|
|||||||
lv_obj_t *bot_cont = lv_obj_create(parent);
|
lv_obj_t *bot_cont = lv_obj_create(parent);
|
||||||
lv_obj_set_pos(bot_cont, 0, y0);
|
lv_obj_set_pos(bot_cont, 0, y0);
|
||||||
lv_obj_set_size(bot_cont, SCREEN_W, BOT_H);
|
lv_obj_set_size(bot_cont, SCREEN_W, BOT_H);
|
||||||
lv_obj_set_style_bg_color(bot_cont, lv_color_white(), 0);
|
lv_obj_set_style_bg_color(bot_cont, lv_color_black(), 0);
|
||||||
lv_obj_set_style_bg_opa(bot_cont, LV_OPA_COVER, 0);
|
lv_obj_set_style_bg_opa(bot_cont, LV_OPA_COVER, 0);
|
||||||
lv_obj_set_style_border_color(bot_cont, lv_color_black(), 0);
|
lv_obj_set_style_border_color(bot_cont, lv_color_black(), 0);
|
||||||
lv_obj_set_style_border_width(bot_cont, 1, 0);
|
lv_obj_set_style_border_width(bot_cont, 1, 0);
|
||||||
@@ -314,14 +349,14 @@ static void create_bottom_bar(lv_obj_t *parent)
|
|||||||
|
|
||||||
lbl_net = lv_label_create(bot_cont);
|
lbl_net = lv_label_create(bot_cont);
|
||||||
lv_obj_set_style_text_font(lbl_net, &InziuIosevka_Slab_CC_12px, 0);
|
lv_obj_set_style_text_font(lbl_net, &InziuIosevka_Slab_CC_12px, 0);
|
||||||
lv_obj_set_style_text_color(lbl_net, lv_color_black(), 0);
|
lv_obj_set_style_text_color(lbl_net, lv_color_white(), 0);
|
||||||
lv_obj_align(lbl_net, LV_ALIGN_LEFT_MID, 0, 0);
|
lv_obj_align(lbl_net, LV_ALIGN_LEFT_MID, 0, 0);
|
||||||
lv_label_set_text(lbl_net, "NETWORK RX: ---- kbps TX: ---- kbps");
|
lv_label_set_text(lbl_net, "NETWORK DOWN: ---- kBps / UP: ---- kBps");
|
||||||
|
|
||||||
/* Local sensor readings — right-aligned */
|
/* Local sensor readings — right-aligned */
|
||||||
lbl_local = lv_label_create(bot_cont);
|
lbl_local = lv_label_create(bot_cont);
|
||||||
lv_obj_set_style_text_font(lbl_local, &InziuIosevka_Slab_CC_12px, 0);
|
lv_obj_set_style_text_font(lbl_local, &InziuIosevka_Slab_CC_12px, 0);
|
||||||
lv_obj_set_style_text_color(lbl_local, lv_color_black(), 0);
|
lv_obj_set_style_text_color(lbl_local, lv_color_white(), 0);
|
||||||
lv_obj_align(lbl_local, LV_ALIGN_RIGHT_MID, 0, 0);
|
lv_obj_align(lbl_local, LV_ALIGN_RIGHT_MID, 0, 0);
|
||||||
lv_label_set_text(lbl_local, "T: --.- H: --%");
|
lv_label_set_text(lbl_local, "T: --.- H: --%");
|
||||||
}
|
}
|
||||||
@@ -369,15 +404,17 @@ void dashboard_ui_update_stats(const pi_stats_t *stats)
|
|||||||
/* Services table */
|
/* Services table */
|
||||||
s_service_count = stats->service_count;
|
s_service_count = stats->service_count;
|
||||||
for (int i = 0; i < stats->service_count && i < WS_MAX_SERVICES; i++) {
|
for (int i = 0; i < stats->service_count && i < WS_MAX_SERVICES; i++) {
|
||||||
|
const char *tag;
|
||||||
|
switch (stats->services[i].status) {
|
||||||
|
case SVC_RUNNING: tag = "[RUN]"; break;
|
||||||
|
case SVC_WARNING: tag = "[WARN]"; break;
|
||||||
|
default: tag = "[STOP]"; break;
|
||||||
|
}
|
||||||
lv_table_set_cell_value(tbl_services, i, 0, stats->services[i].name);
|
lv_table_set_cell_value(tbl_services, i, 0, stats->services[i].name);
|
||||||
lv_table_set_cell_value(tbl_services, i, 1,
|
lv_table_set_cell_value(tbl_services, i, 1, tag);
|
||||||
stats->services[i].running ? "[RUN]" : "[STOP]");
|
|
||||||
}
|
|
||||||
/* Clear unused rows */
|
|
||||||
for (int i = stats->service_count; i < WS_MAX_SERVICES; i++) {
|
|
||||||
lv_table_set_cell_value(tbl_services, i, 0, "");
|
|
||||||
lv_table_set_cell_value(tbl_services, i, 1, "");
|
|
||||||
}
|
}
|
||||||
|
/* Measure row height, compute visible rows, append duplicates */
|
||||||
|
fill_duplicate_rows(stats->service_count);
|
||||||
|
|
||||||
/* Uptime */
|
/* Uptime */
|
||||||
snprintf(buf, sizeof(buf), "Uptime: %.0fh", stats->uptime_hrs);
|
snprintf(buf, sizeof(buf), "Uptime: %.0fh", stats->uptime_hrs);
|
||||||
@@ -385,7 +422,7 @@ void dashboard_ui_update_stats(const pi_stats_t *stats)
|
|||||||
|
|
||||||
/* Network */
|
/* Network */
|
||||||
char net_buf[64];
|
char net_buf[64];
|
||||||
snprintf(net_buf, sizeof(net_buf), "NETWORK RX: %.0f kbps TX: %.0f kbps",
|
snprintf(net_buf, sizeof(net_buf), "NETWORK DOWN: %.0f kBps / UP: %.0f kBps",
|
||||||
stats->net_rx_kbps, stats->net_tx_kbps);
|
stats->net_rx_kbps, stats->net_tx_kbps);
|
||||||
lv_label_set_text(lbl_net, net_buf);
|
lv_label_set_text(lbl_net, net_buf);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -133,7 +133,7 @@ static void ws_data_cb(const pi_stats_t *stats)
|
|||||||
}
|
}
|
||||||
|
|
||||||
for (int i = 0; i < stats->service_count; i++) {
|
for (int i = 0; i < stats->service_count; i++) {
|
||||||
if (!stats->services[i].running) {
|
if (stats->services[i].status != SVC_RUNNING) {
|
||||||
alert_trigger(ALERT_SERVICE_DOWN);
|
alert_trigger(ALERT_SERVICE_DOWN);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@@ -194,12 +194,16 @@ static void sensor_task(void *arg)
|
|||||||
*/
|
*/
|
||||||
ulTaskNotifyTake(pdTRUE, pdMS_TO_TICKS(1000));
|
ulTaskNotifyTake(pdTRUE, pdMS_TO_TICKS(1000));
|
||||||
|
|
||||||
|
/* Send deferred image request if connect just happened */
|
||||||
|
audio_client_send_pending_request();
|
||||||
|
|
||||||
/* Check for status image updates immediately */
|
/* Check for status image updates immediately */
|
||||||
bool img_updated = false;
|
bool img_updated = false;
|
||||||
const lv_img_dsc_t *img = audio_client_get_status_image(&img_updated);
|
const lv_img_dsc_t *img = audio_client_get_status_image(&img_updated);
|
||||||
if (img_updated && Lvgl_lock(100)) {
|
if (img_updated && Lvgl_lock(100)) {
|
||||||
dashboard_ui_update_status_image(img);
|
dashboard_ui_update_status_image(img);
|
||||||
Lvgl_unlock();
|
Lvgl_unlock();
|
||||||
|
audio_client_ack_status_image();
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Sensor + clock updates at ~1s cadence (skip if woken early) */
|
/* Sensor + clock updates at ~1s cadence (skip if woken early) */
|
||||||
@@ -245,10 +249,10 @@ static void sensor_task(void *arg)
|
|||||||
static void button_task(void *arg)
|
static void button_task(void *arg)
|
||||||
{
|
{
|
||||||
for (;;) {
|
for (;;) {
|
||||||
/* Wait for GP18 button event (single click = bit 0) */
|
/* Wait for GP18 button event (single click = bit 0, long press = bit 2) */
|
||||||
EventBits_t bits = xEventGroupWaitBits(
|
EventBits_t bits = xEventGroupWaitBits(
|
||||||
GP18ButtonGroups,
|
GP18ButtonGroups,
|
||||||
set_bit_button(0), /* single press bit */
|
set_bit_button(0) | set_bit_button(2),
|
||||||
pdTRUE, /* clear on exit */
|
pdTRUE, /* clear on exit */
|
||||||
pdFALSE, /* any bit */
|
pdFALSE, /* any bit */
|
||||||
pdMS_TO_TICKS(500)
|
pdMS_TO_TICKS(500)
|
||||||
@@ -257,7 +261,12 @@ static void button_task(void *arg)
|
|||||||
if (bits & set_bit_button(0)) {
|
if (bits & set_bit_button(0)) {
|
||||||
bool muted = !alert_is_muted();
|
bool muted = !alert_is_muted();
|
||||||
alert_mute(muted);
|
alert_mute(muted);
|
||||||
ESP_LOGI(TAG, "GP18 pressed: alerts %s", muted ? "muted" : "unmuted");
|
ESP_LOGI(TAG, "GP18 single press: alerts %s", muted ? "muted" : "unmuted");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (bits & set_bit_button(2)) {
|
||||||
|
ESP_LOGI(TAG, "GP18 long press: forcing WS reconnect");
|
||||||
|
ws_client_reconnect();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -92,7 +92,13 @@ static void parse_stats_json(const char *data, int len)
|
|||||||
s_stats.services[i].name[WS_SERVICE_NAME_LEN - 1] = '\0';
|
s_stats.services[i].name[WS_SERVICE_NAME_LEN - 1] = '\0';
|
||||||
}
|
}
|
||||||
if (status && status->valuestring) {
|
if (status && status->valuestring) {
|
||||||
s_stats.services[i].running = (strcmp(status->valuestring, "running") == 0);
|
if (strcmp(status->valuestring, "running") == 0) {
|
||||||
|
s_stats.services[i].status = SVC_RUNNING;
|
||||||
|
} else if (strcmp(status->valuestring, "warning") == 0) {
|
||||||
|
s_stats.services[i].status = SVC_WARNING;
|
||||||
|
} else {
|
||||||
|
s_stats.services[i].status = SVC_STOPPED;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -204,6 +210,14 @@ void ws_client_stop(void)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void ws_client_reconnect(void)
|
||||||
|
{
|
||||||
|
if (!s_client) return;
|
||||||
|
ESP_LOGI(TAG, "Manual WS reconnect triggered");
|
||||||
|
esp_websocket_client_close(s_client, pdMS_TO_TICKS(2000));
|
||||||
|
s_last_data_tick = xTaskGetTickCount();
|
||||||
|
}
|
||||||
|
|
||||||
ws_state_t ws_client_get_state(void)
|
ws_state_t ws_client_get_state(void)
|
||||||
{
|
{
|
||||||
return s_state;
|
return s_state;
|
||||||
|
|||||||
@@ -8,7 +8,7 @@
|
|||||||
extern "C" {
|
extern "C" {
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#define WS_MAX_SERVICES 8
|
#define WS_MAX_SERVICES 20
|
||||||
#define WS_SERVICE_NAME_LEN 16
|
#define WS_SERVICE_NAME_LEN 16
|
||||||
|
|
||||||
typedef enum {
|
typedef enum {
|
||||||
@@ -18,9 +18,15 @@ typedef enum {
|
|||||||
WS_STATE_ERROR,
|
WS_STATE_ERROR,
|
||||||
} ws_state_t;
|
} ws_state_t;
|
||||||
|
|
||||||
|
typedef enum {
|
||||||
|
SVC_STOPPED = 0,
|
||||||
|
SVC_WARNING,
|
||||||
|
SVC_RUNNING,
|
||||||
|
} ws_svc_status_t;
|
||||||
|
|
||||||
typedef struct {
|
typedef struct {
|
||||||
char name[WS_SERVICE_NAME_LEN];
|
char name[WS_SERVICE_NAME_LEN];
|
||||||
bool running;
|
ws_svc_status_t status;
|
||||||
} ws_service_t;
|
} ws_service_t;
|
||||||
|
|
||||||
typedef struct {
|
typedef struct {
|
||||||
@@ -53,6 +59,7 @@ typedef void (*ws_state_callback_t)(ws_state_t state);
|
|||||||
void ws_client_init(const char *uri);
|
void ws_client_init(const char *uri);
|
||||||
void ws_client_start(void);
|
void ws_client_start(void);
|
||||||
void ws_client_stop(void);
|
void ws_client_stop(void);
|
||||||
|
void ws_client_reconnect(void);
|
||||||
ws_state_t ws_client_get_state(void);
|
ws_state_t ws_client_get_state(void);
|
||||||
void ws_client_get_stats(pi_stats_t *out);
|
void ws_client_get_stats(pi_stats_t *out);
|
||||||
void ws_client_set_data_callback(ws_data_callback_t cb);
|
void ws_client_set_data_callback(ws_data_callback_t cb);
|
||||||
|
|||||||
5
pi/.gitignore
vendored
5
pi/.gitignore
vendored
@@ -1,5 +0,0 @@
|
|||||||
# python artifacts
|
|
||||||
*/__pycache__
|
|
||||||
__pycache__/
|
|
||||||
*.pyo
|
|
||||||
*.pyc
|
|
||||||
36
pi/PLAN.md
36
pi/PLAN.md
@@ -1,36 +0,0 @@
|
|||||||
# Pi Servers -- Roadmap
|
|
||||||
|
|
||||||
## Docker Compose
|
|
||||||
|
|
||||||
Containerize the pi servers for easier deployment.
|
|
||||||
|
|
||||||
### Options
|
|
||||||
|
|
||||||
1. **Single service** -- `run_all.py` as the entrypoint, both servers in one container
|
|
||||||
2. **Split services** -- separate containers for `stats_server.py` and `contents_server.py`
|
|
||||||
|
|
||||||
Single service is simpler. Split services allow independent scaling and restarts.
|
|
||||||
|
|
||||||
### Configuration
|
|
||||||
|
|
||||||
- Volume mount `assets/` and `config/alarms.json` so they're editable without rebuilding
|
|
||||||
- Expose ports 8765 and 8766
|
|
||||||
- Network mode `host` or a bridge with known IPs for ESP32 discovery
|
|
||||||
- Restart policy: `unless-stopped`
|
|
||||||
|
|
||||||
## Repository Extraction
|
|
||||||
|
|
||||||
The `pi/` directory will become its own git repository.
|
|
||||||
|
|
||||||
### Steps
|
|
||||||
|
|
||||||
1. Extract `pi/` into a standalone repo with its own `README.md`, `requirements.txt`, and CI
|
|
||||||
2. Add it back to this project as a git submodule
|
|
||||||
3. The interface contract between the two repos is the WebSocket protocol -- JSON schemas and binary frame formats documented in `README.md`
|
|
||||||
|
|
||||||
### Benefits
|
|
||||||
|
|
||||||
- Independent versioning and release cycle
|
|
||||||
- Pi-side contributors don't need the ESP-IDF toolchain
|
|
||||||
- CI can test the Python servers in isolation
|
|
||||||
- Cleaner separation of concerns between embedded firmware and host services
|
|
||||||
132
pi/README.md
132
pi/README.md
@@ -1,132 +0,0 @@
|
|||||||
# Pi Dashboard Servers
|
|
||||||
|
|
||||||
WebSocket servers that feed system stats, alarm audio, and status images to the ESP32-S3 RLCD dashboard.
|
|
||||||
|
|
||||||
## File Structure
|
|
||||||
|
|
||||||
```
|
|
||||||
pi/
|
|
||||||
run_all.py # Launches both servers as child processes
|
|
||||||
stats_server.py # Real system stats over WebSocket (port 8765)
|
|
||||||
contents_server.py # Alarm audio + status images over WebSocket (port 8766)
|
|
||||||
mock_server.py # Drop-in replacement for stats_server with random data
|
|
||||||
audio_handler.py # WAV loading, PCM chunking, alarm streaming
|
|
||||||
image_handler.py # PNG to 1-bit monochrome conversion, alpha compositing
|
|
||||||
alarm_scheduler.py # Loads and validates alarm config, checks firing schedule
|
|
||||||
requirements.txt
|
|
||||||
config/
|
|
||||||
alarms.json # Alarm schedule configuration
|
|
||||||
assets/
|
|
||||||
alarm/ # WAV files for alarm audio
|
|
||||||
img/ # Status images (idle.png, on_alarm.png)
|
|
||||||
```
|
|
||||||
|
|
||||||
## Requirements
|
|
||||||
|
|
||||||
Python 3.10+
|
|
||||||
|
|
||||||
```
|
|
||||||
pip install -r requirements.txt
|
|
||||||
```
|
|
||||||
|
|
||||||
Dependencies: `websockets`, `psutil`, `Pillow`
|
|
||||||
|
|
||||||
## Running
|
|
||||||
|
|
||||||
Start both servers:
|
|
||||||
|
|
||||||
```
|
|
||||||
python run_all.py # both servers, default config
|
|
||||||
python run_all.py --config path/to.json # both servers, custom config
|
|
||||||
```
|
|
||||||
|
|
||||||
Or run individually:
|
|
||||||
|
|
||||||
```
|
|
||||||
python stats_server.py # port 8765 only
|
|
||||||
python contents_server.py --config path/to.json # port 8766, custom config
|
|
||||||
python mock_server.py # port 8765, random data (no psutil needed)
|
|
||||||
```
|
|
||||||
|
|
||||||
## Servers
|
|
||||||
|
|
||||||
### stats_server.py -- port 8765
|
|
||||||
|
|
||||||
Pushes a JSON object every 2 seconds with real system metrics from `psutil`:
|
|
||||||
|
|
||||||
- `cpu_pct`, `mem_pct`, `mem_used_mb`, `disk_pct`
|
|
||||||
- `cpu_temp` (reads `/sys/class/thermal/` as fallback)
|
|
||||||
- `uptime_hrs`, `net_rx_kbps`, `net_tx_kbps`
|
|
||||||
- `services` (mocked until systemd integration)
|
|
||||||
- `local_time` fields for RTC sync (`y`, `mo`, `d`, `h`, `m`, `s`)
|
|
||||||
|
|
||||||
### contents_server.py -- port 8766
|
|
||||||
|
|
||||||
Serves alarm audio and status images. Protocol:
|
|
||||||
|
|
||||||
**Status image:**
|
|
||||||
1. Text frame: `{"type":"status_image","width":120,"height":120}`
|
|
||||||
2. Binary frame: 1-bit monochrome bitmap (1800 bytes)
|
|
||||||
|
|
||||||
**Alarm audio:**
|
|
||||||
1. Text frame: `{"type":"alarm_start","sample_rate":N,"channels":N,"bits":N}`
|
|
||||||
2. Binary frames: raw PCM chunks (4096 bytes each, paced at ~90% real-time)
|
|
||||||
3. Text frame: `{"type":"alarm_stop"}`
|
|
||||||
|
|
||||||
Loads alarm config from `config/alarms.json` (override with `--config`). Checks schedule every 5 seconds, fires once per matched minute. If no config or empty config, sends idle image and blocks forever. On alarm: switches to alarm image, streams audio, switches back to idle.
|
|
||||||
|
|
||||||
### mock_server.py -- port 8765
|
|
||||||
|
|
||||||
Same JSON schema and 2-second push interval as `stats_server.py`, but all values are randomized. No `psutil` dependency -- useful for development on non-Pi machines.
|
|
||||||
|
|
||||||
Does not include `local_time` fields.
|
|
||||||
|
|
||||||
## Alarm Configuration
|
|
||||||
|
|
||||||
Config file: `config/alarms.json` -- a single alarm object or an array of alarm objects.
|
|
||||||
|
|
||||||
Example with two alarms:
|
|
||||||
|
|
||||||
```json
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"alarm_time": "0730",
|
|
||||||
"alarm_days": ["Mon", "Tue", "Wed", "Thu", "Fri"],
|
|
||||||
"alarm_audio": "assets/alarm/alarm_test.wav",
|
|
||||||
"alarm_image": "assets/img/on_alarm.png"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"alarm_time": "2300",
|
|
||||||
"alarm_audio": "assets/alarm/sleep.wav",
|
|
||||||
"alarm_image": "assets/img/sleep.png"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
```
|
|
||||||
|
|
||||||
| Field | Type | Required | Description |
|
|
||||||
|-------|------|----------|-------------|
|
|
||||||
| `alarm_time` | `string` | Yes | 4-digit HHMM, 24-hour. Fires on the matched minute. |
|
|
||||||
| `alarm_days` | `string[]` | No | 3-letter abbreviations: `Mon`–`Sun`. If omitted, fires every day. |
|
|
||||||
| `alarm_dates` | `string[]` | No | `MM/DD` strings. Ignored if `alarm_days` is also set. |
|
|
||||||
| `alarm_audio` | `string` | No | WAV path, relative to `pi/`. Default: `assets/alarm/alarm_test.wav`. |
|
|
||||||
| `alarm_image` | `string` | No | Status PNG path, relative to `pi/`. Default: `assets/img/on_alarm.png`. |
|
|
||||||
|
|
||||||
If both `alarm_days` and `alarm_dates` are present, `alarm_days` takes priority.
|
|
||||||
|
|
||||||
## Modules
|
|
||||||
|
|
||||||
### audio_handler.py
|
|
||||||
|
|
||||||
- `find_wav(path=None)` -- uses the given path if it exists, otherwise falls back to glob in `assets/alarm/`
|
|
||||||
- `read_wav(path)` -- reads WAV, returns `(pcm_bytes, sample_rate, channels, bits)`
|
|
||||||
- `stream_alarm(ws, pcm, sr, ch, bits)` -- streams one alarm cycle over WebSocket
|
|
||||||
|
|
||||||
### image_handler.py
|
|
||||||
|
|
||||||
- `load_status_image(path)` -- loads PNG, composites transparency onto white, converts to 1-bit 120x120 monochrome bitmap (black=1, MSB-first)
|
|
||||||
- `send_status_image(ws, img_bytes)` -- sends status image header + binary over WebSocket
|
|
||||||
|
|
||||||
### alarm_scheduler.py
|
|
||||||
|
|
||||||
- `load_config(path)` -- reads and validates alarm JSON; returns list of alarm dicts or `None`
|
|
||||||
- `should_fire(config)` -- checks a single alarm entry against current local time
|
|
||||||
@@ -1,135 +0,0 @@
|
|||||||
"""Alarm scheduler — load config and check firing schedule."""
|
|
||||||
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
import re
|
|
||||||
from datetime import datetime
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
DEFAULT_CONFIG_PATH = Path(__file__).parent / "config" / "alarms.json"
|
|
||||||
|
|
||||||
VALID_DAYS = {"Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"}
|
|
||||||
TIME_RE = re.compile(r"^([01]\d|2[0-3])[0-5]\d$")
|
|
||||||
DATE_RE = re.compile(r"^(0[1-9]|1[0-2])/(0[1-9]|[12]\d|3[01])$")
|
|
||||||
|
|
||||||
|
|
||||||
def _validate_entry(entry: dict, index: int) -> dict | None:
|
|
||||||
"""Validate a single alarm entry. Returns it if valid, None otherwise."""
|
|
||||||
if not isinstance(entry, dict):
|
|
||||||
log.warning("Alarm #%d: expected object, got %s", index, type(entry).__name__)
|
|
||||||
return None
|
|
||||||
|
|
||||||
alarm_time = entry.get("alarm_time")
|
|
||||||
if alarm_time is None:
|
|
||||||
log.warning("Alarm #%d: missing required field 'alarm_time'", index)
|
|
||||||
return None
|
|
||||||
if not isinstance(alarm_time, str) or not TIME_RE.match(alarm_time):
|
|
||||||
log.warning("Alarm #%d: invalid alarm_time '%s' — must be 4-digit HHMM", index, alarm_time)
|
|
||||||
return None
|
|
||||||
|
|
||||||
alarm_days = entry.get("alarm_days")
|
|
||||||
if alarm_days is not None:
|
|
||||||
if not isinstance(alarm_days, list) or not all(isinstance(d, str) for d in alarm_days):
|
|
||||||
log.warning("Alarm #%d: alarm_days must be a list of strings", index)
|
|
||||||
return None
|
|
||||||
bad = [d for d in alarm_days if d not in VALID_DAYS]
|
|
||||||
if bad:
|
|
||||||
log.warning("Alarm #%d: invalid day abbreviations: %s", index, bad)
|
|
||||||
return None
|
|
||||||
|
|
||||||
alarm_dates = entry.get("alarm_dates")
|
|
||||||
if alarm_dates is not None:
|
|
||||||
if not isinstance(alarm_dates, list) or not all(isinstance(d, str) for d in alarm_dates):
|
|
||||||
log.warning("Alarm #%d: alarm_dates must be a list of strings", index)
|
|
||||||
return None
|
|
||||||
bad = [d for d in alarm_dates if not DATE_RE.match(d)]
|
|
||||||
if bad:
|
|
||||||
log.warning("Alarm #%d: invalid date formats (expected MM/DD): %s", index, bad)
|
|
||||||
return None
|
|
||||||
|
|
||||||
log.info("Alarm #%d: time=%s days=%s", index, alarm_time, alarm_days or "(every day)")
|
|
||||||
return entry
|
|
||||||
|
|
||||||
|
|
||||||
def load_config(path: Path) -> list[dict] | None:
|
|
||||||
"""Read and validate alarm config JSON.
|
|
||||||
|
|
||||||
Accepts either a single alarm object or an array of alarm objects.
|
|
||||||
Returns a list of valid alarm dicts, or None if the file is missing,
|
|
||||||
empty, or contains no valid entries.
|
|
||||||
Never raises — logs warnings and returns None on any problem.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
text = path.read_text(encoding="utf-8").strip()
|
|
||||||
except FileNotFoundError:
|
|
||||||
log.warning("Config file not found: %s", path)
|
|
||||||
return None
|
|
||||||
except OSError as e:
|
|
||||||
log.warning("Cannot read config %s: %s", path, e)
|
|
||||||
return None
|
|
||||||
|
|
||||||
if not text:
|
|
||||||
log.warning("Config file is empty: %s", path)
|
|
||||||
return None
|
|
||||||
|
|
||||||
try:
|
|
||||||
data = json.loads(text)
|
|
||||||
except json.JSONDecodeError as e:
|
|
||||||
log.warning("Invalid JSON in %s: %s", path, e)
|
|
||||||
return None
|
|
||||||
|
|
||||||
if not data:
|
|
||||||
log.info("Config is empty — no alarms configured")
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Normalize to list
|
|
||||||
if isinstance(data, dict):
|
|
||||||
entries = [data]
|
|
||||||
elif isinstance(data, list):
|
|
||||||
entries = data
|
|
||||||
else:
|
|
||||||
log.warning("Config must be a JSON object or array, got %s", type(data).__name__)
|
|
||||||
return None
|
|
||||||
|
|
||||||
valid = []
|
|
||||||
for i, entry in enumerate(entries):
|
|
||||||
result = _validate_entry(entry, i)
|
|
||||||
if result is not None:
|
|
||||||
valid.append(result)
|
|
||||||
|
|
||||||
if not valid:
|
|
||||||
log.warning("No valid alarm entries in %s", path)
|
|
||||||
return None
|
|
||||||
|
|
||||||
log.info("Loaded %d alarm(s) from %s", len(valid), path)
|
|
||||||
return valid
|
|
||||||
|
|
||||||
|
|
||||||
def should_fire(config: dict) -> bool:
|
|
||||||
"""Check if a single alarm entry should fire right now.
|
|
||||||
|
|
||||||
Rules:
|
|
||||||
- alarm_time must match current HHMM
|
|
||||||
- If alarm_days is present, today's 3-letter abbreviation must be in the list
|
|
||||||
- If alarm_days is absent but alarm_dates is present, today's MM/DD must match
|
|
||||||
- If neither alarm_days nor alarm_dates is present, fires every day
|
|
||||||
- If both are present, alarm_days wins (alarm_dates ignored)
|
|
||||||
"""
|
|
||||||
now = datetime.now()
|
|
||||||
current_hhmm = now.strftime("%H%M")
|
|
||||||
|
|
||||||
if config["alarm_time"] != current_hhmm:
|
|
||||||
return False
|
|
||||||
|
|
||||||
alarm_days = config.get("alarm_days")
|
|
||||||
alarm_dates = config.get("alarm_dates")
|
|
||||||
|
|
||||||
if alarm_days is not None:
|
|
||||||
return now.strftime("%a") in alarm_days
|
|
||||||
|
|
||||||
if alarm_dates is not None:
|
|
||||||
return now.strftime("%m/%d") in alarm_dates
|
|
||||||
|
|
||||||
return True
|
|
||||||
@@ -1,55 +0,0 @@
|
|||||||
0 700000 j
|
|
||||||
700000 1440000 u
|
|
||||||
1440000 2370000 u
|
|
||||||
2370000 3110000 i
|
|
||||||
3110000 3710000 ch
|
|
||||||
3710000 4380000 i
|
|
||||||
4380000 4800000 g
|
|
||||||
4800000 5560000 a
|
|
||||||
5560000 6180000 ts
|
|
||||||
6180000 6850000 u
|
|
||||||
6850000 7500000 j
|
|
||||||
7500000 8210000 u
|
|
||||||
8210000 9130000 u
|
|
||||||
9130000 9880000 i
|
|
||||||
9880000 10460000 ch
|
|
||||||
10460000 11150000 i
|
|
||||||
11150000 11690000 g
|
|
||||||
11690000 12470000 a
|
|
||||||
12470000 13100000 ts
|
|
||||||
13100000 13770000 u
|
|
||||||
13770000 14420000 j
|
|
||||||
14420000 15140000 u
|
|
||||||
15140000 16070000 u
|
|
||||||
16070000 16810000 i
|
|
||||||
16810000 17420000 ch
|
|
||||||
17420000 18080000 i
|
|
||||||
18080000 18610000 g
|
|
||||||
18610000 19410000 a
|
|
||||||
19410000 20020000 ts
|
|
||||||
20020000 20680000 u
|
|
||||||
20680000 21320000 j
|
|
||||||
21320000 22030000 u
|
|
||||||
22030000 22900000 u
|
|
||||||
22900000 23640000 i
|
|
||||||
23640000 24250000 ch
|
|
||||||
24250000 24920000 i
|
|
||||||
24920000 25460000 g
|
|
||||||
25460000 26200000 a
|
|
||||||
26200000 26840000 ts
|
|
||||||
26840000 27480000 u
|
|
||||||
27480000 28130000 j
|
|
||||||
28130000 28830000 u
|
|
||||||
28830000 29720000 u
|
|
||||||
29720000 30440000 i
|
|
||||||
30440000 31040000 ch
|
|
||||||
31040000 31750000 i
|
|
||||||
31750000 32600000 by
|
|
||||||
32600000 33320000 o
|
|
||||||
33320000 34120000 o
|
|
||||||
34120000 34740000 j
|
|
||||||
34740000 35350000 a
|
|
||||||
35350000 35870000 s
|
|
||||||
35870000 36510000 u
|
|
||||||
36510000 36960000 t
|
|
||||||
36960000 38220000 o
|
|
||||||
Binary file not shown.
@@ -1,42 +0,0 @@
|
|||||||
0 850000 s
|
|
||||||
850000 1580000 u
|
|
||||||
1580000 2220000 i
|
|
||||||
2220000 2660000 m
|
|
||||||
2660000 3370000 i
|
|
||||||
3370000 4140000 N
|
|
||||||
4140000 4610000 g
|
|
||||||
4610000 5780000 a
|
|
||||||
5780000 13780000 pau
|
|
||||||
13780000 14350000 k
|
|
||||||
14350000 15010000 i
|
|
||||||
15010000 15730000 e
|
|
||||||
15730000 16160000 t
|
|
||||||
16160000 16800000 e
|
|
||||||
16800000 17260000 k
|
|
||||||
17260000 17840000 u
|
|
||||||
17840000 18390000 d
|
|
||||||
18390000 19090000 a
|
|
||||||
19090000 19700000 s
|
|
||||||
19700000 20390000 a
|
|
||||||
20390000 20830000 r
|
|
||||||
20830000 22120000 i
|
|
||||||
22120000 23620000 pau
|
|
||||||
23620000 24390000 a
|
|
||||||
24390000 24810000 r
|
|
||||||
24810000 25430000 i
|
|
||||||
25430000 25860000 g
|
|
||||||
25860000 26550000 a
|
|
||||||
26550000 27000000 t
|
|
||||||
27000000 27780000 o
|
|
||||||
27780000 28520000 o
|
|
||||||
28520000 29000000 g
|
|
||||||
29000000 29740000 o
|
|
||||||
29740000 30260000 z
|
|
||||||
30260000 31110000 a
|
|
||||||
31110000 31790000 i
|
|
||||||
31790000 32190000 m
|
|
||||||
32190000 32870000 a
|
|
||||||
32870000 33480000 sh
|
|
||||||
33480000 34040000 i
|
|
||||||
34040000 34520000 t
|
|
||||||
34520000 35950000 a
|
|
||||||
Binary file not shown.
Binary file not shown.
|
Before Width: | Height: | Size: 24 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 33 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 47 KiB |
@@ -1,114 +0,0 @@
|
|||||||
"""Audio alarm functions — WAV loading and PCM streaming."""
|
|
||||||
|
|
||||||
import array
|
|
||||||
import asyncio
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
import math
|
|
||||||
import wave
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
CHUNK_SIZE = 4096
|
|
||||||
AUDIO_DIR = Path(__file__).parent / "assets" / "alarm"
|
|
||||||
|
|
||||||
|
|
||||||
def find_wav(path: Path | None = None) -> Path:
|
|
||||||
"""Return a WAV file path.
|
|
||||||
|
|
||||||
If *path* is given and points to an existing file, use it directly.
|
|
||||||
Otherwise fall back to the first .wav found in the alarm assets directory.
|
|
||||||
"""
|
|
||||||
if path is not None:
|
|
||||||
p = Path(path)
|
|
||||||
if p.is_file():
|
|
||||||
log.info("Using audio file: %s", p)
|
|
||||||
return p
|
|
||||||
log.warning("Specified audio path not found: %s — falling back to glob", p)
|
|
||||||
|
|
||||||
wavs = list(AUDIO_DIR.glob("*.wav"))
|
|
||||||
if not wavs:
|
|
||||||
raise FileNotFoundError(f"No .wav files found in {AUDIO_DIR}")
|
|
||||||
log.info("Using audio file: %s", wavs[0].name)
|
|
||||||
return wavs[0]
|
|
||||||
|
|
||||||
|
|
||||||
def _normalize_pcm(pcm: bytes, bits: int) -> bytes:
|
|
||||||
"""Peak-normalize PCM data to 0 dBFS.
|
|
||||||
|
|
||||||
Supports 8-bit (unsigned) and 16-bit (signed) PCM.
|
|
||||||
Returns the original bytes unchanged if already at 0 dB or silent.
|
|
||||||
"""
|
|
||||||
if bits == 16:
|
|
||||||
samples = array.array("h", pcm) # signed 16-bit
|
|
||||||
peak = max(abs(s) for s in samples) if samples else 0
|
|
||||||
if peak == 0 or peak == 32767:
|
|
||||||
return pcm
|
|
||||||
scale = 32767 / peak
|
|
||||||
samples = array.array("h", (min(32767, max(-32768, int(s * scale))) for s in samples))
|
|
||||||
elif bits == 8:
|
|
||||||
samples = array.array("B", pcm) # unsigned 8-bit, center=128
|
|
||||||
peak = max(abs(s - 128) for s in samples) if samples else 0
|
|
||||||
if peak == 0 or peak == 127:
|
|
||||||
return pcm
|
|
||||||
scale = 127 / peak
|
|
||||||
samples = array.array("B", (max(0, min(255, int((s - 128) * scale) + 128)) for s in samples))
|
|
||||||
else:
|
|
||||||
log.warning("Normalization not supported for %d-bit audio, skipping", bits)
|
|
||||||
return pcm
|
|
||||||
|
|
||||||
gain_db = 20 * __import__("math").log10(scale) if scale > 0 else 0
|
|
||||||
log.info("Normalized: peak %d → 0 dBFS (gain %.1f dB)", peak, gain_db)
|
|
||||||
return samples.tobytes()
|
|
||||||
|
|
||||||
|
|
||||||
def read_wav(path: Path) -> tuple[bytes, int, int, int]:
|
|
||||||
"""Read WAV file, normalize to 0 dBFS, return (pcm_data, sample_rate, channels, bits)."""
|
|
||||||
try:
|
|
||||||
wf = wave.open(str(path), "rb")
|
|
||||||
except wave.Error as e:
|
|
||||||
raise ValueError(
|
|
||||||
f"{path.name}: unsupported WAV format ({e}). "
|
|
||||||
"Only 8/16-bit integer PCM is supported — no 32-bit float."
|
|
||||||
) from e
|
|
||||||
with wf:
|
|
||||||
sr = wf.getframerate()
|
|
||||||
ch = wf.getnchannels()
|
|
||||||
bits = wf.getsampwidth() * 8
|
|
||||||
pcm = wf.readframes(wf.getnframes())
|
|
||||||
log.info("WAV loaded: %dHz %dch %dbit, %.1fs, %d bytes",
|
|
||||||
sr, ch, bits, len(pcm) / (sr * ch * (bits // 8)), len(pcm))
|
|
||||||
pcm = _normalize_pcm(pcm, bits)
|
|
||||||
return pcm, sr, ch, bits
|
|
||||||
|
|
||||||
|
|
||||||
def chunk_bytes(data: bytes, size: int):
|
|
||||||
"""Yield data in fixed-size chunks."""
|
|
||||||
for i in range(0, len(data), size):
|
|
||||||
yield data[i : i + size]
|
|
||||||
|
|
||||||
|
|
||||||
async def stream_alarm(ws, pcm: bytes, sr: int, ch: int, bits: int):
|
|
||||||
"""Stream one alarm cycle to the connected client."""
|
|
||||||
bytes_per_sec = sr * ch * (bits // 8)
|
|
||||||
chunk_duration = CHUNK_SIZE / bytes_per_sec
|
|
||||||
pace_delay = chunk_duration * 0.9 # 90% real-time to avoid underrun
|
|
||||||
|
|
||||||
total_chunks = (len(pcm) + CHUNK_SIZE - 1) // CHUNK_SIZE
|
|
||||||
|
|
||||||
start_msg = json.dumps({
|
|
||||||
"type": "alarm_start",
|
|
||||||
"sample_rate": sr,
|
|
||||||
"channels": ch,
|
|
||||||
"bits": bits,
|
|
||||||
})
|
|
||||||
await ws.send(start_msg)
|
|
||||||
log.info("Sent alarm_start (%d chunks, pace %.1fms)", total_chunks, pace_delay * 1000)
|
|
||||||
|
|
||||||
for i, chunk in enumerate(chunk_bytes(pcm, CHUNK_SIZE)):
|
|
||||||
await ws.send(chunk)
|
|
||||||
await asyncio.sleep(pace_delay)
|
|
||||||
|
|
||||||
await ws.send(json.dumps({"type": "alarm_stop"}))
|
|
||||||
log.info("Sent alarm_stop")
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
[
|
|
||||||
{
|
|
||||||
"alarm_time": "0700",
|
|
||||||
"alarm_days": ["Mon", "Tue", "Wed", "Thu", "Fri"],
|
|
||||||
"alarm_audio": "assets/alarm/alarm_test.wav",
|
|
||||||
"alarm_image": "assets/img/on_alarm.png"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"alarm_time": "2330",
|
|
||||||
"alarm_audio": "assets/alarm/sleep.wav",
|
|
||||||
"alarm_image": "assets/img/sleep.png"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
@@ -1,114 +0,0 @@
|
|||||||
"""
|
|
||||||
Contents server — serves alarm audio and status images over WebSocket.
|
|
||||||
|
|
||||||
Streams WAV PCM chunks and pushes 1-bit monochrome status images to the
|
|
||||||
connected ESP32 dashboard client on port 8766.
|
|
||||||
|
|
||||||
Protocol:
|
|
||||||
Status image:
|
|
||||||
1. Text frame: {"type":"status_image","width":200,"height":200}
|
|
||||||
2. Binary frame: 1-bit monochrome bitmap
|
|
||||||
|
|
||||||
Alarm audio:
|
|
||||||
1. Text frame: {"type":"alarm_start","sample_rate":N,"channels":N,"bits":N}
|
|
||||||
2. Binary frames: raw PCM chunks (4096 bytes each, paced at ~90% real-time)
|
|
||||||
3. Text frame: {"type":"alarm_stop"}
|
|
||||||
"""
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import asyncio
|
|
||||||
import logging
|
|
||||||
from datetime import datetime
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import websockets
|
|
||||||
|
|
||||||
from alarm_scheduler import DEFAULT_CONFIG_PATH, load_config, should_fire
|
|
||||||
from audio_handler import find_wav, read_wav, stream_alarm
|
|
||||||
from image_handler import IMG_DIR, load_status_image, send_status_image
|
|
||||||
|
|
||||||
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)s %(message)s")
|
|
||||||
log = logging.getLogger("contents_server")
|
|
||||||
|
|
||||||
PORT = 8766
|
|
||||||
PI_DIR = Path(__file__).parent
|
|
||||||
|
|
||||||
# Set by main(), read by handler()
|
|
||||||
_config_path: Path = DEFAULT_CONFIG_PATH
|
|
||||||
|
|
||||||
TICK_INTERVAL = 5 # seconds between schedule checks
|
|
||||||
|
|
||||||
|
|
||||||
def _resolve_path(relative: str) -> Path:
|
|
||||||
"""Resolve a config path relative to pi/ directory."""
|
|
||||||
p = Path(relative)
|
|
||||||
if not p.is_absolute():
|
|
||||||
p = PI_DIR / p
|
|
||||||
return p
|
|
||||||
|
|
||||||
|
|
||||||
def _prepare_alarm(entry: dict) -> dict:
|
|
||||||
"""Pre-resolve paths and load resources for a single alarm entry."""
|
|
||||||
audio_path = find_wav(_resolve_path(entry.get("alarm_audio", "assets/alarm/alarm_test.wav")))
|
|
||||||
alarm_img_path = _resolve_path(entry.get("alarm_image", "assets/img/on_alarm.png"))
|
|
||||||
pcm, sr, ch, bits = read_wav(audio_path)
|
|
||||||
img = load_status_image(alarm_img_path)
|
|
||||||
return {
|
|
||||||
"config": entry,
|
|
||||||
"pcm": pcm, "sr": sr, "ch": ch, "bits": bits,
|
|
||||||
"img": img,
|
|
||||||
"last_fired": None,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
async def handler(ws):
|
|
||||||
"""Handle a single WebSocket connection."""
|
|
||||||
remote = ws.remote_address
|
|
||||||
log.info("Client connected: %s:%d", remote[0], remote[1])
|
|
||||||
|
|
||||||
configs = load_config(_config_path)
|
|
||||||
img_idle = load_status_image(IMG_DIR / "idle.png")
|
|
||||||
|
|
||||||
try:
|
|
||||||
await send_status_image(ws, img_idle)
|
|
||||||
|
|
||||||
if not configs:
|
|
||||||
log.info("No alarms configured — idling forever")
|
|
||||||
await asyncio.Future()
|
|
||||||
return
|
|
||||||
|
|
||||||
alarms = [_prepare_alarm(entry) for entry in configs]
|
|
||||||
|
|
||||||
while True:
|
|
||||||
for alarm in alarms:
|
|
||||||
if should_fire(alarm["config"]):
|
|
||||||
current_minute = datetime.now().strftime("%Y%m%d%H%M")
|
|
||||||
|
|
||||||
if current_minute != alarm["last_fired"]:
|
|
||||||
alarm["last_fired"] = current_minute
|
|
||||||
log.info("Alarm firing: %s at %s",
|
|
||||||
alarm["config"]["alarm_time"], current_minute)
|
|
||||||
await send_status_image(ws, alarm["img"])
|
|
||||||
await stream_alarm(ws, alarm["pcm"], alarm["sr"],
|
|
||||||
alarm["ch"], alarm["bits"])
|
|
||||||
await send_status_image(ws, img_idle)
|
|
||||||
|
|
||||||
await asyncio.sleep(TICK_INTERVAL)
|
|
||||||
|
|
||||||
except websockets.exceptions.ConnectionClosed:
|
|
||||||
log.info("Client disconnected: %s:%d", remote[0], remote[1])
|
|
||||||
|
|
||||||
|
|
||||||
async def main():
|
|
||||||
log.info("Contents server starting on port %d", PORT)
|
|
||||||
async with websockets.serve(handler, "0.0.0.0", PORT):
|
|
||||||
await asyncio.Future()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
parser = argparse.ArgumentParser(description="Alarm contents server")
|
|
||||||
parser.add_argument("--config", type=Path, default=DEFAULT_CONFIG_PATH,
|
|
||||||
help="Path to alarm config JSON (default: %(default)s)")
|
|
||||||
args = parser.parse_args()
|
|
||||||
_config_path = args.config
|
|
||||||
asyncio.run(main())
|
|
||||||
@@ -1,52 +0,0 @@
|
|||||||
"""Status image functions — loading, alpha compositing, and WS transmission."""
|
|
||||||
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from PIL import Image
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
IMG_DIR = Path(__file__).parent / "assets" / "img"
|
|
||||||
STATUS_IMG_SIZE = 200
|
|
||||||
MONOCHROME_THRESHOLD = 180
|
|
||||||
|
|
||||||
|
|
||||||
def load_status_image(path: Path) -> bytes:
|
|
||||||
"""Load a PNG, convert to 1-bit 200x200 monochrome bitmap (MSB-first, black=1).
|
|
||||||
|
|
||||||
Transparent pixels are composited onto white so they don't render as black.
|
|
||||||
"""
|
|
||||||
img = Image.open(path)
|
|
||||||
|
|
||||||
# Composite transparent pixels onto white background
|
|
||||||
if img.mode in ("RGBA", "LA", "PA"):
|
|
||||||
bg = Image.new("RGBA", img.size, (255, 255, 255, 255))
|
|
||||||
bg.paste(img, mask=img.split()[-1])
|
|
||||||
img = bg
|
|
||||||
|
|
||||||
img = img.convert("L")
|
|
||||||
|
|
||||||
# Resize to fit within 120x120, preserving aspect ratio
|
|
||||||
img.thumbnail((STATUS_IMG_SIZE, STATUS_IMG_SIZE), Image.LANCZOS)
|
|
||||||
|
|
||||||
# Paste centered onto white canvas
|
|
||||||
canvas = Image.new("L", (STATUS_IMG_SIZE, STATUS_IMG_SIZE), 255)
|
|
||||||
x_off = (STATUS_IMG_SIZE - img.width) // 2
|
|
||||||
y_off = (STATUS_IMG_SIZE - img.height) // 2
|
|
||||||
canvas.paste(img, (x_off, y_off))
|
|
||||||
|
|
||||||
# Threshold to 1-bit: black (< MONOCHROME_THRESHOLD) -> 1, white -> 0
|
|
||||||
bw = canvas.point(lambda p: 1 if p < MONOCHROME_THRESHOLD else 0, "1")
|
|
||||||
raw = bw.tobytes()
|
|
||||||
log.info("Status image loaded: %s -> %d bytes", path.name, len(raw))
|
|
||||||
return raw
|
|
||||||
|
|
||||||
|
|
||||||
async def send_status_image(ws, img_bytes: bytes):
|
|
||||||
"""Send a status image over the WebSocket (text header + binary payload)."""
|
|
||||||
header = json.dumps({"type": "status_image", "width": STATUS_IMG_SIZE, "height": STATUS_IMG_SIZE})
|
|
||||||
await ws.send(header)
|
|
||||||
await ws.send(img_bytes)
|
|
||||||
log.info("Sent status image (%d bytes)", len(img_bytes))
|
|
||||||
@@ -1,52 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""Mock WebSocket server that sends randomized Pi stats every 2 seconds."""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import json
|
|
||||||
import random
|
|
||||||
import time
|
|
||||||
|
|
||||||
import websockets
|
|
||||||
|
|
||||||
|
|
||||||
def generate_stats():
|
|
||||||
services = [
|
|
||||||
{"name": "docker", "status": random.choice(["running", "running", "running", "stopped"])},
|
|
||||||
{"name": "pihole", "status": random.choice(["running", "running", "running", "stopped"])},
|
|
||||||
{"name": "nginx", "status": random.choice(["running", "running", "stopped"])},
|
|
||||||
{"name": "sshd", "status": "running"},
|
|
||||||
]
|
|
||||||
return {
|
|
||||||
"cpu_pct": round(random.uniform(5, 95), 1),
|
|
||||||
"mem_pct": round(random.uniform(30, 85), 1),
|
|
||||||
"mem_used_mb": random.randint(512, 3200),
|
|
||||||
"disk_pct": round(random.uniform(20, 80), 1),
|
|
||||||
"cpu_temp": round(random.uniform(35, 78), 1),
|
|
||||||
"uptime_hrs": round(random.uniform(1, 2000), 1),
|
|
||||||
"net_rx_kbps": round(random.uniform(0, 5000), 1),
|
|
||||||
"net_tx_kbps": round(random.uniform(0, 2000), 1),
|
|
||||||
"services": services,
|
|
||||||
"timestamp": int(time.time()),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
async def handler(websocket):
|
|
||||||
addr = websocket.remote_address
|
|
||||||
print(f"Client connected: {addr}")
|
|
||||||
try:
|
|
||||||
while True:
|
|
||||||
stats = generate_stats()
|
|
||||||
await websocket.send(json.dumps(stats))
|
|
||||||
await asyncio.sleep(2)
|
|
||||||
except websockets.ConnectionClosed:
|
|
||||||
print(f"Client disconnected: {addr}")
|
|
||||||
|
|
||||||
|
|
||||||
async def main():
|
|
||||||
print("Mock Pi stats server starting on ws://0.0.0.0:8765")
|
|
||||||
async with websockets.serve(handler, "0.0.0.0", 8765):
|
|
||||||
await asyncio.Future() # run forever
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
websockets>=12.0
|
|
||||||
psutil>=5.9.0
|
|
||||||
Pillow>=10.0
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""Launch stats_server and contents_server as child processes."""
|
|
||||||
import subprocess, sys, signal
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
d = Path(__file__).parent
|
|
||||||
|
|
||||||
# Forward any CLI args (e.g. --config) to contents_server
|
|
||||||
extra_args = sys.argv[1:]
|
|
||||||
|
|
||||||
procs = [
|
|
||||||
subprocess.Popen([sys.executable, d / "stats_server.py"]),
|
|
||||||
subprocess.Popen([sys.executable, d / "contents_server.py"] + extra_args),
|
|
||||||
]
|
|
||||||
signal.signal(signal.SIGINT, lambda *_: [p.terminate() for p in procs])
|
|
||||||
signal.signal(signal.SIGTERM, lambda *_: [p.terminate() for p in procs])
|
|
||||||
print(f"Running stats_server (PID {procs[0].pid}) + contents_server (PID {procs[1].pid})")
|
|
||||||
for p in procs:
|
|
||||||
p.wait()
|
|
||||||
@@ -1,128 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""WebSocket server that sends real Pi system stats every 2 seconds.
|
|
||||||
|
|
||||||
Drop-in replacement for mock_server.py. Same port (8765), same JSON schema,
|
|
||||||
same 2s push interval. Services remain mocked until systemd integration is added.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import json
|
|
||||||
import random
|
|
||||||
import time
|
|
||||||
from datetime import datetime
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import psutil
|
|
||||||
import websockets
|
|
||||||
|
|
||||||
# Prime the CPU percent counter (first call always returns 0.0)
|
|
||||||
psutil.cpu_percent(interval=None)
|
|
||||||
|
|
||||||
# Network baseline for delta calculation
|
|
||||||
_prev_net = psutil.net_io_counters()
|
|
||||||
_prev_net_time = time.monotonic()
|
|
||||||
|
|
||||||
|
|
||||||
def _get_cpu_temp() -> float:
|
|
||||||
"""Read CPU temperature with fallback for different Pi OS versions."""
|
|
||||||
try:
|
|
||||||
temps = psutil.sensors_temperatures()
|
|
||||||
if "cpu_thermal" in temps and temps["cpu_thermal"]:
|
|
||||||
return round(temps["cpu_thermal"][0].current, 1)
|
|
||||||
except (AttributeError, KeyError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Fallback: read sysfs directly (value is in millidegrees)
|
|
||||||
thermal_path = Path("/sys/class/thermal/thermal_zone0/temp")
|
|
||||||
try:
|
|
||||||
millidegrees = int(thermal_path.read_text().strip())
|
|
||||||
return round(millidegrees / 1000.0, 1)
|
|
||||||
except (FileNotFoundError, ValueError, PermissionError):
|
|
||||||
return 0.0
|
|
||||||
|
|
||||||
|
|
||||||
def _get_net_throughput() -> tuple[float, float]:
|
|
||||||
"""Calculate network rx/tx in kbps since last call."""
|
|
||||||
global _prev_net, _prev_net_time
|
|
||||||
|
|
||||||
now = time.monotonic()
|
|
||||||
current = psutil.net_io_counters()
|
|
||||||
elapsed = now - _prev_net_time
|
|
||||||
|
|
||||||
if elapsed <= 0:
|
|
||||||
return 0.0, 0.0
|
|
||||||
|
|
||||||
rx_kbps = round((current.bytes_recv - _prev_net.bytes_recv) * 8 / (elapsed * 1000), 1)
|
|
||||||
tx_kbps = round((current.bytes_sent - _prev_net.bytes_sent) * 8 / (elapsed * 1000), 1)
|
|
||||||
|
|
||||||
_prev_net = current
|
|
||||||
_prev_net_time = now
|
|
||||||
|
|
||||||
return rx_kbps, tx_kbps
|
|
||||||
|
|
||||||
|
|
||||||
def _mock_services() -> list[dict]:
|
|
||||||
"""Mocked service status — same logic as mock_server.py."""
|
|
||||||
return [
|
|
||||||
{"name": "docker", "status": random.choice(["running", "running", "running", "stopped"])},
|
|
||||||
{"name": "pihole", "status": random.choice(["running", "running", "running", "stopped"])},
|
|
||||||
{"name": "nginx", "status": random.choice(["running", "running", "stopped"])},
|
|
||||||
{"name": "sshd", "status": "running"},
|
|
||||||
{"name": "ph1", "status": "running"},
|
|
||||||
{"name": "ph2", "status": "stopped"},
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def _local_time_fields() -> dict:
|
|
||||||
"""Current local time as broken-down fields for RTC sync."""
|
|
||||||
now = datetime.now()
|
|
||||||
return {
|
|
||||||
"y": now.year,
|
|
||||||
"mo": now.month,
|
|
||||||
"d": now.day,
|
|
||||||
"h": now.hour,
|
|
||||||
"m": now.minute,
|
|
||||||
"s": now.second,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def generate_stats() -> dict:
|
|
||||||
mem = psutil.virtual_memory()
|
|
||||||
disk = psutil.disk_usage("/mnt/buffalo")
|
|
||||||
rx_kbps, tx_kbps = _get_net_throughput()
|
|
||||||
|
|
||||||
return {
|
|
||||||
"cpu_pct": psutil.cpu_percent(interval=None),
|
|
||||||
"mem_pct": round(mem.percent, 1),
|
|
||||||
"mem_used_mb": int(mem.used // (1024 * 1024)),
|
|
||||||
"disk_pct": round(disk.percent, 1),
|
|
||||||
"cpu_temp": _get_cpu_temp(),
|
|
||||||
"uptime_hrs": round((time.time() - psutil.boot_time()) / 3600, 1),
|
|
||||||
"net_rx_kbps": rx_kbps / 8,
|
|
||||||
"net_tx_kbps": tx_kbps / 8, # kByte/s for humans
|
|
||||||
"services": _mock_services(),
|
|
||||||
"timestamp": int(time.time()),
|
|
||||||
"local_time": _local_time_fields(),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
async def handler(websocket):
|
|
||||||
addr = websocket.remote_address
|
|
||||||
print(f"Client connected: {addr}")
|
|
||||||
try:
|
|
||||||
while True:
|
|
||||||
stats = generate_stats()
|
|
||||||
await websocket.send(json.dumps(stats))
|
|
||||||
await asyncio.sleep(2)
|
|
||||||
except websockets.ConnectionClosed:
|
|
||||||
print(f"Client disconnected: {addr}")
|
|
||||||
|
|
||||||
|
|
||||||
async def main():
|
|
||||||
print("Pi stats server starting on ws://0.0.0.0:8765")
|
|
||||||
async with websockets.serve(handler, "0.0.0.0", 8765):
|
|
||||||
await asyncio.Future() # run forever
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
||||||
Reference in New Issue
Block a user