Compare commits
6 Commits
bugfix/ws-
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
| 6c48d70284 | |||
| 436e55a0c5 | |||
|
|
776aee18fc | ||
|
|
41e0a6b81d | ||
|
|
e4da3687b4 | ||
|
|
05cd085b89 |
@@ -1,25 +0,0 @@
|
|||||||
{
|
|
||||||
"permissions": {
|
|
||||||
"allow": [
|
|
||||||
"Bash(echo:*)",
|
|
||||||
"Bash(idf.py build:*)",
|
|
||||||
"Bash(for f in DSEG14C_BI_50px.c InziuIosevka_Slab_CC_12px.c InziuIosevka_Slab_CC_16px.c InziuIosevka_Slab_CC_20px.c InziuIosevka_Slab_CC_24px.c InziuIosevka_Slab_CC_32px.c)",
|
|
||||||
"Bash(do sed -i '/\\\\.static_bitmap = 0,/d' \"$f\")",
|
|
||||||
"Bash(done)",
|
|
||||||
"Bash(file:*)",
|
|
||||||
"mcp__ide__getDiagnostics",
|
|
||||||
"Bash(python -m py_compile:*)",
|
|
||||||
"WebSearch",
|
|
||||||
"WebFetch(domain:docs.waveshare.com)",
|
|
||||||
"WebFetch(domain:www.waveshare.com)",
|
|
||||||
"WebFetch(domain:raw.githubusercontent.com)",
|
|
||||||
"Bash(npm view:*)",
|
|
||||||
"WebFetch(domain:raw.githubusercontent.com)",
|
|
||||||
"Bash(docker ps:*)",
|
|
||||||
"Bash(python3:*)"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"outputStyle": "iseri",
|
|
||||||
"spinnerTipsEnabled": false,
|
|
||||||
"prefersReducedMotion": true
|
|
||||||
}
|
|
||||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -5,4 +5,7 @@ sdkconfig.old
|
|||||||
dependencies.lock
|
dependencies.lock
|
||||||
|
|
||||||
# vscode local settings
|
# vscode local settings
|
||||||
.vscode/
|
.vscode/
|
||||||
|
|
||||||
|
# claude local settings
|
||||||
|
.claude/
|
||||||
@@ -215,7 +215,7 @@ static void create_top_bar(lv_obj_t *parent)
|
|||||||
/* Battery positive terminal nub */
|
/* Battery positive terminal nub */
|
||||||
lv_obj_t *batt_nub = lv_obj_create(bar_cont);
|
lv_obj_t *batt_nub = lv_obj_create(bar_cont);
|
||||||
lv_obj_set_size(batt_nub, 2, 4);
|
lv_obj_set_size(batt_nub, 2, 4);
|
||||||
lv_obj_align(batt_nub, LV_ALIGN_RIGHT_MID, -37, 0);
|
lv_obj_align(batt_nub, LV_ALIGN_RIGHT_MID, -38, 0);
|
||||||
lv_obj_set_style_bg_color(batt_nub, lv_color_white(), 0);
|
lv_obj_set_style_bg_color(batt_nub, lv_color_white(), 0);
|
||||||
lv_obj_set_style_bg_opa(batt_nub, LV_OPA_COVER, 0);
|
lv_obj_set_style_bg_opa(batt_nub, LV_OPA_COVER, 0);
|
||||||
lv_obj_set_style_border_width(batt_nub, 0, 0);
|
lv_obj_set_style_border_width(batt_nub, 0, 0);
|
||||||
@@ -284,7 +284,7 @@ static void create_main_section(lv_obj_t *parent)
|
|||||||
}
|
}
|
||||||
|
|
||||||
/* Timer for each shift of 1/8 line's height */
|
/* Timer for each shift of 1/8 line's height */
|
||||||
s_scroll_timer = lv_timer_create(scroll_timer_cb, 200, NULL);
|
s_scroll_timer = lv_timer_create(scroll_timer_cb, 120, NULL);
|
||||||
|
|
||||||
/* === Left column: Pi Vitals (below services) === */
|
/* === Left column: Pi Vitals (below services) === */
|
||||||
int rx = 0;
|
int rx = 0;
|
||||||
@@ -338,7 +338,7 @@ static void create_bottom_bar(lv_obj_t *parent)
|
|||||||
lv_obj_t *bot_cont = lv_obj_create(parent);
|
lv_obj_t *bot_cont = lv_obj_create(parent);
|
||||||
lv_obj_set_pos(bot_cont, 0, y0);
|
lv_obj_set_pos(bot_cont, 0, y0);
|
||||||
lv_obj_set_size(bot_cont, SCREEN_W, BOT_H);
|
lv_obj_set_size(bot_cont, SCREEN_W, BOT_H);
|
||||||
lv_obj_set_style_bg_color(bot_cont, lv_color_white(), 0);
|
lv_obj_set_style_bg_color(bot_cont, lv_color_black(), 0);
|
||||||
lv_obj_set_style_bg_opa(bot_cont, LV_OPA_COVER, 0);
|
lv_obj_set_style_bg_opa(bot_cont, LV_OPA_COVER, 0);
|
||||||
lv_obj_set_style_border_color(bot_cont, lv_color_black(), 0);
|
lv_obj_set_style_border_color(bot_cont, lv_color_black(), 0);
|
||||||
lv_obj_set_style_border_width(bot_cont, 1, 0);
|
lv_obj_set_style_border_width(bot_cont, 1, 0);
|
||||||
@@ -349,14 +349,14 @@ static void create_bottom_bar(lv_obj_t *parent)
|
|||||||
|
|
||||||
lbl_net = lv_label_create(bot_cont);
|
lbl_net = lv_label_create(bot_cont);
|
||||||
lv_obj_set_style_text_font(lbl_net, &InziuIosevka_Slab_CC_12px, 0);
|
lv_obj_set_style_text_font(lbl_net, &InziuIosevka_Slab_CC_12px, 0);
|
||||||
lv_obj_set_style_text_color(lbl_net, lv_color_black(), 0);
|
lv_obj_set_style_text_color(lbl_net, lv_color_white(), 0);
|
||||||
lv_obj_align(lbl_net, LV_ALIGN_LEFT_MID, 0, 0);
|
lv_obj_align(lbl_net, LV_ALIGN_LEFT_MID, 0, 0);
|
||||||
lv_label_set_text(lbl_net, "NETWORK DOWN: ---- kBps / UP: ---- kBps");
|
lv_label_set_text(lbl_net, "NETWORK DOWN: ---- kBps / UP: ---- kBps");
|
||||||
|
|
||||||
/* Local sensor readings — right-aligned */
|
/* Local sensor readings — right-aligned */
|
||||||
lbl_local = lv_label_create(bot_cont);
|
lbl_local = lv_label_create(bot_cont);
|
||||||
lv_obj_set_style_text_font(lbl_local, &InziuIosevka_Slab_CC_12px, 0);
|
lv_obj_set_style_text_font(lbl_local, &InziuIosevka_Slab_CC_12px, 0);
|
||||||
lv_obj_set_style_text_color(lbl_local, lv_color_black(), 0);
|
lv_obj_set_style_text_color(lbl_local, lv_color_white(), 0);
|
||||||
lv_obj_align(lbl_local, LV_ALIGN_RIGHT_MID, 0, 0);
|
lv_obj_align(lbl_local, LV_ALIGN_RIGHT_MID, 0, 0);
|
||||||
lv_label_set_text(lbl_local, "T: --.- H: --%");
|
lv_label_set_text(lbl_local, "T: --.- H: --%");
|
||||||
}
|
}
|
||||||
|
|||||||
5
pi/.gitignore
vendored
5
pi/.gitignore
vendored
@@ -1,5 +0,0 @@
|
|||||||
# python artifacts
|
|
||||||
*/__pycache__
|
|
||||||
__pycache__/
|
|
||||||
*.pyo
|
|
||||||
*.pyc
|
|
||||||
36
pi/PLAN.md
36
pi/PLAN.md
@@ -1,36 +0,0 @@
|
|||||||
# Pi Servers -- Roadmap
|
|
||||||
|
|
||||||
## Docker Compose
|
|
||||||
|
|
||||||
Containerize the pi servers for easier deployment.
|
|
||||||
|
|
||||||
### Options
|
|
||||||
|
|
||||||
1. **Single service** -- `run_all.py` as the entrypoint, both servers in one container
|
|
||||||
2. **Split services** -- separate containers for `stats_server.py` and `contents_server.py`
|
|
||||||
|
|
||||||
Single service is simpler. Split services allow independent scaling and restarts.
|
|
||||||
|
|
||||||
### Configuration
|
|
||||||
|
|
||||||
- Volume mount `assets/` and `config/alarms.json` so they're editable without rebuilding
|
|
||||||
- Expose ports 8765 and 8766
|
|
||||||
- Network mode `host` or a bridge with known IPs for ESP32 discovery
|
|
||||||
- Restart policy: `unless-stopped`
|
|
||||||
|
|
||||||
## Repository Extraction
|
|
||||||
|
|
||||||
The `pi/` directory will become its own git repository.
|
|
||||||
|
|
||||||
### Steps
|
|
||||||
|
|
||||||
1. Extract `pi/` into a standalone repo with its own `README.md`, `requirements.txt`, and CI
|
|
||||||
2. Add it back to this project as a git submodule
|
|
||||||
3. The interface contract between the two repos is the WebSocket protocol -- JSON schemas and binary frame formats documented in `README.md`
|
|
||||||
|
|
||||||
### Benefits
|
|
||||||
|
|
||||||
- Independent versioning and release cycle
|
|
||||||
- Pi-side contributors don't need the ESP-IDF toolchain
|
|
||||||
- CI can test the Python servers in isolation
|
|
||||||
- Cleaner separation of concerns between embedded firmware and host services
|
|
||||||
132
pi/README.md
132
pi/README.md
@@ -1,132 +0,0 @@
|
|||||||
# Pi Dashboard Servers
|
|
||||||
|
|
||||||
WebSocket servers that feed system stats, alarm audio, and status images to the ESP32-S3 RLCD dashboard.
|
|
||||||
|
|
||||||
## File Structure
|
|
||||||
|
|
||||||
```
|
|
||||||
pi/
|
|
||||||
run_all.py # Launches both servers as child processes
|
|
||||||
stats_server.py # Real system stats over WebSocket (port 8765)
|
|
||||||
contents_server.py # Alarm audio + status images over WebSocket (port 8766)
|
|
||||||
mock_server.py # Drop-in replacement for stats_server with random data
|
|
||||||
audio_handler.py # WAV loading, PCM chunking, alarm streaming
|
|
||||||
image_handler.py # PNG to 1-bit monochrome conversion, alpha compositing
|
|
||||||
alarm_scheduler.py # Loads and validates alarm config, checks firing schedule
|
|
||||||
requirements.txt
|
|
||||||
config/
|
|
||||||
alarms.json # Alarm schedule configuration
|
|
||||||
assets/
|
|
||||||
alarm/ # WAV files for alarm audio
|
|
||||||
img/ # Status images (idle.png, on_alarm.png)
|
|
||||||
```
|
|
||||||
|
|
||||||
## Requirements
|
|
||||||
|
|
||||||
Python 3.10+
|
|
||||||
|
|
||||||
```
|
|
||||||
pip install -r requirements.txt
|
|
||||||
```
|
|
||||||
|
|
||||||
Dependencies: `websockets`, `psutil`, `Pillow`
|
|
||||||
|
|
||||||
## Running
|
|
||||||
|
|
||||||
Start both servers:
|
|
||||||
|
|
||||||
```
|
|
||||||
python run_all.py # both servers, default config
|
|
||||||
python run_all.py --config path/to.json # both servers, custom config
|
|
||||||
```
|
|
||||||
|
|
||||||
Or run individually:
|
|
||||||
|
|
||||||
```
|
|
||||||
python stats_server.py # port 8765 only
|
|
||||||
python contents_server.py --config path/to.json # port 8766, custom config
|
|
||||||
python mock_server.py # port 8765, random data (no psutil needed)
|
|
||||||
```
|
|
||||||
|
|
||||||
## Servers
|
|
||||||
|
|
||||||
### stats_server.py -- port 8765
|
|
||||||
|
|
||||||
Pushes a JSON object every 2 seconds with real system metrics from `psutil`:
|
|
||||||
|
|
||||||
- `cpu_pct`, `mem_pct`, `mem_used_mb`, `disk_pct`
|
|
||||||
- `cpu_temp` (reads `/sys/class/thermal/` as fallback)
|
|
||||||
- `uptime_hrs`, `net_rx_kbps`, `net_tx_kbps`
|
|
||||||
- `services` (mocked until systemd integration)
|
|
||||||
- `local_time` fields for RTC sync (`y`, `mo`, `d`, `h`, `m`, `s`)
|
|
||||||
|
|
||||||
### contents_server.py -- port 8766
|
|
||||||
|
|
||||||
Serves alarm audio and status images. Protocol:
|
|
||||||
|
|
||||||
**Status image:**
|
|
||||||
1. Text frame: `{"type":"status_image","width":120,"height":120}`
|
|
||||||
2. Binary frame: 1-bit monochrome bitmap (1800 bytes)
|
|
||||||
|
|
||||||
**Alarm audio:**
|
|
||||||
1. Text frame: `{"type":"alarm_start","sample_rate":N,"channels":N,"bits":N}`
|
|
||||||
2. Binary frames: raw PCM chunks (4096 bytes each, paced at ~90% real-time)
|
|
||||||
3. Text frame: `{"type":"alarm_stop"}`
|
|
||||||
|
|
||||||
Loads alarm config from `config/alarms.json` (override with `--config`). Checks schedule every 5 seconds, fires once per matched minute. If no config or empty config, sends idle image and blocks forever. On alarm: switches to alarm image, streams audio, switches back to idle.
|
|
||||||
|
|
||||||
### mock_server.py -- port 8765
|
|
||||||
|
|
||||||
Same JSON schema and 2-second push interval as `stats_server.py`, but all values are randomized. No `psutil` dependency -- useful for development on non-Pi machines.
|
|
||||||
|
|
||||||
Does not include `local_time` fields.
|
|
||||||
|
|
||||||
## Alarm Configuration
|
|
||||||
|
|
||||||
Config file: `config/alarms.json` -- a single alarm object or an array of alarm objects.
|
|
||||||
|
|
||||||
Example with two alarms:
|
|
||||||
|
|
||||||
```json
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"alarm_time": "0730",
|
|
||||||
"alarm_days": ["Mon", "Tue", "Wed", "Thu", "Fri"],
|
|
||||||
"alarm_audio": "assets/alarm/alarm_test.wav",
|
|
||||||
"alarm_image": "assets/img/on_alarm.png"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"alarm_time": "2300",
|
|
||||||
"alarm_audio": "assets/alarm/sleep.wav",
|
|
||||||
"alarm_image": "assets/img/sleep.png"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
```
|
|
||||||
|
|
||||||
| Field | Type | Required | Description |
|
|
||||||
|-------|------|----------|-------------|
|
|
||||||
| `alarm_time` | `string` | Yes | 4-digit HHMM, 24-hour. Fires on the matched minute. |
|
|
||||||
| `alarm_days` | `string[]` | No | 3-letter abbreviations: `Mon`–`Sun`. If omitted, fires every day. |
|
|
||||||
| `alarm_dates` | `string[]` | No | `MM/DD` strings. Ignored if `alarm_days` is also set. |
|
|
||||||
| `alarm_audio` | `string` | No | WAV path, relative to `pi/`. Default: `assets/alarm/alarm_test.wav`. |
|
|
||||||
| `alarm_image` | `string` | No | Status PNG path, relative to `pi/`. Default: `assets/img/on_alarm.png`. |
|
|
||||||
|
|
||||||
If both `alarm_days` and `alarm_dates` are present, `alarm_days` takes priority.
|
|
||||||
|
|
||||||
## Modules
|
|
||||||
|
|
||||||
### audio_handler.py
|
|
||||||
|
|
||||||
- `find_wav(path=None)` -- uses the given path if it exists, otherwise falls back to glob in `assets/alarm/`
|
|
||||||
- `read_wav(path)` -- reads WAV, returns `(pcm_bytes, sample_rate, channels, bits)`
|
|
||||||
- `stream_alarm(ws, pcm, sr, ch, bits)` -- streams one alarm cycle over WebSocket
|
|
||||||
|
|
||||||
### image_handler.py
|
|
||||||
|
|
||||||
- `load_status_image(path)` -- loads PNG, composites transparency onto white, converts to 1-bit 120x120 monochrome bitmap (black=1, MSB-first)
|
|
||||||
- `send_status_image(ws, img_bytes)` -- sends status image header + binary over WebSocket
|
|
||||||
|
|
||||||
### alarm_scheduler.py
|
|
||||||
|
|
||||||
- `load_config(path)` -- reads and validates alarm JSON; returns list of alarm dicts or `None`
|
|
||||||
- `should_fire(config)` -- checks a single alarm entry against current local time
|
|
||||||
@@ -1,135 +0,0 @@
|
|||||||
"""Alarm scheduler — load config and check firing schedule."""
|
|
||||||
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
import re
|
|
||||||
from datetime import datetime
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
DEFAULT_CONFIG_PATH = Path(__file__).parent / "config" / "alarms.json"
|
|
||||||
|
|
||||||
VALID_DAYS = {"Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"}
|
|
||||||
TIME_RE = re.compile(r"^([01]\d|2[0-3])[0-5]\d$")
|
|
||||||
DATE_RE = re.compile(r"^(0[1-9]|1[0-2])/(0[1-9]|[12]\d|3[01])$")
|
|
||||||
|
|
||||||
|
|
||||||
def _validate_entry(entry: dict, index: int) -> dict | None:
|
|
||||||
"""Validate a single alarm entry. Returns it if valid, None otherwise."""
|
|
||||||
if not isinstance(entry, dict):
|
|
||||||
log.warning("Alarm #%d: expected object, got %s", index, type(entry).__name__)
|
|
||||||
return None
|
|
||||||
|
|
||||||
alarm_time = entry.get("alarm_time")
|
|
||||||
if alarm_time is None:
|
|
||||||
log.warning("Alarm #%d: missing required field 'alarm_time'", index)
|
|
||||||
return None
|
|
||||||
if not isinstance(alarm_time, str) or not TIME_RE.match(alarm_time):
|
|
||||||
log.warning("Alarm #%d: invalid alarm_time '%s' — must be 4-digit HHMM", index, alarm_time)
|
|
||||||
return None
|
|
||||||
|
|
||||||
alarm_days = entry.get("alarm_days")
|
|
||||||
if alarm_days is not None:
|
|
||||||
if not isinstance(alarm_days, list) or not all(isinstance(d, str) for d in alarm_days):
|
|
||||||
log.warning("Alarm #%d: alarm_days must be a list of strings", index)
|
|
||||||
return None
|
|
||||||
bad = [d for d in alarm_days if d not in VALID_DAYS]
|
|
||||||
if bad:
|
|
||||||
log.warning("Alarm #%d: invalid day abbreviations: %s", index, bad)
|
|
||||||
return None
|
|
||||||
|
|
||||||
alarm_dates = entry.get("alarm_dates")
|
|
||||||
if alarm_dates is not None:
|
|
||||||
if not isinstance(alarm_dates, list) or not all(isinstance(d, str) for d in alarm_dates):
|
|
||||||
log.warning("Alarm #%d: alarm_dates must be a list of strings", index)
|
|
||||||
return None
|
|
||||||
bad = [d for d in alarm_dates if not DATE_RE.match(d)]
|
|
||||||
if bad:
|
|
||||||
log.warning("Alarm #%d: invalid date formats (expected MM/DD): %s", index, bad)
|
|
||||||
return None
|
|
||||||
|
|
||||||
log.info("Alarm #%d: time=%s days=%s", index, alarm_time, alarm_days or "(every day)")
|
|
||||||
return entry
|
|
||||||
|
|
||||||
|
|
||||||
def load_config(path: Path) -> list[dict] | None:
|
|
||||||
"""Read and validate alarm config JSON.
|
|
||||||
|
|
||||||
Accepts either a single alarm object or an array of alarm objects.
|
|
||||||
Returns a list of valid alarm dicts, or None if the file is missing,
|
|
||||||
empty, or contains no valid entries.
|
|
||||||
Never raises — logs warnings and returns None on any problem.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
text = path.read_text(encoding="utf-8").strip()
|
|
||||||
except FileNotFoundError:
|
|
||||||
log.warning("Config file not found: %s", path)
|
|
||||||
return None
|
|
||||||
except OSError as e:
|
|
||||||
log.warning("Cannot read config %s: %s", path, e)
|
|
||||||
return None
|
|
||||||
|
|
||||||
if not text:
|
|
||||||
log.warning("Config file is empty: %s", path)
|
|
||||||
return None
|
|
||||||
|
|
||||||
try:
|
|
||||||
data = json.loads(text)
|
|
||||||
except json.JSONDecodeError as e:
|
|
||||||
log.warning("Invalid JSON in %s: %s", path, e)
|
|
||||||
return None
|
|
||||||
|
|
||||||
if not data:
|
|
||||||
log.info("Config is empty — no alarms configured")
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Normalize to list
|
|
||||||
if isinstance(data, dict):
|
|
||||||
entries = [data]
|
|
||||||
elif isinstance(data, list):
|
|
||||||
entries = data
|
|
||||||
else:
|
|
||||||
log.warning("Config must be a JSON object or array, got %s", type(data).__name__)
|
|
||||||
return None
|
|
||||||
|
|
||||||
valid = []
|
|
||||||
for i, entry in enumerate(entries):
|
|
||||||
result = _validate_entry(entry, i)
|
|
||||||
if result is not None:
|
|
||||||
valid.append(result)
|
|
||||||
|
|
||||||
if not valid:
|
|
||||||
log.warning("No valid alarm entries in %s", path)
|
|
||||||
return None
|
|
||||||
|
|
||||||
log.info("Loaded %d alarm(s) from %s", len(valid), path)
|
|
||||||
return valid
|
|
||||||
|
|
||||||
|
|
||||||
def should_fire(config: dict) -> bool:
|
|
||||||
"""Check if a single alarm entry should fire right now.
|
|
||||||
|
|
||||||
Rules:
|
|
||||||
- alarm_time must match current HHMM
|
|
||||||
- If alarm_days is present, today's 3-letter abbreviation must be in the list
|
|
||||||
- If alarm_days is absent but alarm_dates is present, today's MM/DD must match
|
|
||||||
- If neither alarm_days nor alarm_dates is present, fires every day
|
|
||||||
- If both are present, alarm_days wins (alarm_dates ignored)
|
|
||||||
"""
|
|
||||||
now = datetime.now()
|
|
||||||
current_hhmm = now.strftime("%H%M")
|
|
||||||
|
|
||||||
if config["alarm_time"] != current_hhmm:
|
|
||||||
return False
|
|
||||||
|
|
||||||
alarm_days = config.get("alarm_days")
|
|
||||||
alarm_dates = config.get("alarm_dates")
|
|
||||||
|
|
||||||
if alarm_days is not None:
|
|
||||||
return now.strftime("%a") in alarm_days
|
|
||||||
|
|
||||||
if alarm_dates is not None:
|
|
||||||
return now.strftime("%m/%d") in alarm_dates
|
|
||||||
|
|
||||||
return True
|
|
||||||
@@ -1,55 +0,0 @@
|
|||||||
0 700000 j
|
|
||||||
700000 1440000 u
|
|
||||||
1440000 2370000 u
|
|
||||||
2370000 3110000 i
|
|
||||||
3110000 3710000 ch
|
|
||||||
3710000 4380000 i
|
|
||||||
4380000 4800000 g
|
|
||||||
4800000 5560000 a
|
|
||||||
5560000 6180000 ts
|
|
||||||
6180000 6850000 u
|
|
||||||
6850000 7500000 j
|
|
||||||
7500000 8210000 u
|
|
||||||
8210000 9130000 u
|
|
||||||
9130000 9880000 i
|
|
||||||
9880000 10460000 ch
|
|
||||||
10460000 11150000 i
|
|
||||||
11150000 11690000 g
|
|
||||||
11690000 12470000 a
|
|
||||||
12470000 13100000 ts
|
|
||||||
13100000 13770000 u
|
|
||||||
13770000 14420000 j
|
|
||||||
14420000 15140000 u
|
|
||||||
15140000 16070000 u
|
|
||||||
16070000 16810000 i
|
|
||||||
16810000 17420000 ch
|
|
||||||
17420000 18080000 i
|
|
||||||
18080000 18610000 g
|
|
||||||
18610000 19410000 a
|
|
||||||
19410000 20020000 ts
|
|
||||||
20020000 20680000 u
|
|
||||||
20680000 21320000 j
|
|
||||||
21320000 22030000 u
|
|
||||||
22030000 22900000 u
|
|
||||||
22900000 23640000 i
|
|
||||||
23640000 24250000 ch
|
|
||||||
24250000 24920000 i
|
|
||||||
24920000 25460000 g
|
|
||||||
25460000 26200000 a
|
|
||||||
26200000 26840000 ts
|
|
||||||
26840000 27480000 u
|
|
||||||
27480000 28130000 j
|
|
||||||
28130000 28830000 u
|
|
||||||
28830000 29720000 u
|
|
||||||
29720000 30440000 i
|
|
||||||
30440000 31040000 ch
|
|
||||||
31040000 31750000 i
|
|
||||||
31750000 32600000 by
|
|
||||||
32600000 33320000 o
|
|
||||||
33320000 34120000 o
|
|
||||||
34120000 34740000 j
|
|
||||||
34740000 35350000 a
|
|
||||||
35350000 35870000 s
|
|
||||||
35870000 36510000 u
|
|
||||||
36510000 36960000 t
|
|
||||||
36960000 38220000 o
|
|
||||||
Binary file not shown.
@@ -1,42 +0,0 @@
|
|||||||
0 850000 s
|
|
||||||
850000 1580000 u
|
|
||||||
1580000 2220000 i
|
|
||||||
2220000 2660000 m
|
|
||||||
2660000 3370000 i
|
|
||||||
3370000 4140000 N
|
|
||||||
4140000 4610000 g
|
|
||||||
4610000 5780000 a
|
|
||||||
5780000 13780000 pau
|
|
||||||
13780000 14350000 k
|
|
||||||
14350000 15010000 i
|
|
||||||
15010000 15730000 e
|
|
||||||
15730000 16160000 t
|
|
||||||
16160000 16800000 e
|
|
||||||
16800000 17260000 k
|
|
||||||
17260000 17840000 u
|
|
||||||
17840000 18390000 d
|
|
||||||
18390000 19090000 a
|
|
||||||
19090000 19700000 s
|
|
||||||
19700000 20390000 a
|
|
||||||
20390000 20830000 r
|
|
||||||
20830000 22120000 i
|
|
||||||
22120000 23620000 pau
|
|
||||||
23620000 24390000 a
|
|
||||||
24390000 24810000 r
|
|
||||||
24810000 25430000 i
|
|
||||||
25430000 25860000 g
|
|
||||||
25860000 26550000 a
|
|
||||||
26550000 27000000 t
|
|
||||||
27000000 27780000 o
|
|
||||||
27780000 28520000 o
|
|
||||||
28520000 29000000 g
|
|
||||||
29000000 29740000 o
|
|
||||||
29740000 30260000 z
|
|
||||||
30260000 31110000 a
|
|
||||||
31110000 31790000 i
|
|
||||||
31790000 32190000 m
|
|
||||||
32190000 32870000 a
|
|
||||||
32870000 33480000 sh
|
|
||||||
33480000 34040000 i
|
|
||||||
34040000 34520000 t
|
|
||||||
34520000 35950000 a
|
|
||||||
Binary file not shown.
Binary file not shown.
|
Before Width: | Height: | Size: 24 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 33 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 47 KiB |
@@ -1,114 +0,0 @@
|
|||||||
"""Audio alarm functions — WAV loading and PCM streaming."""
|
|
||||||
|
|
||||||
import array
|
|
||||||
import asyncio
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
import math
|
|
||||||
import wave
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
CHUNK_SIZE = 4096
|
|
||||||
AUDIO_DIR = Path(__file__).parent / "assets" / "alarm"
|
|
||||||
|
|
||||||
|
|
||||||
def find_wav(path: Path | None = None) -> Path:
|
|
||||||
"""Return a WAV file path.
|
|
||||||
|
|
||||||
If *path* is given and points to an existing file, use it directly.
|
|
||||||
Otherwise fall back to the first .wav found in the alarm assets directory.
|
|
||||||
"""
|
|
||||||
if path is not None:
|
|
||||||
p = Path(path)
|
|
||||||
if p.is_file():
|
|
||||||
log.info("Using audio file: %s", p)
|
|
||||||
return p
|
|
||||||
log.warning("Specified audio path not found: %s — falling back to glob", p)
|
|
||||||
|
|
||||||
wavs = list(AUDIO_DIR.glob("*.wav"))
|
|
||||||
if not wavs:
|
|
||||||
raise FileNotFoundError(f"No .wav files found in {AUDIO_DIR}")
|
|
||||||
log.info("Using audio file: %s", wavs[0].name)
|
|
||||||
return wavs[0]
|
|
||||||
|
|
||||||
|
|
||||||
def _normalize_pcm(pcm: bytes, bits: int) -> bytes:
|
|
||||||
"""Peak-normalize PCM data to 0 dBFS.
|
|
||||||
|
|
||||||
Supports 8-bit (unsigned) and 16-bit (signed) PCM.
|
|
||||||
Returns the original bytes unchanged if already at 0 dB or silent.
|
|
||||||
"""
|
|
||||||
if bits == 16:
|
|
||||||
samples = array.array("h", pcm) # signed 16-bit
|
|
||||||
peak = max(abs(s) for s in samples) if samples else 0
|
|
||||||
if peak == 0 or peak == 32767:
|
|
||||||
return pcm
|
|
||||||
scale = 32767 / peak
|
|
||||||
samples = array.array("h", (min(32767, max(-32768, int(s * scale))) for s in samples))
|
|
||||||
elif bits == 8:
|
|
||||||
samples = array.array("B", pcm) # unsigned 8-bit, center=128
|
|
||||||
peak = max(abs(s - 128) for s in samples) if samples else 0
|
|
||||||
if peak == 0 or peak == 127:
|
|
||||||
return pcm
|
|
||||||
scale = 127 / peak
|
|
||||||
samples = array.array("B", (max(0, min(255, int((s - 128) * scale) + 128)) for s in samples))
|
|
||||||
else:
|
|
||||||
log.warning("Normalization not supported for %d-bit audio, skipping", bits)
|
|
||||||
return pcm
|
|
||||||
|
|
||||||
gain_db = 20 * __import__("math").log10(scale) if scale > 0 else 0
|
|
||||||
log.info("Normalized: peak %d → 0 dBFS (gain %.1f dB)", peak, gain_db)
|
|
||||||
return samples.tobytes()
|
|
||||||
|
|
||||||
|
|
||||||
def read_wav(path: Path) -> tuple[bytes, int, int, int]:
|
|
||||||
"""Read WAV file, normalize to 0 dBFS, return (pcm_data, sample_rate, channels, bits)."""
|
|
||||||
try:
|
|
||||||
wf = wave.open(str(path), "rb")
|
|
||||||
except wave.Error as e:
|
|
||||||
raise ValueError(
|
|
||||||
f"{path.name}: unsupported WAV format ({e}). "
|
|
||||||
"Only 8/16-bit integer PCM is supported — no 32-bit float."
|
|
||||||
) from e
|
|
||||||
with wf:
|
|
||||||
sr = wf.getframerate()
|
|
||||||
ch = wf.getnchannels()
|
|
||||||
bits = wf.getsampwidth() * 8
|
|
||||||
pcm = wf.readframes(wf.getnframes())
|
|
||||||
log.info("WAV loaded: %dHz %dch %dbit, %.1fs, %d bytes",
|
|
||||||
sr, ch, bits, len(pcm) / (sr * ch * (bits // 8)), len(pcm))
|
|
||||||
pcm = _normalize_pcm(pcm, bits)
|
|
||||||
return pcm, sr, ch, bits
|
|
||||||
|
|
||||||
|
|
||||||
def chunk_bytes(data: bytes, size: int):
|
|
||||||
"""Yield data in fixed-size chunks."""
|
|
||||||
for i in range(0, len(data), size):
|
|
||||||
yield data[i : i + size]
|
|
||||||
|
|
||||||
|
|
||||||
async def stream_alarm(ws, pcm: bytes, sr: int, ch: int, bits: int):
|
|
||||||
"""Stream one alarm cycle to the connected client."""
|
|
||||||
bytes_per_sec = sr * ch * (bits // 8)
|
|
||||||
chunk_duration = CHUNK_SIZE / bytes_per_sec
|
|
||||||
pace_delay = chunk_duration * 0.9 # 90% real-time to avoid underrun
|
|
||||||
|
|
||||||
total_chunks = (len(pcm) + CHUNK_SIZE - 1) // CHUNK_SIZE
|
|
||||||
|
|
||||||
start_msg = json.dumps({
|
|
||||||
"type": "alarm_start",
|
|
||||||
"sample_rate": sr,
|
|
||||||
"channels": ch,
|
|
||||||
"bits": bits,
|
|
||||||
})
|
|
||||||
await ws.send(start_msg)
|
|
||||||
log.info("Sent alarm_start (%d chunks, pace %.1fms)", total_chunks, pace_delay * 1000)
|
|
||||||
|
|
||||||
for i, chunk in enumerate(chunk_bytes(pcm, CHUNK_SIZE)):
|
|
||||||
await ws.send(chunk)
|
|
||||||
await asyncio.sleep(pace_delay)
|
|
||||||
|
|
||||||
await ws.send(json.dumps({"type": "alarm_stop"}))
|
|
||||||
log.info("Sent alarm_stop")
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
[
|
|
||||||
{
|
|
||||||
"alarm_time": "0700",
|
|
||||||
"alarm_days": ["Mon", "Tue", "Wed", "Thu", "Fri"],
|
|
||||||
"alarm_audio": "assets/alarm/alarm_test.wav",
|
|
||||||
"alarm_image": "assets/img/on_alarm.png"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"alarm_time": "2330",
|
|
||||||
"alarm_audio": "assets/alarm/sleep.wav",
|
|
||||||
"alarm_image": "assets/img/sleep.png"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
@@ -1,130 +0,0 @@
|
|||||||
"""
|
|
||||||
Contents server — serves alarm audio and status images over WebSocket.
|
|
||||||
|
|
||||||
Streams WAV PCM chunks and pushes 1-bit monochrome status images to the
|
|
||||||
connected ESP32 dashboard client on port 8766.
|
|
||||||
|
|
||||||
Protocol:
|
|
||||||
Status image:
|
|
||||||
1. Text frame: {"type":"status_image","width":200,"height":200}
|
|
||||||
2. Binary frame: 1-bit monochrome bitmap
|
|
||||||
|
|
||||||
Alarm audio:
|
|
||||||
1. Text frame: {"type":"alarm_start","sample_rate":N,"channels":N,"bits":N}
|
|
||||||
2. Binary frames: raw PCM chunks (4096 bytes each, paced at ~90% real-time)
|
|
||||||
3. Text frame: {"type":"alarm_stop"}
|
|
||||||
"""
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import asyncio
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
from datetime import datetime
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import websockets
|
|
||||||
|
|
||||||
from alarm_scheduler import DEFAULT_CONFIG_PATH, load_config, should_fire
|
|
||||||
from audio_handler import find_wav, read_wav, stream_alarm
|
|
||||||
from image_handler import IMG_DIR, load_status_image, send_status_image
|
|
||||||
|
|
||||||
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)s %(message)s")
|
|
||||||
log = logging.getLogger("contents_server")
|
|
||||||
|
|
||||||
PORT = 8766
|
|
||||||
PI_DIR = Path(__file__).parent
|
|
||||||
|
|
||||||
# Set by main(), read by handler()
|
|
||||||
_config_path: Path = DEFAULT_CONFIG_PATH
|
|
||||||
|
|
||||||
TICK_INTERVAL = 5 # seconds between schedule checks
|
|
||||||
|
|
||||||
|
|
||||||
def _resolve_path(relative: str) -> Path:
|
|
||||||
"""Resolve a config path relative to pi/ directory."""
|
|
||||||
p = Path(relative)
|
|
||||||
if not p.is_absolute():
|
|
||||||
p = PI_DIR / p
|
|
||||||
return p
|
|
||||||
|
|
||||||
|
|
||||||
def _prepare_alarm(entry: dict) -> dict:
|
|
||||||
"""Pre-resolve paths and load resources for a single alarm entry."""
|
|
||||||
audio_path = find_wav(_resolve_path(entry.get("alarm_audio", "assets/alarm/alarm_test.wav")))
|
|
||||||
alarm_img_path = _resolve_path(entry.get("alarm_image", "assets/img/on_alarm.png"))
|
|
||||||
pcm, sr, ch, bits = read_wav(audio_path)
|
|
||||||
img = load_status_image(alarm_img_path)
|
|
||||||
return {
|
|
||||||
"config": entry,
|
|
||||||
"pcm": pcm, "sr": sr, "ch": ch, "bits": bits,
|
|
||||||
"img": img,
|
|
||||||
"last_fired": None,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
async def handler(ws):
|
|
||||||
"""Handle a single WebSocket connection."""
|
|
||||||
remote = ws.remote_address
|
|
||||||
log.info("Client connected: %s:%d", remote[0], remote[1])
|
|
||||||
|
|
||||||
configs = load_config(_config_path)
|
|
||||||
img_idle = load_status_image(IMG_DIR / "idle.png")
|
|
||||||
current_img = img_idle
|
|
||||||
|
|
||||||
alarms = [_prepare_alarm(entry) for entry in configs] if configs else []
|
|
||||||
|
|
||||||
async def alarm_ticker():
|
|
||||||
nonlocal current_img
|
|
||||||
if not alarms:
|
|
||||||
log.info("No alarms configured — idling forever")
|
|
||||||
await asyncio.Future()
|
|
||||||
return
|
|
||||||
|
|
||||||
while True:
|
|
||||||
for alarm in alarms:
|
|
||||||
if should_fire(alarm["config"]):
|
|
||||||
current_minute = datetime.now().strftime("%Y%m%d%H%M")
|
|
||||||
|
|
||||||
if current_minute != alarm["last_fired"]:
|
|
||||||
alarm["last_fired"] = current_minute
|
|
||||||
log.info("Alarm firing: %s at %s",
|
|
||||||
alarm["config"]["alarm_time"], current_minute)
|
|
||||||
current_img = alarm["img"]
|
|
||||||
await send_status_image(ws, current_img)
|
|
||||||
await stream_alarm(ws, alarm["pcm"], alarm["sr"],
|
|
||||||
alarm["ch"], alarm["bits"])
|
|
||||||
current_img = img_idle
|
|
||||||
await send_status_image(ws, current_img)
|
|
||||||
|
|
||||||
await asyncio.sleep(TICK_INTERVAL)
|
|
||||||
|
|
||||||
async def receiver():
|
|
||||||
async for msg in ws:
|
|
||||||
try:
|
|
||||||
data = json.loads(msg)
|
|
||||||
except (json.JSONDecodeError, TypeError):
|
|
||||||
continue
|
|
||||||
if data.get("type") == "request_image":
|
|
||||||
log.info("Client requested image — sending current (%d bytes)",
|
|
||||||
len(current_img))
|
|
||||||
await send_status_image(ws, current_img)
|
|
||||||
|
|
||||||
try:
|
|
||||||
await asyncio.gather(alarm_ticker(), receiver())
|
|
||||||
except websockets.exceptions.ConnectionClosed:
|
|
||||||
log.info("Client disconnected: %s:%d", remote[0], remote[1])
|
|
||||||
|
|
||||||
|
|
||||||
async def main():
|
|
||||||
log.info("Contents server starting on port %d", PORT)
|
|
||||||
async with websockets.serve(handler, "0.0.0.0", PORT):
|
|
||||||
await asyncio.Future()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
parser = argparse.ArgumentParser(description="Alarm contents server")
|
|
||||||
parser.add_argument("--config", type=Path, default=DEFAULT_CONFIG_PATH,
|
|
||||||
help="Path to alarm config JSON (default: %(default)s)")
|
|
||||||
args = parser.parse_args()
|
|
||||||
_config_path = args.config
|
|
||||||
asyncio.run(main())
|
|
||||||
@@ -1,52 +0,0 @@
|
|||||||
"""Status image functions — loading, alpha compositing, and WS transmission."""
|
|
||||||
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from PIL import Image
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
IMG_DIR = Path(__file__).parent / "assets" / "img"
|
|
||||||
STATUS_IMG_SIZE = 200
|
|
||||||
MONOCHROME_THRESHOLD = 180
|
|
||||||
|
|
||||||
|
|
||||||
def load_status_image(path: Path) -> bytes:
|
|
||||||
"""Load a PNG, convert to 1-bit 200x200 monochrome bitmap (MSB-first, black=1).
|
|
||||||
|
|
||||||
Transparent pixels are composited onto white so they don't render as black.
|
|
||||||
"""
|
|
||||||
img = Image.open(path)
|
|
||||||
|
|
||||||
# Composite transparent pixels onto white background
|
|
||||||
if img.mode in ("RGBA", "LA", "PA"):
|
|
||||||
bg = Image.new("RGBA", img.size, (255, 255, 255, 255))
|
|
||||||
bg.paste(img, mask=img.split()[-1])
|
|
||||||
img = bg
|
|
||||||
|
|
||||||
img = img.convert("L")
|
|
||||||
|
|
||||||
# Resize to fit within 120x120, preserving aspect ratio
|
|
||||||
img.thumbnail((STATUS_IMG_SIZE, STATUS_IMG_SIZE), Image.LANCZOS)
|
|
||||||
|
|
||||||
# Paste centered onto white canvas
|
|
||||||
canvas = Image.new("L", (STATUS_IMG_SIZE, STATUS_IMG_SIZE), 255)
|
|
||||||
x_off = (STATUS_IMG_SIZE - img.width) // 2
|
|
||||||
y_off = (STATUS_IMG_SIZE - img.height) // 2
|
|
||||||
canvas.paste(img, (x_off, y_off))
|
|
||||||
|
|
||||||
# Threshold to 1-bit: black (< MONOCHROME_THRESHOLD) -> 1, white -> 0
|
|
||||||
bw = canvas.point(lambda p: 1 if p < MONOCHROME_THRESHOLD else 0, "1")
|
|
||||||
raw = bw.tobytes()
|
|
||||||
log.info("Status image loaded: %s -> %d bytes", path.name, len(raw))
|
|
||||||
return raw
|
|
||||||
|
|
||||||
|
|
||||||
async def send_status_image(ws, img_bytes: bytes):
|
|
||||||
"""Send a status image over the WebSocket (text header + binary payload)."""
|
|
||||||
header = json.dumps({"type": "status_image", "width": STATUS_IMG_SIZE, "height": STATUS_IMG_SIZE})
|
|
||||||
await ws.send(header)
|
|
||||||
await ws.send(img_bytes)
|
|
||||||
log.info("Sent status image (%d bytes)", len(img_bytes))
|
|
||||||
@@ -1,52 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""Mock WebSocket server that sends randomized Pi stats every 2 seconds."""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import json
|
|
||||||
import random
|
|
||||||
import time
|
|
||||||
|
|
||||||
import websockets
|
|
||||||
|
|
||||||
|
|
||||||
def generate_stats():
|
|
||||||
services = [
|
|
||||||
{"name": "docker", "status": random.choice(["running", "running", "running", "stopped"])},
|
|
||||||
{"name": "pihole", "status": random.choice(["running", "running", "running", "stopped"])},
|
|
||||||
{"name": "nginx", "status": random.choice(["running", "running", "stopped"])},
|
|
||||||
{"name": "sshd", "status": "running"},
|
|
||||||
]
|
|
||||||
return {
|
|
||||||
"cpu_pct": round(random.uniform(5, 95), 1),
|
|
||||||
"mem_pct": round(random.uniform(30, 85), 1),
|
|
||||||
"mem_used_mb": random.randint(512, 3200),
|
|
||||||
"disk_pct": round(random.uniform(20, 80), 1),
|
|
||||||
"cpu_temp": round(random.uniform(35, 78), 1),
|
|
||||||
"uptime_hrs": round(random.uniform(1, 2000), 1),
|
|
||||||
"net_rx_kbps": round(random.uniform(0, 5000), 1),
|
|
||||||
"net_tx_kbps": round(random.uniform(0, 2000), 1),
|
|
||||||
"services": services,
|
|
||||||
"timestamp": int(time.time()),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
async def handler(websocket):
|
|
||||||
addr = websocket.remote_address
|
|
||||||
print(f"Client connected: {addr}")
|
|
||||||
try:
|
|
||||||
while True:
|
|
||||||
stats = generate_stats()
|
|
||||||
await websocket.send(json.dumps(stats))
|
|
||||||
await asyncio.sleep(2)
|
|
||||||
except websockets.ConnectionClosed:
|
|
||||||
print(f"Client disconnected: {addr}")
|
|
||||||
|
|
||||||
|
|
||||||
async def main():
|
|
||||||
print("Mock Pi stats server starting on ws://0.0.0.0:8765")
|
|
||||||
async with websockets.serve(handler, "0.0.0.0", 8765):
|
|
||||||
await asyncio.Future() # run forever
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
websockets>=12.0
|
|
||||||
psutil>=5.9.0
|
|
||||||
Pillow>=10.0
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""Launch stats_server and contents_server as child processes."""
|
|
||||||
import subprocess, sys, signal
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
d = Path(__file__).parent
|
|
||||||
|
|
||||||
# Forward any CLI args (e.g. --config) to contents_server
|
|
||||||
extra_args = sys.argv[1:]
|
|
||||||
|
|
||||||
procs = [
|
|
||||||
subprocess.Popen([sys.executable, d / "stats_server.py"]),
|
|
||||||
subprocess.Popen([sys.executable, d / "contents_server.py"] + extra_args),
|
|
||||||
]
|
|
||||||
signal.signal(signal.SIGINT, lambda *_: [p.terminate() for p in procs])
|
|
||||||
signal.signal(signal.SIGTERM, lambda *_: [p.terminate() for p in procs])
|
|
||||||
print(f"Running stats_server (PID {procs[0].pid}) + contents_server (PID {procs[1].pid})")
|
|
||||||
for p in procs:
|
|
||||||
p.wait()
|
|
||||||
@@ -1,163 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""WebSocket server that sends real Pi system stats every 2 seconds.
|
|
||||||
|
|
||||||
Drop-in replacement for mock_server.py. Same port (8765), same JSON schema,
|
|
||||||
same 2s push interval. Services remain mocked until systemd integration is added.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import json
|
|
||||||
import subprocess
|
|
||||||
import time
|
|
||||||
from datetime import datetime
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import psutil
|
|
||||||
import websockets
|
|
||||||
|
|
||||||
# Prime the CPU percent counter (first call always returns 0.0)
|
|
||||||
psutil.cpu_percent(interval=None)
|
|
||||||
|
|
||||||
# Network baseline for delta calculation
|
|
||||||
_prev_net = psutil.net_io_counters()
|
|
||||||
_prev_net_time = time.monotonic()
|
|
||||||
|
|
||||||
|
|
||||||
def _get_cpu_temp() -> float:
|
|
||||||
"""Read CPU temperature with fallback for different Pi OS versions."""
|
|
||||||
try:
|
|
||||||
temps = psutil.sensors_temperatures()
|
|
||||||
if "cpu_thermal" in temps and temps["cpu_thermal"]:
|
|
||||||
return round(temps["cpu_thermal"][0].current, 1)
|
|
||||||
except (AttributeError, KeyError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Fallback: read sysfs directly (value is in millidegrees)
|
|
||||||
thermal_path = Path("/sys/class/thermal/thermal_zone0/temp")
|
|
||||||
try:
|
|
||||||
millidegrees = int(thermal_path.read_text().strip())
|
|
||||||
return round(millidegrees / 1000.0, 1)
|
|
||||||
except (FileNotFoundError, ValueError, PermissionError):
|
|
||||||
return 0.0
|
|
||||||
|
|
||||||
|
|
||||||
def _get_net_throughput() -> tuple[float, float]:
|
|
||||||
"""Calculate network rx/tx in kbps since last call."""
|
|
||||||
global _prev_net, _prev_net_time
|
|
||||||
|
|
||||||
now = time.monotonic()
|
|
||||||
current = psutil.net_io_counters()
|
|
||||||
elapsed = now - _prev_net_time
|
|
||||||
|
|
||||||
if elapsed <= 0:
|
|
||||||
return 0.0, 0.0
|
|
||||||
|
|
||||||
rx_kbps = round((current.bytes_recv - _prev_net.bytes_recv) * 8 / (elapsed * 1000), 1)
|
|
||||||
tx_kbps = round((current.bytes_sent - _prev_net.bytes_sent) * 8 / (elapsed * 1000), 1)
|
|
||||||
|
|
||||||
_prev_net = current
|
|
||||||
_prev_net_time = now
|
|
||||||
|
|
||||||
return rx_kbps, tx_kbps
|
|
||||||
|
|
||||||
# only services that matter
|
|
||||||
SERVICES_ALIASES = {
|
|
||||||
"gitea": "gitea",
|
|
||||||
"samba": "samba",
|
|
||||||
"pihole": "pihole",
|
|
||||||
"qbittorrent": "qbittorrent",
|
|
||||||
"frpc-primary": "frpc (ny)",
|
|
||||||
"pinepods": "pinepods",
|
|
||||||
"frpc-ssh": "frpc (ssh)",
|
|
||||||
"jellyfin": "jellyfin",
|
|
||||||
}
|
|
||||||
def _get_docker_services() -> list[dict]:
|
|
||||||
"""Query Docker for real container statuses with ternary status model."""
|
|
||||||
try:
|
|
||||||
result = subprocess.run(
|
|
||||||
["docker", "ps", "-a", "--format", "{{.Names}}\t{{.Status}}"],
|
|
||||||
capture_output=True, text=True, timeout=5,
|
|
||||||
)
|
|
||||||
except (subprocess.TimeoutExpired, FileNotFoundError, OSError):
|
|
||||||
return []
|
|
||||||
|
|
||||||
if result.returncode != 0:
|
|
||||||
return []
|
|
||||||
|
|
||||||
services = []
|
|
||||||
for line in result.stdout.strip().splitlines():
|
|
||||||
parts = line.split("\t", 1)
|
|
||||||
if len(parts) != 2:
|
|
||||||
continue
|
|
||||||
name, raw_status = parts
|
|
||||||
|
|
||||||
if (name in SERVICES_ALIASES):
|
|
||||||
if raw_status.startswith("Up"):
|
|
||||||
if "unhealthy" in raw_status or "Restarting" in raw_status:
|
|
||||||
status = "warning"
|
|
||||||
else:
|
|
||||||
status = "running"
|
|
||||||
else:
|
|
||||||
status = "stopped"
|
|
||||||
services.append({"name": SERVICES_ALIASES[name], "status": status})
|
|
||||||
|
|
||||||
# Sort: warnings first, then stopped, then running (problems float to top)
|
|
||||||
order = {"warning": 0, "stopped": 1, "running": 2}
|
|
||||||
services.sort(key=lambda s: order.get(s["status"], 3))
|
|
||||||
|
|
||||||
return services
|
|
||||||
|
|
||||||
|
|
||||||
def _local_time_fields() -> dict:
|
|
||||||
"""Current local time as broken-down fields for RTC sync."""
|
|
||||||
now = datetime.now()
|
|
||||||
return {
|
|
||||||
"y": now.year,
|
|
||||||
"mo": now.month,
|
|
||||||
"d": now.day,
|
|
||||||
"h": now.hour,
|
|
||||||
"m": now.minute,
|
|
||||||
"s": now.second,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def generate_stats() -> dict:
|
|
||||||
mem = psutil.virtual_memory()
|
|
||||||
disk = psutil.disk_usage("/mnt/buffalo")
|
|
||||||
rx_kbps, tx_kbps = _get_net_throughput()
|
|
||||||
|
|
||||||
return {
|
|
||||||
"cpu_pct": psutil.cpu_percent(interval=None),
|
|
||||||
"mem_pct": round(mem.percent, 1),
|
|
||||||
"mem_used_mb": int(mem.used // (1024 * 1024)),
|
|
||||||
"disk_pct": round(disk.percent, 1),
|
|
||||||
"cpu_temp": _get_cpu_temp(),
|
|
||||||
"uptime_hrs": round((time.time() - psutil.boot_time()) / 3600, 1),
|
|
||||||
"net_rx_kbps": rx_kbps / 8,
|
|
||||||
"net_tx_kbps": tx_kbps / 8, # kByte/s for humans
|
|
||||||
"services": _get_docker_services(),
|
|
||||||
"timestamp": int(time.time()),
|
|
||||||
"local_time": _local_time_fields(),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
async def handler(websocket):
|
|
||||||
addr = websocket.remote_address
|
|
||||||
print(f"Client connected: {addr}")
|
|
||||||
try:
|
|
||||||
while True:
|
|
||||||
stats = generate_stats()
|
|
||||||
await websocket.send(json.dumps(stats))
|
|
||||||
await asyncio.sleep(2)
|
|
||||||
except websockets.ConnectionClosed:
|
|
||||||
print(f"Client disconnected: {addr}")
|
|
||||||
|
|
||||||
|
|
||||||
async def main():
|
|
||||||
print("Pi stats server starting on ws://0.0.0.0:8765")
|
|
||||||
async with websockets.serve(handler, "0.0.0.0", 8765):
|
|
||||||
await asyncio.Future() # run forever
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
||||||
Reference in New Issue
Block a user