diff --git a/.local_runtime/launchd/com.zhouxuanting.opencode.health-monitor.plist b/.local_runtime/launchd/com.zhouxuanting.opencode.health-monitor.plist
new file mode 100644
index 00000000..d8d6c9a8
--- /dev/null
+++ b/.local_runtime/launchd/com.zhouxuanting.opencode.health-monitor.plist
@@ -0,0 +1,31 @@
+
+
+
+
+ Label
+ com.zhouxuanting.opencode.health-monitor
+
+ ProgramArguments
+
+ /Users/zhouxuanting/opencode-context-infrastructure/.local_runtime/scripts/run-scheduled-job
+ health_monitor
+
+
+ WorkingDirectory
+ /Users/zhouxuanting/opencode-context-infrastructure
+
+ StartCalendarInterval
+
+ Hour
+ 8
+ Minute
+ 1
+
+
+ StandardOutPath
+ /Users/zhouxuanting/opencode-context-infrastructure/.local_runtime/logs/jobs/health_monitor.log
+
+ StandardErrorPath
+ /Users/zhouxuanting/opencode-context-infrastructure/.local_runtime/logs/jobs/health_monitor.err.log
+
+
diff --git a/.local_runtime/launchd/com.zhouxuanting.opencode.health-receiver.plist b/.local_runtime/launchd/com.zhouxuanting.opencode.health-receiver.plist
new file mode 100644
index 00000000..a72e9abd
--- /dev/null
+++ b/.local_runtime/launchd/com.zhouxuanting.opencode.health-receiver.plist
@@ -0,0 +1,28 @@
+
+
+
+
+ Label
+ com.zhouxuanting.opencode.health-receiver
+
+ ProgramArguments
+
+ /Users/zhouxuanting/opencode-context-infrastructure/.venv/bin/python3
+ /Users/zhouxuanting/opencode-context-infrastructure/tools/health_data_receiver.py
+ --port
+ 9876
+
+
+ WorkingDirectory
+ /Users/zhouxuanting/opencode-context-infrastructure
+
+ KeepAlive
+
+
+ StandardOutPath
+ /Users/zhouxuanting/opencode-context-infrastructure/.local_runtime/logs/services/health_receiver.log
+
+ StandardErrorPath
+ /Users/zhouxuanting/opencode-context-infrastructure/.local_runtime/logs/services/health_receiver.err.log
+
+
diff --git a/.local_runtime/scripts/run-scheduled-job b/.local_runtime/scripts/run-scheduled-job
new file mode 100755
index 00000000..95fad6e9
--- /dev/null
+++ b/.local_runtime/scripts/run-scheduled-job
@@ -0,0 +1,88 @@
+#!/bin/bash
+set -euo pipefail
+export PATH="$HOME/.opencode/bin:/opt/homebrew/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:${PATH:-}"
+
+ROOT="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")/../.." && pwd -P)"
+RUNTIME_ROOT="$ROOT/.local_runtime"
+ENV_FILE="$ROOT/.env"
+PYTHON_BIN="$ROOT/.venv/bin/python3"
+JOB="${1:-}"
+shift || true
+
+load_env() {
+ if [[ -f "$ENV_FILE" ]]; then
+ set -a
+ source "$ENV_FILE"
+ set +a
+ fi
+}
+
+require_env() {
+ local missing=()
+ for name in "$@"; do
+ if [[ -z "${!name:-}" ]]; then
+ missing+=("$name")
+ fi
+ done
+ if [[ ${#missing[@]} -gt 0 ]]; then
+ echo "SKIP: missing env vars: ${missing[*]}"
+ exit 0
+ fi
+}
+
+[[ -n "$JOB" ]] || { echo "Usage: $(basename "$0") " >&2; exit 1; }
+cd "$ROOT"
+mkdir -p "$RUNTIME_ROOT/logs/jobs"
+load_env
+
+[[ -x "$PYTHON_BIN" ]] || { echo "Missing virtualenv python: $PYTHON_BIN" >&2; exit 1; }
+
+case "$JOB" in
+ observer)
+ require_env OPENCODE_BASE_URL OPENCODE_USERNAME OPENCODE_PASSWORD
+ exec "$PYTHON_BIN" "$ROOT/periodic_jobs/ai_heartbeat/src/v0/observer.py" "$@"
+ ;;
+ reflector)
+ require_env OPENCODE_BASE_URL OPENCODE_USERNAME OPENCODE_PASSWORD
+ exec "$PYTHON_BIN" "$ROOT/periodic_jobs/ai_heartbeat/src/v0/reflector.py" "$@"
+ ;;
+ crontab_monitor)
+ require_env OPENCODE_BASE_URL OPENCODE_USERNAME OPENCODE_PASSWORD GMAIL_USERNAME GMAIL_APP_PASSWORD
+ exec "$PYTHON_BIN" "$ROOT/periodic_jobs/ai_heartbeat/src/v0/jobs/crontab_monitor.py" "$@"
+ ;;
+ ai_news_daily)
+ require_env OPENCODE_BASE_URL OPENCODE_USERNAME OPENCODE_PASSWORD GMAIL_USERNAME GMAIL_APP_PASSWORD TAVILY_API_KEY
+ exec "$PYTHON_BIN" "$ROOT/periodic_jobs/ai_heartbeat/src/v0/jobs/ai_news_survey.py" --mode daily "$@"
+ ;;
+ ai_news_weekly)
+ require_env OPENCODE_BASE_URL OPENCODE_USERNAME OPENCODE_PASSWORD GMAIL_USERNAME GMAIL_APP_PASSWORD TAVILY_API_KEY
+ exec "$PYTHON_BIN" "$ROOT/periodic_jobs/ai_heartbeat/src/v0/jobs/ai_news_survey.py" --mode weekly "$@"
+ ;;
+ builders_digest)
+ # follow-builders skill: prepare feed JSON → OpenCode remix → deliver
+ NODE_BIN="/usr/local/bin/node"
+ SKILL_DIR="$HOME/.claude/skills/follow-builders/scripts"
+ [[ -x "$NODE_BIN" ]] || { echo "Missing node: $NODE_BIN" >&2; exit 1; }
+ [[ -d "$SKILL_DIR" ]] || { echo "Missing skill dir: $SKILL_DIR" >&2; exit 1; }
+ require_env OPENCODE_BASE_URL OPENCODE_USERNAME OPENCODE_PASSWORD
+ exec "$PYTHON_BIN" "$ROOT/periodic_jobs/ai_heartbeat/src/v0/jobs/builders_digest.py" --node-bin "$NODE_BIN" --skill-dir "${SKILL_DIR%/scripts}" "$@"
+ ;;
+ action_advisor)
+ require_env OPENCODE_BASE_URL OPENCODE_USERNAME OPENCODE_PASSWORD GMAIL_USERNAME GMAIL_APP_PASSWORD
+ exec "$PYTHON_BIN" "$ROOT/periodic_jobs/ai_heartbeat/src/v0/jobs/action_advisor.py" "$@"
+ ;;
+ life_record_digest)
+ exec "$PYTHON_BIN" "$ROOT/periodic_jobs/ai_heartbeat/src/v0/jobs/life_record_digest.py" "$@"
+ ;;
+ github_trending)
+ require_env OPENCODE_BASE_URL OPENCODE_USERNAME OPENCODE_PASSWORD
+ exec "$PYTHON_BIN" "$ROOT/periodic_jobs/ai_heartbeat/src/v0/jobs/github_trending.py" "$@"
+ ;;
+ health_monitor)
+ exec "$PYTHON_BIN" "$ROOT/periodic_jobs/ai_heartbeat/src/v0/jobs/health_monitor.py" "$@"
+ ;;
+ *)
+ echo "Unknown job: $JOB" >&2
+ exit 1
+ ;;
+esac
diff --git a/contexts/health/daily/.gitkeep b/contexts/health/daily/.gitkeep
new file mode 100644
index 00000000..e69de29b
diff --git a/contexts/health/daily/2026-04-07.md b/contexts/health/daily/2026-04-07.md
new file mode 100644
index 00000000..cc220f73
--- /dev/null
+++ b/contexts/health/daily/2026-04-07.md
@@ -0,0 +1,16 @@
+# Health Summary 2026-04-07
+## Sleep
+- Total: 7h00m (23:15 - 06:15)
+- Deep: 85min | REM: 110min | Light: 195min | Awake: 30min
+## Heart
+- Resting HR: 52 bpm
+- HRV: 45 ms
+- Range: 48-155 bpm
+
+## Activity
+- Steps: 8,234
+- Active energy: 520 kcal | Exercise: 35 min | Stand: 10 hours
+
+## Blood Oxygen
+- Avg: 97% | Min: 94%
+
diff --git a/contexts/health/data/.gitkeep b/contexts/health/data/.gitkeep
new file mode 100644
index 00000000..e69de29b
diff --git a/contexts/health/data/2026-04-07.json b/contexts/health/data/2026-04-07.json
new file mode 100644
index 00000000..424ec965
--- /dev/null
+++ b/contexts/health/data/2026-04-07.json
@@ -0,0 +1,29 @@
+{
+ "date": "2026-04-07",
+ "sleep": {
+ "total_minutes": 420,
+ "deep_minutes": 85,
+ "rem_minutes": 110,
+ "light_minutes": 195,
+ "awake_minutes": 30,
+ "bedtime": "23:15",
+ "wakeup": "06:15"
+ },
+ "heart": {
+ "resting_hr": 52,
+ "hrv_avg": 45,
+ "hrv_during_sleep": 52,
+ "hr_min": 48,
+ "hr_max": 155
+ },
+ "activity": {
+ "steps": 8234,
+ "active_energy_kcal": 520,
+ "exercise_minutes": 35,
+ "stand_hours": 10
+ },
+ "blood_oxygen": {
+ "avg": 97,
+ "min": 94
+ }
+}
diff --git a/contexts/health/weekly/.gitkeep b/contexts/health/weekly/.gitkeep
new file mode 100644
index 00000000..e69de29b
diff --git a/docs/health_monitor_setup.md b/docs/health_monitor_setup.md
new file mode 100644
index 00000000..52338426
--- /dev/null
+++ b/docs/health_monitor_setup.md
@@ -0,0 +1,332 @@
+# Health Monitor Setup Guide
+
+Apple Watch Ultra 3 健康数据 → Mac 自动化采集 → 每日摘要 → Action Advisor 联动。
+
+## 架构概览
+
+```
+iPhone (08:00) Mac
+┌──────────────┐ ┌──────────────────────┐
+│ Apple Health │ │ health_data_receiver │
+│ (HealthKit) │──── HTTP ────→│ (port 9876) │
+│ │ POST JSON │ → contexts/health/data│
+│ Shortcuts │ │ │
+│ (08:00 触发) │ ├──────────────────────┤
+└──────────────┘ │ health_monitor.py │
+ │ (08:01 launchd) │
+ │ → contexts/health/ │
+ │ daily/YYYY-MM-DD.md │
+ ├──────────────────────┤
+ │ action_advisor.py │
+ │ (09:15 launchd) │
+ │ → 读取健康摘要 │
+ │ → 融合进每日建议邮件 │
+ └──────────────────────┘
+```
+
+## Step 1: Mac 端部署
+
+### 1.1 启动 Health Data Receiver
+
+```bash
+cd ~/opencode-context-infrastructure
+
+# 测试运行
+python3 tools/health_data_receiver.py --port 9876
+
+# 验证
+curl http://localhost:9876/ping
+# → {"status": "ok"}
+```
+
+### 1.2 注册 launchd 服务
+
+```bash
+# Health Data Receiver (常驻服务)
+cp .local_runtime/launchd/com.zhouxuanting.opencode.health-receiver.plist \
+ ~/Library/LaunchAgents/
+launchctl load ~/Library/LaunchAgents/com.zhouxuanting.opencode.health-receiver.plist
+
+# Health Monitor (每日 08:01 定时任务)
+cp .local_runtime/launchd/com.zhouxuanting.opencode.health-monitor.plist \
+ ~/Library/LaunchAgents/
+launchctl load ~/Library/LaunchAgents/com.zhouxuanting.opencode.health-monitor.plist
+```
+
+### 1.3 确认 Mac 局域网 IP
+
+```bash
+ipconfig getifaddr en0
+# 记下这个 IP,Shortcuts 配置时需要用到
+# 例如: 192.168.1.100
+```
+
+### 1.4 (可选) 配置 Token 认证
+
+在 `.env` 中添加:
+```
+HEALTH_RECEIVER_TOKEN=your-secret-token-here
+```
+
+## Step 2: iPhone Shortcuts 配置
+
+### 2.1 创建 Shortcut
+
+在 iPhone 上打开 Shortcuts app,创建一个新 Shortcut,命名为 **"Health Export"**。
+
+按以下顺序添加 Action:
+
+#### Action 1: Find Health Samples (Sleep Analysis)
+
+```
+Find Health Samples where
+ Type is Sleep Analysis
+ Start Date is in the last 1 day
+Sort by Start Date
+Limit to 20
+```
+
+保存到变量: `SleepSamples`
+
+#### Action 2: Find Health Samples (Resting Heart Rate)
+
+```
+Find Health Samples where
+ Type is Resting Heart Rate
+ Start Date is in the last 1 day
+Sort by Start Date
+Limit to 1
+```
+
+保存到变量: `RestingHR`
+
+#### Action 3: Find Health Samples (Heart Rate Variability)
+
+```
+Find Health Samples where
+ Type is Heart Rate Variability
+ Start Date is in the last 1 day
+Sort by Start Date
+Limit to 1
+```
+
+保存到变量: `HRV`
+
+#### Action 4: Find Health Samples (Step Count)
+
+```
+Find Health Samples where
+ Type is Step Count
+ Start Date is in the last 1 day
+Sort by Start Date
+```
+
+保存到变量: `Steps`
+
+#### Action 5: Find Health Samples (Active Energy)
+
+```
+Find Health Samples where
+ Type is Active Energy Burned
+ Start Date is in the last 1 day
+Sort by Start Date
+```
+
+保存到变量: `ActiveEnergy`
+
+#### Action 6: Find Health Samples (Blood Oxygen)
+
+```
+Find Health Samples where
+ Type is Blood Oxygen Saturation
+ Start Date is in the last 1 day
+Sort by Start Date
+```
+
+保存到变量: `SpO2`
+
+#### Action 7: Build JSON (Text action)
+
+使用 Text action 组装 JSON。由于 Shortcuts 对 HealthKit 数据的处理有限,建议用以下模板,手动插入变量:
+
+```json
+{
+ "date": "{CurrentDate format:yyyy-MM-dd, adjusted -1 day}",
+ "sleep": {
+ "total_minutes": {SleepTotal},
+ "deep_minutes": {DeepSleep},
+ "rem_minutes": {REMSleep},
+ "light_minutes": {LightSleep},
+ "awake_minutes": {AwakeMins},
+ "bedtime": "{Bedtime}",
+ "wakeup": "{Wakeup}"
+ },
+ "heart": {
+ "resting_hr": {RestingHR},
+ "hrv_avg": {HRV},
+ "hr_min": 0,
+ "hr_max": 0
+ },
+ "activity": {
+ "steps": {TotalSteps},
+ "active_energy_kcal": {TotalActiveEnergy},
+ "exercise_minutes": 0,
+ "stand_hours": 0
+ },
+ "blood_oxygen": {
+ "avg": {SpO2Avg},
+ "min": {SpO2Min}
+ }
+}
+```
+
+> 注意:Shortcuts 中 Sleep Analysis 返回的数据需要按 category 区分 (In Bed / Asleep - Deep / Asleep - REM / Asleep - Core / Awake),你需要用 Repeat/Filter 来分别统计各阶段时长。
+
+#### Action 8: Get Contents of URL
+
+```
+URL: http://<你的Mac IP>:9876/health
+Method: POST
+Headers:
+ Content-Type: application/json
+ Authorization: Bearer <你的token> (如果配置了的话)
+Request Body: {上一步的 Text 输出}
+```
+
+### 2.2 配置自动化触发
+
+1. 打开 Shortcuts → Automation → + → Time of Day
+2. 时间设为 **08:00**
+3. 选择 "Run Immediately"
+4. Action: Run Shortcut → 选择 "Health Export"
+
+### 2.3 简化方案 (推荐先用这个)
+
+上面的纯 Shortcuts 方案比较复杂(特别是 Sleep 分期的解析)。如果配置困难,可以用 **Health Auto Export** app:
+
+1. App Store 下载 Health Auto Export (Pro 版支持自动化)
+2. 配置导出指标:Sleep Analysis, Resting HR, HRV, Steps, Active Energy, SpO2
+3. 导出格式:JSON
+4. 导出目标:REST API → `http://:9876/health`
+5. 频率:每日 08:00
+
+该 app 会自动处理 HealthKit 数据格式转换,省去手动解析。
+
+## Step 3: 验证
+
+### 手动推送测试数据
+
+```bash
+curl -X POST http://localhost:9876/health \
+ -H "Content-Type: application/json" \
+ -d '{
+ "date": "2026-04-07",
+ "sleep": {
+ "total_minutes": 420,
+ "deep_minutes": 85,
+ "rem_minutes": 110,
+ "light_minutes": 195,
+ "awake_minutes": 30,
+ "bedtime": "23:15",
+ "wakeup": "06:15"
+ },
+ "heart": {
+ "resting_hr": 52,
+ "hrv_avg": 45,
+ "hrv_during_sleep": 52,
+ "hr_min": 48,
+ "hr_max": 155
+ },
+ "activity": {
+ "steps": 8234,
+ "active_energy_kcal": 520,
+ "exercise_minutes": 35,
+ "stand_hours": 10
+ },
+ "blood_oxygen": {
+ "avg": 97,
+ "min": 94
+ }
+ }'
+```
+
+### 运行 health_monitor
+
+```bash
+python3 periodic_jobs/ai_heartbeat/src/v0/jobs/health_monitor.py --date 2026-04-07
+
+# 检查生成的摘要
+cat contexts/health/daily/2026-04-07.md
+```
+
+### 验证 Action Advisor 集成
+
+```bash
+python3 periodic_jobs/ai_heartbeat/src/v0/jobs/action_advisor.py --dry-run
+# 输出中应该能看到 "health" 在 Signals collected 列表里
+```
+
+## 数据格式参考
+
+### 原始 JSON (contexts/health/data/YYYY-MM-DD.json)
+
+```json
+{
+ "date": "2026-04-07",
+ "sleep": {
+ "total_minutes": 420,
+ "deep_minutes": 85,
+ "rem_minutes": 110,
+ "light_minutes": 195,
+ "awake_minutes": 30,
+ "bedtime": "23:15",
+ "wakeup": "06:15"
+ },
+ "heart": {
+ "resting_hr": 52,
+ "hrv_avg": 45,
+ "hrv_during_sleep": 52,
+ "hr_min": 48,
+ "hr_max": 155
+ },
+ "activity": {
+ "steps": 8234,
+ "active_energy_kcal": 520,
+ "exercise_minutes": 35,
+ "stand_hours": 10
+ },
+ "blood_oxygen": {
+ "avg": 97,
+ "min": 94
+ }
+}
+```
+
+## 时间线总览
+
+| 时间 | 组件 | 动作 |
+|------|------|------|
+| 08:00 | iPhone Shortcuts | 查询昨日 HealthKit 数据 → POST JSON 到 Mac |
+| 08:01 | health_monitor.py | 读取 JSON → 生成每日摘要 → 检测异常 → 更新基线 |
+| 09:15 | action_advisor.py | 读取健康摘要 → 融合进今日行动建议 → 发邮件 |
+| 周日 08:01 | health_monitor.py | 额外生成周报(7 天趋势分析) |
+
+## 故障排查
+
+```bash
+# 检查 receiver 是否在运行
+curl http://localhost:9876/ping
+
+# 查看 receiver 日志
+tail -f .local_runtime/logs/services/health_receiver.log
+
+# 查看 monitor 日志
+tail -f .local_runtime/logs/jobs/health_monitor.log
+
+# 手动重启 receiver
+launchctl stop com.zhouxuanting.opencode.health-receiver
+launchctl start com.zhouxuanting.opencode.health-receiver
+
+# 重建基线
+python3 periodic_jobs/ai_heartbeat/src/v0/jobs/health_monitor.py --rebuild-baseline
+```
diff --git a/periodic_jobs/ai_heartbeat/src/v0/jobs/action_advisor.py b/periodic_jobs/ai_heartbeat/src/v0/jobs/action_advisor.py
new file mode 100644
index 00000000..a275e58a
--- /dev/null
+++ b/periodic_jobs/ai_heartbeat/src/v0/jobs/action_advisor.py
@@ -0,0 +1,342 @@
+#!/usr/bin/env python3
+"""
+Action Advisor — 从信息到行动的缺失齿轮。
+
+Two-phase architecture:
+ Phase 1 (deterministic): 读取今日所有自动采集的信号(builders digest, AI 日报,
+ GitHub trending, OBSERVATIONS.md),拼成结构化上下文。
+ Phase 2 (agentic): Agent 做三重过滤(相关性 → 可行动性 → 公理校验),
+ 生成 2-3 个具体 TODO,通过 Email 发送 + 本地存档。
+
+调度时间:每日 9:15 AM(在 observer 之后,确保所有上游数据就绪)
+
+Usage:
+ python action_advisor.py # 正常运行
+ python action_advisor.py --dry-run # 只生成不发邮件
+"""
+
+import os
+import argparse
+from datetime import datetime, timedelta
+from pathlib import Path
+import sys
+import glob
+
+sys.path.append(str(Path(__file__).parent.parent))
+sys.path.append(str(Path(__file__).parent))
+try:
+ from opencode_client import OpenCodeClient
+except ImportError:
+ print("Error: Could not import OpenCodeClient. Ensure path is correct.")
+ sys.exit(1)
+
+from life_record_common import read_latest_work_summary
+
+DEFAULT_MODEL = os.getenv(
+ "OPENCODE_NEWS_MODEL",
+ os.getenv("OPENCODE_DEFAULT_MODEL", "openai/gpt-5.4"),
+)
+
+WORKSPACE = Path(__file__).resolve().parents[5] # up to workspace root
+
+
+# ── Phase 1: Deterministic data collection ─────────────────────────────────
+
+def read_latest_file(directory: Path, prefix: str, days_back: int = 2) -> str:
+ """Find and read the most recent file matching prefix in directory."""
+ if not directory.exists():
+ return ""
+
+ # Try today first, then yesterday
+ for delta in range(days_back):
+ date_str = (datetime.now() - timedelta(days=delta)).strftime("%Y%m%d")
+ pattern = str(directory / f"{prefix}*{date_str}*")
+ matches = glob.glob(pattern)
+ if matches:
+ latest = max(matches, key=os.path.getmtime)
+ try:
+ content = Path(latest).read_text(encoding="utf-8")
+ # Truncate if too long
+ if len(content) > 8000:
+ content = content[:8000] + "\n\n... [truncated, full file: " + latest + "]"
+ return f"**来源**: `{Path(latest).name}`\n\n{content}"
+ except Exception as e:
+ return f"(读取失败: {e})"
+
+ return ""
+
+
+def read_observations_tail(n_lines: int = 30) -> str:
+ """Read the last N lines of OBSERVATIONS.md."""
+ obs_path = WORKSPACE / "contexts" / "memory" / "OBSERVATIONS.md"
+ if not obs_path.exists():
+ return ""
+
+ try:
+ lines = obs_path.read_text(encoding="utf-8").splitlines()
+ tail = lines[-n_lines:] if len(lines) > n_lines else lines
+ return "\n".join(tail)
+ except Exception as e:
+ return f"(读取失败: {e})"
+
+
+def read_latest_github_trending() -> str:
+ """Read the most recent GitHub trending report (weekly, so check last 7 days)."""
+ directory = WORKSPACE / "contexts" / "survey_sessions" / "github_trending"
+ if not directory.exists():
+ return ""
+
+ for delta in range(7):
+ date_str = (datetime.now() - timedelta(days=delta)).strftime("%Y%m%d")
+ pattern = str(directory / f"github_trending*{date_str}*")
+ matches = glob.glob(pattern)
+ if matches:
+ latest = max(matches, key=os.path.getmtime)
+ try:
+ content = Path(latest).read_text(encoding="utf-8")
+ if len(content) > 5000:
+ content = content[:5000] + "\n\n... [truncated]"
+ return f"**来源**: `{Path(latest).name}`\n\n{content}"
+ except Exception:
+ pass
+
+ return ""
+
+
+def read_latest_health_summary(days_back: int = 2) -> str:
+ """Read the most recent health daily summary."""
+ health_dir = WORKSPACE / "contexts" / "health" / "daily"
+ if not health_dir.exists():
+ return ""
+
+ for delta in range(days_back):
+ date_str = (datetime.now() - timedelta(days=delta)).strftime("%Y-%m-%d")
+ path = health_dir / f"{date_str}.md"
+ if path.exists():
+ try:
+ content = path.read_text(encoding="utf-8")
+ return f"**来源**: `{path.name}`\n\n{content}"
+ except Exception:
+ pass
+ return ""
+
+
+def collect_signals() -> dict:
+ """Collect all available signals for today."""
+ surveys = WORKSPACE / "contexts" / "survey_sessions"
+ life_record_root = WORKSPACE / "contexts" / "life_record" / "data"
+
+ signals = {
+ "builders_digest": read_latest_file(
+ surveys / "ai_builders_digest", "ai_builders_digest"
+ ),
+ "ai_newsletter": read_latest_file(
+ surveys / "daily_ai_newsletter", "daily_ai_newsletter"
+ ),
+ "github_trending": read_latest_github_trending(),
+ "observations": read_observations_tail(30),
+ "life_record": read_latest_work_summary(
+ life_record_root, reference_date=datetime.now().strftime("%Y%m%d"), days_back=3
+ ),
+ "health": read_latest_health_summary(),
+ }
+
+ available = {k: v for k, v in signals.items() if v}
+ empty = [k for k, v in signals.items() if not v]
+
+ print(f"Signals collected: {list(available.keys())}")
+ if empty:
+ print(f"Signals empty (normal if not yet generated today): {empty}")
+
+ return signals
+
+
+def format_signals_for_agent(signals: dict) -> str:
+ """Format collected signals into structured text for the Agent prompt."""
+ sections = []
+
+ if signals.get("builders_digest"):
+ sections.append(f"## 信号 1: AI Builders Digest (今日)\n\n{signals['builders_digest']}")
+
+ if signals.get("ai_newsletter"):
+ sections.append(f"## 信号 2: AI 行业日报 (今日)\n\n{signals['ai_newsletter']}")
+
+ if signals.get("github_trending"):
+ sections.append(f"## 信号 3: GitHub Trending (本周)\n\n{signals['github_trending']}")
+
+ if signals.get("observations"):
+ sections.append(f"## 信号 4: 最近的记忆观察\n\n{signals['observations']}")
+
+ if signals.get("life_record"):
+ sections.append(f"## 信号 5: 工作录音摘要\n\n{signals['life_record']}")
+
+ if signals.get("health"):
+ sections.append(f"## 信号 6: 健康数据\n\n{signals['health']}")
+
+ if not sections:
+ return ""
+
+ return "\n\n---\n\n".join(sections)
+
+
+# ── Phase 2: Agent analysis ────────────────────────────────────────────────
+
+def run_action_advisor(dry_run: bool = False, model_id: str = DEFAULT_MODEL):
+ # Phase 1
+ print("Phase 1: Collecting today's signals...")
+ signals = collect_signals()
+ formatted = format_signals_for_agent(signals)
+
+ if not formatted:
+ print("No signals available today. Skipping Action Advisor.")
+ return
+
+ signal_chars = len(formatted)
+ print(f"Phase 1 complete. {signal_chars} chars of signal data.\n")
+
+ # Phase 2
+ client = OpenCodeClient()
+ date_str = datetime.now().strftime("%Y-%m-%d")
+ date_file = datetime.now().strftime("%Y%m%d")
+
+ report_path = f"contexts/survey_sessions/action_advisor/action_advisor_{date_file}.md"
+ archive_dir = WORKSPACE / "contexts" / "survey_sessions" / "action_advisor"
+ archive_dir.mkdir(parents=True, exist_ok=True)
+
+ session_title = f"Action Advisor {date_str}"
+ session_id = client.create_session(session_title)
+
+ if not session_id:
+ print("Failed to create OpenCode session.")
+ return
+
+ delivery = ""
+ if not dry_run:
+ delivery = f"""
+### 第四步:发送 Email
+
+使用以下命令发送邮件:
+```bash
+python3 tools/send_email_to_myself.py "[Action Advisor] {date_str}" "" --file {report_path}
+```
+"""
+
+ prompt = f"""你是炫汀的 Action Advisor。你的唯一职责是把今天的信息信号转化为具体可执行的动作。
+
+## 今日信号
+
+以下是自动采集系统今天收集到的全部信号:
+
+{formatted}
+
+## 你的任务
+
+### 第一步:读取项目上下文
+
+1. 读取 `AGENTS.md` — 了解 workspace 整体定位
+2. 读取 `rules/axioms/INDEX.md` — 浏览公理索引,选 2-3 条最相关的公理
+
+### 第二步:三重过滤
+
+对上面的信号做三层过滤:
+
+**过滤 1 — 相关性**:这条信号跟炫汀的 AI Memory Infrastructure 项目有关吗?具体包括:
+- Context engineering / 记忆系统
+- Agent 自动化 / 调度 / 编排
+- 开发者工具 / AI-native workflow
+- 信息架构 / 知识管理
+- 当前 workspace 正在活跃开发的模块(从信号 4 的 OBSERVATIONS 判断)
+
+**过滤 2 — 可行动性**:炫汀今天能对这条信号做什么?
+- [30min] 可以在 30 分钟内完成的具体动作(试用、配置、对比、阅读)
+- [2h] 可以在 2 小时内完成的小型改进(新 skill、新脚本、新公理候选)
+- [标记] 需要标记为"下周做"的中型动作
+
+**过滤 3 — 公理校验**:这个动作符合决策原则吗?
+- A09(构建者思维)→ 推荐的是"构建"而非"消费"?
+- M01(闭环校准)→ 推荐的动作有可验证的结果?
+- T05(认知是资产)→ 做完后能沉淀为可复用知识?
+
+### 第三步:写入报告
+
+将结果写入 `{report_path}`,严格按以下模板:
+
+```markdown
+# [Action Advisor] {date_str}
+
+## 今日信号
+
+1. [来源名] 一句话描述发现了什么
+2. ...
+(只列通过相关性过滤的信号,最多 5 条)
+
+## 建议动作
+
+- [ ] [30min] 具体动作描述
+ - 为什么:连接到你的什么项目/目标
+ - 怎么验证:做完后怎么知道有效
+ - 相关公理:Axx 公理名
+
+- [ ] [2h] 具体动作描述
+ - 为什么:...
+ - 怎么验证:...
+ - 相关公理:...
+
+- [ ] [标记] 下周值得做的一件事
+ - 为什么:...
+
+## 今日不值得做
+
+- 排除内容 1:原因
+- 排除内容 2:原因
+(显式列出被过滤的信号及理由,节省筛选时间)
+
+## 反思锚点
+
+今晚写 daily record 时,回顾:
+"[一个具体的反思问题,基于今天推荐的动作]"
+```
+
+**硬性约束**:
+- 最多 3 条建议动作,多了就过滤
+- 每条必须有具体的文件路径、命令、或 repo URL,不能只说"关注 X 领域"
+- "为什么"必须连接到 workspace 内的具体模块或文件
+- "怎么验证"必须是可观察的结果(文件产出、配置变更、测试通过)
+- 如果今天没有任何值得做的事,就写:"今天的信号都与你的当前方向无关。安心做手头的事。"
+- 全文不超过 50 行 Markdown
+{delivery}
+
+请开始执行。
+"""
+
+ print(f"Phase 2: Triggering Agent analysis (Session: {session_id})...")
+ print(f"Using model: {model_id}")
+
+ result = client.send_message(session_id, prompt, model_id=model_id)
+
+ if not result:
+ print("No immediate response. Sending continuation ping...")
+ result = client.send_message(session_id, "继续", model_id=model_id)
+
+ if result:
+ client.wait_for_session_complete(session_id)
+ print("Action Advisor complete.")
+ else:
+ print("Failed to start Action Advisor session.")
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(description="Daily Action Advisor")
+ parser.add_argument(
+ "--dry-run", action="store_true",
+ help="Generate report only, skip email delivery"
+ )
+ parser.add_argument(
+ "--model", "-M", default=DEFAULT_MODEL,
+ help=f"Model ID (default: {DEFAULT_MODEL})"
+ )
+ args = parser.parse_args()
+
+ print(f"Starting Action Advisor (dry_run={args.dry_run})...")
+ run_action_advisor(dry_run=args.dry_run, model_id=args.model)
+ print("Done.")
diff --git a/periodic_jobs/ai_heartbeat/src/v0/jobs/health_monitor.py b/periodic_jobs/ai_heartbeat/src/v0/jobs/health_monitor.py
new file mode 100644
index 00000000..bd6e849b
--- /dev/null
+++ b/periodic_jobs/ai_heartbeat/src/v0/jobs/health_monitor.py
@@ -0,0 +1,482 @@
+#!/usr/bin/env python3
+"""
+Health Monitor — Apple Watch 健康数据处理与每日摘要生成。
+
+纯确定性处理,无需 LLM:
+ 1. 读取 contexts/health/data/YYYY-MM-DD.json(由 iPhone Shortcuts 推送)
+ 2. 与个人基线对比,检测异常
+ 3. 生成每日摘要 Markdown → contexts/health/daily/YYYY-MM-DD.md
+ 4. 每周日生成周报 → contexts/health/weekly/YYYY-Wxx.md
+
+调度时间:每日 08:01(在 iPhone Shortcuts 08:00 推送之后)
+
+Usage:
+ python health_monitor.py # 处理今天的数据
+ python health_monitor.py --date 2026-04-07 # 处理指定日期
+ python health_monitor.py --rebuild-baseline # 重建基线
+"""
+
+from __future__ import annotations
+
+import argparse
+import json
+import statistics
+from datetime import datetime, timedelta
+from pathlib import Path
+
+ROOT_DIR = Path(__file__).resolve().parents[5]
+HEALTH_ROOT = ROOT_DIR / "contexts" / "health"
+DATA_DIR = HEALTH_ROOT / "data"
+DAILY_DIR = HEALTH_ROOT / "daily"
+WEEKLY_DIR = HEALTH_ROOT / "weekly"
+BASELINE_PATH = HEALTH_ROOT / "baseline.json"
+
+# 指标定义:(显示名, JSON路径, 单位, 方向, 告警阈值倍数)
+# 方向: "lower_better" 表示低于基线是好事, "higher_better" 反之
+METRIC_DEFS = {
+ "resting_hr": {
+ "name": "静息心率",
+ "path": ("heart", "resting_hr"),
+ "unit": "bpm",
+ "direction": "lower_better",
+ "warn_pct": 0.10, # 偏离基线 10% 告警
+ },
+ "hrv_avg": {
+ "name": "HRV",
+ "path": ("heart", "hrv_avg"),
+ "unit": "ms",
+ "direction": "higher_better",
+ "warn_pct": 0.15,
+ },
+ "sleep_total": {
+ "name": "总睡眠",
+ "path": ("sleep", "total_minutes"),
+ "unit": "min",
+ "direction": "higher_better",
+ "warn_pct": 0.20,
+ },
+ "sleep_deep": {
+ "name": "深睡",
+ "path": ("sleep", "deep_minutes"),
+ "unit": "min",
+ "direction": "higher_better",
+ "warn_pct": 0.25,
+ },
+ "sleep_rem": {
+ "name": "REM",
+ "path": ("sleep", "rem_minutes"),
+ "unit": "min",
+ "direction": "higher_better",
+ "warn_pct": 0.25,
+ },
+ "steps": {
+ "name": "步数",
+ "path": ("activity", "steps"),
+ "unit": "步",
+ "direction": "higher_better",
+ "warn_pct": 0.30,
+ },
+ "active_energy": {
+ "name": "活动消耗",
+ "path": ("activity", "active_energy_kcal"),
+ "unit": "kcal",
+ "direction": "higher_better",
+ "warn_pct": 0.30,
+ },
+ "blood_oxygen_avg": {
+ "name": "血氧均值",
+ "path": ("blood_oxygen", "avg"),
+ "unit": "%",
+ "direction": "higher_better",
+ "warn_pct": 0.03, # 血氧波动很小,3% 就算异常
+ },
+}
+
+
+def _get_nested(data: dict, path: tuple):
+ """Safely traverse nested dict by key path."""
+ current = data
+ for key in path:
+ if not isinstance(current, dict):
+ return None
+ current = current.get(key)
+ return current
+
+
+def load_day_data(date_str: str) -> dict | None:
+ """Load raw JSON for a given date (YYYY-MM-DD format)."""
+ path = DATA_DIR / f"{date_str}.json"
+ if not path.exists():
+ return None
+ try:
+ return json.loads(path.read_text(encoding="utf-8"))
+ except (json.JSONDecodeError, OSError) as e:
+ print(f"Error loading {path}: {e}")
+ return None
+
+
+def load_baseline() -> dict:
+ """Load personal baseline values."""
+ if not BASELINE_PATH.exists():
+ return {}
+ try:
+ return json.loads(BASELINE_PATH.read_text(encoding="utf-8"))
+ except (json.JSONDecodeError, OSError):
+ return {}
+
+
+def save_baseline(baseline: dict):
+ """Save baseline to disk."""
+ BASELINE_PATH.write_text(
+ json.dumps(baseline, indent=2, ensure_ascii=False) + "\n",
+ encoding="utf-8",
+ )
+
+
+def rebuild_baseline(days_back: int = 14) -> dict:
+ """Build baseline from the last N days of data."""
+ today = datetime.now().date()
+ metric_values: dict[str, list] = {k: [] for k in METRIC_DEFS}
+
+ for delta in range(days_back):
+ date_str = (today - timedelta(days=delta)).isoformat()
+ data = load_day_data(date_str)
+ if not data:
+ continue
+ for key, mdef in METRIC_DEFS.items():
+ val = _get_nested(data, mdef["path"])
+ if val is not None and isinstance(val, (int, float)):
+ metric_values[key].append(val)
+
+ baseline = {"updated": today.isoformat(), "days_used": 0, "metrics": {}}
+ for key, values in metric_values.items():
+ if len(values) >= 3: # 至少 3 天数据才建基线
+ baseline["metrics"][key] = {
+ "mean": round(statistics.mean(values), 1),
+ "stdev": round(statistics.stdev(values), 1) if len(values) > 1 else 0,
+ }
+ baseline["days_used"] = max(baseline["days_used"], len(values))
+
+ save_baseline(baseline)
+ print(f"Baseline rebuilt from {baseline['days_used']} days of data.")
+ return baseline
+
+
+def analyze_day(data: dict, baseline: dict) -> dict:
+ """Analyze a day's data against baseline. Returns structured analysis."""
+ analysis = {"metrics": {}, "warnings": [], "highlights": []}
+
+ for key, mdef in METRIC_DEFS.items():
+ val = _get_nested(data, mdef["path"])
+ if val is None:
+ continue
+
+ entry = {"value": val, "unit": mdef["unit"], "name": mdef["name"]}
+
+ # Compare against baseline
+ bl = baseline.get("metrics", {}).get(key)
+ if bl and bl.get("mean"):
+ mean = bl["mean"]
+ diff_pct = (val - mean) / mean if mean != 0 else 0
+ entry["baseline_mean"] = mean
+ entry["diff_pct"] = round(diff_pct * 100, 1)
+
+ # Determine status
+ is_good_direction = (
+ (mdef["direction"] == "lower_better" and diff_pct < 0)
+ or (mdef["direction"] == "higher_better" and diff_pct > 0)
+ )
+
+ if abs(diff_pct) <= mdef["warn_pct"]:
+ entry["status"] = "normal"
+ elif is_good_direction:
+ entry["status"] = "good"
+ analysis["highlights"].append(
+ f"{mdef['name']} {val}{mdef['unit']}(较基线 {entry['diff_pct']:+.1f}%)"
+ )
+ else:
+ entry["status"] = "warning"
+ analysis["warnings"].append(
+ f"{mdef['name']} {val}{mdef['unit']}(较基线 {entry['diff_pct']:+.1f}%)"
+ )
+ else:
+ entry["status"] = "no_baseline"
+
+ analysis["metrics"][key] = entry
+
+ return analysis
+
+
+def _status_icon(status: str) -> str:
+ if status == "good":
+ return "+"
+ if status == "warning":
+ return "!"
+ return " "
+
+
+def format_sleep_time(minutes: int) -> str:
+ """Format minutes as Xh Ym."""
+ h, m = divmod(minutes, 60)
+ return f"{h}h{m:02d}m"
+
+
+def generate_daily_summary(date_str: str, data: dict, analysis: dict) -> str:
+ """Generate daily health summary markdown."""
+ lines = [f"# Health Summary {date_str}", ""]
+
+ # Sleep section
+ sleep = data.get("sleep", {})
+ if sleep:
+ total = sleep.get("total_minutes", 0)
+ deep = sleep.get("deep_minutes", 0)
+ rem = sleep.get("rem_minutes", 0)
+ light = sleep.get("light_minutes", 0)
+ awake = sleep.get("awake_minutes", 0)
+ bedtime = sleep.get("bedtime", "?")
+ wakeup = sleep.get("wakeup", "?")
+
+ m = analysis["metrics"]
+ deep_s = f" {_status_icon(m.get('sleep_deep', {}).get('status', ''))}" if "sleep_deep" in m else ""
+ rem_s = f" {_status_icon(m.get('sleep_rem', {}).get('status', ''))}" if "sleep_rem" in m else ""
+
+ lines.extend([
+ "## Sleep",
+ f"- Total: {format_sleep_time(total)} ({bedtime} - {wakeup})",
+ f"- Deep: {deep}min{deep_s} | REM: {rem}min{rem_s} | Light: {light}min | Awake: {awake}min",
+ "",
+ ])
+
+ # Heart section
+ heart = data.get("heart", {})
+ if heart:
+ rhr = heart.get("resting_hr")
+ hrv = heart.get("hrv_avg")
+ hr_min = heart.get("hr_min")
+ hr_max = heart.get("hr_max")
+
+ m = analysis["metrics"]
+ rhr_info = ""
+ if rhr and "resting_hr" in m:
+ e = m["resting_hr"]
+ if "diff_pct" in e:
+ rhr_info = f" (baseline {e['baseline_mean']}, {e['diff_pct']:+.1f}%)"
+
+ hrv_info = ""
+ if hrv and "hrv_avg" in m:
+ e = m["hrv_avg"]
+ if "diff_pct" in e:
+ hrv_info = f" (baseline {e['baseline_mean']}, {e['diff_pct']:+.1f}%)"
+
+ lines.extend([
+ "## Heart",
+ f"- Resting HR: {rhr} bpm{rhr_info}" if rhr else "",
+ f"- HRV: {hrv} ms{hrv_info}" if hrv else "",
+ f"- Range: {hr_min}-{hr_max} bpm" if hr_min and hr_max else "",
+ "",
+ ])
+ lines = [ln for ln in lines if ln != ""] # remove empty
+ lines.append("")
+
+ # Activity section
+ activity = data.get("activity", {})
+ if activity:
+ steps = activity.get("steps", 0)
+ energy = activity.get("active_energy_kcal", 0)
+ exercise = activity.get("exercise_minutes", 0)
+ stand = activity.get("stand_hours", 0)
+
+ lines.extend([
+ "## Activity",
+ f"- Steps: {steps:,}",
+ f"- Active energy: {energy} kcal | Exercise: {exercise} min | Stand: {stand} hours",
+ "",
+ ])
+
+ # Blood oxygen
+ spo2 = data.get("blood_oxygen", {})
+ if spo2:
+ avg = spo2.get("avg")
+ low = spo2.get("min")
+ if avg:
+ lines.extend([
+ "## Blood Oxygen",
+ f"- Avg: {avg}% | Min: {low}%",
+ "",
+ ])
+
+ # Warnings & highlights
+ if analysis["warnings"]:
+ lines.append("## Warnings")
+ for w in analysis["warnings"]:
+ lines.append(f"- {w}")
+ lines.append("")
+
+ if analysis["highlights"]:
+ lines.append("## Highlights")
+ for h in analysis["highlights"]:
+ lines.append(f"- {h}")
+ lines.append("")
+
+ # Action hint (for action_advisor to pick up)
+ if analysis["warnings"]:
+ lines.append("## Suggested adjustments")
+ for w in analysis["warnings"]:
+ if "HRV" in w:
+ lines.append("- HRV 偏低,建议今天避免高强度运动,优先恢复性活动")
+ if "深睡" in w:
+ lines.append("- 深睡不足,建议今晚减少屏幕时间、提前入睡")
+ if "步数" in w:
+ lines.append("- 活动量偏低,建议增加日间步行")
+ if "静息心率" in w:
+ lines.append("- 静息心率偏高,关注是否有过度疲劳或感冒前兆")
+ if "血氧" in w:
+ lines.append("- 血氧偏低,如持续请关注呼吸健康")
+ lines.append("")
+
+ return "\n".join(lines)
+
+
+def generate_weekly_report(end_date: str) -> str | None:
+ """Generate weekly report from last 7 days of daily summaries."""
+ end = datetime.fromisoformat(end_date).date()
+ week_data = []
+
+ for delta in range(7):
+ d = end - timedelta(days=delta)
+ data = load_day_data(d.isoformat())
+ if data:
+ week_data.append((d.isoformat(), data))
+
+ if len(week_data) < 3:
+ print(f"Only {len(week_data)} days of data, skipping weekly report (need >= 3)")
+ return None
+
+ # Compute weekly averages
+ metric_values: dict[str, list] = {k: [] for k in METRIC_DEFS}
+ for _, data in week_data:
+ for key, mdef in METRIC_DEFS.items():
+ val = _get_nested(data, mdef["path"])
+ if val is not None and isinstance(val, (int, float)):
+ metric_values[key].append(val)
+
+ iso_year, iso_week, _ = end.isocalendar()
+ lines = [f"# Weekly Health Report {iso_year}-W{iso_week:02d}", ""]
+ lines.append(f"Period: {week_data[-1][0]} to {week_data[0][0]} ({len(week_data)} days)")
+ lines.append("")
+
+ baseline = load_baseline()
+
+ lines.append("## Weekly Averages")
+ lines.append("")
+ lines.append("| Metric | Weekly Avg | Baseline | Trend |")
+ lines.append("|--------|-----------|----------|-------|")
+
+ for key, mdef in METRIC_DEFS.items():
+ values = metric_values[key]
+ if not values:
+ continue
+ avg = round(statistics.mean(values), 1)
+ bl = baseline.get("metrics", {}).get(key, {})
+ bl_mean = bl.get("mean", "—")
+ if isinstance(bl_mean, (int, float)) and bl_mean != 0:
+ diff = round((avg - bl_mean) / bl_mean * 100, 1)
+ trend = f"{diff:+.1f}%"
+ else:
+ trend = "—"
+ lines.append(f"| {mdef['name']} | {avg} {mdef['unit']} | {bl_mean} | {trend} |")
+
+ lines.append("")
+
+ # Daily sparkline (simple text trend)
+ lines.append("## Daily Trend")
+ lines.append("")
+ sorted_days = sorted(week_data, key=lambda x: x[0])
+ for key in ["sleep_total", "resting_hr", "hrv_avg", "steps"]:
+ mdef = METRIC_DEFS[key]
+ daily_vals = []
+ for date, data in sorted_days:
+ val = _get_nested(data, mdef["path"])
+ daily_vals.append(str(val) if val is not None else "—")
+ lines.append(f"- {mdef['name']}: {' → '.join(daily_vals)} {mdef['unit']}")
+
+ lines.append("")
+ return "\n".join(lines)
+
+
+def process_day(date_str: str, force: bool = False):
+ """Main entry: process a single day's health data."""
+ data = load_day_data(date_str)
+ if not data:
+ print(f"No data found for {date_str}")
+ return False
+
+ daily_path = DAILY_DIR / f"{date_str}.md"
+ if daily_path.exists() and not force:
+ print(f"Daily summary already exists: {daily_path}")
+ return True
+
+ baseline = load_baseline()
+ if not baseline.get("metrics"):
+ print("No baseline yet. Will generate summary without baseline comparison.")
+
+ analysis = analyze_day(data, baseline)
+ summary = generate_daily_summary(date_str, data, analysis)
+
+ DAILY_DIR.mkdir(parents=True, exist_ok=True)
+ daily_path.write_text(summary + "\n", encoding="utf-8")
+ print(f"Daily summary written: {daily_path}")
+
+ # Auto-rebuild baseline if we have enough data and it's stale
+ bl_updated = baseline.get("updated", "")
+ if bl_updated:
+ days_since = (datetime.now().date() - datetime.fromisoformat(bl_updated).date()).days
+ if days_since >= 7:
+ print("Baseline is stale (>7 days), rebuilding...")
+ rebuild_baseline()
+ else:
+ # Check if we have enough data to build initial baseline
+ count = len(list(DATA_DIR.glob("*.json")))
+ if count >= 7:
+ print(f"Found {count} days of data, building initial baseline...")
+ rebuild_baseline()
+
+ # Generate weekly report on Sundays
+ date_obj = datetime.fromisoformat(date_str).date()
+ if date_obj.weekday() == 6: # Sunday
+ weekly = generate_weekly_report(date_str)
+ if weekly:
+ iso_year, iso_week, _ = date_obj.isocalendar()
+ weekly_path = WEEKLY_DIR / f"{iso_year}-W{iso_week:02d}.md"
+ WEEKLY_DIR.mkdir(parents=True, exist_ok=True)
+ weekly_path.write_text(weekly + "\n", encoding="utf-8")
+ print(f"Weekly report written: {weekly_path}")
+
+ return True
+
+
+def main():
+ parser = argparse.ArgumentParser(description="Health Monitor — daily summary generator")
+ parser.add_argument(
+ "--date", default=datetime.now().date().isoformat(),
+ help="Date to process (YYYY-MM-DD, default: today)",
+ )
+ parser.add_argument(
+ "--force", action="store_true",
+ help="Overwrite existing daily summary",
+ )
+ parser.add_argument(
+ "--rebuild-baseline", action="store_true",
+ help="Rebuild personal baseline from last 14 days",
+ )
+ args = parser.parse_args()
+
+ if args.rebuild_baseline:
+ rebuild_baseline()
+ return
+
+ process_day(args.date, force=args.force)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/tools/health_data_receiver.py b/tools/health_data_receiver.py
new file mode 100644
index 00000000..715211dd
--- /dev/null
+++ b/tools/health_data_receiver.py
@@ -0,0 +1,120 @@
+#!/usr/bin/env python3
+"""
+Health Data Receiver — 接收 iPhone Shortcuts 推送的健康数据。
+
+轻量 HTTP server,监听本地端口,接收 POST JSON 并落盘到 contexts/health/data/。
+
+启动方式:
+ python3 tools/health_data_receiver.py # 默认 port 9876
+ python3 tools/health_data_receiver.py --port 9876
+
+iPhone Shortcuts 调用:
+ POST http://:9876/health
+ Content-Type: application/json
+ Body: { "date": "2026-04-07", "sleep": {...}, "heart": {...}, ... }
+
+安全说明:
+ - 仅监听局域网,不暴露公网
+ - 可选 token 认证(通过环境变量 HEALTH_RECEIVER_TOKEN)
+"""
+
+import json
+import os
+import sys
+from datetime import datetime
+from http.server import HTTPServer, BaseHTTPRequestHandler
+from pathlib import Path
+
+ROOT_DIR = Path(__file__).resolve().parents[1]
+DATA_DIR = ROOT_DIR / "contexts" / "health" / "data"
+AUTH_TOKEN = os.getenv("HEALTH_RECEIVER_TOKEN", "")
+
+
+class HealthHandler(BaseHTTPRequestHandler):
+ def do_POST(self):
+ if self.path != "/health":
+ self._respond(404, {"error": "Not found"})
+ return
+
+ # Token auth (optional)
+ if AUTH_TOKEN:
+ token = self.headers.get("Authorization", "").removeprefix("Bearer ").strip()
+ if token != AUTH_TOKEN:
+ self._respond(401, {"error": "Unauthorized"})
+ return
+
+ # Read body
+ content_length = int(self.headers.get("Content-Length", 0))
+ if content_length == 0:
+ self._respond(400, {"error": "Empty body"})
+ return
+
+ try:
+ body = self.rfile.read(content_length)
+ data = json.loads(body)
+ except (json.JSONDecodeError, UnicodeDecodeError) as e:
+ self._respond(400, {"error": f"Invalid JSON: {e}"})
+ return
+
+ # Determine date
+ date_str = data.get("date", datetime.now().date().isoformat())
+ # Validate date format
+ try:
+ datetime.fromisoformat(date_str)
+ except ValueError:
+ self._respond(400, {"error": f"Invalid date format: {date_str}"})
+ return
+
+ # Save to disk
+ DATA_DIR.mkdir(parents=True, exist_ok=True)
+ output_path = DATA_DIR / f"{date_str}.json"
+ output_path.write_text(
+ json.dumps(data, indent=2, ensure_ascii=False) + "\n",
+ encoding="utf-8",
+ )
+
+ print(f"[{datetime.now().isoformat()}] Saved health data: {output_path}")
+ self._respond(200, {"status": "ok", "path": str(output_path)})
+
+ def do_GET(self):
+ if self.path == "/ping":
+ self._respond(200, {"status": "ok"})
+ else:
+ self._respond(404, {"error": "Not found"})
+
+ def _respond(self, code: int, body: dict):
+ self.send_response(code)
+ self.send_header("Content-Type", "application/json")
+ self.end_headers()
+ self.wfile.write(json.dumps(body).encode("utf-8"))
+
+ def log_message(self, format, *args):
+ # Quieter logging
+ print(f"[{datetime.now().strftime('%H:%M:%S')}] {args[0]}")
+
+
+def main():
+ import argparse
+ parser = argparse.ArgumentParser(description="Health Data Receiver")
+ parser.add_argument("--port", type=int, default=9876)
+ parser.add_argument("--host", default="0.0.0.0")
+ args = parser.parse_args()
+
+ server = HTTPServer((args.host, args.port), HealthHandler)
+ print(f"Health Data Receiver listening on {args.host}:{args.port}")
+ print(f"POST http://:{args.port}/health")
+ if AUTH_TOKEN:
+ print("Token auth: ENABLED")
+ else:
+ print("Token auth: DISABLED (set HEALTH_RECEIVER_TOKEN to enable)")
+ print(f"Data dir: {DATA_DIR}")
+
+ try:
+ server.serve_forever()
+ except KeyboardInterrupt:
+ print("\nShutting down.")
+ server.shutdown()
+
+
+if __name__ == "__main__":
+ main()