diff --git a/docs/juzi/apk_preflight.py b/docs/juzi/apk_preflight.py new file mode 100755 index 0000000000..4c744212e2 --- /dev/null +++ b/docs/juzi/apk_preflight.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python3 +""" +Quick APK integrity preflight before BrowserStack upload. +""" + +from __future__ import annotations + +import argparse +import hashlib +import sys +import zipfile +from pathlib import Path + + +def sha256sum(path: Path) -> str: + h = hashlib.sha256() + with path.open("rb") as f: + for chunk in iter(lambda: f.read(1024 * 1024), b""): + h.update(chunk) + return h.hexdigest() + + +def main() -> int: + parser = argparse.ArgumentParser(description="APK preflight checks") + parser.add_argument("apk", type=Path, help="Path to APK file") + args = parser.parse_args() + + apk = args.apk + if not apk.exists(): + print(f"[FAIL] file not found: {apk}") + return 2 + if not apk.is_file(): + print(f"[FAIL] not a file: {apk}") + return 2 + + size = apk.stat().st_size + print(f"[INFO] file={apk}") + print(f"[INFO] size_bytes={size}") + print(f"[INFO] sha256={sha256sum(apk)}") + + if size <= 0: + print("[FAIL] APK is zero-byte.") + return 3 + if size < 2 * 1024 * 1024: + print("[WARN] APK is very small (<2MB); verify this is expected.") + + try: + with zipfile.ZipFile(apk, "r") as zf: + names = set(zf.namelist()) + except zipfile.BadZipFile: + print("[FAIL] APK is not a valid ZIP container.") + return 4 + + required = {"AndroidManifest.xml", "classes.dex"} + missing = sorted(required - names) + if missing: + print(f"[FAIL] missing required entries: {', '.join(missing)}") + return 5 + + print("[OK] APK preflight passed.") + return 0 + + +if __name__ == "__main__": + sys.exit(main()) + diff --git a/docs/juzi/browserstack-trace-runbook.md b/docs/juzi/browserstack-trace-runbook.md new file mode 100644 index 0000000000..950afd9c0e --- /dev/null +++ b/docs/juzi/browserstack-trace-runbook.md @@ -0,0 +1,36 @@ +# BrowserStack Trace Runbook (Issue #2994) + +## Objective +Produce maintainers-grade evidence for where RCS setup fails, without fake success paths. + +## Steps +1. Start a fresh BrowserStack Android session. +2. Install the test APK once. +3. Open Google Messages and trigger RCS setup flow. +4. Open Developer Tools > Logcat. +5. Set app/package filter to `org.microg.gms`. +6. Keep logs running while reproducing setup state changes. +7. Export/copy log text to a local file (example: `rcs_run_01.log`). + +## Local analysis +Run: + +```bash +cd "/Users/wolaoposongwodediannao/Downloads/codex ai工作文件/ready_for_dispatch/microg_gmscore" +bash docs/juzi/run_rcs_research_pipeline.sh /path/to/rcs_run_01.log docs/juzi/output +``` + +## What to share in PR +- Device + Android version. +- Google Messages version. +- Whether SIM/carrier profile is present. +- Output of `docs/juzi/output/rcs_report.md`. +- Output of `docs/juzi/output/rcs_contracts.json`. +- Output of `docs/juzi/output/rcs_patch_plan.md`. +- Output of `docs/juzi/output/rcs_research_brief.md`. +- Exact first blocking candidate `(token, code, detail)`. + +## Reject patterns to avoid +- Claiming "Connected" with no trace-backed transition chain. +- Any unconditional success parcel response. +- Identity hardcoding that cannot be justified or reproduced. diff --git a/docs/juzi/harvest_downloads_phase2.sh b/docs/juzi/harvest_downloads_phase2.sh new file mode 100755 index 0000000000..0cdf862a75 --- /dev/null +++ b/docs/juzi/harvest_downloads_phase2.sh @@ -0,0 +1,84 @@ +#!/usr/bin/env bash +set -euo pipefail + +ROOT_DIR="$(cd "$(dirname "$0")/../.." && pwd)" +JUZI_DIR="$ROOT_DIR/docs/juzi" +DOWNLOADS_DIR="${1:-$HOME/Downloads}" +INBOX_DIR="${2:-$JUZI_DIR/phase2_inbox}" +OUT_DIR="${3:-$JUZI_DIR/phase2_submissions}" +MIN_LOG_BYTES="${PHASE2_MIN_LOG_BYTES:-64}" + +mkdir -p "$INBOX_DIR" "$OUT_DIR" + +copied=0 +import_one() { + local src_log="$1" + local src_json="${2:-}" + local base size dest_log + base="$(basename "$src_log" .log)" + dest_log="$INBOX_DIR/$base.log" + + case "$base" in + .*|*~) return 0 ;; + esac + + size="$(wc -c < "$src_log" | tr -d ' ')" + if [[ "$size" -lt "$MIN_LOG_BYTES" ]]; then + return 0 + fi + + if [[ -f "$dest_log" && "$src_log" -ot "$dest_log" ]]; then + return 0 + fi + + cp "$src_log" "$dest_log" + if [[ -n "$src_json" && -f "$src_json" ]]; then + cp "$src_json" "$INBOX_DIR/$base.json" + fi + copied=$((copied + 1)) +} + +shopt -s nullglob +for f in "$DOWNLOADS_DIR"/*.log; do + base="$(basename "$f" .log)" + import_one "$f" "$DOWNLOADS_DIR/$base.json" +done + +for z in "$DOWNLOADS_DIR"/*.zip; do + z_name="$(basename "$z")" + z_lower="$(printf '%s' "$z_name" | tr '[:upper:]' '[:lower:]')" + case "$z_lower" in + *phase2*|*rcs*|*logcat*) ;; + *) continue ;; + esac + + z_key="$(printf '%s' "$z_name" | tr -c '[:alnum:]._-' '_')" + z_marker="$INBOX_DIR/.zip_imported_${z_key}.stamp" + z_info="$(stat -f '%m:%z' "$z")" + if [[ -f "$z_marker" && "$(cat "$z_marker")" == "$z_info" ]]; then + continue + fi + + tmp_dir="$(mktemp -d)" + if ! unzip -q -o "$z" -d "$tmp_dir"; then + rm -rf "$tmp_dir" + continue + fi + + copied_before="$copied" + + while IFS= read -r log_file; do + log_dir="$(dirname "$log_file")" + base="$(basename "$log_file" .log)" + import_one "$log_file" "$log_dir/$base.json" + done < <(find "$tmp_dir" -type f -name "*.log") + + if [[ "$copied" -gt "$copied_before" ]]; then + echo "$z_info" > "$z_marker" + fi + rm -rf "$tmp_dir" +done +shopt -u nullglob + +echo "harvested logs: $copied" +bash "$JUZI_DIR/process_phase2_inbox.sh" "$INBOX_DIR" "$OUT_DIR" diff --git a/docs/juzi/issue2994-breakthrough-roadmap.md b/docs/juzi/issue2994-breakthrough-roadmap.md new file mode 100644 index 0000000000..7edf43d344 --- /dev/null +++ b/docs/juzi/issue2994-breakthrough-roadmap.md @@ -0,0 +1,63 @@ +# Issue #2994 - Technical Breakthrough Roadmap + +## Goal +Deliver a solution that is technically real (can be reproduced), not UI-level simulation. + +## Hard reality +If RCS fails due server-side trust gates (carrier/Jibe policy + app integrity coupling), local binder stubs alone will never be sufficient. +So the first breakthrough is not "force connected", but "pinpoint the first authoritative rejection point with proof". + +## Breakthrough Hypothesis +The bottleneck is likely one of these: +1. Contract mismatch between Google Messages and microG RCS/CarrierAuth service behavior. +2. Provisioning dependency on upstream trust signal that is not satisfied in current environment. +3. State machine regression where client remains in setup loop because one mandatory transition callback is missing. + +## Engineering strategy (what we can actually ship) + +### Phase A - Contract Witness (must-have) +- Add precise binder-level tracing: + - interface token + - transaction code + - caller uid/pid + - call order + timestamp + - response mode (unavailable/passthrough) +- Add blocker detector: + - if the same unhandled `(token, code, detail)` repeats, emit `blocker_candidate`. +- Output a deterministic trace set from a real run. + +Success criteria: +- We can produce a single ordered trace showing exactly where setup stalls. + +### Phase B - Compatibility Adapter (minimal, reviewable) +- Implement only required transactions verified by Phase A. +- No synthetic "all good" response. +- Unsupported versions fail closed with explicit traceable reason. + +Success criteria: +- No fake status path. +- Behavior is contract-specific and version-scoped. + +### Phase C - Evidence Matrix (maintainer-grade) +- Each run includes: + - device model + Android version + - Google Messages version + - SIM/carrier status + - key trace excerpt + - observed UI state +- Include at least one negative path and explain why it fails. + +Success criteria: +- Maintainers can reproduce and reason about acceptance/rejection on their side. + +## What not to do (auto-reject patterns) +- Hardcoded IMEI/IMSI/device identity payloads. +- Unconditional `STATUS_OK` parcel replies. +- Bundling unrelated auth/system changes in the same PR. +- Claiming end-to-end support without trace-backed evidence. + +## PR narrative that can win trust +1. "We instrumented first, then implemented the minimum compatible path." +2. "This revision does not fake provisioning success." +3. "Here is the exact trace where setup blocks today, and here is the targeted adapter behavior for that point." +4. "Known limits are explicit." diff --git a/docs/juzi/issue2994-community-validation-template.md b/docs/juzi/issue2994-community-validation-template.md new file mode 100644 index 0000000000..6e53e21202 --- /dev/null +++ b/docs/juzi/issue2994-community-validation-template.md @@ -0,0 +1,26 @@ +# Issue #2994 Community Validation Template + +Please post validation results in this exact format: + +``` +Device: +ROM: +Android: +Carrier/Country: +Google Messages version: +microG build commit: + +Result: +- RCS state (Connected / Setting up / Error): +- Time to state: + +Top blocker evidence (if not Connected): +- token: +- code: +- detail: +- repeated: + +Log excerpt (10-30 lines): + +``` + diff --git a/docs/juzi/issue2994-deep-dive.md b/docs/juzi/issue2994-deep-dive.md new file mode 100644 index 0000000000..abef38e463 --- /dev/null +++ b/docs/juzi/issue2994-deep-dive.md @@ -0,0 +1,37 @@ +# Issue #2994 Deep Dive (RCS Bounty) + +## 1) What maintainers actually care about +- Works on **locked bootloader** devices. +- No dependency on **root/Magisk**. +- Compatible with **current Google Messages**, not only one frozen old version. +- Must survive **real-world verification**, not only local mock success. + +## 2) Why prior attempts are rejected +- Returned local `STATUS_OK` without proving end-to-end provisioning semantics. +- Hardcoded identity data (IMEI/IMSI/model) instead of a defensible compatibility path. +- Missing reproducible evidence matrix (device, ROM, app version, network, SIM profile, logs). +- Broad claims without clear failure boundaries and rollback behavior. + +## 3) Current branch risks to fix before trust-building +- `RcsShimServices` must move from observation to contract completion one row at a time, otherwise it remains a diagnostic-only layer. +- Earlier branch variants included identity hardcoding and static provisioning replies; those patterns must stay removed. +- `IdentityFidoProxyActivity` changes are not directly tied to RCS provisioning acceptance criteria. + +## 4) Breakthrough direction (engineering, not theater) +- Replace "success-forcing" behavior with **trace-first** architecture: + - Add a recorder for binder transaction token, code, call order, response latency, and explicit failure reason. + - Keep behavior deterministic and fail-closed when required dependency is absent. +- Implement **compatibility mediation**, not identity spoofing: + - Route only known RCS service contracts. + - Preserve original parcel semantics where possible. + - Explicitly mark unsupported contract versions. +- Add strict evidence artifacts: + - Device + ROM + Google Messages version + SIM/carrier + timestamp. + - Log excerpts that show state transition chain, not only one terminal string. + +## 5) Acceptance gates for a credible PR +- No hardcoded personal/device identifiers. +- No unconditional "STATUS_OK" responses. +- No unrelated subsystem edits bundled into RCS PR. +- Clear unsupported-scope statement. +- Reproducible verification table with at least one physical-SIM validation path. diff --git a/docs/juzi/issue2994-maintainer-comment-draft.md b/docs/juzi/issue2994-maintainer-comment-draft.md new file mode 100644 index 0000000000..2c1d4b6f6b --- /dev/null +++ b/docs/juzi/issue2994-maintainer-comment-draft.md @@ -0,0 +1,18 @@ +Hi maintainers, + +I reviewed the failed attempts on this bounty and I am adjusting the implementation strategy to be evidence-driven rather than mock-driven. + +For PR #3294, I am removing success-forcing behavior and focusing on a compatibility + tracing path: + +1. Binder contract tracing for RCS/provisioning calls (token, transaction code, call order, response semantics). +2. Fail-closed behavior for unsupported contract versions (no fake `STATUS_OK` on unknown paths). +3. A reproducible validation matrix (device, ROM, Messages version, SIM/carrier, logs). + +Before I finalize the next update, I want to align with your review expectations on three points: + +- Which concrete signals do you consider sufficient to prove real RCS readiness (beyond UI state text)? +- For the bounty scope, is one fully documented modern-device success path acceptable as phase 1, followed by broader compatibility in phase 2? +- Are there specific binder interfaces or state transitions you want explicitly logged in the first reviewable revision? + +I will keep the next revision narrow, testable, and directly tied to the acceptance criteria in #2994. + diff --git a/docs/juzi/issue2994-maintainer-new-angle-comment.md b/docs/juzi/issue2994-maintainer-new-angle-comment.md new file mode 100644 index 0000000000..e4d08bff07 --- /dev/null +++ b/docs/juzi/issue2994-maintainer-new-angle-comment.md @@ -0,0 +1,17 @@ +Maintainers, + +I think the key blocker in #2994 is being approached from the wrong angle. +This does not look like a "single provisioning response" issue. It looks like a contract-completeness issue across the RCS + CarrierAuth binder boundary. + +Most previous attempts tried to force a terminal state. That hides the real blocker and fails under review. + +I am taking a different route: + +1. Instrument exact contract rows (`token`, `transaction code`, call order, caller uid/pid). +2. Identify the first blocking row in the real flow. +3. Implement only that row + direct dependencies, keep unknown paths fail-closed. + +No unconditional success stubs, no inflated compatibility claims. + +If this direction matches your expectations, I will post the first blocker row report in the PR and keep the next patch narrowly scoped to that contract. + diff --git a/docs/juzi/issue2994-maintainer-reply-now.md b/docs/juzi/issue2994-maintainer-reply-now.md new file mode 100644 index 0000000000..6e43fa5a3a --- /dev/null +++ b/docs/juzi/issue2994-maintainer-reply-now.md @@ -0,0 +1,17 @@ +Thanks for the clear questions. + +Direct answers: + +1. No, I have not yet validated this on a physical SIM-equipped device running a microG-capable custom ROM. +2. No, this patchset has not yet been validated end-to-end in that same carrier-backed physical environment. + +Current PR scope is Phase 1 only: protocol-layer isolation and deterministic binder-contract analysis. +I am not claiming final carrier-backed RCS activation from this patchset yet. + +What this PR currently contributes: +- binder instrumentation for RCS/CarrierAuth contract paths, +- ranked blocker evidence from repeated unhandled rows, +- minimal fail-closed patch iterations per `(token, code)` row. + +I want to keep this PR active and move directly into Phase 2 validation. +If you share a preferred device/ROM matrix, I will align to it and report results in a structured format. diff --git a/docs/juzi/issue2994-maintainer-research-comment.md b/docs/juzi/issue2994-maintainer-research-comment.md new file mode 100644 index 0000000000..c9b81ccbb2 --- /dev/null +++ b/docs/juzi/issue2994-maintainer-research-comment.md @@ -0,0 +1,23 @@ +Maintainers, + +For #2994, I am treating this as a contract-completeness research problem, not a UI-state workaround. + +Working hypothesis: +- the setup loop is caused by one or more missing/incorrect binder contract rows at the RCS + CarrierAuth boundary. + +Method in this revision: +1. instrument binder rows (`token`, `code`, caller package/uid/pid, handled/unhandled), +2. detect repeated unhandled rows as `blocker_candidate`, +3. rank blockers and patch only rank-1 row in the next iteration. + +I am intentionally not claiming end-to-end success in this step. +The goal is to produce reproducible blocker evidence and a minimal patch target that can be reviewed objectively. + +Artifacts generated from a run: +- blocker trace report +- contract map JSON +- next patch suggestion +- research brief + +If this review direction is acceptable, I will post the first rank-1 blocker row and submit a narrow completion patch for that exact `(token, code)` pair. + diff --git a/docs/juzi/issue2994-new-viewpoint.md b/docs/juzi/issue2994-new-viewpoint.md new file mode 100644 index 0000000000..60c0059578 --- /dev/null +++ b/docs/juzi/issue2994-new-viewpoint.md @@ -0,0 +1,43 @@ +# Issue #2994 New Viewpoint (What Others Missed) + +## Core thesis +This is not a "device spoofing" problem. +This is a **contract-completeness** problem between Google Messages and microG at the RCS + CarrierAuth boundary. + +## Why most attempts fail +- They try to force terminal state (`Connected`) instead of implementing required intermediate contracts. +- They treat provisioning as a single response, while real flow is a multi-step state machine with strict call ordering. +- They cannot show which exact `(interface token, transaction code)` is the first hard blocker. + +## New technical angle +Build a **Contract Completion Layer** (CCL), not a mock-success layer: + +1. **Contract Witness** + - Record exact binder contract rows: + - token + - code + - call order + - caller uid/pid + - Output deterministic traces. + - Emit automatic `blocker_candidate` signals when the same unhandled row repeats. + +2. **Minimal Completion** + - Implement only the first blocking contract row and its direct dependencies. + - Keep all unknown rows fail-closed. + - No unconditional `STATUS_OK`. + +3. **Version Drift Guard** + - Detect token/code drift across Google Messages versions. + - Mark unsupported variants explicitly instead of pretending compatibility. + +## Why this is valuable to maintainers +- Gives a reproducible path to reason about real failures. +- Reduces review risk by avoiding broad or deceptive behavior. +- Produces a mergeable progression: + - instrumentation -> first contract completion -> compatibility expansion. + +## Deliverable shape for next PR update +- RCS/CarrierAuth trace instrumentation (already in branch). +- First blocking row implementation only (strictly scoped). +- Trace report attached in PR with blocker row + result. +- Known limits section (no inflated claims). diff --git a/docs/juzi/issue2994-phase2-in-progress-comment.md b/docs/juzi/issue2994-phase2-in-progress-comment.md new file mode 100644 index 0000000000..6f7e840cdb --- /dev/null +++ b/docs/juzi/issue2994-phase2-in-progress-comment.md @@ -0,0 +1,15 @@ +Maintainers, + +Phase 2 validation tooling is now in place in this branch. + +- `docs/juzi/rcs_log_extract.py` +- `docs/juzi/run_phase2_validation_bundle.sh` + +Given a raw device logcat, the pipeline now produces: +- `rcs_report.md` +- `rcs_contracts.json` +- `rcs_patch_plan.md` +- `rcs_research_brief.md` +- `rcs_research_artifacts.zip` + +I will post the first carrier-backed run output in this PR using that exact format. diff --git a/docs/juzi/issue2994-phase2-validation-matrix.md b/docs/juzi/issue2994-phase2-validation-matrix.md new file mode 100644 index 0000000000..303b2158b3 --- /dev/null +++ b/docs/juzi/issue2994-phase2-validation-matrix.md @@ -0,0 +1,41 @@ +# Issue #2994 Phase 2 Validation Matrix + +## Goal +Validate whether the current patchset changes real RCS setup behavior on physical SIM-backed devices. + +## Test Matrix (minimum) +- Device: Pixel 6 / Pixel 7 / Samsung S22 (at least one required for first pass) +- Android: 13 or 14 +- ROM: one microG-capable ROM +- microG build: patched build from this PR branch +- Google Messages: current production version +- SIM: active, carrier-provisioned for RCS region + +## Execution Steps +1. Flash/install ROM and microG baseline. +2. Install patched microG build. +3. Install/clear data for Google Messages. +4. Trigger RCS setup from Messages settings. +5. Capture logs for tags: +`RcsApiService`, `CarrierAuthService`, and related provisioning tags. +6. Record end state in Messages: +`Connected` / `Setting up` / explicit error. + +## Acceptance Criteria +- Pass: +Messages reaches stable RCS `Connected` with no repeated rank-1 unhandled blocker row in logs. +- Partial: +Setup progresses but stalls with a new ranked blocker row. +- Fail: +No setup progression and same blocker row persists. + +## Required Report Fields +- Device model +- ROM name + version +- Android version +- Carrier + country +- Messages version +- Result state +- Top blocker row (if any): `token`, `code`, `detail`, repeat count +- Log excerpt lines + diff --git a/docs/juzi/issue2994-research-protocol.md b/docs/juzi/issue2994-research-protocol.md new file mode 100644 index 0000000000..6424b8999a --- /dev/null +++ b/docs/juzi/issue2994-research-protocol.md @@ -0,0 +1,54 @@ +# Issue #2994 Research Protocol (Maintainer-Facing) + +## Research Question +Which exact RCS/CarrierAuth contract row is the first authoritative blocker preventing Google Messages from completing RCS setup in a non-root, locked-bootloader context? + +## Hypotheses +1. The blocking point is a **binder contract incompleteness** row, not a single UI-state mismatch. +2. Repeated unhandled rows (`token + code + detail`) can be used as a deterministic blocker signal. +3. Narrow row-by-row completion is safer and more reviewable than broad synthetic success behavior. + +## Method +1. Instrument `RcsService` and `CarrierAuthService` binder boundaries. +2. Capture run traces with: + - trace id + - token/code + - caller uid/pid/package + - handled/unhandled +3. Emit automatic blocker candidates when unhandled rows repeat. +4. Rank blocker rows and patch only rank-1 row in the next iteration. +5. For the selected rank-1 row, use controlled minimal completion mode (`COMPLETE_*_UNAVAILABLE`) before any broad contract expansion. +6. Adjust policy rows through runtime config (no source edit) to keep each iteration auditable. + +## Reproducibility Artifacts +- `docs/juzi/rcs_trace_analyzer.py` +- `docs/juzi/rcs_contract_map_builder.py` +- `docs/juzi/rcs_patch_suggester.py` +- `docs/juzi/rcs_blocker_report_template.md` +- `docs/juzi/rcs_policy_overrides.example.json` +- `docs/juzi/run_rcs_research_from_latest_log.sh` +- `docs/juzi/rcs_log_extract.py` +- `docs/juzi/run_phase2_validation_bundle.sh` +- `docs/juzi/process_phase2_inbox.sh` +- `docs/juzi/summarize_phase2_submissions.py` +- `docs/juzi/harvest_downloads_phase2.sh` + +## Runtime Policy Control +- Policy override path: `files/rcs_policy_overrides.json` (inside microG app sandbox). +- Supported keys: + - `enableMinimalCompletion` (boolean) + - `messagesClients` (string array) + - `completionRows` (array of `{token, code}` or `{tokenContains, code}`) +- Default behavior remains fail-closed if no override file exists. + +## Evaluation Criteria +- Deterministic blocker ranking from independent runs. +- Patch scope limited to target row and direct dependencies. +- No unconditional success responses. +- Unsupported rows remain fail-closed. +- Rank-1 completion does not introduce false-positive connected states. + +## Expected Research Output +1. A blocker report naming first blocker row. +2. A minimal patch plan bound to that row. +3. A post-patch delta report showing whether blocker rank shifts or disappears. diff --git a/docs/juzi/package_research_artifacts.sh b/docs/juzi/package_research_artifacts.sh new file mode 100755 index 0000000000..2449838855 --- /dev/null +++ b/docs/juzi/package_research_artifacts.sh @@ -0,0 +1,30 @@ +#!/usr/bin/env bash +set -euo pipefail + +if [ "$#" -lt 1 ]; then + echo "Usage: $0 [zip_path]" + exit 2 +fi + +OUT_DIR="$1" +ZIP_PATH="${2:-$OUT_DIR/rcs_research_artifacts.zip}" +ZIP_PATH_ABS="$(python3 - <<'PY' "$ZIP_PATH" +import os, sys +print(os.path.abspath(sys.argv[1])) +PY +)" + +for f in \ + "$OUT_DIR/rcs_report.md" \ + "$OUT_DIR/rcs_contracts.json" \ + "$OUT_DIR/rcs_patch_plan.md" \ + "$OUT_DIR/rcs_research_brief.md"; do + if [ ! -f "$f" ]; then + echo "Missing required artifact: $f" + exit 3 + fi +done + +cd "$OUT_DIR" +zip -q -r "$ZIP_PATH_ABS" rcs_report.md rcs_contracts.json rcs_patch_plan.md rcs_research_brief.md +echo "Packaged: $ZIP_PATH_ABS" diff --git a/docs/juzi/phase2_external_tester_brief_zh.md b/docs/juzi/phase2_external_tester_brief_zh.md new file mode 100644 index 0000000000..04855c51c6 --- /dev/null +++ b/docs/juzi/phase2_external_tester_brief_zh.md @@ -0,0 +1,40 @@ +# Phase 2 外部真机测试任务说明(给测试员) + +## 测试目标 +验证 microG RCS 补丁在真实 `SIM + 运营商 + microG ROM` 场景下的行为,不做主观口头结论,只提交结构化证据。 + +## 设备要求(至少满足一组) +- 设备:Pixel 6/7 或 Samsung S22/S23(同级可接受) +- Android:13 或 14 +- ROM:支持 microG 的 ROM +- SIM:可正常联网,所在地区支持 Google Messages RCS + +## 执行步骤 +1. 安装补丁版 microG(由我们提供 APK)。 +2. 安装或清空 Google Messages 数据。 +3. 打开 Messages > RCS 设置,触发初始化流程。 +4. 保持 3-10 分钟,观察状态。 +5. 导出 logcat(完整原始日志,不要删改)。 + +## 必交付内容(缺一不可) +1. `raw_logcat.log`(原始日志) +2. `metadata.json`(按下面模板填写) + +```json +{ + "tester_id": "t001", + "device": "Pixel 6", + "rom": "LineageOS 21 + microG", + "android": "14", + "carrier_country": "CarrierName/Country", + "messages_version": "2026.xx.xx", + "microg_commit": "PR3294-branch-commit", + "result_state": "Connected|Setting up|Error", + "time_to_state_seconds": 0, + "notes": "" +} +``` + +## 通过标准(由我们统一判定) +- `Connected` 且日志无重复 rank-1 blocker 行,为通过。 +- 若未通过,也必须提交完整日志;失败样本同样有价值。 diff --git a/docs/juzi/phase2_watchdog_worker.sh b/docs/juzi/phase2_watchdog_worker.sh new file mode 100755 index 0000000000..38655a7235 --- /dev/null +++ b/docs/juzi/phase2_watchdog_worker.sh @@ -0,0 +1,17 @@ +#!/usr/bin/env bash +set -euo pipefail + +JUZI_DIR="${1:?missing juzi dir}" +INTERVAL_SECONDS="${2:-180}" +DOWNLOADS_DIR="${HOME}/Downloads" + +while true; do + ts="$(date '+%Y-%m-%d %H:%M:%S')" + echo "[$ts] phase2 watchdog sweep begin" + if ! bash "$JUZI_DIR/harvest_downloads_phase2.sh" "$DOWNLOADS_DIR" "$JUZI_DIR/phase2_inbox" "$JUZI_DIR/phase2_submissions"; then + echo "[$ts] phase2 watchdog sweep failed" + fi + ts="$(date '+%Y-%m-%d %H:%M:%S')" + echo "[$ts] phase2 watchdog sweep end" + sleep "$INTERVAL_SECONDS" +done diff --git a/docs/juzi/pr3294-core-scope.md b/docs/juzi/pr3294-core-scope.md new file mode 100644 index 0000000000..164a76696e --- /dev/null +++ b/docs/juzi/pr3294-core-scope.md @@ -0,0 +1,19 @@ +# PR #3294 Core Scope (Keep It Credible) + +## Include in PR +- `play-services-core/src/main/kotlin/org/microg/gms/rcs/RcsShimServices.kt` +- `play-services-core/src/main/kotlin/org/microg/gms/rcs/RcsContractPolicy.kt` +- `docs/juzi/issue2994-breakthrough-roadmap.md` +- `docs/juzi/browserstack-trace-runbook.md` +- `docs/juzi/rcs_trace_analyzer.py` +- `docs/juzi/rcs_contract_map_builder.py` + +## Exclude from this PR (separate later) +- `play-services-core/src/main/kotlin/org/microg/gms/auth/credentials/identity/IdentityFidoProxyActivity.kt` +- `play-services-core/src/main/AndroidManifest.xml` launcher additions +- `vending-app/src/main/AndroidManifest.xml` launcher additions +- `play-services-core/src/main/kotlin/org/microg/gms/LauncherActivity.kt` +- `vending-app/src/main/kotlin/com/android/vending/LauncherActivity.kt` + +## Why +Issue #2994 is reviewed as an RCS correctness task. Unrelated auth/launcher edits dilute trust and make rejection more likely. diff --git a/docs/juzi/pr3294-delivery-checklist.md b/docs/juzi/pr3294-delivery-checklist.md new file mode 100644 index 0000000000..e420f48531 --- /dev/null +++ b/docs/juzi/pr3294-delivery-checklist.md @@ -0,0 +1,25 @@ +# PR #3294 Delivery Checklist + +## Scope discipline +- [ ] RCS-related files only. +- [ ] Remove unrelated auth/identity edits from this PR. +- [ ] No UI-only or cosmetic commit noise. + +## Technical correctness +- [ ] No hardcoded IMEI/IMSI/line number payloads. +- [ ] No unconditional `STATUS_OK` response path. +- [ ] Unsupported interface/version returns explicit failure with trace id. +- [ ] Parcel read/write order matches expected contract. + +## Evidence quality +- [ ] Include a state-transition trace, not just final app screenshot. +- [ ] Include device/ROM/Messages/SIM metadata. +- [ ] Include at least one negative-path log (expected failure handling). +- [ ] Include rollback/safety note if upstream contract changes. + +## Reviewability +- [ ] One concern per commit (small and reviewable diffs). +- [ ] Commit messages explain "why", not only "what". +- [ ] PR body lists known limitations explicitly. +- [ ] Comment tone stays technical, no hype language. + diff --git a/docs/juzi/pr3294-final-body.md b/docs/juzi/pr3294-final-body.md new file mode 100644 index 0000000000..9c9dcf94da --- /dev/null +++ b/docs/juzi/pr3294-final-body.md @@ -0,0 +1,36 @@ +# PR #3294: RCS Contract Witness and Minimal Compatibility Strategy + +## Summary +This revision intentionally avoids synthetic provisioning success paths and introduces a contract-first debugging layer for `RCS` and `CarrierAuth`. + +## What changed +- Added binder-level contract witness for `RCS` and `CarrierAuth` services: + - interface token + - transaction code + - caller package / uid / pid + - payload size + - deterministic trace id + - handled/unhandled decision +- Added policy-driven routing (`RcsContractPolicy`) to keep behavior explicit and auditable. +- Unknown/unsupported paths remain fail-closed (`handled=false`) by design. + +## Why this approach +Issue #2994 has repeatedly failed with broad mock-based responses that do not survive real verification. +This change takes the opposite path: +- instrument first +- identify first blocker row `(token, code, detail)` +- implement only minimal required completion next + +## Non-goals in this revision +- No claim of full end-to-end RCS provisioning success. +- No hardcoded identity payloads. +- No unconditional success responses. + +## Reproducibility +Tooling included: +- `docs/juzi/rcs_trace_analyzer.py` +- `docs/juzi/rcs_contract_map_builder.py` +- `docs/juzi/browserstack-trace-runbook.md` + +These artifacts are intended to produce a maintainer-reviewable blocker report and reduce speculation. + diff --git a/docs/juzi/pr3294-progress-comment.md b/docs/juzi/pr3294-progress-comment.md new file mode 100644 index 0000000000..09bd3ae8d2 --- /dev/null +++ b/docs/juzi/pr3294-progress-comment.md @@ -0,0 +1,25 @@ +Maintainers, + +Quick progress update on #2994: + +- I moved the RCS shim to a contract-first workflow: + - binder row tracing (`token`, `code`, caller package/uid/pid, handled/unhandled), + - automatic repeated-blocker detection (`blocker_candidate`), + - blocker ranking for deterministic patch prioritization. + +- I also added a narrow completion mode for rank-1 blocker rows: + - `COMPLETE_*_UNAVAILABLE` is applied only to selected contract rows, + - all other rows remain fail-closed. +- I added a runtime policy layer for the row-selection logic: + - completion rows and client allowlist are now externalized via `rcs_policy_overrides.json`, + - this allows deterministic iteration without repeatedly changing core routing code, + - default behavior remains strict and fail-closed when no override file is present. +- I fixed the research pipeline parser so it accepts both full `trace ...` rows and lightweight `trace_decision ...` rows: + - blocker ranking and contract maps are now generated from current instrumentation logs without manual reformatting. + +This is intentionally not a broad success stub. +The goal is to produce reproducible blocker evidence, then patch exactly one contract row at a time. + +Next step I am preparing: +- post the current top-ranked blocker row from a fresh run, +- submit a minimal row patch bound to that blocker and direct dependencies only. diff --git a/docs/juzi/pr3294-technical-brief.md b/docs/juzi/pr3294-technical-brief.md new file mode 100644 index 0000000000..c26ce6110f --- /dev/null +++ b/docs/juzi/pr3294-technical-brief.md @@ -0,0 +1,24 @@ +# PR #3294 Technical Brief (RCS) + +## What this revision changes +- Introduces contract-level tracing for RCS and CarrierAuth binder calls: + - caller package / uid / pid + - transaction code / flags / payload size + - interface token + - deterministic trace id +- Replaces synthetic success behavior with explicit unavailable/failure semantics. +- Adds reproducible BrowserStack trace runbook and a local analyzer to pinpoint first blocking contract row. + +## Why this is different from prior failed attempts +- Does not claim RCS success through static XML or forced status responses. +- Treats unknown/unsupported paths as unsupported, not success. +- Produces evidence maintainers can inspect and reproduce. + +## Expected outcome +- A clear first blocking candidate `(token, code, detail)` from real runs. +- Narrow, reviewable next-step implementation focused on that exact contract row. + +## Known limits +- This revision is an instrumentation + contract-hardening step. +- End-to-end provisioning is intentionally not claimed without trace-backed proof. + diff --git a/docs/juzi/process_phase2_inbox.sh b/docs/juzi/process_phase2_inbox.sh new file mode 100755 index 0000000000..d8db9556ef --- /dev/null +++ b/docs/juzi/process_phase2_inbox.sh @@ -0,0 +1,56 @@ +#!/usr/bin/env bash +set -euo pipefail + +ROOT_DIR="$(cd "$(dirname "$0")/../.." && pwd)" +JUZI_DIR="$ROOT_DIR/docs/juzi" +INBOX_DIR="${1:-$JUZI_DIR/phase2_inbox}" +OUT_ROOT="${2:-$JUZI_DIR/phase2_submissions}" + +mkdir -p "$INBOX_DIR" "$OUT_ROOT" + +has_any=0 +for f in "$INBOX_DIR"/*.log; do + if [[ ! -e "$f" ]]; then + continue + fi + has_any=1 + base="$(basename "$f" .log)" + meta="$INBOX_DIR/$base.json" + out_dir="$OUT_ROOT/$base" + mkdir -p "$out_dir" + done_zip="$out_dir/rcs_research_artifacts.zip" + + if [[ -f "$done_zip" && "$f" -ot "$done_zip" ]]; then + continue + fi + + cp "$f" "$out_dir/raw_logcat.log" + if [[ -f "$meta" ]]; then + cp "$meta" "$out_dir/metadata.json" + else + cat > "$out_dir/metadata.json" <<'EOF' +{ + "tester_id": "", + "device": "", + "rom": "", + "android": "", + "carrier_country": "", + "messages_version": "", + "microg_commit": "", + "result_state": "", + "time_to_state_seconds": 0, + "notes": "metadata missing in inbox" +} +EOF + fi + + bash "$JUZI_DIR/run_phase2_validation_bundle.sh" "$out_dir/raw_logcat.log" "$out_dir" +done + +if [[ "$has_any" -eq 0 ]]; then + echo "no log files found in $INBOX_DIR" + exit 0 +fi + +python3 "$JUZI_DIR/summarize_phase2_submissions.py" "$OUT_ROOT" -o "$OUT_ROOT/index.md" +echo "phase2 submissions index: $OUT_ROOT/index.md" diff --git a/docs/juzi/rcs_blocker_report_template.md b/docs/juzi/rcs_blocker_report_template.md new file mode 100644 index 0000000000..8a00085ca0 --- /dev/null +++ b/docs/juzi/rcs_blocker_report_template.md @@ -0,0 +1,36 @@ +# RCS Blocker Report (Template) + +## Environment +- Device: +- Android: +- Google Messages: +- microG build: +- SIM/Carrier state: +- Timestamp: + +## Trace Summary +- Parsed rows: +- First blocking candidate: + - token: + - code: + - detail: + - handled: + +## Reproduction +1. +2. +3. + +## Observed Outcome +- UI state: +- Log anchor lines: + +## Engineering Conclusion +- Why this is the first authoritative blocker: +- Why prior assumptions are insufficient: + +## Next Minimal Patch +- Target `(token, code)`: +- Expected behavior: +- Out-of-scope: + diff --git a/docs/juzi/rcs_contract_map_builder.py b/docs/juzi/rcs_contract_map_builder.py new file mode 100755 index 0000000000..d173659f55 --- /dev/null +++ b/docs/juzi/rcs_contract_map_builder.py @@ -0,0 +1,124 @@ +#!/usr/bin/env python3 +""" +Build a contract map (token+code priority list) from RCS trace logs. +""" + +from __future__ import annotations + +import argparse +import json +import re +from dataclasses import dataclass +from pathlib import Path + + +TRACE_RE = re.compile( + r"trace id=(?P\d+)\s+" + r"service=(?P\S+)\s+" + r"caller=(?P\S+)\s+" + r"uid=(?P-?\d+)\s+" + r"pid=(?P-?\d+)\s+" + r"code=(?P-?\d+)\s+" + r"flags=(?P-?\d+)\s+" + r"size=(?P-?\d+)\s+" + r"token=(?P.*?)\s+" + r"detail=(?P\S+)\s+" + r"handled=(?Ptrue|false)\s+" + r"t=(?P-?\d+)" +) + +TRACE_DECISION_RE = re.compile( + r"trace_decision\s+id=(?P\d+)\s+" + r"detail=(?P\S+)\s+" + r"handled=(?Ptrue|false)\s+" + r"token=(?P.*?)\s+" + r"code=(?P-?\d+)" +) + +TAG_RE = re.compile(r"\b(RcsApiService|CarrierAuthService)\b") + + +@dataclass(frozen=True) +class Key: + service: str + token: str + code: int + + +def main() -> int: + parser = argparse.ArgumentParser(description="Build token+code contract map from trace logs.") + parser.add_argument("input", type=Path, help="Input logcat text file") + parser.add_argument("-o", "--output", type=Path, required=True, help="Output JSON file") + args = parser.parse_args() + + text = args.input.read_text(encoding="utf-8", errors="replace") + rows = [] + for line in text.splitlines(): + m = TRACE_RE.search(line) + if m: + rows.append( + { + "trace_id": int(m.group("trace_id")), + "service": m.group("service"), + "token": (m.group("token") or "").strip(), + "code": int(m.group("code")), + "detail": m.group("detail"), + "handled": m.group("handled") == "true", + } + ) + continue + m2 = TRACE_DECISION_RE.search(line) + if not m2: + continue + tag = TAG_RE.search(line) + service = "unknown" + if tag: + service = "rcs" if tag.group(1) == "RcsApiService" else "carrier_auth" + rows.append( + { + "trace_id": int(m2.group("trace_id")), + "service": service, + "token": (m2.group("token") or "").strip(), + "code": int(m2.group("code")), + "detail": m2.group("detail"), + "handled": m2.group("handled") == "true", + } + ) + + index: dict[Key, dict] = {} + order: list[Key] = [] + for r in rows: + key = Key(r["service"], r["token"], r["code"]) + if key not in index: + index[key] = { + "service": r["service"], + "token": r["token"], + "code": r["code"], + "count": 0, + "first_trace_id": r["trace_id"], + "details": {}, + "handled_seen": False, + } + order.append(key) + item = index[key] + item["count"] += 1 + item["details"][r["detail"]] = item["details"].get(r["detail"], 0) + 1 + item["handled_seen"] = item["handled_seen"] or r["handled"] + + ordered = sorted( + (index[k] for k in order), + key=lambda x: (x["first_trace_id"], -x["count"]), + ) + + payload = { + "source": str(args.input), + "total_rows": len(rows), + "contracts": ordered, + } + args.output.write_text(json.dumps(payload, ensure_ascii=False, indent=2) + "\n", encoding="utf-8") + print(f"wrote {args.output} contracts={len(ordered)} rows={len(rows)}") + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/docs/juzi/rcs_log_extract.py b/docs/juzi/rcs_log_extract.py new file mode 100755 index 0000000000..b5f30dc3a2 --- /dev/null +++ b/docs/juzi/rcs_log_extract.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python3 +""" +Extract RCS-relevant lines from large Android logcat dumps. +""" + +from __future__ import annotations + +import argparse +import re +from pathlib import Path + +DEFAULT_PATTERNS = [ + r"RcsApiService", + r"CarrierAuthService", + r"trace_decision", + r"blocker_candidate", + r"blocker_summary", + r"\bRCS\b", + r"\brcs\b", + r"\bJibe\b", + r"Provision", + r"SIP/200", + r"SIP/403", +] + + +def main() -> int: + parser = argparse.ArgumentParser(description="Extract RCS-related logcat lines.") + parser.add_argument("input", type=Path, help="Raw logcat input file") + parser.add_argument("-o", "--output", type=Path, required=True, help="Filtered output file") + parser.add_argument( + "--pattern", + action="append", + default=[], + help="Additional regex pattern (repeatable)", + ) + args = parser.parse_args() + + if not args.input.exists(): + raise SystemExit(f"input not found: {args.input}") + + patterns = DEFAULT_PATTERNS + args.pattern + regexes = [re.compile(p) for p in patterns] + + kept = [] + for line in args.input.read_text(encoding="utf-8", errors="replace").splitlines(): + if any(r.search(line) for r in regexes): + kept.append(line) + + args.output.write_text("\n".join(kept) + ("\n" if kept else ""), encoding="utf-8") + print(f"wrote {args.output} lines={len(kept)}") + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/docs/juzi/rcs_patch_suggester.py b/docs/juzi/rcs_patch_suggester.py new file mode 100755 index 0000000000..d725326e36 --- /dev/null +++ b/docs/juzi/rcs_patch_suggester.py @@ -0,0 +1,101 @@ +#!/usr/bin/env python3 +""" +Generate next minimal patch suggestion from RCS contract map. +""" + +from __future__ import annotations + +import argparse +import json +from pathlib import Path + + +def choose_target(contracts: list[dict]) -> dict | None: + # Prioritize first unhandled observed contract with highest count and earliest trace. + candidates = [ + c + for c in contracts + if not c.get("handled_seen", False) + and c.get("token") + and c.get("token") != "" + and any(d in c.get("details", {}) for d in ("observe_config_request", "observe_generic_request")) + ] + if not candidates: + return None + candidates.sort(key=lambda c: (c.get("first_trace_id", 10**9), -c.get("count", 0))) + return candidates[0] + + +def decide_mode(target: dict) -> str: + details = target.get("details", {}) + code = int(target.get("code", -1)) + if "observe_config_request" in details or code in (1, 2, 1001): + return "COMPLETE_CONFIG_UNAVAILABLE" + return "COMPLETE_GENERIC_UNAVAILABLE" + + +def build_markdown(source: Path, target: dict | None) -> str: + lines: list[str] = [] + lines.append("# RCS Next Patch Suggestion") + lines.append("") + lines.append(f"- Source contract map: `{source}`") + lines.append("") + + if target is None: + lines.append("No actionable unhandled observed contract row found.") + lines.append("Keep instrumentation-only mode and collect another run.") + return "\n".join(lines) + + mode = decide_mode(target) + token = target.get("token", "") + code = int(target.get("code", -1)) + count = int(target.get("count", 0)) + first_trace_id = int(target.get("first_trace_id", -1)) + service = target.get("service", "rcs") + + lines.append("## Selected Target") + lines.append(f"- service: `{service}`") + lines.append(f"- token: `{token}`") + lines.append(f"- code: `{code}`") + lines.append(f"- first_trace_id: `{first_trace_id}`") + lines.append(f"- repeated_count: `{count}`") + lines.append(f"- suggested_mode: `{mode}`") + lines.append("") + + lines.append("## Minimal Kotlin Patch Direction") + lines.append("Update `RcsContractPolicy` with an explicit row rule, e.g.:") + lines.append("") + lines.append("```kotlin") + lines.append("if (row.token == \"TOKEN_HERE\" && row.code == CODE_HERE) {") + lines.append(" return ContractDecision(") + lines.append(" mode = ContractDecisionMode.COMPLETE_CONFIG_UNAVAILABLE,") + lines.append(" detail = \"targeted_contract_row\",") + lines.append(" handled = true") + lines.append(" )") + lines.append("}") + lines.append("```") + lines.append("") + lines.append("## Guardrails") + lines.append("- Implement only this row + direct dependencies.") + lines.append("- Keep all other rows fail-closed.") + lines.append("- Do not add broad token wildcard handling.") + return "\n".join(lines) + + +def main() -> int: + parser = argparse.ArgumentParser(description="Suggest next minimal RCS patch from contract map.") + parser.add_argument("input", type=Path, help="Input contract map JSON") + parser.add_argument("-o", "--output", type=Path, required=True, help="Output markdown file") + args = parser.parse_args() + + data = json.loads(args.input.read_text(encoding="utf-8")) + contracts = data.get("contracts", []) + target = choose_target(contracts) + out = build_markdown(args.input, target) + args.output.write_text(out + "\n", encoding="utf-8") + print(f"wrote {args.output}") + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/docs/juzi/rcs_policy_overrides.example.json b/docs/juzi/rcs_policy_overrides.example.json new file mode 100644 index 0000000000..4d3327882b --- /dev/null +++ b/docs/juzi/rcs_policy_overrides.example.json @@ -0,0 +1,21 @@ +{ + "enableMinimalCompletion": true, + "messagesClients": [ + "com.google.android.apps.messaging", + "com.samsung.android.messaging" + ], + "completionRows": [ + { + "token": "com.google.android.gms.rcs.iprovisioning", + "code": 1 + }, + { + "token": "com.google.android.gms.rcs.iprovisioning", + "code": 2 + }, + { + "token": "com.google.android.gms.rcs.iprovisioning", + "code": 1001 + } + ] +} diff --git a/docs/juzi/rcs_research_brief.py b/docs/juzi/rcs_research_brief.py new file mode 100755 index 0000000000..bb882fcfb0 --- /dev/null +++ b/docs/juzi/rcs_research_brief.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python3 +""" +Generate a concise research-style brief from contract map and patch plan. +""" + +from __future__ import annotations + +import argparse +import json +from pathlib import Path + + +def main() -> int: + parser = argparse.ArgumentParser(description="Build research brief for Issue #2994.") + parser.add_argument("--contracts", type=Path, required=True, help="Contracts JSON from rcs_contract_map_builder.py") + parser.add_argument("--patch-plan", type=Path, required=True, help="Patch plan markdown from rcs_patch_suggester.py") + parser.add_argument("-o", "--output", type=Path, required=True, help="Output markdown path") + args = parser.parse_args() + + contract_data = json.loads(args.contracts.read_text(encoding="utf-8")) + patch_plan = args.patch_plan.read_text(encoding="utf-8") + contracts = contract_data.get("contracts", []) + top = contracts[0] if contracts else None + + lines: list[str] = [] + lines.append("# Issue #2994 Research Brief") + lines.append("") + lines.append("## Objective") + lines.append("Identify and patch the first authoritative RCS contract blocker in a reproducible, fail-closed way.") + lines.append("") + lines.append("## Dataset") + lines.append(f"- source: `{contract_data.get('source', '')}`") + lines.append(f"- total_rows: `{contract_data.get('total_rows', 0)}`") + lines.append(f"- unique_contracts: `{len(contracts)}`") + lines.append("") + lines.append("## Top Observed Contract") + if top: + lines.append(f"- service: `{top.get('service')}`") + lines.append(f"- token: `{top.get('token')}`") + lines.append(f"- code: `{top.get('code')}`") + lines.append(f"- repeated_count: `{top.get('count')}`") + lines.append(f"- handled_seen: `{top.get('handled_seen')}`") + lines.append(f"- details: `{top.get('details')}`") + else: + lines.append("- none") + lines.append("") + lines.append("## Next Patch Plan") + lines.append("```md") + lines.extend(patch_plan.rstrip().splitlines()) + lines.append("```") + lines.append("") + lines.append("## Maintainer Review Ask") + lines.append("- Confirm that blocker ranking method is acceptable.") + lines.append("- Confirm whether target row semantics align with expected provisioning flow.") + + args.output.write_text("\n".join(lines) + "\n", encoding="utf-8") + print(f"wrote {args.output}") + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) + diff --git a/docs/juzi/rcs_trace_analyzer.py b/docs/juzi/rcs_trace_analyzer.py new file mode 100755 index 0000000000..3645965011 --- /dev/null +++ b/docs/juzi/rcs_trace_analyzer.py @@ -0,0 +1,261 @@ +#!/usr/bin/env python3 +""" +Parse microG RCS binder traces from logcat text and emit a concise markdown report. +""" + +from __future__ import annotations + +import argparse +import re +from collections import Counter +from dataclasses import dataclass +from pathlib import Path + + +TRACE_RE = re.compile( + r"trace id=(?P\d+)\s+" + r"service=(?P\S+)\s+" + r"caller=(?P\S+)\s+" + r"uid=(?P-?\d+)\s+" + r"pid=(?P-?\d+)\s+" + r"code=(?P-?\d+)\s+" + r"flags=(?P-?\d+)\s+" + r"size=(?P-?\d+)\s+" + r"token=(?P.*?)\s+" + r"detail=(?P\S+)\s+" + r"handled=(?Ptrue|false)\s+" + r"t=(?P-?\d+)" +) + +TRACE_DECISION_RE = re.compile( + r"trace_decision\s+id=(?P\d+)\s+" + r"detail=(?P\S+)\s+" + r"handled=(?Ptrue|false)\s+" + r"token=(?P.*?)\s+" + r"code=(?P-?\d+)" +) + +TAG_RE = re.compile(r"\b(RcsApiService|CarrierAuthService)\b") + +BLOCKER_RE = re.compile( + r"blocker_candidate\s+" + r"service=(?P\S+)\s+" + r"caller=(?P\S+)\s+" + r"token=(?P\S+)\s+" + r"code=(?P-?\d+)\s+" + r"detail=(?P\S+)\s+" + r"repeated=(?P\d+)" +) + +BLOCKER_SUMMARY_RE = re.compile( + r"blocker_summary\s+" + r"rank=(?P\d+)\s+" + r"repeated=(?P\d+)\s+" + r"first_trace=(?P\d+)\s+" + r"last_trace=(?P\d+)\s+" + r"service=(?P\S+)\s+" + r"caller=(?P\S+)\s+" + r"token=(?P\S+)\s+" + r"code=(?P-?\d+)\s+" + r"detail=(?P\S+)" +) + + +@dataclass +class TraceRecord: + line_no: int + trace_id: int + service: str + caller: str + uid: int + pid: int + code: int + flags: int + size: int + token: str + detail: str + handled: bool + elapsed_ms: int + + +def parse_records(text: str) -> list[TraceRecord]: + out: list[TraceRecord] = [] + for i, line in enumerate(text.splitlines(), start=1): + m = TRACE_RE.search(line) + if not m: + m2 = TRACE_DECISION_RE.search(line) + if not m2: + continue + out.append( + TraceRecord( + line_no=i, + trace_id=int(m2.group("trace_id")), + service=infer_service(line), + caller="", + uid=-1, + pid=-1, + code=int(m2.group("code")), + flags=-1, + size=-1, + token=(m2.group("token") or "").strip(), + detail=m2.group("detail"), + handled=(m2.group("handled") == "true"), + elapsed_ms=-1, + ) + ) + continue + out.append( + TraceRecord( + line_no=i, + trace_id=int(m.group("trace_id")), + service=m.group("service"), + caller=m.group("caller"), + uid=int(m.group("uid")), + pid=int(m.group("pid")), + code=int(m.group("code")), + flags=int(m.group("flags")), + size=int(m.group("size")), + token=(m.group("token") or "").strip(), + detail=m.group("detail"), + handled=(m.group("handled") == "true"), + elapsed_ms=int(m.group("t")), + ) + ) + return out + + +def infer_service(line: str) -> str: + m = TAG_RE.search(line) + if not m: + return "unknown" + return "rcs" if m.group(1) == "RcsApiService" else "carrier_auth" + + +def first_blocking_candidate(records: list[TraceRecord]) -> TraceRecord | None: + for rec in records: + if rec.detail in {"observe_config_request", "observe_generic_request"}: + return rec + return None + + +def build_report(records: list[TraceRecord], source: Path) -> str: + lines: list[str] = [] + lines.append("# RCS Trace Report") + lines.append("") + lines.append(f"- Source: `{source}`") + lines.append(f"- Parsed trace rows: **{len(records)}**") + lines.append("") + + if not records: + lines.append("No RCS trace rows found. Ensure logcat includes `RcsApiService` traces.") + return "\n".join(lines) + + by_detail = Counter(rec.detail for rec in records) + by_contract = Counter((rec.service, rec.token, rec.code, rec.detail, rec.handled) for rec in records) + + lines.append("## Detail Distribution") + for detail, count in by_detail.most_common(): + lines.append(f"- `{detail}`: {count}") + lines.append("") + + lines.append("## Top Contract Rows") + for (service, token, code, detail, handled), count in by_contract.most_common(10): + token_preview = token if len(token) <= 96 else token[:93] + "..." + lines.append( + f"- `{service}` code=`{code}` detail=`{detail}` handled=`{handled}` token=`{token_preview}` -> {count}" + ) + lines.append("") + + blocker_lines = [] + blocker_summaries = [] + for line in source.read_text(encoding="utf-8", errors="replace").splitlines(): + m = BLOCKER_RE.search(line) + if m: + blocker_lines.append( + ( + m.group("service"), + m.group("caller"), + m.group("token"), + int(m.group("code")), + m.group("detail"), + int(m.group("repeated")), + ) + ) + s = BLOCKER_SUMMARY_RE.search(line) + if s: + blocker_summaries.append( + ( + int(s.group("rank")), + int(s.group("repeated")), + int(s.group("first_trace")), + int(s.group("last_trace")), + s.group("service"), + s.group("caller"), + s.group("token"), + int(s.group("code")), + s.group("detail"), + ) + ) + if blocker_lines: + lines.append("## Auto Blocker Signals") + for service, caller, token, code, detail, repeated in blocker_lines[-5:]: + lines.append( + f"- service=`{service}` caller=`{caller}` token=`{token}` code=`{code}` detail=`{detail}` repeated=`{repeated}`" + ) + lines.append("") + if blocker_summaries: + lines.append("## Blocker Ranking (Service-Side)") + for rank, repeated, first_trace, last_trace, service, caller, token, code, detail in blocker_summaries: + lines.append( + f"- rank=`{rank}` repeated=`{repeated}` first_trace=`{first_trace}` last_trace=`{last_trace}` service=`{service}` caller=`{caller}` token=`{token}` code=`{code}` detail=`{detail}`" + ) + lines.append("") + + blocker = first_blocking_candidate(records) + lines.append("## First Blocking Candidate") + if blocker is None: + lines.append("- Not detected.") + else: + lines.append( + "- " + f"trace_id=`{blocker.trace_id}` line=`{blocker.line_no}` service=`{blocker.service}` " + f"caller=`{blocker.caller}` uid=`{blocker.uid}` pid=`{blocker.pid}` " + f"code=`{blocker.code}` detail=`{blocker.detail}` handled=`{blocker.handled}` token=`{blocker.token}`" + ) + lines.append("") + + lines.append("## Suggested Next Step") + if blocker is None: + lines.append("- Capture a fresh run with `RcsApiService` tag visible and rerun analyzer.") + else: + lines.append( + "- Implement/adjust only this exact `(token, code)` contract path first; " + "avoid broad success stubs." + ) + return "\n".join(lines) + + +def main() -> int: + parser = argparse.ArgumentParser(description="Analyze microG RCS binder trace logs.") + parser.add_argument("input", type=Path, help="Input logcat text file") + parser.add_argument( + "-o", + "--output", + type=Path, + help="Output markdown report path (default: print stdout)", + ) + args = parser.parse_args() + + text = args.input.read_text(encoding="utf-8", errors="replace") + records = parse_records(text) + report = build_report(records, args.input) + + if args.output: + args.output.write_text(report + "\n", encoding="utf-8") + else: + print(report) + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/docs/juzi/run_phase2_validation_bundle.sh b/docs/juzi/run_phase2_validation_bundle.sh new file mode 100755 index 0000000000..5b53f887fc --- /dev/null +++ b/docs/juzi/run_phase2_validation_bundle.sh @@ -0,0 +1,46 @@ +#!/usr/bin/env bash +set -euo pipefail + +ROOT_DIR="$(cd "$(dirname "$0")/../.." && pwd)" +JUZI_DIR="$ROOT_DIR/docs/juzi" + +RAW_LOG="${1:-}" +OUT_DIR="${2:-$JUZI_DIR/phase2_output}" + +if [[ -z "$RAW_LOG" ]]; then + echo "usage: $0 [output_dir]" >&2 + exit 1 +fi +if [[ ! -f "$RAW_LOG" ]]; then + echo "raw log not found: $RAW_LOG" >&2 + exit 1 +fi + +mkdir -p "$OUT_DIR" + +FILTERED_LOG="$OUT_DIR/phase2_filtered.log" +RAW_ABS="$(python3 - <<'PY' "$RAW_LOG" +import os, sys +print(os.path.abspath(sys.argv[1])) +PY +)" +DEST_RAW="$OUT_DIR/raw_logcat.log" +DEST_ABS="$(python3 - <<'PY' "$DEST_RAW" +import os, sys +print(os.path.abspath(sys.argv[1])) +PY +)" +if [[ "$RAW_ABS" != "$DEST_ABS" ]]; then + cp "$RAW_LOG" "$DEST_RAW" +fi + +python3 "$JUZI_DIR/rcs_log_extract.py" "$RAW_LOG" -o "$FILTERED_LOG" +bash "$JUZI_DIR/run_rcs_research_pipeline.sh" "$FILTERED_LOG" "$OUT_DIR" +bash "$JUZI_DIR/package_research_artifacts.sh" "$OUT_DIR" + +echo "phase2 bundle ready:" +echo " $OUT_DIR/rcs_report.md" +echo " $OUT_DIR/rcs_contracts.json" +echo " $OUT_DIR/rcs_patch_plan.md" +echo " $OUT_DIR/rcs_research_brief.md" +echo " $OUT_DIR/rcs_research_artifacts.zip" diff --git a/docs/juzi/run_rcs_research_from_latest_log.sh b/docs/juzi/run_rcs_research_from_latest_log.sh new file mode 100755 index 0000000000..39c9d5b2be --- /dev/null +++ b/docs/juzi/run_rcs_research_from_latest_log.sh @@ -0,0 +1,58 @@ +#!/usr/bin/env bash +set -euo pipefail + +ROOT_DIR="$(cd "$(dirname "$0")/../.." && pwd)" +JUZI_DIR="$ROOT_DIR/docs/juzi" +PIPELINE_SCRIPT="$JUZI_DIR/run_rcs_research_pipeline.sh" + +if [[ ! -x "$PIPELINE_SCRIPT" ]]; then + echo "pipeline script missing: $PIPELINE_SCRIPT" >&2 + exit 1 +fi + +INPUT_PATH="${1:-}" +OUT_DIR="${2:-$JUZI_DIR}" + +pick_latest_log() { + local candidates + candidates="$( + ( + find "$ROOT_DIR" -maxdepth 4 -type f \( -name "*.log" -o -name "*.txt" \) 2>/dev/null + if [[ -d "$HOME/Downloads" ]]; then + find "$HOME/Downloads" -maxdepth 3 -type f \( -name "*.log" -o -name "*.txt" \) 2>/dev/null + fi + ) | while IFS= read -r f; do + if rg -q "trace_decision|RcsApiService|CarrierAuthService|blocker_summary" "$f" 2>/dev/null; then + echo "$f" + fi + done + )" + + if [[ -z "$candidates" ]]; then + return 1 + fi + + # shellcheck disable=SC2016 + echo "$candidates" | while IFS= read -r f; do + stat -f "%m|%N" "$f" + done | sort -t'|' -k1,1nr | head -n1 | cut -d'|' -f2- +} + +if [[ -z "$INPUT_PATH" ]]; then + if ! INPUT_PATH="$(pick_latest_log)"; then + echo "no trace log found (need trace_decision/RcsApiService lines)" >&2 + exit 1 + fi +fi + +if [[ ! -f "$INPUT_PATH" ]]; then + echo "input not found: $INPUT_PATH" >&2 + exit 1 +fi + +echo "using log: $INPUT_PATH" +bash "$PIPELINE_SCRIPT" "$INPUT_PATH" "$OUT_DIR" +echo "done: $OUT_DIR/rcs_report.md" +echo "done: $OUT_DIR/rcs_contracts.json" +echo "done: $OUT_DIR/rcs_patch_plan.md" +echo "done: $OUT_DIR/rcs_research_brief.md" diff --git a/docs/juzi/run_rcs_research_pipeline.sh b/docs/juzi/run_rcs_research_pipeline.sh new file mode 100755 index 0000000000..b1ad106bc3 --- /dev/null +++ b/docs/juzi/run_rcs_research_pipeline.sh @@ -0,0 +1,29 @@ +#!/usr/bin/env bash +set -euo pipefail + +if [ "$#" -lt 1 ]; then + echo "Usage: $0 [output_dir]" + exit 2 +fi + +ROOT_DIR="$(cd "$(dirname "$0")/../.." && pwd)" +LOG_PATH="$1" +OUT_DIR="${2:-$ROOT_DIR/docs/juzi/output}" +mkdir -p "$OUT_DIR" + +REPORT_MD="$OUT_DIR/rcs_report.md" +CONTRACTS_JSON="$OUT_DIR/rcs_contracts.json" +PATCH_PLAN_MD="$OUT_DIR/rcs_patch_plan.md" +RESEARCH_BRIEF_MD="$OUT_DIR/rcs_research_brief.md" + +python3 "$ROOT_DIR/docs/juzi/rcs_trace_analyzer.py" "$LOG_PATH" -o "$REPORT_MD" +python3 "$ROOT_DIR/docs/juzi/rcs_contract_map_builder.py" "$LOG_PATH" -o "$CONTRACTS_JSON" +python3 "$ROOT_DIR/docs/juzi/rcs_patch_suggester.py" "$CONTRACTS_JSON" -o "$PATCH_PLAN_MD" +python3 "$ROOT_DIR/docs/juzi/rcs_research_brief.py" --contracts "$CONTRACTS_JSON" --patch-plan "$PATCH_PLAN_MD" -o "$RESEARCH_BRIEF_MD" + +echo "Generated:" +echo " $REPORT_MD" +echo " $CONTRACTS_JSON" +echo " $PATCH_PLAN_MD" +echo " $RESEARCH_BRIEF_MD" + diff --git a/docs/juzi/start_phase2_watchdog.sh b/docs/juzi/start_phase2_watchdog.sh new file mode 100755 index 0000000000..522edaa719 --- /dev/null +++ b/docs/juzi/start_phase2_watchdog.sh @@ -0,0 +1,35 @@ +#!/usr/bin/env bash +set -euo pipefail + +ROOT_DIR="$(cd "$(dirname "$0")/../.." && pwd)" +JUZI_DIR="$ROOT_DIR/docs/juzi" +PID_FILE="$JUZI_DIR/.phase2_watchdog.pid" +LOG_FILE="$JUZI_DIR/.phase2_watchdog.log" +INTERVAL_SECONDS="${PHASE2_WATCHDOG_INTERVAL_SECONDS:-180}" +WORKER_SCRIPT="$JUZI_DIR/phase2_watchdog_worker.sh" + +is_running() { + if [[ ! -f "$PID_FILE" ]]; then + return 1 + fi + local pid + pid="$(cat "$PID_FILE" || true)" + [[ -n "${pid:-}" ]] && kill -0 "$pid" 2>/dev/null +} + +if is_running; then + pid="$(cat "$PID_FILE")" + echo "watchdog already running pid=$pid interval=${INTERVAL_SECONDS}s" + exit 0 +fi + +rm -f "$PID_FILE" +mkdir -p "$JUZI_DIR/phase2_inbox" "$JUZI_DIR/phase2_submissions" +touch "$LOG_FILE" + +nohup bash "$WORKER_SCRIPT" "$JUZI_DIR" "$INTERVAL_SECONDS" >> "$LOG_FILE" 2>&1 & + +pid="$!" +echo "$pid" > "$PID_FILE" +echo "watchdog started pid=$pid interval=${INTERVAL_SECONDS}s" +echo "log: $LOG_FILE" diff --git a/docs/juzi/status_phase2_watchdog.sh b/docs/juzi/status_phase2_watchdog.sh new file mode 100755 index 0000000000..0f0e6d0b2a --- /dev/null +++ b/docs/juzi/status_phase2_watchdog.sh @@ -0,0 +1,35 @@ +#!/usr/bin/env bash +set -euo pipefail + +ROOT_DIR="$(cd "$(dirname "$0")/../.." && pwd)" +JUZI_DIR="$ROOT_DIR/docs/juzi" +PID_FILE="$JUZI_DIR/.phase2_watchdog.pid" +LOG_FILE="$JUZI_DIR/.phase2_watchdog.log" + +if [[ ! -f "$PID_FILE" ]]; then + echo "watchdog status: stopped" + [[ -f "$LOG_FILE" ]] && echo "log: $LOG_FILE" + exit 0 +fi + +pid="$(cat "$PID_FILE" || true)" +if [[ -z "${pid:-}" ]]; then + rm -f "$PID_FILE" + echo "watchdog status: stopped (stale pid file cleaned)" + [[ -f "$LOG_FILE" ]] && echo "log: $LOG_FILE" + exit 0 +fi + +if ! kill -0 "$pid" 2>/dev/null; then + rm -f "$PID_FILE" + echo "watchdog status: stopped (stale pid file cleaned)" + [[ -f "$LOG_FILE" ]] && echo "log: $LOG_FILE" + exit 0 +fi + +echo "watchdog status: running pid=$pid" +if [[ -f "$LOG_FILE" ]]; then + echo "log: $LOG_FILE" + echo "--- last 20 lines ---" + tail -n 20 "$LOG_FILE" +fi diff --git a/docs/juzi/stop_phase2_watchdog.sh b/docs/juzi/stop_phase2_watchdog.sh new file mode 100755 index 0000000000..15c934c802 --- /dev/null +++ b/docs/juzi/stop_phase2_watchdog.sh @@ -0,0 +1,39 @@ +#!/usr/bin/env bash +set -euo pipefail + +ROOT_DIR="$(cd "$(dirname "$0")/../.." && pwd)" +JUZI_DIR="$ROOT_DIR/docs/juzi" +PID_FILE="$JUZI_DIR/.phase2_watchdog.pid" + +if [[ ! -f "$PID_FILE" ]]; then + echo "watchdog not running (no pid file)" + exit 0 +fi + +pid="$(cat "$PID_FILE" || true)" +if [[ -z "${pid:-}" ]]; then + rm -f "$PID_FILE" + echo "watchdog not running (empty pid file cleaned)" + exit 0 +fi + +if ! kill -0 "$pid" 2>/dev/null; then + rm -f "$PID_FILE" + echo "watchdog not running (stale pid file cleaned)" + exit 0 +fi + +kill "$pid" 2>/dev/null || true +for _ in 1 2 3 4 5; do + if ! kill -0 "$pid" 2>/dev/null; then + break + fi + sleep 1 +done + +if kill -0 "$pid" 2>/dev/null; then + kill -9 "$pid" 2>/dev/null || true +fi + +rm -f "$PID_FILE" +echo "watchdog stopped pid=$pid" diff --git a/docs/juzi/summarize_phase2_submissions.py b/docs/juzi/summarize_phase2_submissions.py new file mode 100755 index 0000000000..828b17bbab --- /dev/null +++ b/docs/juzi/summarize_phase2_submissions.py @@ -0,0 +1,74 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import argparse +import json +from pathlib import Path + + +def load_json(path: Path) -> dict: + try: + return json.loads(path.read_text(encoding="utf-8", errors="replace")) + except Exception: + return {} + + +def main() -> int: + parser = argparse.ArgumentParser(description="Summarize phase2 submission outputs") + parser.add_argument("root", type=Path, help="phase2_submissions root") + parser.add_argument("-o", "--output", type=Path, required=True, help="markdown summary path") + args = parser.parse_args() + + rows = [] + if args.root.exists(): + for d in sorted(p for p in args.root.iterdir() if p.is_dir()): + meta = load_json(d / "metadata.json") + report = d / "rcs_report.md" + contracts = d / "rcs_contracts.json" + patch_plan = d / "rcs_patch_plan.md" + zip_file = d / "rcs_research_artifacts.zip" + rows.append( + { + "name": d.name, + "tester_id": meta.get("tester_id", ""), + "device": meta.get("device", ""), + "rom": meta.get("rom", ""), + "android": meta.get("android", ""), + "carrier_country": meta.get("carrier_country", ""), + "result_state": meta.get("result_state", ""), + "report": report.exists(), + "contracts": contracts.exists(), + "patch_plan": patch_plan.exists(), + "zip": zip_file.exists(), + } + ) + + lines = ["# Phase 2 Submission Index", ""] + lines.append(f"- Total submissions: **{len(rows)}**") + lines.append("") + if not rows: + lines.append("No submissions found.") + else: + lines.append("| Submission | Tester | Device | ROM | Android | Carrier | State | Artifacts |") + lines.append("|---|---|---|---|---|---|---|---|") + for r in rows: + artifacts = [] + if r["report"]: + artifacts.append("report") + if r["contracts"]: + artifacts.append("contracts") + if r["patch_plan"]: + artifacts.append("plan") + if r["zip"]: + artifacts.append("zip") + lines.append( + f"| {r['name']} | {r['tester_id']} | {r['device']} | {r['rom']} | {r['android']} | {r['carrier_country']} | {r['result_state']} | {', '.join(artifacts)} |" + ) + + args.output.write_text("\n".join(lines) + "\n", encoding="utf-8") + print(f"wrote {args.output}") + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/docs/juzi/xianyu_recruitment_message_zh.md b/docs/juzi/xianyu_recruitment_message_zh.md new file mode 100644 index 0000000000..6299106658 --- /dev/null +++ b/docs/juzi/xianyu_recruitment_message_zh.md @@ -0,0 +1,12 @@ +你好,我们在做一个开源项目的真机兼容测试(Android / microG / Google Messages RCS)。 + +需要你提供: +1. 一台可插 SIM 的安卓真机(Pixel 或三星优先) +2. 支持 microG 的 ROM 环境 +3. 按步骤执行后导出原始 logcat + +我们只要结构化测试证据,不需要你做开发。 +交付是两个文件:`raw_logcat.log` + `metadata.json`。 + +测试规范文档我会提供,过程约 20-40 分钟。 +如果可以做,请回复你的设备型号、ROM、Android 版本和运营商。 diff --git a/play-services-core/src/main/AndroidManifest.xml b/play-services-core/src/main/AndroidManifest.xml index 66038da7e2..4956978006 100644 --- a/play-services-core/src/main/AndroidManifest.xml +++ b/play-services-core/src/main/AndroidManifest.xml @@ -917,6 +917,24 @@ + + + + + + + + + + + + + + - @@ -1271,7 +1288,6 @@ - diff --git a/play-services-core/src/main/kotlin/org/microg/gms/rcs/RcsContractPolicy.kt b/play-services-core/src/main/kotlin/org/microg/gms/rcs/RcsContractPolicy.kt new file mode 100644 index 0000000000..dbdcccd737 --- /dev/null +++ b/play-services-core/src/main/kotlin/org/microg/gms/rcs/RcsContractPolicy.kt @@ -0,0 +1,89 @@ +/* + * SPDX-FileCopyrightText: 2026 microG Project Team + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.microg.gms.rcs + +import java.util.Locale + +internal data class ContractRow( + val token: String?, + val code: Int, + val callingPackage: String +) + +internal enum class ContractDecisionMode { + UNHANDLED, + OBSERVE_CONFIG, + OBSERVE_GENERIC, + COMPLETE_CONFIG_UNAVAILABLE, + COMPLETE_GENERIC_UNAVAILABLE, + REJECT_NON_MESSAGES_CLIENT +} + +internal data class ContractDecision( + val mode: ContractDecisionMode, + val detail: String, + val handled: Boolean +) + +internal object RcsContractPolicy { + fun decide(row: ContractRow, config: RcsPolicyConfig): ContractDecision { + val normalizedCaller = row.callingPackage.lowercase(Locale.US) + if (!config.messagesClients.contains(normalizedCaller)) { + return ContractDecision( + mode = ContractDecisionMode.REJECT_NON_MESSAGES_CLIENT, + detail = "reject_non_messages_client", + handled = false + ) + } + val token = row.token ?: return ContractDecision( + mode = ContractDecisionMode.UNHANDLED, + detail = "passthrough", + handled = false + ) + if (!isKnownRcsContract(token)) { + return ContractDecision( + mode = ContractDecisionMode.UNHANDLED, + detail = "passthrough", + handled = false + ) + } + val mode = if (row.code == 1 || row.code == 2 || row.code == 1001) { + ContractDecisionMode.OBSERVE_CONFIG + } else { + ContractDecisionMode.OBSERVE_GENERIC + } + val normalized = token.lowercase(Locale.US) + if (config.enableMinimalCompletion && config.matchesCompletionRow(normalized, row.code)) { + val completionMode = if (mode == ContractDecisionMode.OBSERVE_CONFIG) { + ContractDecisionMode.COMPLETE_CONFIG_UNAVAILABLE + } else { + ContractDecisionMode.COMPLETE_GENERIC_UNAVAILABLE + } + return ContractDecision( + mode = completionMode, + detail = if (completionMode == ContractDecisionMode.COMPLETE_CONFIG_UNAVAILABLE) { + "complete_config_unavailable" + } else { + "complete_generic_unavailable" + }, + handled = true + ) + } + return ContractDecision( + mode = mode, + detail = if (mode == ContractDecisionMode.OBSERVE_CONFIG) "observe_config_request" else "observe_generic_request", + handled = false + ) + } + + private fun isKnownRcsContract(token: String): Boolean { + val normalized = token.lowercase(Locale.US) + if (!normalized.startsWith("com.google.android")) return false + return normalized.contains(".rcs.") || + normalized.contains(".carrierauth.") || + normalized.contains("provisioning") + } +} diff --git a/play-services-core/src/main/kotlin/org/microg/gms/rcs/RcsPolicyConfig.kt b/play-services-core/src/main/kotlin/org/microg/gms/rcs/RcsPolicyConfig.kt new file mode 100644 index 0000000000..e14bedf9bb --- /dev/null +++ b/play-services-core/src/main/kotlin/org/microg/gms/rcs/RcsPolicyConfig.kt @@ -0,0 +1,148 @@ +/* + * SPDX-FileCopyrightText: 2026 microG Project Team + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.microg.gms.rcs + +import android.content.Context +import android.util.Log +import org.json.JSONArray +import org.json.JSONObject +import java.io.File +import java.util.Locale + +private const val RCS_POLICY_TAG = "RcsPolicyConfig" +private const val POLICY_FILE_NAME = "rcs_policy_overrides.json" + +internal data class CompletionRowRule( + val code: Int, + val tokenExact: String? = null, + val tokenContains: String? = null +) { + fun matches(normalizedToken: String, code: Int): Boolean { + if (this.code != code) return false + val exact = tokenExact + if (!exact.isNullOrBlank()) return normalizedToken == exact + val contains = tokenContains + if (!contains.isNullOrBlank()) return normalizedToken.contains(contains) + return false + } +} + +internal data class RcsPolicyConfig( + val enableMinimalCompletion: Boolean, + val messagesClients: Set, + val completionRules: List +) { + fun matchesCompletionRow(normalizedToken: String, code: Int): Boolean { + return completionRules.any { it.matches(normalizedToken, code) } + } + + companion object { + private val DEFAULT_MESSAGES_CLIENTS = setOf( + "com.google.android.apps.messaging", + "com.samsung.android.messaging" + ) + private val DEFAULT_COMPLETION_RULES = listOf( + CompletionRowRule(code = 1, tokenExact = "com.google.android.gms.rcs.iprovisioning"), + CompletionRowRule(code = 2, tokenExact = "com.google.android.gms.rcs.iprovisioning"), + CompletionRowRule(code = 1001, tokenExact = "com.google.android.gms.rcs.iprovisioning") + ) + + fun defaults(): RcsPolicyConfig { + return RcsPolicyConfig( + enableMinimalCompletion = true, + messagesClients = DEFAULT_MESSAGES_CLIENTS, + completionRules = DEFAULT_COMPLETION_RULES + ) + } + } +} + +internal object RcsPolicyConfigStore { + @Volatile + private var cachedConfig: RcsPolicyConfig = RcsPolicyConfig.defaults() + @Volatile + private var cachedMtimeMs: Long = -1L + @Volatile + private var lastAttemptMs: Long = 0L + private const val RELOAD_INTERVAL_MS = 3_000L + + fun current(context: Context): RcsPolicyConfig { + val now = System.currentTimeMillis() + if (now - lastAttemptMs < RELOAD_INTERVAL_MS) return cachedConfig + synchronized(this) { + val refreshedNow = System.currentTimeMillis() + if (refreshedNow - lastAttemptMs < RELOAD_INTERVAL_MS) return cachedConfig + lastAttemptMs = refreshedNow + reloadIfChanged(context) + return cachedConfig + } + } + + private fun reloadIfChanged(context: Context) { + val policyFile = File(context.filesDir, POLICY_FILE_NAME) + if (!policyFile.exists()) { + if (cachedMtimeMs != -1L) { + cachedConfig = RcsPolicyConfig.defaults() + cachedMtimeMs = -1L + Log.i(RCS_POLICY_TAG, "policy_config reset_to_defaults") + } + return + } + val mtime = policyFile.lastModified() + if (mtime == cachedMtimeMs) return + val parsed = parsePolicy(policyFile.readText()) + cachedConfig = parsed ?: RcsPolicyConfig.defaults() + cachedMtimeMs = mtime + Log.i( + RCS_POLICY_TAG, + "policy_config reloaded completion=${cachedConfig.enableMinimalCompletion} clients=${cachedConfig.messagesClients.size} rows=${cachedConfig.completionRules.size}" + ) + } + + private fun parsePolicy(jsonText: String): RcsPolicyConfig? { + return runCatching { + val root = JSONObject(jsonText) + val defaults = RcsPolicyConfig.defaults() + val completion = root.optBoolean("enableMinimalCompletion", defaults.enableMinimalCompletion) + val clients = parseClients(root.optJSONArray("messagesClients"), defaults.messagesClients) + val rules = parseRows(root.optJSONArray("completionRows"), defaults.completionRules) + RcsPolicyConfig( + enableMinimalCompletion = completion, + messagesClients = clients, + completionRules = rules + ) + }.onFailure { + Log.w(RCS_POLICY_TAG, "policy_config parse_failed: ${it.message}") + }.getOrNull() + } + + private fun parseClients(source: JSONArray?, fallback: Set): Set { + if (source == null) return fallback + val values = mutableSetOf() + for (index in 0 until source.length()) { + val candidate = source.optString(index).orEmpty().trim().lowercase(Locale.US) + if (candidate.isNotEmpty()) values += candidate + } + return if (values.isNotEmpty()) values else fallback + } + + private fun parseRows(source: JSONArray?, fallback: List): List { + if (source == null) return fallback + val values = mutableListOf() + for (index in 0 until source.length()) { + val row = source.optJSONObject(index) ?: continue + val code = row.optInt("code", Int.MIN_VALUE) + val token = row.optString("token").orEmpty().trim().lowercase(Locale.US) + val tokenContains = row.optString("tokenContains").orEmpty().trim().lowercase(Locale.US) + if (code == Int.MIN_VALUE) continue + when { + token.isNotEmpty() -> values += CompletionRowRule(code = code, tokenExact = token) + tokenContains.isNotEmpty() -> values += CompletionRowRule(code = code, tokenContains = tokenContains) + } + } + return if (values.isNotEmpty()) values else fallback + } +} diff --git a/play-services-core/src/main/kotlin/org/microg/gms/rcs/RcsReplyCodec.kt b/play-services-core/src/main/kotlin/org/microg/gms/rcs/RcsReplyCodec.kt new file mode 100644 index 0000000000..3ee0e1a3f2 --- /dev/null +++ b/play-services-core/src/main/kotlin/org/microg/gms/rcs/RcsReplyCodec.kt @@ -0,0 +1,27 @@ +/* + * SPDX-FileCopyrightText: 2026 microG Project Team + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.microg.gms.rcs + +import android.os.IBinder +import android.os.Parcel + +internal object RcsReplyCodec { + private const val STATUS_UNAVAILABLE = 0 + + fun writeConfigUnavailable(reply: Parcel?, flags: Int) { + if (reply == null || flags and IBinder.FLAG_ONEWAY != 0) return + reply.writeNoException() + reply.writeInt(STATUS_UNAVAILABLE) + reply.writeString("") + } + + fun writeGenericUnavailable(reply: Parcel?, flags: Int) { + if (reply == null || flags and IBinder.FLAG_ONEWAY != 0) return + reply.writeNoException() + reply.writeInt(STATUS_UNAVAILABLE) + } +} + diff --git a/play-services-core/src/main/kotlin/org/microg/gms/rcs/RcsShimServices.kt b/play-services-core/src/main/kotlin/org/microg/gms/rcs/RcsShimServices.kt new file mode 100644 index 0000000000..af2c5b6100 --- /dev/null +++ b/play-services-core/src/main/kotlin/org/microg/gms/rcs/RcsShimServices.kt @@ -0,0 +1,273 @@ +/* + * SPDX-FileCopyrightText: 2026 microG Project Team + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.microg.gms.rcs + +import android.content.Context +import android.os.Binder +import android.os.IBinder +import android.os.IInterface +import android.os.Parcel +import android.os.SystemClock +import android.util.Log +import com.google.android.gms.common.ConnectionResult +import com.google.android.gms.common.internal.GetServiceRequest +import com.google.android.gms.common.internal.IGmsCallbacks +import org.microg.gms.BaseService +import org.microg.gms.common.GmsService +import org.microg.gms.common.PackageUtils +import java.util.ArrayDeque +import java.util.LinkedHashMap +import java.util.Locale + +private const val RCS_TAG = "RcsApiService" +private const val CARRIER_AUTH_TAG = "CarrierAuthService" +private const val TRACE_CAPACITY = 64 +private const val DEFAULT_RCS_DESCRIPTOR = "com.google.android.gms.rcs.IRcsService" +private const val DEFAULT_CARRIER_DESCRIPTOR = "com.google.android.gms.carrierauth.internal.ICarrierAuthService" +private const val BLOCKER_THRESHOLD = 4 + +private data class BinderTrace( + val traceId: Long, + val service: String, + val caller: String, + val callerUid: Int, + val callerPid: Int, + val code: Int, + val flags: Int, + val dataSize: Int, + val token: String?, + val detail: String, + val handled: Boolean, + val elapsedRealtimeMs: Long +) + +private object BinderTraceStore { + private val traces = ArrayDeque() + private var nextTraceId = 1L + + @Synchronized + fun add(trace: BinderTrace): Long { + val traceId = nextTraceId++ + val materialized = trace.copy(traceId = traceId) + while (traces.size >= TRACE_CAPACITY) traces.removeFirst() + traces.addLast(materialized) + return traceId + } + + @Synchronized + fun dump(tag: String) { + Log.d(tag, "trace_summary count=${traces.size}") + traces.forEach { + Log.d( + tag, + "trace id=${it.traceId} service=${it.service} caller=${it.caller} uid=${it.callerUid} pid=${it.callerPid} code=${it.code} flags=${it.flags} size=${it.dataSize} token=${it.token} detail=${it.detail} handled=${it.handled} t=${it.elapsedRealtimeMs}" + ) + } + } +} + +private data class BlockerKey( + val service: String, + val token: String, + val code: Int, + val detail: String, + val caller: String +) + +private data class BlockerStats( + val firstTraceId: Long, + var lastTraceId: Long, + var count: Int +) + +private object BlockerDetector { + private val counters = LinkedHashMap() + + @Synchronized + fun observe(trace: BinderTrace): String? { + if (trace.handled) return null + if (trace.detail == "passthrough") return null + val token = trace.token ?: "" + val key = BlockerKey( + service = trace.service, + token = token, + code = trace.code, + detail = trace.detail, + caller = trace.caller + ) + val stats = counters[key] + val nextCount = if (stats == null) { + counters[key] = BlockerStats( + firstTraceId = trace.traceId, + lastTraceId = trace.traceId, + count = 1 + ) + 1 + } else { + stats.lastTraceId = trace.traceId + stats.count += 1 + stats.count + } + if (nextCount == BLOCKER_THRESHOLD || nextCount % 10 == 0) { + return "blocker_candidate service=${key.service} caller=${key.caller} token=${key.token} code=${key.code} detail=${key.detail} repeated=$nextCount" + } + return null + } + + @Synchronized + fun dump(tag: String) { + if (counters.isEmpty()) { + Log.d(tag, "blocker_summary count=0") + return + } + val ranked = counters.entries + .sortedWith( + compareByDescending> { it.value.count } + .thenBy { it.value.firstTraceId } + ) + Log.d(tag, "blocker_summary count=${ranked.size}") + ranked.take(10).forEachIndexed { index, entry -> + val k = entry.key + val v = entry.value + Log.d( + tag, + "blocker_summary rank=${index + 1} repeated=${v.count} first_trace=${v.firstTraceId} last_trace=${v.lastTraceId} service=${k.service} caller=${k.caller} token=${k.token} code=${k.code} detail=${k.detail}" + ) + } + } +} + +class RcsService : BaseService(RCS_TAG, GmsService.RCS) { + override fun handleServiceRequest(callback: IGmsCallbacks, request: GetServiceRequest, service: GmsService) { + val packageName = PackageUtils.getAndCheckCallingPackage(this, request.packageName) + ?: throw IllegalArgumentException("Missing package name") + callback.onPostInitComplete( + ConnectionResult.SUCCESS, + DynamicBinderAdapter(applicationContext, "rcs", packageName, DEFAULT_RCS_DESCRIPTOR), + null + ) + } +} + +class CarrierAuthService : BaseService(CARRIER_AUTH_TAG, GmsService.CARRIER_AUTH) { + override fun handleServiceRequest(callback: IGmsCallbacks, request: GetServiceRequest, service: GmsService) { + val packageName = PackageUtils.getAndCheckCallingPackage(this, request.packageName) + ?: throw IllegalArgumentException("Missing package name") + callback.onPostInitComplete( + ConnectionResult.SUCCESS, + DynamicBinderAdapter(applicationContext, "carrier_auth", packageName, DEFAULT_CARRIER_DESCRIPTOR), + null + ) + } +} + +private class DynamicBinderAdapter( + private val context: Context, + private val serviceName: String, + private val callingPackage: String, + private val defaultDescriptor: String +) : Binder() { + private val iface = object : IInterface { + override fun asBinder(): IBinder = this@DynamicBinderAdapter + } + init { + attachInterface(iface, defaultDescriptor) + } + + override fun onTransact(code: Int, data: Parcel, reply: Parcel?, flags: Int): Boolean { + if (code == INTERFACE_TRANSACTION) { + reply?.writeString(defaultDescriptor) + return true + } + if (code == DUMP_TRANSACTION) { + val tag = if (serviceName == "rcs") RCS_TAG else CARRIER_AUTH_TAG + BinderTraceStore.dump(tag) + BlockerDetector.dump(tag) + reply?.writeNoException() + return true + } + + val token = readInterfaceToken(data) + val decision = routeTransaction(code, token) + applyDecisionReply(decision, reply, flags) + val trace = BinderTrace( + traceId = 0L, + service = serviceName, + caller = callingPackage, + callerUid = getCallingUid(), + callerPid = getCallingPid(), + code = code, + flags = flags, + dataSize = data.dataSize(), + token = token, + detail = decision.detail, + handled = decision.handled, + elapsedRealtimeMs = SystemClock.elapsedRealtime() + ) + val traceId = BinderTraceStore.add(trace) + if (decision.detail != "passthrough") { + Log.i( + if (serviceName == "rcs") RCS_TAG else CARRIER_AUTH_TAG, + "trace_decision id=$traceId detail=${decision.detail} handled=${decision.handled} token=$token code=$code" + ) + } + val blockerHint = BlockerDetector.observe(trace.copy(traceId = traceId)) + if (blockerHint != null) { + Log.w( + if (serviceName == "rcs") RCS_TAG else CARRIER_AUTH_TAG, + blockerHint + ) + } + return decision.handled + } + + private fun routeTransaction(code: Int, token: String?): ContractDecision { + return RcsContractPolicy.decide( + ContractRow( + token = token, + code = code, + callingPackage = callingPackage + ), + RcsPolicyConfigStore.current(context) + ) + } + + private fun applyDecisionReply(decision: ContractDecision, reply: Parcel?, flags: Int) { + when (decision.mode) { + ContractDecisionMode.COMPLETE_CONFIG_UNAVAILABLE -> { + RcsReplyCodec.writeConfigUnavailable(reply, flags) + } + ContractDecisionMode.COMPLETE_GENERIC_UNAVAILABLE -> { + RcsReplyCodec.writeGenericUnavailable(reply, flags) + } + else -> Unit + } + } + + private fun readInterfaceToken(parcel: Parcel): String? { + val position = parcel.dataPosition() + return try { + parcel.setDataPosition(0) + val raw = parcel.readString() + if (looksLikeInterfaceToken(raw)) return raw + parcel.setDataPosition(0) + parcel.readInt() // strict mode header or parcel preamble + val shifted = parcel.readString() + if (looksLikeInterfaceToken(shifted)) shifted else null + } catch (_: Throwable) { + null + } finally { + parcel.setDataPosition(position) + } + } + + private fun looksLikeInterfaceToken(candidate: String?): Boolean { + if (candidate.isNullOrBlank()) return false + val normalized = candidate.lowercase(Locale.US) + return normalized.startsWith("com.google.android") && + (normalized.contains(".rcs.") || normalized.contains(".carrierauth.") || normalized.contains("provisioning")) + } +}